mirror of https://github.com/apache/openjpa.git
OPENJPA-1015 fix line wrapping in source files. Based on patch contributed by B.J. Reed. Updates made to do enforce line wrapping during the process-sources phase of the build.
git-svn-id: https://svn.apache.org/repos/asf/openjpa/trunk@761509 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
318d408ab7
commit
b6e661e24a
|
@ -317,7 +317,8 @@ public class JDBCConfigurationImpl
|
|||
preparedQueryCachePlugin.setDefault(aliases[0]);
|
||||
preparedQueryCachePlugin.setClassName(aliases[1]);
|
||||
preparedQueryCachePlugin.setDynamic(true);
|
||||
preparedQueryCachePlugin.setInstantiatingGetter("getQuerySQLCacheInstance");
|
||||
preparedQueryCachePlugin.setInstantiatingGetter(
|
||||
"getQuerySQLCacheInstance");
|
||||
|
||||
finderCachePlugin = addPlugin("jdbc.FinderCache", true);
|
||||
aliases = new String[] {
|
||||
|
|
|
@ -92,9 +92,11 @@ public abstract class AbstractUpdateManager
|
|||
StateManagerImpl sm = (StateManagerImpl) obj;
|
||||
if (sm.getMappedByIdFields() != null)
|
||||
mappedByIdStates.add(sm);
|
||||
else exceps = populateRowManager(sm, rowMgr, store, exceps, customs);
|
||||
else exceps = populateRowManager(sm, rowMgr, store, exceps,
|
||||
customs);
|
||||
} else
|
||||
exceps = populateRowManager(obj, rowMgr, store, exceps, customs);
|
||||
exceps = populateRowManager(obj, rowMgr, store, exceps,
|
||||
customs);
|
||||
}
|
||||
|
||||
// flush rows
|
||||
|
@ -364,8 +366,9 @@ public abstract class AbstractUpdateManager
|
|||
/**
|
||||
* Update version and discriminator indicators.
|
||||
*/
|
||||
protected void updateIndicators(OpenJPAStateManager sm, ClassMapping mapping,
|
||||
RowManager rowMgr, JDBCStore store, Collection customs,
|
||||
protected void updateIndicators(OpenJPAStateManager sm,
|
||||
ClassMapping mapping, RowManager rowMgr, JDBCStore store,
|
||||
Collection customs,
|
||||
boolean versionUpdateOnly) throws SQLException {
|
||||
while (mapping.getJoinablePCSuperclassMapping() != null)
|
||||
mapping = mapping.getJoinablePCSuperclassMapping();
|
||||
|
@ -410,7 +413,8 @@ public abstract class AbstractUpdateManager
|
|||
private final OpenJPAStateManager _sm;
|
||||
private final Strategy _strat;
|
||||
|
||||
public CustomMapping(int action, OpenJPAStateManager sm, Strategy strat) {
|
||||
public CustomMapping(int action, OpenJPAStateManager sm, Strategy strat)
|
||||
{
|
||||
_action = action;
|
||||
_sm = sm;
|
||||
_strat = strat;
|
||||
|
|
|
@ -511,8 +511,9 @@ public class ConstraintUpdateManager
|
|||
|
||||
if (!breakableLink.isRemovedFromGraph()) {
|
||||
|
||||
// use a primary row update to prevent setting pk and fk values
|
||||
// until after flush, to get latest auto-increment values
|
||||
// use a primary row update to prevent setting pk and fk
|
||||
// values until after flush, to get latest auto-increment
|
||||
// values
|
||||
PrimaryRow row = (PrimaryRow) breakableLink.getFrom();
|
||||
if (row.getAction() == Row.ACTION_DELETE) {
|
||||
addDeleteUpdate(breakableLink, deleteUpdates);
|
||||
|
|
|
@ -108,7 +108,8 @@ public class JDBCStoreManager
|
|||
private static final Class<ClientConnection> clientConnectionImpl;
|
||||
private static final Class<RefCountConnection> refCountConnectionImpl;
|
||||
private static final Class<CancelStatement> cancelStatementImpl;
|
||||
private static final Class<CancelPreparedStatement> cancelPreparedStatementImpl;
|
||||
private static final Class<CancelPreparedStatement>
|
||||
cancelPreparedStatementImpl;
|
||||
|
||||
static {
|
||||
try {
|
||||
|
@ -1415,7 +1416,8 @@ public class JDBCStoreManager
|
|||
* Connection returned to client code. Makes sure its wrapped connection
|
||||
* ref count is decremented on finalize.
|
||||
*/
|
||||
protected abstract static class ClientConnection extends DelegatingConnection {
|
||||
protected abstract static class ClientConnection extends
|
||||
DelegatingConnection {
|
||||
|
||||
private boolean _closed = false;
|
||||
|
||||
|
@ -1563,7 +1565,8 @@ public class JDBCStoreManager
|
|||
* Statement type that adds and removes itself from the set of active
|
||||
* statements so that it can be canceled.
|
||||
*/
|
||||
protected abstract class CancelPreparedStatement extends DelegatingPreparedStatement {
|
||||
protected abstract class CancelPreparedStatement extends
|
||||
DelegatingPreparedStatement {
|
||||
|
||||
public CancelPreparedStatement(PreparedStatement stmnt,
|
||||
Connection conn) {
|
||||
|
|
|
@ -133,7 +133,8 @@ public class PreparedSQLStoreQuery extends SQLStoreQuery {
|
|||
* The given userParams is already re-parameterized, so this method have
|
||||
* to merely copy the given Map values.
|
||||
*
|
||||
* @see PreparedQueryImpl#reparametrize(Map, org.apache.openjpa.kernel.Broker)
|
||||
* @see PreparedQueryImpl#reparametrize(Map,
|
||||
* org.apache.openjpa.kernel.Broker)
|
||||
*/
|
||||
public Object[] toParameterArray(StoreQuery q, Map userParams) {
|
||||
Object[] array = new Object[userParams.size()];
|
||||
|
|
|
@ -125,7 +125,8 @@ public class PreparedStatementManagerImpl
|
|||
sql).getMessage());
|
||||
}
|
||||
if (autoAssignColNames != null)
|
||||
populateAutoAssignCols(stmnt, autoAssign, autoAssignColNames, row);
|
||||
populateAutoAssignCols(stmnt, autoAssign, autoAssignColNames,
|
||||
row);
|
||||
else {
|
||||
StateManagerImpl sm = (StateManagerImpl)row.getPrimaryKey();
|
||||
if (sm != null) {
|
||||
|
|
|
@ -377,7 +377,8 @@ public class SQLStoreQuery
|
|||
tok.wordChars('?', '?');
|
||||
|
||||
StringBuffer buf = new StringBuffer(sql.length());
|
||||
for (int ttype; (ttype = tok.nextToken()) != StreamTokenizer.TT_EOF;) {
|
||||
for (int ttype; (ttype = tok.nextToken()) !=
|
||||
StreamTokenizer.TT_EOF;) {
|
||||
switch (ttype) {
|
||||
case StreamTokenizer.TT_WORD:
|
||||
// a token is a positional parameter if it starts with
|
||||
|
|
|
@ -391,7 +391,8 @@ public class TableJDBCSeq
|
|||
for (String columnName : _uniqueColumnNames) {
|
||||
if (!table.containsColumn(columnName))
|
||||
throw new UserException(_loc.get("unique-missing-column",
|
||||
columnName, table.getName(), table.getColumnNames()));
|
||||
columnName, table.getName(),
|
||||
table.getColumnNames()));
|
||||
Column col = table.getColumn(columnName);
|
||||
u.addColumn(col);
|
||||
}
|
||||
|
@ -416,7 +417,8 @@ public class TableJDBCSeq
|
|||
runnable);
|
||||
}
|
||||
catch(NotSupportedException nse) {
|
||||
SQLException sqlEx = new SQLException(nse.getLocalizedMessage());
|
||||
SQLException sqlEx = new SQLException(
|
||||
nse.getLocalizedMessage());
|
||||
sqlEx.initCause(nse);
|
||||
throw sqlEx;
|
||||
}
|
||||
|
@ -560,7 +562,8 @@ public class TableJDBCSeq
|
|||
|
||||
// update the value
|
||||
upd = new SQLBuffer(dict);
|
||||
String tableName = resolveTableName(mapping, _seqColumn.getTable());
|
||||
String tableName = resolveTableName(mapping,
|
||||
_seqColumn.getTable());
|
||||
upd.append("UPDATE ").append(tableName).
|
||||
append(" SET ").append(_seqColumn).append(" = ").
|
||||
appendValue(Numbers.valueOf(cur + inc), _seqColumn).
|
||||
|
@ -570,7 +573,8 @@ public class TableJDBCSeq
|
|||
|
||||
stmnt = prepareStatement(conn, upd);
|
||||
dict.setTimeouts(stmnt, _conf, true);
|
||||
updates = executeUpdate(_conf, conn, stmnt, upd, RowImpl.ACTION_UPDATE);
|
||||
updates = executeUpdate(_conf, conn, stmnt, upd,
|
||||
RowImpl.ACTION_UPDATE);
|
||||
} finally {
|
||||
if (rs != null)
|
||||
try { rs.close(); } catch (SQLException se) {}
|
||||
|
@ -765,7 +769,8 @@ public class TableJDBCSeq
|
|||
* implementation of executing update.
|
||||
*/
|
||||
protected int executeUpdate(JDBCConfiguration conf, Connection conn,
|
||||
PreparedStatement stmnt, SQLBuffer buf, int opcode) throws SQLException {
|
||||
PreparedStatement stmnt, SQLBuffer buf, int opcode) throws SQLException
|
||||
{
|
||||
return stmnt.executeUpdate();
|
||||
}
|
||||
|
||||
|
@ -782,7 +787,8 @@ public class TableJDBCSeq
|
|||
* This method is to provide override for non-JDBC or JDBC-like
|
||||
* implementation of getting sequence from the result set.
|
||||
*/
|
||||
protected long getSequence(ResultSet rs, DBDictionary dict) throws SQLException {
|
||||
protected long getSequence(ResultSet rs, DBDictionary dict)
|
||||
throws SQLException {
|
||||
if (rs == null || !rs.next())
|
||||
return -1;
|
||||
return dict.getLong(rs, 1);
|
||||
|
|
|
@ -180,11 +180,13 @@ public class CollectionParam
|
|||
sql.appendValue(((Object[]) pstate.sqlValue[i])[index],
|
||||
pstate.getColumn(index), this);
|
||||
else if (pstate.cols != null)
|
||||
sql.appendValue(pstate.sqlValue[i], pstate.getColumn(index), this);
|
||||
sql.appendValue(pstate.sqlValue[i], pstate.getColumn(index),
|
||||
this);
|
||||
else if (pstate.discValue[i] != null)
|
||||
sql.appendValue(pstate.discValue[i]);
|
||||
else
|
||||
sql.appendValue(pstate.sqlValue[i], pstate.getColumn(index), this);
|
||||
sql.appendValue(pstate.sqlValue[i], pstate.getColumn(index),
|
||||
this);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -486,7 +486,8 @@ public class JDBCExpressionFactory
|
|||
for (int i = 0; i < vals.length; i++) {
|
||||
if (vals[i] instanceof Lit) {
|
||||
Lit val = (Lit) vals[i];
|
||||
StringBuffer value = new StringBuffer(val.getValue().toString());
|
||||
StringBuffer value =
|
||||
new StringBuffer(val.getValue().toString());
|
||||
if (val.getParseType() == Literal.TYPE_SQ_STRING)
|
||||
value.insert(0, "'").append("'");
|
||||
val.setValue(new Raw(value.toString()));
|
||||
|
|
|
@ -97,10 +97,12 @@ public class MapEntry
|
|||
return new Entry(key, val);
|
||||
}
|
||||
|
||||
public void orderBy(Select sel, ExpContext ctx, ExpState state, boolean asc) {
|
||||
public void orderBy(Select sel, ExpContext ctx, ExpState state, boolean asc)
|
||||
{
|
||||
}
|
||||
|
||||
public void select(Select sel, ExpContext ctx, ExpState state, boolean pks) {
|
||||
public void select(Select sel, ExpContext ctx, ExpState state, boolean pks)
|
||||
{
|
||||
selectColumns(sel, ctx, state, pks);
|
||||
}
|
||||
|
||||
|
|
|
@ -92,7 +92,8 @@ public class MapKey
|
|||
return key;
|
||||
}
|
||||
|
||||
public void select(Select sel, ExpContext ctx, ExpState state, boolean pks) {
|
||||
public void select(Select sel, ExpContext ctx, ExpState state, boolean pks)
|
||||
{
|
||||
selectColumns(sel, ctx, state, pks);
|
||||
}
|
||||
|
||||
|
|
|
@ -296,7 +296,8 @@ public class PCPath
|
|||
case JavaTypes.ARRAY:
|
||||
case JavaTypes.COLLECTION:
|
||||
ValueMapping elem = pstate.field.getElementMapping();
|
||||
if (pstate.joinedRel && elem.getTypeCode() == JavaTypes.PC) {
|
||||
if (pstate.joinedRel && elem.getTypeCode() == JavaTypes.PC)
|
||||
{
|
||||
if (pstate.field.isElementCollection() &&
|
||||
pstate.field.getElement().isEmbedded())
|
||||
return ((HandlerCollectionTableFieldStrategy)
|
||||
|
@ -544,7 +545,8 @@ public class PCPath
|
|||
pstate.joins = from.joinSuperclass(pstate.joins, false);
|
||||
}
|
||||
}
|
||||
// nothing more to do from here on as we encountered an xpath action
|
||||
// nothing more to do from here on as we encountered an xpath
|
||||
// action
|
||||
if (action.op == Action.GET_XPATH)
|
||||
break;
|
||||
}
|
||||
|
@ -628,8 +630,8 @@ public class PCPath
|
|||
if (action.op != Action.GET && action.op != Action.GET_OUTER)
|
||||
continue;
|
||||
try {
|
||||
candidate = sm.fetchField(((FieldMapping)action.data).getIndex(),
|
||||
true);
|
||||
candidate = sm.fetchField(
|
||||
((FieldMapping)action.data).getIndex(), true);
|
||||
} catch (ClassCastException cce) {
|
||||
throw new RuntimeException(action.data + " not a field path");
|
||||
} finally {
|
||||
|
|
|
@ -430,7 +430,8 @@ public class ClassMappingInfo
|
|||
for (int i=0; i<uniqueColumns.length; i++) {
|
||||
String columnName = templateColumns[i].getName();
|
||||
if (!table.containsColumn(columnName)) {
|
||||
throw new UserException(_loc.get("unique-missing-column",
|
||||
throw new UserException(_loc.get(
|
||||
"unique-missing-column",
|
||||
new Object[]{cm, columnName, tableName,
|
||||
Arrays.toString(table.getColumnNames())}));
|
||||
}
|
||||
|
|
|
@ -1187,9 +1187,9 @@ public class FieldMapping
|
|||
ForeignKey rjfk = rfm.getJoinForeignKey();
|
||||
if (rjfk == null)
|
||||
continue;
|
||||
if (rjfk.getTable() == jfk.getTable()
|
||||
&& jfk.getTable().getColumns().length
|
||||
== jfk.getColumns().length + rjfk.getColumns().length) {
|
||||
if (rjfk.getTable() == jfk.getTable() &&
|
||||
jfk.getTable().getColumns().length ==
|
||||
jfk.getColumns().length + rjfk.getColumns().length) {
|
||||
_bidirectionalJoinTableOwner = true;
|
||||
break;
|
||||
}
|
||||
|
@ -1227,13 +1227,14 @@ public class FieldMapping
|
|||
for (int i=0; i<relFmds.length;i++) {
|
||||
FieldMapping rfm = relFmds[i];
|
||||
ValueMapping relem = rfm.getElementMapping();
|
||||
if (relem != null && relem.getDeclaredTypeMapping() == getDeclaringMapping()) {
|
||||
if (relem != null && relem.getDeclaredTypeMapping() ==
|
||||
getDeclaringMapping()) {
|
||||
ForeignKey rjfk = rfm.getJoinForeignKey();
|
||||
if (rjfk == null)
|
||||
continue;
|
||||
if (rjfk.getTable() == jfk.getTable()
|
||||
&& jfk.getTable().getColumns().length
|
||||
== jfk.getColumns().length + rjfk.getColumns().length) {
|
||||
if (rjfk.getTable() == jfk.getTable() &&
|
||||
jfk.getTable().getColumns().length ==
|
||||
jfk.getColumns().length + rjfk.getColumns().length) {
|
||||
_bidirectionalJoinTableNonOwner = true;
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ public class FieldMappingInfo
|
|||
private Column _orderCol = null;
|
||||
private boolean _canOrderCol = true;
|
||||
private String[] _comments = null;
|
||||
private List<Unique> _joinTableUniques; // Unique constraints on the JoinTable
|
||||
private List<Unique> _joinTableUniques; // Unique constraints on JoinTable
|
||||
|
||||
/**
|
||||
* The user-supplied name of the table for this field.
|
||||
|
@ -368,7 +368,8 @@ public class FieldMappingInfo
|
|||
/**
|
||||
* Sets internal constraint information to match given mapped constraint.
|
||||
*/
|
||||
protected void syncJoinTableUniques(MetaDataContext context, Unique[] unqs) {
|
||||
protected void syncJoinTableUniques(MetaDataContext context, Unique[] unqs)
|
||||
{
|
||||
if (unqs == null) {
|
||||
_joinTableUniques = null;
|
||||
return;
|
||||
|
|
|
@ -573,7 +573,8 @@ public abstract class MappingInfo
|
|||
return cols;
|
||||
}
|
||||
|
||||
boolean canMerge(List given, Column[] templates, boolean adapt, boolean fill) {
|
||||
boolean canMerge(List given, Column[] templates, boolean adapt,
|
||||
boolean fill) {
|
||||
return !((!given.isEmpty() || (!adapt && !fill))
|
||||
&& given.size() != templates.length);
|
||||
}
|
||||
|
|
|
@ -51,7 +51,8 @@ import org.apache.openjpa.jdbc.meta.strats.MaxEmbeddedByteArrayFieldStrategy;
|
|||
import org.apache.openjpa.jdbc.meta.strats.MaxEmbeddedCharArrayFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.MaxEmbeddedClobFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.MultiColumnVersionStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.NanoPrecisionTimestampVersionStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.
|
||||
NanoPrecisionTimestampVersionStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.NoneClassStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.NoneDiscriminatorStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.NoneFieldStrategy;
|
||||
|
@ -60,13 +61,15 @@ import org.apache.openjpa.jdbc.meta.strats.NumberVersionStrategy;
|
|||
import org.apache.openjpa.jdbc.meta.strats.ObjectIdClassStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.ObjectIdValueHandler;
|
||||
import org.apache.openjpa.jdbc.meta.strats.PrimitiveFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationCollectionInverseKeyFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.
|
||||
RelationCollectionInverseKeyFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationCollectionTableFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationHandlerMapTableFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationMapInverseKeyFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationMapTableFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationRelationMapTableFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.
|
||||
RelationRelationMapTableFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.StateComparisonVersionStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.StringFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.SubclassJoinDiscriminatorStrategy;
|
||||
|
|
|
@ -54,7 +54,8 @@ import org.apache.openjpa.jdbc.meta.strats.MaxEmbeddedBlobFieldStrategy;
|
|||
import org.apache.openjpa.jdbc.meta.strats.MaxEmbeddedClobFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.NoneDiscriminatorStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.PrimitiveFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationCollectionInverseKeyFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.
|
||||
RelationCollectionInverseKeyFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationCollectionTableFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.RelationFieldStrategy;
|
||||
import org.apache.openjpa.jdbc.meta.strats.StateComparisonVersionStrategy;
|
||||
|
|
|
@ -71,7 +71,8 @@ public class VersionMappingInfo
|
|||
Table primaryTable = vers.getClassMapping().getTable();
|
||||
List<String> secondaryTableNames = Arrays.asList(vers
|
||||
.getClassMapping().getMappingInfo().getSecondaryTableNames());
|
||||
Map<Table, List<Column>> assign = new LinkedHashMap<Table, List<Column>>();
|
||||
Map<Table, List<Column>> assign = new LinkedHashMap<Table,
|
||||
List<Column>>();
|
||||
for (Column col : templates) {
|
||||
String tableName = col.getTableName();
|
||||
Table table;
|
||||
|
|
|
@ -65,8 +65,8 @@ public abstract class ColumnVersionStrategy
|
|||
* one column.
|
||||
*/
|
||||
protected int getJavaType(int i) {
|
||||
throw new AbstractMethodError(
|
||||
_loc.get("multi-column-version-unsupported",getAlias()).toString());
|
||||
throw new AbstractMethodError(_loc.get(
|
||||
"multi-column-version-unsupported",getAlias()).toString());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -111,9 +111,9 @@ public abstract class ColumnVersionStrategy
|
|||
* whether first version is earlier, same or later than the second one.
|
||||
*
|
||||
* @return If any element of a1 is later than corresponding element of
|
||||
* a2 then returns 1 i.e. the first version is later than the second version.
|
||||
* If each element of a1 is equal to corresponding element of a2 then return
|
||||
* 0 i.e. the first version is same as the second version.
|
||||
* a2 then returns 1 i.e. the first version is later than the second
|
||||
* version. If each element of a1 is equal to corresponding element of a2
|
||||
* then return 0 i.e. the first version is same as the second version.
|
||||
* else return a negative number i.e. the first version is earlier than
|
||||
* the second version.
|
||||
*/
|
||||
|
@ -162,7 +162,8 @@ public abstract class ColumnVersionStrategy
|
|||
tmplate.setJavaType(getJavaType());
|
||||
tmplate.setName("versn");
|
||||
|
||||
Column[] cols = info.getColumns(vers, new Column[]{ tmplate }, adapt);
|
||||
Column[] cols = info.getColumns(vers, new Column[]{ tmplate },
|
||||
adapt);
|
||||
cols[0].setVersionStrategy(this);
|
||||
vers.setColumns(cols);
|
||||
vers.setColumnIO(info.getColumnIO());
|
||||
|
@ -178,7 +179,8 @@ public abstract class ColumnVersionStrategy
|
|||
ColumnIO io = vers.getColumnIO();
|
||||
Object initial = nextVersion(null);
|
||||
for (int i = 0; i < cols.length; i++) {
|
||||
Row row = rm.getRow(cols[i].getTable(), Row.ACTION_INSERT, sm, true);
|
||||
Row row = rm.getRow(cols[i].getTable(), Row.ACTION_INSERT, sm,
|
||||
true);
|
||||
if (io.isInsertable(i, initial == null))
|
||||
row.setObject(cols[i], getColumnValue(initial, i));
|
||||
}
|
||||
|
@ -201,7 +203,8 @@ public abstract class ColumnVersionStrategy
|
|||
|
||||
// set where and update conditions on row
|
||||
for (int i = 0; i < cols.length; i++) {
|
||||
Row row = rm.getRow(cols[i].getTable(), Row.ACTION_UPDATE, sm, true);
|
||||
Row row = rm.getRow(cols[i].getTable(), Row.ACTION_UPDATE, sm,
|
||||
true);
|
||||
row.setFailedObject(sm.getManagedInstance());
|
||||
if (curVersion != null && sm.isVersionCheckRequired()) {
|
||||
row.whereObject(cols[i], getColumnValue(curVersion, i));
|
||||
|
|
|
@ -152,9 +152,11 @@ public abstract class EmbedValueHandler
|
|||
if (cval instanceof PersistenceCapable) {
|
||||
OpenJPAStateManager embedSm = (OpenJPAStateManager)
|
||||
((PersistenceCapable)cval).pcGetStateManager();
|
||||
idx = toDataStoreValue1(embedSm, val, store, cols, rvals, idx);
|
||||
idx = toDataStoreValue1(embedSm, val, store, cols, rvals,
|
||||
idx);
|
||||
} else if (cval instanceof ObjectIdStateManager) {
|
||||
idx = toDataStoreValue1((ObjectIdStateManager)cval, val, store, cols, rvals, idx);
|
||||
idx = toDataStoreValue1((ObjectIdStateManager)cval, val,
|
||||
store, cols, rvals, idx);
|
||||
} else if (cval == null) {
|
||||
idx = toDataStoreValue1(null, val, store, cols, rvals, idx);
|
||||
}
|
||||
|
@ -222,7 +224,8 @@ public abstract class EmbedValueHandler
|
|||
idx = toObjectValue1(em1, vm1, val, store, fetch, cols, idx);
|
||||
} else if (em instanceof ObjectIdStateManager) {
|
||||
em1 = new ObjectIdStateManager(null, null, vm1);
|
||||
idx = toObjectValue1(em1, vm1, val, store, null, getColumns(fms[i]), idx);
|
||||
idx = toObjectValue1(em1, vm1, val, store, null,
|
||||
getColumns(fms[i]), idx);
|
||||
}
|
||||
cval = em1.getManagedInstance();
|
||||
} else {
|
||||
|
|
|
@ -115,7 +115,8 @@ public class HandlerHandlerMapTableFieldStrategy
|
|||
} else {
|
||||
DBDictionary dict = field.getMappingRepository().getDBDictionary();
|
||||
_kcols = HandlerStrategies.map(key,
|
||||
dict.getValidColumnName("key", field.getTable()), _kio, adapt);
|
||||
dict.getValidColumnName("key", field.getTable()), _kio,
|
||||
adapt);
|
||||
}
|
||||
_vio = new ColumnIO();
|
||||
_vcols = HandlerStrategies.map(val, "value", _vio, adapt);
|
||||
|
|
|
@ -204,7 +204,8 @@ public abstract class MapTableFieldStrategy
|
|||
throw new MetaDataException(_loc.get
|
||||
("mapped-inverse-unjoined", field.getName(),
|
||||
field.getDefiningMapping(), mapped));
|
||||
ForeignKey fk = mapped.getForeignKey(field.getDefiningMapping());
|
||||
ForeignKey fk = mapped.getForeignKey(
|
||||
field.getDefiningMapping());
|
||||
field.setForeignKey(fk);
|
||||
field.setJoinForeignKey(fk);
|
||||
} else if (mapped.getElement().getTypeCode() == JavaTypes.PC) {
|
||||
|
@ -220,7 +221,8 @@ public abstract class MapTableFieldStrategy
|
|||
ValueMapping elem = mapped.getElementMapping();
|
||||
ForeignKey fk = elem.getForeignKey();
|
||||
field.setJoinForeignKey(fk);
|
||||
field.getElementMapping().setForeignKey(mapped.getJoinForeignKey());
|
||||
field.getElementMapping().setForeignKey(
|
||||
mapped.getJoinForeignKey());
|
||||
} else
|
||||
throw new MetaDataException(_loc.get("not-inv-relation",
|
||||
field, mapped));
|
||||
|
@ -256,8 +258,9 @@ public abstract class MapTableFieldStrategy
|
|||
return true;
|
||||
}
|
||||
|
||||
protected boolean populateKey(Row row, OpenJPAStateManager valsm, Object obj,
|
||||
StoreContext ctx, RowManager rm, JDBCStore store) throws SQLException {
|
||||
protected boolean populateKey(Row row, OpenJPAStateManager valsm,
|
||||
Object obj, StoreContext ctx, RowManager rm, JDBCStore store)
|
||||
throws SQLException {
|
||||
ClassMapping meta = (ClassMapping)valsm.getMetaData();
|
||||
FieldMapping fm = getFieldMapping(meta);
|
||||
if (fm == null)
|
||||
|
|
|
@ -213,15 +213,18 @@ public class RelationFieldStrategy
|
|||
if ((fmds[i].getName().equals(mappedByIdValue)) ||
|
||||
mappedByIdValue.length() == 0) {
|
||||
if (fmds[i].getValue().getEmbeddedMetaData() != null) {
|
||||
EmbedValueHandler.getEmbeddedIdCols((FieldMapping)fmds[i], cols);
|
||||
EmbedValueHandler.getEmbeddedIdCols(
|
||||
(FieldMapping)fmds[i], cols);
|
||||
} else
|
||||
EmbedValueHandler.getIdColumns((FieldMapping)fmds[i], cols);
|
||||
EmbedValueHandler.getIdColumns(
|
||||
(FieldMapping)fmds[i], cols);
|
||||
}
|
||||
}
|
||||
return cols;
|
||||
} else { // primary key is single-value
|
||||
Class pkType = pk.getDeclaredType();
|
||||
FieldMetaData[] pks = field.getValue().getDeclaredTypeMetaData().getPrimaryKeyFields();
|
||||
FieldMetaData[] pks = field.getValue().getDeclaredTypeMetaData().
|
||||
getPrimaryKeyFields();
|
||||
if (pks.length != 1 || pks[0].getDeclaredType() != pkType)
|
||||
return Collections.EMPTY_LIST;
|
||||
pkCols = pk.getColumns();
|
||||
|
@ -346,8 +349,8 @@ public class RelationFieldStrategy
|
|||
nullInverse(sm, rm);
|
||||
updateInverse(sm, rel, store, rm);
|
||||
} else {
|
||||
int action = (rel == null
|
||||
&& field.isBidirectionalJoinTableMappingNonOwner()) ?
|
||||
int action = (rel == null &&
|
||||
field.isBidirectionalJoinTableMappingNonOwner()) ?
|
||||
Row.ACTION_DELETE : Row.ACTION_UPDATE;
|
||||
Row row = field.getRow(sm, store, rm, action);
|
||||
if (row != null) {
|
||||
|
|
|
@ -213,8 +213,8 @@ public class SchemaGroup
|
|||
// We can't handle the case that one entity has schema name
|
||||
// and other entity does not have schema name but both entities
|
||||
// map to the same table.
|
||||
if (tab != null
|
||||
&& (schemas[i] == inSchema || inSchema.getName() == null))
|
||||
if (tab != null &&
|
||||
(schemas[i] == inSchema || inSchema.getName() == null))
|
||||
return tab;
|
||||
|
||||
}
|
||||
|
@ -297,8 +297,8 @@ public class SchemaGroup
|
|||
Sequence seq;
|
||||
for (int i = 0; i < schemas.length; i++) {
|
||||
seq = schemas[i].getSequence(name);
|
||||
if ((seq != null)
|
||||
&& (schemas[i] == inSchema || inSchema.getName() == null))
|
||||
if ((seq != null) &&
|
||||
(schemas[i] == inSchema || inSchema.getName() == null))
|
||||
return seq;
|
||||
}
|
||||
|
||||
|
|
|
@ -450,7 +450,8 @@ public class SchemaTool {
|
|||
for (int i = 0; i < schemas.length; i++) {
|
||||
seqs = schemas[i].getSequences();
|
||||
for (int j = 0; j < seqs.length; j++) {
|
||||
if (db.findSequence(schemas[i], seqs[j].getFullName()) != null)
|
||||
if (db.findSequence(schemas[i], seqs[j].getFullName()) !=
|
||||
null)
|
||||
continue;
|
||||
|
||||
if (createSequence(seqs[j])) {
|
||||
|
|
|
@ -233,8 +233,9 @@ public class DB2Dictionary
|
|||
// TRANSACTION_READ_COMMITTED
|
||||
conn = super.decorate(conn);
|
||||
|
||||
if (conf.getTransactionIsolationConstant() == -1
|
||||
&& conn.getTransactionIsolation() < Connection.TRANSACTION_READ_COMMITTED)
|
||||
if (conf.getTransactionIsolationConstant() == -1 &&
|
||||
conn.getTransactionIsolation() <
|
||||
Connection.TRANSACTION_READ_COMMITTED)
|
||||
conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED);
|
||||
|
||||
return conn;
|
||||
|
@ -843,9 +844,11 @@ public class DB2Dictionary
|
|||
if (subtype == StoreException.LOCK && errorState.equals("57033")
|
||||
&& ex.getMessage().indexOf("80") != -1) {
|
||||
recoverable = Boolean.TRUE;
|
||||
} else if (subtype == StoreException.QUERY && errorState.equals("57014")
|
||||
&& ex.getMessage().indexOf("40001") == -1) {
|
||||
// FIXME drwoods - OPENJPA-964 - Need to determine expected DB2 behavior for query timeouts
|
||||
} else if (subtype == StoreException.QUERY &&
|
||||
errorState.equals("57014") &&
|
||||
ex.getMessage().indexOf("40001") == -1) {
|
||||
// FIXME drwoods - OPENJPA-964 - Need to determine expected DB2
|
||||
// behavior for query timeouts
|
||||
recoverable = Boolean.TRUE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -399,7 +399,8 @@ public class DBDictionary
|
|||
// JDBC3-only method, so it might throw a
|
||||
// AbstractMethodError
|
||||
isJDBC3 = metaData.getJDBCMajorVersion() >= 3;
|
||||
supportsGetGeneratedKeys = metaData.supportsGetGeneratedKeys();
|
||||
supportsGetGeneratedKeys =
|
||||
metaData.supportsGetGeneratedKeys();
|
||||
} catch (Throwable t) {
|
||||
// ignore if not JDBC3
|
||||
}
|
||||
|
@ -2387,7 +2388,8 @@ public class DBDictionary
|
|||
boolean subselect, boolean checkTableForUpdate) {
|
||||
return toOperation(getSelectOperation(fetch), selects, from, where,
|
||||
group, having, order, distinct, start, end,
|
||||
getForUpdateClause(fetch, forUpdate, null), subselect, checkTableForUpdate);
|
||||
getForUpdateClause(fetch, forUpdate, null), subselect,
|
||||
checkTableForUpdate);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2460,10 +2462,10 @@ public class DBDictionary
|
|||
/**
|
||||
* Return the SQL for the given selecting operation.
|
||||
*/
|
||||
private SQLBuffer toOperation(String op, SQLBuffer selects,
|
||||
SQLBuffer from, SQLBuffer where, SQLBuffer group, SQLBuffer having,
|
||||
SQLBuffer order, boolean distinct, long start, long end,
|
||||
String forUpdateClause, boolean subselect, boolean checkTableForUpdate) {
|
||||
private SQLBuffer toOperation(String op, SQLBuffer selects, SQLBuffer from,
|
||||
SQLBuffer where, SQLBuffer group, SQLBuffer having, SQLBuffer order,
|
||||
boolean distinct, long start, long end, String forUpdateClause,
|
||||
boolean subselect, boolean checkTableForUpdate) {
|
||||
SQLBuffer buf = new SQLBuffer(this);
|
||||
buf.append(op);
|
||||
|
||||
|
@ -4093,7 +4095,8 @@ public class DBDictionary
|
|||
}
|
||||
|
||||
private boolean isSupported() {
|
||||
// if this is a custom dict, traverse to whatever openjpa dict it extends
|
||||
// if this is a custom dict, traverse to whatever openjpa dict it
|
||||
// extends
|
||||
Class c = getClass();
|
||||
while (!c.getName().startsWith("org.apache.openjpa."))
|
||||
c = c.getSuperclass();
|
||||
|
@ -4158,7 +4161,8 @@ public class DBDictionary
|
|||
InputStream stream = getClass().getResourceAsStream(rsrc);
|
||||
String dictionaryClassName = getClass().getName();
|
||||
if (stream == null) { // User supplied dictionary but no error codes xml
|
||||
stream = DBDictionary.class.getResourceAsStream(rsrc); // use default
|
||||
// use default
|
||||
stream = DBDictionary.class.getResourceAsStream(rsrc);
|
||||
dictionaryClassName = getClass().getSuperclass().getName();
|
||||
}
|
||||
codeReader.parse(stream, dictionaryClassName, this);
|
||||
|
@ -4614,7 +4618,8 @@ public class DBDictionary
|
|||
* This method is to provide override for non-JDBC or JDBC-like
|
||||
* implementation of executing query.
|
||||
*/
|
||||
protected ResultSet executeQuery(Connection conn, PreparedStatement stmnt, String sql
|
||||
protected ResultSet executeQuery(Connection conn, PreparedStatement stmnt,
|
||||
String sql
|
||||
) throws SQLException {
|
||||
return stmnt.executeQuery();
|
||||
}
|
||||
|
|
|
@ -48,7 +48,7 @@ public class DerbyDictionary
|
|||
substringFunctionName = "SUBSTR";
|
||||
|
||||
// Derby name length restriction has been relaxed
|
||||
// http://www.archivum.info/derby-dev@db.apache.org/2004-12/msg00270.html
|
||||
//http://www.archivum.info/derby-dev@db.apache.org/2004-12/msg00270.html
|
||||
maxConstraintNameLength = 128;
|
||||
maxIndexNameLength = 128;
|
||||
maxColumnNameLength = 128;
|
||||
|
@ -113,7 +113,8 @@ public class DerbyDictionary
|
|||
int errorCode = ex.getErrorCode();
|
||||
if (errorStates.contains(errorState)) {
|
||||
recoverable = Boolean.FALSE;
|
||||
if ((subtype == StoreException.LOCK || subtype == StoreException.QUERY) && errorCode < 30000) {
|
||||
if ((subtype == StoreException.LOCK ||
|
||||
subtype == StoreException.QUERY) && errorCode < 30000) {
|
||||
recoverable = Boolean.TRUE;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -162,7 +162,8 @@ public class InformixDictionary
|
|||
else
|
||||
driverVendor = VENDOR_OTHER;
|
||||
|
||||
if (driverName.equals("IBM DB2 JDBC Universal Driver Architecture")) {
|
||||
if (driverName.equals("IBM DB2 JDBC Universal Driver Architecture"))
|
||||
{
|
||||
useJCC = true;
|
||||
try {
|
||||
if (meta.storesLowerCaseIdentifiers())
|
||||
|
|
|
@ -58,7 +58,8 @@ public class PostgresDictionary
|
|||
(PostgresDictionary.class);
|
||||
|
||||
private static Class<PostgresConnection> postgresConnectionImpl;
|
||||
private static Class<PostgresPreparedStatement> postgresPreparedStatementImpl;
|
||||
private static Class<PostgresPreparedStatement>
|
||||
postgresPreparedStatementImpl;
|
||||
|
||||
static {
|
||||
try {
|
||||
|
@ -378,7 +379,7 @@ public class PostgresDictionary
|
|||
.getConnection();
|
||||
try {
|
||||
conn.setAutoCommit(false);
|
||||
PGConnection pgconn = (PGConnection) conn.getInnermostDelegate();
|
||||
PGConnection pgconn = (PGConnection)conn.getInnermostDelegate();
|
||||
LargeObjectManager lom = pgconn.getLargeObjectAPI();
|
||||
// The create method is valid in versions previous 8.3
|
||||
// in 8.3 this methos is deprecated, use createLO
|
||||
|
|
|
@ -350,9 +350,9 @@ public interface Result
|
|||
* Return the value stored in the given column or id.
|
||||
*
|
||||
* @param obj the column or id whose data to fetch
|
||||
* @param metaType the type code from {@link org.apache.openjpa.meta.JavaTypes} or
|
||||
* {@link JavaSQLTypes} for the type of the data; if
|
||||
* <code>obj</code> is a column, you may specify -1
|
||||
* @param metaType the type code from
|
||||
* {@link org.apache.openjpa.meta.JavaTypes} or {@link JavaSQLTypes} for the
|
||||
* type of the data; if <code>obj</code> is a column, you may specify -1
|
||||
* to use the column's recorded java type
|
||||
* @param arg some JDBC data access methods use an argument, such
|
||||
* as a {@link Calendar} or {@link Map}
|
||||
|
|
|
@ -41,8 +41,8 @@ import org.apache.openjpa.kernel.OpenJPAStateManager;
|
|||
|
||||
/**
|
||||
* Logical representation of a table row for insert/update/delete. The
|
||||
* {@link org.apache.openjpa.jdbc.kernel.UpdateManager} is responsible for implementing
|
||||
* rows to do something useful when the values are set.
|
||||
* {@link org.apache.openjpa.jdbc.kernel.UpdateManager} is responsible for
|
||||
* implementing rows to do something useful when the values are set.
|
||||
*
|
||||
* @author Abe White
|
||||
*/
|
||||
|
|
|
@ -55,8 +55,8 @@ public class SQLErrorCodeReader {
|
|||
static {
|
||||
storeErrorTypes.put("lock", StoreException.LOCK);
|
||||
storeErrorTypes.put("object-exists", StoreException.OBJECT_EXISTS);
|
||||
storeErrorTypes
|
||||
.put("object-not-found", StoreException.OBJECT_NOT_FOUND);
|
||||
storeErrorTypes.put("object-not-found",
|
||||
StoreException.OBJECT_NOT_FOUND);
|
||||
storeErrorTypes.put("optimistic", StoreException.OPTIMISTIC);
|
||||
storeErrorTypes.put("referential-integrity",
|
||||
StoreException.REFERENTIAL_INTEGRITY);
|
||||
|
|
|
@ -141,7 +141,8 @@ public class SQLServerDictionary
|
|||
|
||||
protected void appendLength(SQLBuffer buf, int type) {
|
||||
if (type == Types.VARCHAR)
|
||||
buf.append("(").append(Integer.toString(characterColumnSize)).append(")");
|
||||
buf.append("(").append(Integer.toString(characterColumnSize)).
|
||||
append(")");
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -487,7 +487,8 @@ public interface Select
|
|||
* Add an ORDER BY clause.
|
||||
* Optionally selects ordering data if not already selected.
|
||||
*/
|
||||
public boolean orderBy(SQLBuffer sql, boolean asc, boolean sel, Value selAs);
|
||||
public boolean orderBy(SQLBuffer sql, boolean asc, boolean sel,
|
||||
Value selAs);
|
||||
|
||||
/**
|
||||
* Add an ORDER BY clause.
|
||||
|
|
|
@ -322,7 +322,8 @@ public class SelectImpl
|
|||
stmnt = prepareStatement(conn, sql, null,
|
||||
ResultSet.TYPE_FORWARD_ONLY,
|
||||
ResultSet.CONCUR_READ_ONLY, false);
|
||||
_dict.setQueryTimeout(stmnt, store.getFetchConfiguration().getQueryTimeout());
|
||||
_dict.setQueryTimeout(stmnt,
|
||||
store.getFetchConfiguration().getQueryTimeout());
|
||||
rs = executeQuery(conn, stmnt, sql, false, store);
|
||||
return getCount(rs);
|
||||
} finally {
|
||||
|
@ -1220,7 +1221,8 @@ public class SelectImpl
|
|||
return seld;
|
||||
}
|
||||
|
||||
public boolean orderBy(SQLBuffer sql, boolean asc, boolean sel, Value selAs) {
|
||||
public boolean orderBy(SQLBuffer sql, boolean asc, boolean sel, Value selAs)
|
||||
{
|
||||
return orderBy(sql, asc, (Joins) null, sel, selAs);
|
||||
}
|
||||
|
||||
|
@ -1407,7 +1409,8 @@ public class SelectImpl
|
|||
int count = 0;
|
||||
for (int i = 0; i < toCols.length; i++, count++) {
|
||||
if (pks == null)
|
||||
val = (oid == null) ? null : Numbers.valueOf(((Id) oid).getId());
|
||||
val = (oid == null) ? null :
|
||||
Numbers.valueOf(((Id) oid).getId());
|
||||
else {
|
||||
// must be app identity; use pk index to get correct pk value
|
||||
join = mapping.assertJoinable(toCols[i]);
|
||||
|
|
|
@ -151,8 +151,8 @@ public class SybaseDictionary
|
|||
"USER_OPTION", "WAITFOR", "WHILE", "WRITETEXT",
|
||||
}));
|
||||
|
||||
// Sybase does not support foreign key delete/update action NULL, DEFAULT,
|
||||
// CASCADE
|
||||
// Sybase does not support foreign key delete/update action NULL,
|
||||
// DEFAULT, CASCADE
|
||||
supportsNullDeleteAction = false;
|
||||
supportsDefaultDeleteAction = false;
|
||||
supportsCascadeDeleteAction = false;
|
||||
|
|
|
@ -58,9 +58,10 @@ public interface CacheMarshaller {
|
|||
/**
|
||||
* The {@link ValidationPolicy} that this marshaller should use.
|
||||
* A value for this parameter is required. The class will be instantiated
|
||||
* via the {@link org.apache.openjpa.lib.conf.Configurations} mechanism, ensuring that if the class
|
||||
* implements {@link Configurable} or {@link org.apache.openjpa.lib.conf.GenericConfigurable}, it will
|
||||
* be taken through the appropriate lifecycle.
|
||||
* via the {@link org.apache.openjpa.lib.conf.Configurations} mechanism,
|
||||
* ensuring that if the class implements {@link Configurable} or
|
||||
* {@link org.apache.openjpa.lib.conf.GenericConfigurable}, it will be taken
|
||||
* through the appropriate lifecycle.
|
||||
*/
|
||||
public void setValidationPolicy(String policy)
|
||||
throws InstantiationException, IllegalAccessException;
|
||||
|
|
|
@ -1545,7 +1545,8 @@ public class OpenJPAConfigurationImpl
|
|||
|
||||
public PreparedQueryCache getQuerySQLCacheInstance() {
|
||||
if (preparedQueryCachePlugin.get() == null) {
|
||||
preparedQueryCachePlugin.instantiate(PreparedQueryCache.class, this);
|
||||
preparedQueryCachePlugin.instantiate(PreparedQueryCache.class,
|
||||
this);
|
||||
}
|
||||
return (PreparedQueryCache)preparedQueryCachePlugin.get();
|
||||
}
|
||||
|
|
|
@ -378,7 +378,8 @@ public abstract class AbstractQueryCache
|
|||
|
||||
/**
|
||||
* Updates the entity timestamp map with the current time in milliseconds
|
||||
* @param timestampMap -- a map that contains entityname and its last updated timestamp
|
||||
* @param timestampMap -- a map that contains entityname and its last
|
||||
* updated timestamp
|
||||
*/
|
||||
protected void updateEntityTimestamp(Map<String,Long> timestampMap) {
|
||||
if (entityTimestampMap != null)
|
||||
|
|
|
@ -122,7 +122,8 @@ public class DataCachePCDataImpl
|
|||
|
||||
/**
|
||||
* Store field-level information from the given state manager.
|
||||
* Special process of checking if the cached collection data is out of order.
|
||||
* Special process of checking if the cached collection data is out of
|
||||
* order.
|
||||
*/
|
||||
protected void storeField(OpenJPAStateManager sm, FieldMetaData fmd) {
|
||||
if (fmd.getManagement() != fmd.MANAGE_PERSISTENT)
|
||||
|
|
|
@ -141,7 +141,8 @@ public class ManagedClassSubclasser {
|
|||
// reconfiguration at the end of this method.
|
||||
ClassMetaData meta = enhancer.getMetaData();
|
||||
if (meta == null) {
|
||||
throw new MetaDataException(_loc.get("no-meta", cls)).setFatal(true);
|
||||
throw new MetaDataException(_loc.get("no-meta", cls)).
|
||||
setFatal(true);
|
||||
}
|
||||
configureMetaData(meta, conf, redefine, false);
|
||||
|
||||
|
|
|
@ -2493,7 +2493,8 @@ public class PCEnhancer {
|
|||
code.dup();
|
||||
if(_meta.isEmbeddedOnly()) {
|
||||
code.aload().setThis();
|
||||
code.invokevirtual().setMethod(Object.class, "getClass", Class.class, null);
|
||||
code.invokevirtual().setMethod(Object.class, "getClass",
|
||||
Class.class, null);
|
||||
}else
|
||||
code.classconstant().setClass(getType(_meta));
|
||||
}
|
||||
|
@ -2501,10 +2502,12 @@ public class PCEnhancer {
|
|||
// new <oid class> ();
|
||||
code.anew().setType(oidType);
|
||||
code.dup();
|
||||
if (_meta.isOpenJPAIdentity() || (obj && usesClsString == Boolean.TRUE)) {
|
||||
if (_meta.isOpenJPAIdentity() || (obj && usesClsString ==
|
||||
Boolean.TRUE)) {
|
||||
if(_meta.isEmbeddedOnly()) {
|
||||
code.aload().setThis();
|
||||
code.invokevirtual().setMethod(Object.class, "getClass", Class.class, null);
|
||||
code.invokevirtual().setMethod(Object.class, "getClass",
|
||||
Class.class, null);
|
||||
}else
|
||||
code.classconstant().setClass(getType(_meta));
|
||||
}
|
||||
|
|
|
@ -65,10 +65,10 @@ import serp.bytecode.BCMethod;
|
|||
* </li>
|
||||
* </ul>
|
||||
*
|
||||
* <p>If you use this technique and use the <code>new</code> keyword instead of
|
||||
* a OpenJPA-supplied construction routine, OpenJPA will need to do extra work
|
||||
* with persistent-new-flushed instances, since OpenJPA cannot in this case
|
||||
* track what happens to such an instance.</p>
|
||||
* <p>If you use this technique and use the <code>new</code> keyword instead
|
||||
* of a OpenJPA-supplied construction routine, OpenJPA will need to do extra
|
||||
* work with persistent-new-flushed instances, since OpenJPA cannot in this
|
||||
* case track what happens to such an instance.</p>
|
||||
*
|
||||
* @since 1.0.0
|
||||
*/
|
||||
|
|
|
@ -41,14 +41,16 @@ public class LogOrphanedKeyAction
|
|||
private short _level = Log.WARN;
|
||||
|
||||
/**
|
||||
* The channel to log to. Defaults to <code>org.apache.openjpa.Runtime</code>.
|
||||
* The channel to log to. Defaults to
|
||||
* <code>org.apache.openjpa.Runtime</code>.
|
||||
*/
|
||||
public String getChannel() {
|
||||
return _channel;
|
||||
}
|
||||
|
||||
/**
|
||||
* The channel to log to. Defaults to <code>org.apache.openjpa.Runtime</code>.
|
||||
* The channel to log to. Defaults to
|
||||
* <code>org.apache.openjpa.Runtime</code>.
|
||||
*/
|
||||
public void setChannel(String channel) {
|
||||
_channel = channel;
|
||||
|
|
|
@ -31,7 +31,8 @@ import org.apache.openjpa.lib.util.Closeable;
|
|||
* <code><pre> import org.apache.openjpa.event.*;
|
||||
* import org.apache.openjpa.conf.*;
|
||||
*
|
||||
* {@link org.apache.openjpa.conf.OpenJPAConfiguration} conf = factory.getConfiguraiton ();
|
||||
* {@link org.apache.openjpa.conf.OpenJPAConfiguration} conf =
|
||||
* factory.getConfiguraiton ();
|
||||
* RemoteCommitListener l = new RemoteCommitListener () {
|
||||
* public void afterCommit ({@link RemoteCommitEvent} e) {
|
||||
* // update a Swing widget when remote brokers make
|
||||
|
|
|
@ -410,7 +410,8 @@ public class BrokerImpl
|
|||
|
||||
public void popFetchConfiguration() {
|
||||
if (_fcs == null || _fcs.isEmpty())
|
||||
throw new UserException(_loc.get("fetch-configuration-stack-empty"));
|
||||
throw new UserException(
|
||||
_loc.get("fetch-configuration-stack-empty"));
|
||||
_fc = (FetchConfiguration) _fcs.removeLast();
|
||||
}
|
||||
|
||||
|
@ -4359,7 +4360,8 @@ public class BrokerImpl
|
|||
public boolean isPersistent(Object obj) {
|
||||
assertOpen();
|
||||
if (ImplHelper.isManageable(obj))
|
||||
return (ImplHelper.toPersistenceCapable(obj, _conf)).pcIsPersistent();
|
||||
return (ImplHelper.toPersistenceCapable(obj, _conf)).
|
||||
pcIsPersistent();
|
||||
return false;
|
||||
}
|
||||
|
||||
|
|
|
@ -55,7 +55,8 @@ import org.apache.openjpa.lib.conf.Configurable;
|
|||
*
|
||||
* This cache allows customization of whether a query can be cached or not
|
||||
* via either explicit marking of certain keys as non-cachable (which is
|
||||
* irreversible) or addition/removal of exclusion patterns (which is reversible).
|
||||
* irreversible) or addition/removal of exclusion patterns (which is
|
||||
* reversible).
|
||||
*
|
||||
* @see #markUncachable(String)
|
||||
* @see #addExclusionPattern(String)
|
||||
|
|
|
@ -900,7 +900,8 @@ public class QueryImpl
|
|||
* key and index. The index set on the Parameter by the parser is the
|
||||
* same index used to access the Object[] elements returned by this method.
|
||||
*
|
||||
* {@link JPQLExpressionBuilder} creates and populates parameters as follows:
|
||||
* {@link JPQLExpressionBuilder} creates and populates parameters as
|
||||
* follows:
|
||||
* The parameter key is not the token encountered by the parser, but
|
||||
* converted to Integer or String based on the context in which the token
|
||||
* appeared.
|
||||
|
@ -914,11 +915,13 @@ public class QueryImpl
|
|||
* This LinkedMap contains the parameter key and their expected
|
||||
* (if determinable) value types. That it is a LinkedMap points to the
|
||||
* fact that an ordering is implicit. The ordering of the keys in this Map
|
||||
* is the same as the order in which parser encountered the parameter tokens.
|
||||
* is the same as the order in which parser encountered the parameter
|
||||
* tokens.
|
||||
*
|
||||
* For example, parsing result of the following two JPQL queries
|
||||
* a) UPDATE CompUser e SET e.name= ?1, e.age = ?2 WHERE e.userid = ?3
|
||||
* b) UPDATE CompUser e SET e.name= :name, e.age = :age WHERE e.userid = :id
|
||||
* b) UPDATE CompUser e SET e.name= :name, e.age = :age WHERE e.userid =
|
||||
* :id
|
||||
* The parameter keys will appear in the order (3,2,1) or (:id, :name, :age)
|
||||
* in the given LinkedMap because WHERE clause is parsed before SET clause.
|
||||
* The corresponding Parameter Expressions created by the parser will have
|
||||
|
@ -1083,7 +1086,8 @@ public class QueryImpl
|
|||
try {
|
||||
val = q.evaluate(value, ob, params, sm);
|
||||
} catch (UnsupportedException e1) {
|
||||
throw new UserException(_loc.get("fail-to-get-update-value"));
|
||||
throw new UserException(
|
||||
_loc.get("fail-to-get-update-value"));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -205,8 +205,8 @@ public interface QueryStatistics<T> extends Serializable {
|
|||
out.println("\tTotal \t\tQuery");
|
||||
} else {
|
||||
out.println(" last reset on " + since);
|
||||
out.println("Total Query Execution since start "
|
||||
+ toString(astat) + " since reset " + toString(stat));
|
||||
out.println("Total Query Execution since start " +
|
||||
toString(astat) + " since reset " + toString(stat));
|
||||
out.println("\tSince Start \tSince Reset \t\tQuery");
|
||||
}
|
||||
int i = 0;
|
||||
|
|
|
@ -29,7 +29,8 @@ import org.apache.openjpa.meta.ClassMetaData;
|
|||
public class UUIDType4StringSeq
|
||||
implements Seq {
|
||||
|
||||
private static final UUIDType4StringSeq _instance = new UUIDType4StringSeq();
|
||||
private static final UUIDType4StringSeq _instance =
|
||||
new UUIDType4StringSeq();
|
||||
|
||||
private String _last = null;
|
||||
|
||||
|
|
|
@ -1430,7 +1430,8 @@ public class JPQLExpressionBuilder
|
|||
if (c != null) {
|
||||
Value lit = factory.newTypeLiteral(c, Literal.TYPE_CLASS);
|
||||
Class<?> candidate = getCandidateType();
|
||||
ClassMetaData can = getClassMetaData(candidate.getName(), false);
|
||||
ClassMetaData can = getClassMetaData(candidate.getName(),
|
||||
false);
|
||||
ClassMetaData meta = getClassMetaData(name, false);
|
||||
if (candidate.isAssignableFrom(c))
|
||||
lit.setMetaData(meta);
|
||||
|
@ -1501,7 +1502,8 @@ public class JPQLExpressionBuilder
|
|||
return getQualifiedPath(node, false, true);
|
||||
}
|
||||
|
||||
private Value getQualifiedPath(JPQLNode node, boolean pcOnly, boolean inner) {
|
||||
private Value getQualifiedPath(JPQLNode node, boolean pcOnly, boolean inner)
|
||||
{
|
||||
int nChild = node.getChildCount();
|
||||
JPQLNode firstChild = firstChild(node);
|
||||
JPQLNode id = firstChild.id == JJTKEY ? onlyChild(firstChild) :
|
||||
|
|
|
@ -756,7 +756,8 @@ public abstract class AbstractCFMetaDataFactory
|
|||
(url));
|
||||
List<String> newNames = Arrays.asList(clss);
|
||||
if (log.isTraceEnabled())
|
||||
log.trace(_loc.get("scan-found-names", newNames, rsrc));
|
||||
log.trace(_loc.get("scan-found-names", newNames,
|
||||
rsrc));
|
||||
names.addAll(newNames);
|
||||
mapPersistentTypeNames(url, clss);
|
||||
}
|
||||
|
|
|
@ -1952,8 +1952,8 @@ public class ClassMetaData
|
|||
if (f != null)
|
||||
_useIdClassFromParent = false;
|
||||
else
|
||||
throw new MetaDataException(_loc.get("invalid-id",
|
||||
_type, pks[0].getName()));
|
||||
throw new MetaDataException(_loc.get(
|
||||
"invalid-id", _type, pks[0].getName()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -287,7 +287,8 @@ public class ProxyCollections
|
|||
public static Object afterRemove(ProxyCollection coll, int index,
|
||||
Object removed) {
|
||||
if (coll.getChangeTracker() != null)
|
||||
((CollectionChangeTracker) coll.getChangeTracker()).removed(removed);
|
||||
((CollectionChangeTracker) coll.getChangeTracker()).
|
||||
removed(removed);
|
||||
removed(coll, removed, false);
|
||||
return removed;
|
||||
}
|
||||
|
|
|
@ -34,7 +34,8 @@ public class QueryException
|
|||
|
||||
private static final long serialVersionUID = 7375049808087780437L;
|
||||
|
||||
private static final transient Localizer _loc = Localizer.forPackage(QueryException.class);
|
||||
private static final transient Localizer _loc =
|
||||
Localizer.forPackage(QueryException.class);
|
||||
|
||||
private int timeout = -1;
|
||||
|
||||
|
|
|
@ -86,7 +86,8 @@ public class ObjectValue extends Value {
|
|||
/**
|
||||
* Instantiate the object as an instance of the given class.
|
||||
*/
|
||||
public Object instantiate(Class<?> type, Configuration conf, boolean fatal) {
|
||||
public Object instantiate(Class<?> type, Configuration conf, boolean fatal)
|
||||
{
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -98,7 +98,8 @@ public class PluginValue extends ObjectValue {
|
|||
/**
|
||||
* Instantiate the plugin as an instance of the given class.
|
||||
*/
|
||||
public Object instantiate(Class<?> type, Configuration conf, boolean fatal) {
|
||||
public Object instantiate(Class<?> type, Configuration conf, boolean fatal)
|
||||
{
|
||||
Object obj = newInstance(_name, type, conf, fatal);
|
||||
Configurations.configureInstance(obj, conf, _props,
|
||||
(fatal) ? getProperty() : null);
|
||||
|
|
|
@ -113,7 +113,8 @@ public interface ProductDerivation {
|
|||
*
|
||||
* @since 1.1.0
|
||||
*/
|
||||
public List<String> getAnchorsInFile(File file) throws IOException, Exception;
|
||||
public List<String> getAnchorsInFile(File file) throws IOException,
|
||||
Exception;
|
||||
|
||||
/**
|
||||
* Return a List<String> of all the anchors defined in
|
||||
|
|
|
@ -94,7 +94,8 @@ public class ProductDerivations {
|
|||
for (int i = 0; i < _derivationErrors.length; i++) {
|
||||
if (_derivationErrors[i] == null)
|
||||
continue;
|
||||
System.err.println(_derivationNames[i] + ":" + _derivationErrors[i]);
|
||||
System.err.println(_derivationNames[i] + ":" +
|
||||
_derivationErrors[i]);
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -180,14 +180,16 @@ public class Edge {
|
|||
}
|
||||
|
||||
/**
|
||||
* List of edges forming a cycle. Only set for TYPE_BACK and TYPE_FORWARD edges.
|
||||
* List of edges forming a cycle. Only set for TYPE_BACK and TYPE_FORWARD
|
||||
* edges.
|
||||
*/
|
||||
public List getCycle() {
|
||||
return _cycle;
|
||||
}
|
||||
|
||||
/**
|
||||
* List of edges forming a cycle. Only set for TYPE_BACK and TYPE_FORWARD edges.
|
||||
* List of edges forming a cycle. Only set for TYPE_BACK and TYPE_FORWARD
|
||||
* edges.
|
||||
*/
|
||||
public void setCycle(List cycle) {
|
||||
_cycle = cycle;
|
||||
|
|
|
@ -788,7 +788,8 @@ public abstract class DelegatingCallableStatement
|
|||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
public Object getObject(String a, Map<String, Class<?>>b) throws SQLException {
|
||||
public Object getObject(String a, Map<String, Class<?>>b) throws
|
||||
SQLException {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
||||
|
|
|
@ -595,7 +595,8 @@ public abstract class DelegatingResultSet implements ResultSet, Closeable {
|
|||
return _stmnt;
|
||||
}
|
||||
|
||||
public Object getObject(int a, Map<String, Class<?>> b) throws SQLException {
|
||||
public Object getObject(int a, Map<String, Class<?>> b) throws
|
||||
SQLException {
|
||||
return _rs.getObject(a, b);
|
||||
}
|
||||
|
||||
|
@ -615,7 +616,8 @@ public abstract class DelegatingResultSet implements ResultSet, Closeable {
|
|||
return _rs.getArray(a);
|
||||
}
|
||||
|
||||
public Object getObject(String a, Map<String, Class<?>> b) throws SQLException {
|
||||
public Object getObject(String a, Map<String, Class<?>> b) throws
|
||||
SQLException {
|
||||
return _rs.getObject(a, b);
|
||||
}
|
||||
|
||||
|
|
|
@ -39,7 +39,8 @@ public class JDBCEventConnectionDecorator extends AbstractConcurrentEventManager
|
|||
|
||||
private static final Class<EventConnection> eventConnectionImpl;
|
||||
private static final Class<EventStatement> eventStatementImpl;
|
||||
private static final Class<EventPreparedStatement> eventPreparedStatementImpl;
|
||||
private static final Class<EventPreparedStatement>
|
||||
eventPreparedStatementImpl;
|
||||
|
||||
static {
|
||||
try {
|
||||
|
@ -250,7 +251,8 @@ public class JDBCEventConnectionDecorator extends AbstractConcurrentEventManager
|
|||
/**
|
||||
* Fires events as appropriate.
|
||||
*/
|
||||
protected abstract class EventPreparedStatement extends DelegatingPreparedStatement {
|
||||
protected abstract class EventPreparedStatement extends
|
||||
DelegatingPreparedStatement {
|
||||
|
||||
private final EventConnection _conn;
|
||||
private final String _sql;
|
||||
|
|
|
@ -45,11 +45,16 @@ import java.util.Calendar;
|
|||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.LoggingCallableStatement;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.LoggingDatabaseMetaData;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.LoggingPreparedStatement;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.LoggingResultSet;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.LoggingStatement;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.
|
||||
LoggingCallableStatement;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.
|
||||
LoggingDatabaseMetaData;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.
|
||||
LoggingPreparedStatement;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.
|
||||
LoggingResultSet;
|
||||
import org.apache.openjpa.lib.jdbc.LoggingConnectionDecorator.LoggingConnection.
|
||||
LoggingStatement;
|
||||
import org.apache.openjpa.lib.log.Log;
|
||||
import org.apache.openjpa.lib.util.ConcreteClassGenerator;
|
||||
import org.apache.openjpa.lib.util.J2DoPrivHelper;
|
||||
|
@ -1277,7 +1282,8 @@ public class LoggingConnectionDecorator implements ConnectionDecorator {
|
|||
_paramBatch = new ArrayList<List<String>>();
|
||||
// copy parameters since they will be re-used
|
||||
if (_params != null) {
|
||||
List<String> copyParms = new ArrayList<String>(_params);
|
||||
List<String> copyParms =
|
||||
new ArrayList<String>(_params);
|
||||
_paramBatch.add(copyParms);
|
||||
}
|
||||
else
|
||||
|
|
|
@ -37,8 +37,8 @@ public class CommonsLogFactory extends LogFactoryAdapter {
|
|||
}
|
||||
|
||||
/**
|
||||
* Adapts a commons logging log to the {@link org.apache.openjpa.lib.log.Log}
|
||||
* interface.
|
||||
* Adapts a commons logging log to the
|
||||
* {@link org.apache.openjpa.lib.log.Log} interface.
|
||||
*/
|
||||
public static class LogAdapter implements org.apache.openjpa.lib.log.Log {
|
||||
|
||||
|
|
|
@ -34,7 +34,8 @@ public class Log4JLogFactory extends LogFactoryAdapter {
|
|||
}
|
||||
|
||||
/**
|
||||
* Adapts a Log4J logger to the {@link org.apache.openjpa.lib.log.Log} interface.
|
||||
* Adapts a Log4J logger to the {@link org.apache.openjpa.lib.log.Log}
|
||||
* interface.
|
||||
*/
|
||||
public static class LogAdapter implements Log {
|
||||
|
||||
|
|
|
@ -63,8 +63,8 @@ public class ClassMetaDataIterator implements MetaDataIterator {
|
|||
* Constructor; supply the class whose metadata to find, the suffix
|
||||
* of metadata files, and whether to parse top-down or bottom-up.
|
||||
*/
|
||||
public ClassMetaDataIterator(Class<?> cls, String suffix, ClassLoader loader,
|
||||
boolean topDown) {
|
||||
public ClassMetaDataIterator(Class<?> cls, String suffix,
|
||||
ClassLoader loader, boolean topDown) {
|
||||
// skip classes that can't have metadata
|
||||
if (cls != null && (cls.isPrimitive()
|
||||
|| cls.getName().startsWith("java.")
|
||||
|
|
|
@ -351,7 +351,8 @@ public abstract class XMLMetaDataSerializer implements MetaDataSerializer {
|
|||
/**
|
||||
* Serialize the given set of objects.
|
||||
*/
|
||||
protected abstract void serialize(Collection<Object> objs) throws SAXException;
|
||||
protected abstract void serialize(Collection<Object> objs) throws
|
||||
SAXException;
|
||||
|
||||
/**
|
||||
* Return the current set of objects for serialization.
|
||||
|
|
|
@ -482,7 +482,8 @@ public abstract class J2DoPrivHelper {
|
|||
*
|
||||
* @return File
|
||||
*/
|
||||
public static final PrivilegedAction<File> getAbsoluteFileAction(final File f) {
|
||||
public static final PrivilegedAction<File> getAbsoluteFileAction(
|
||||
final File f) {
|
||||
return new PrivilegedAction<File>() {
|
||||
public File run() {
|
||||
return f.getAbsoluteFile();
|
||||
|
@ -746,7 +747,8 @@ public abstract class J2DoPrivHelper {
|
|||
* @return ServerSocket
|
||||
* @throws IOException
|
||||
*/
|
||||
public static final PrivilegedExceptionAction<ServerSocket> newServerSocketAction(
|
||||
public static final PrivilegedExceptionAction<ServerSocket>
|
||||
newServerSocketAction(
|
||||
final int port) throws IOException {
|
||||
return new PrivilegedExceptionAction<ServerSocket>() {
|
||||
public ServerSocket run() throws IOException {
|
||||
|
@ -1020,7 +1022,8 @@ public abstract class J2DoPrivHelper {
|
|||
*
|
||||
* @return BCField
|
||||
*/
|
||||
public static final PrivilegedAction<BCField> getFieldInstructionFieldAction(
|
||||
public static final PrivilegedAction<BCField> getFieldInstructionFieldAction
|
||||
(
|
||||
final FieldInstruction instruction) {
|
||||
return new PrivilegedAction<BCField>() {
|
||||
public BCField run() {
|
||||
|
|
|
@ -94,7 +94,8 @@ public class Services {
|
|||
try {
|
||||
Set resourceList = new TreeSet();
|
||||
Enumeration resources = AccessController.doPrivileged(
|
||||
J2DoPrivHelper.getResourcesAction(loader, PREFIX + serviceName));
|
||||
J2DoPrivHelper.getResourcesAction(loader,
|
||||
PREFIX + serviceName));
|
||||
while (resources.hasMoreElements())
|
||||
addResources((URL) resources.nextElement(), resourceList);
|
||||
|
||||
|
|
|
@ -750,7 +750,8 @@ public class AnnotationPersistenceMappingParser
|
|||
private int toForeignKeyAction(ForeignKeyAction action) {
|
||||
switch (action) {
|
||||
case RESTRICT:
|
||||
return org.apache.openjpa.jdbc.schema.ForeignKey.ACTION_RESTRICT;
|
||||
return org.apache.openjpa.jdbc.schema.ForeignKey.
|
||||
ACTION_RESTRICT;
|
||||
case CASCADE:
|
||||
return org.apache.openjpa.jdbc.schema.ForeignKey.ACTION_CASCADE;
|
||||
case NULL:
|
||||
|
@ -1267,7 +1268,8 @@ public class AnnotationPersistenceMappingParser
|
|||
}
|
||||
}
|
||||
|
||||
public static FieldMapping getEmbeddedFieldMapping(FieldMapping fm, String attrName) {
|
||||
public static FieldMapping getEmbeddedFieldMapping(FieldMapping fm,
|
||||
String attrName) {
|
||||
ClassMapping embed = null;
|
||||
boolean isKey = false;
|
||||
boolean isValue = false;
|
||||
|
@ -1318,7 +1320,8 @@ public class AnnotationPersistenceMappingParser
|
|||
|
||||
}
|
||||
|
||||
public static FieldMapping getAttributeOverrideField(String attrName, FieldMapping fm, ClassMapping embed) {
|
||||
public static FieldMapping getAttributeOverrideField(String attrName,
|
||||
FieldMapping fm, ClassMapping embed) {
|
||||
FieldMapping efm;
|
||||
int idxOfDot = attrName.indexOf(".");
|
||||
if (idxOfDot == -1) {
|
||||
|
@ -1872,7 +1875,8 @@ public class AnnotationPersistenceMappingParser
|
|||
/**
|
||||
* Parse @MapKeyJoinColumn(s).
|
||||
*/
|
||||
private void parseMapKeyJoinColumns(FieldMapping fm, MapKeyJoinColumn... joins) {
|
||||
private void parseMapKeyJoinColumns(FieldMapping fm,
|
||||
MapKeyJoinColumn... joins) {
|
||||
if (joins.length == 0)
|
||||
return;
|
||||
|
||||
|
|
|
@ -167,9 +167,11 @@ public class AnnotationPersistenceMappingSerializer
|
|||
* Add an annotation builder to list of builders for the specified
|
||||
* class metadata.
|
||||
*/
|
||||
protected void addAnnotation(AnnotationBuilder ab, QueryResultMapping meta) {
|
||||
protected void addAnnotation(AnnotationBuilder ab, QueryResultMapping meta)
|
||||
{
|
||||
if (_rsmAnnos == null)
|
||||
_rsmAnnos = new HashMap<QueryResultMapping, List<AnnotationBuilder>>();
|
||||
_rsmAnnos = new HashMap<QueryResultMapping,
|
||||
List<AnnotationBuilder>>();
|
||||
List<AnnotationBuilder> list = _rsmAnnos.get(meta);
|
||||
if (list == null) {
|
||||
list = new ArrayList<AnnotationBuilder>();
|
||||
|
@ -226,7 +228,8 @@ public class AnnotationPersistenceMappingSerializer
|
|||
for (String second : info.getSecondaryTableNames()) {
|
||||
AnnotationBuilder abSecTable =
|
||||
addAnnotation(SecondaryTable.class, mapping);
|
||||
serializeTable(second, null, info, info.getUniques(second), abSecTable);
|
||||
serializeTable(second, null, info, info.getUniques(second),
|
||||
abSecTable);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -116,7 +116,8 @@ public class JDBCFetchPlanImpl
|
|||
return LRSSizeAlgorithm.fromKernelConstant(_fetch.getLRSSize());
|
||||
}
|
||||
|
||||
public JDBCFetchPlanImpl setLRSSizeAlgorithm(LRSSizeAlgorithm lrsSizeAlgorithm) {
|
||||
public JDBCFetchPlanImpl setLRSSizeAlgorithm(
|
||||
LRSSizeAlgorithm lrsSizeAlgorithm) {
|
||||
_fetch.setLRSSize(lrsSizeAlgorithm.toKernelConstant());
|
||||
return this;
|
||||
}
|
||||
|
|
|
@ -153,7 +153,8 @@ public class PersistenceMappingDefaults
|
|||
private Table getTable(ClassMapping clm) {
|
||||
Table table = clm.getTable();
|
||||
if (table == null) {
|
||||
ValueMappingImpl value = (ValueMappingImpl)clm.getEmbeddingMetaData();
|
||||
ValueMappingImpl value =
|
||||
(ValueMappingImpl)clm.getEmbeddingMetaData();
|
||||
if (value == null)
|
||||
return table;
|
||||
FieldMetaData field = value.getFieldMetaData();
|
||||
|
|
|
@ -189,7 +189,8 @@ public class XMLPersistenceMappingSerializer
|
|||
.getClassName(mapping.getDescribedType()), null,
|
||||
info.getUniques(info.getTableName()));
|
||||
for (String second : info.getSecondaryTableNames())
|
||||
serializeTable(second, "secondary-table", null, info, info.getUniques(second));
|
||||
serializeTable(second, "secondary-table", null, info,
|
||||
info.getUniques(second));
|
||||
serializeColumns(info, ColType.PK_JOIN, null);
|
||||
}
|
||||
|
||||
|
@ -408,7 +409,8 @@ public class XMLPersistenceMappingSerializer
|
|||
}
|
||||
}
|
||||
startElement("collection-table");
|
||||
ValueMappingImpl elem = (ValueMappingImpl) field.getElement();
|
||||
ValueMappingImpl elem =
|
||||
(ValueMappingImpl) field.getElement();
|
||||
serializeColumns(elem.getValueInfo(), ColType.COL,
|
||||
null);
|
||||
endElement("collection-table");
|
||||
|
|
|
@ -175,7 +175,8 @@ public class AnnotationBuilder {
|
|||
if (value instanceof List) {
|
||||
sb.append("{");
|
||||
List<AnnotationBuilder> l = (List<AnnotationBuilder>) value;
|
||||
for (Iterator<AnnotationBuilder> i = l.iterator(); i.hasNext();) {
|
||||
for (Iterator<AnnotationBuilder> i = l.iterator(); i.hasNext();)
|
||||
{
|
||||
AnnotationBuilder ab = i.next();
|
||||
sb.append(ab.toString());
|
||||
if (i.hasNext())
|
||||
|
|
|
@ -27,8 +27,10 @@ import static org.apache.openjpa.persistence.MetaDataTag.ELEM_DEPENDENT;
|
|||
import static org.apache.openjpa.persistence.MetaDataTag.ELEM_TYPE;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.EMBEDDED_ID;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.ENTITY_LISTENERS;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.EXCLUDE_DEFAULT_LISTENERS;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.EXCLUDE_SUPERCLASS_LISTENERS;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.
|
||||
EXCLUDE_DEFAULT_LISTENERS;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.
|
||||
EXCLUDE_SUPERCLASS_LISTENERS;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.EXTERNALIZER;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.EXTERNAL_VALS;
|
||||
import static org.apache.openjpa.persistence.MetaDataTag.FACTORY;
|
||||
|
@ -534,7 +536,7 @@ public class AnnotationPersistenceMetaDataParser
|
|||
|
||||
Entity entity = (Entity) _cls.getAnnotation(Entity.class);
|
||||
if (isMetaDataMode()) {
|
||||
meta.setAbstract(_cls.getAnnotation(MappedSuperclass.class) != null);
|
||||
meta.setAbstract(_cls.getAnnotation(MappedSuperclass.class) !=null);
|
||||
// while the spec only provides for embedded exclusive, it doesn't
|
||||
// seem hard to support otherwise
|
||||
if (entity == null)
|
||||
|
@ -586,20 +588,20 @@ public class AnnotationPersistenceMetaDataParser
|
|||
meta.setObjectIdType(((IdClass) anno).value(), true);
|
||||
break;
|
||||
case NATIVE_QUERIES:
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY) == 0)
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY)==0)
|
||||
parseNamedNativeQueries(_cls,
|
||||
((NamedNativeQueries) anno).value());
|
||||
break;
|
||||
case NATIVE_QUERY:
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY) == 0)
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY)==0)
|
||||
parseNamedNativeQueries(_cls, (NamedNativeQuery) anno);
|
||||
break;
|
||||
case QUERIES:
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY) == 0)
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY)==0)
|
||||
parseNamedQueries(_cls, ((NamedQueries) anno).value());
|
||||
break;
|
||||
case QUERY:
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY) == 0)
|
||||
if (isQueryMode() && (meta.getSourceMode() & MODE_QUERY)==0)
|
||||
parseNamedQueries(_cls, (NamedQuery) anno);
|
||||
break;
|
||||
case SEQ_GENERATOR:
|
||||
|
|
|
@ -488,7 +488,8 @@ public class AnnotationPersistenceMetaDataSerializer
|
|||
*/
|
||||
protected void addAnnotation(AnnotationBuilder ab, SequenceMetaData meta) {
|
||||
if (_seqAnnos == null)
|
||||
_seqAnnos = new HashMap<SequenceMetaData, List<AnnotationBuilder>>();
|
||||
_seqAnnos = new HashMap<SequenceMetaData,
|
||||
List<AnnotationBuilder>>();
|
||||
List<AnnotationBuilder> list = _seqAnnos.get(meta);
|
||||
if (list == null) {
|
||||
list = new ArrayList<AnnotationBuilder>();
|
||||
|
@ -1236,8 +1237,9 @@ public class AnnotationPersistenceMetaDataSerializer
|
|||
}
|
||||
|
||||
/**
|
||||
* Represents ordered set of {@link org.apache.openjpa.meta.SequenceMetaData}s with a
|
||||
* common class scope.
|
||||
* Represents ordered set of
|
||||
* {@link org.apache.openjpa.meta.SequenceMetaData}s with a common class
|
||||
* scope.
|
||||
*
|
||||
* @author Stephen Kim
|
||||
* @author Pinaki Poddar
|
||||
|
@ -1304,8 +1306,8 @@ public class AnnotationPersistenceMetaDataSerializer
|
|||
}
|
||||
|
||||
/**
|
||||
* Represents ordered set of {@link org.apache.openjpa.meta.QueryMetaData}s with a
|
||||
* common class scope.
|
||||
* Represents ordered set of {@link org.apache.openjpa.meta.QueryMetaData}s
|
||||
* with a common class scope.
|
||||
*
|
||||
* @author Stephen Kim
|
||||
* @author Pinaki Poddar
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
package org.apache.openjpa.persistence;
|
||||
|
||||
import static org.apache.openjpa.kernel.QueryHints.HINT_IGNORE_PREPARED_QUERY;
|
||||
import static org.apache.openjpa.kernel.QueryHints.HINT_INVALIDATE_PREPARED_QUERY;
|
||||
import static org.apache.openjpa.kernel.QueryHints.
|
||||
HINT_INVALIDATE_PREPARED_QUERY;
|
||||
import static org.apache.openjpa.kernel.QueryHints.HINT_RESULT_COUNT;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
|
|
|
@ -151,8 +151,8 @@ public class JPAFacadeHelper {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link org.apache.openjpa.meta.ClassMetaData} associated with the
|
||||
* persistent object <code>o</code>.
|
||||
* Returns the {@link org.apache.openjpa.meta.ClassMetaData} associated with
|
||||
* the persistent object <code>o</code>.
|
||||
*/
|
||||
public static ClassMetaData getMetaData(Object o) {
|
||||
if (o == null)
|
||||
|
|
|
@ -163,7 +163,8 @@ public class OpenJPAPersistence {
|
|||
public static OpenJPAEntityManager getEntityManager(Object o) {
|
||||
try {
|
||||
if (ImplHelper.isManageable(o)) {
|
||||
PersistenceCapable pc = ImplHelper.toPersistenceCapable(o, null);
|
||||
PersistenceCapable pc = ImplHelper.toPersistenceCapable(o,
|
||||
null);
|
||||
if (pc != null)
|
||||
return JPAFacadeHelper.toEntityManager(
|
||||
(Broker) pc.pcGetGenericContext());
|
||||
|
|
|
@ -108,7 +108,8 @@ public class QueryImpl implements OpenJPAQuerySPI, Serializable {
|
|||
*
|
||||
* @deprecated
|
||||
*/
|
||||
public QueryImpl(EntityManagerImpl em, org.apache.openjpa.kernel.Query query) {
|
||||
public QueryImpl(EntityManagerImpl em,
|
||||
org.apache.openjpa.kernel.Query query) {
|
||||
this(em, null, query);
|
||||
}
|
||||
|
||||
|
@ -362,7 +363,8 @@ public class QueryImpl implements OpenJPAQuerySPI, Serializable {
|
|||
return setParameter(position, convertTemporalType(value, t));
|
||||
}
|
||||
|
||||
public OpenJPAQuery setParameter(int position, Date value, TemporalType type) {
|
||||
public OpenJPAQuery setParameter(int position, Date value,
|
||||
TemporalType type) {
|
||||
return setParameter(position, convertTemporalType(value, type));
|
||||
}
|
||||
|
||||
|
@ -421,7 +423,8 @@ public class QueryImpl implements OpenJPAQuerySPI, Serializable {
|
|||
return setParameter(name, convertTemporalType(value, type));
|
||||
}
|
||||
|
||||
public OpenJPAQuery setParameter(String name, Date value, TemporalType type) {
|
||||
public OpenJPAQuery setParameter(String name, Date value, TemporalType type)
|
||||
{
|
||||
return setParameter(name, convertTemporalType(value, type));
|
||||
}
|
||||
|
||||
|
|
|
@ -1475,7 +1475,8 @@ public class XMLPersistenceMetaDataParser
|
|||
|
||||
if (fmd.getDeclaredTypeCode() != JavaTypes.COLLECTION &&
|
||||
fmd.getDeclaredTypeCode() != JavaTypes.MAP)
|
||||
throw getException(_loc.get("bad-meta-anno", fmd, "ElementCollection"));
|
||||
throw getException(_loc.get("bad-meta-anno", fmd,
|
||||
"ElementCollection"));
|
||||
|
||||
val = attrs.getValue("fetch");
|
||||
if (val != null)
|
||||
|
@ -1558,7 +1559,8 @@ public class XMLPersistenceMetaDataParser
|
|||
if (meta != null) {
|
||||
Class defType = meta.getDefiningType();
|
||||
if ((defType != _cls) && log.isWarnEnabled()) {
|
||||
log.warn(_loc.get("dup-query", name, currentLocation(), defType));
|
||||
log.warn(_loc.get("dup-query", name, currentLocation(),
|
||||
defType));
|
||||
}
|
||||
pushElement(meta);
|
||||
return true;
|
||||
|
@ -1569,7 +1571,8 @@ public class XMLPersistenceMetaDataParser
|
|||
meta.setQueryString(attrs.getValue("query"));
|
||||
meta.setLanguage(JPQLParser.LANG_JPQL);
|
||||
/** TODO: Uncomment when orm.xsd defines lockmode
|
||||
LockModeType lockMode = LockModeType.valueOf(attrs.getValue("lockMode"));
|
||||
LockModeType lockMode =
|
||||
LockModeType.valueOf(attrs.getValue("lockMode"));
|
||||
meta.addHint("openjpa.FetchPlan.ReadLockMode",
|
||||
JPA2LockLevels.toLockLevel(lockMode));
|
||||
**/
|
||||
|
|
|
@ -44,8 +44,8 @@ public interface CaseExpression {
|
|||
* the result of the specific case. Clauses are evaluated in the order added
|
||||
*
|
||||
* @param when -
|
||||
* corresponds to the value against which the case operand of the
|
||||
* simple case is tested
|
||||
* corresponds to the value against which the case operand of
|
||||
* the simple case is tested
|
||||
* @return CaseExpression corresponding to the case with the added clause
|
||||
*/
|
||||
CaseExpression when(Expression when);
|
||||
|
@ -56,8 +56,8 @@ public interface CaseExpression {
|
|||
* the result of the specific case. Clauses are evaluated in the order added
|
||||
*
|
||||
* @param when -
|
||||
* corresponds to the value against which the case operand of the
|
||||
* simple case is tested
|
||||
* corresponds to the value against which the case operand of
|
||||
* the simple case is tested
|
||||
* @return CaseExpression corresponding to the case with the added clause
|
||||
*/
|
||||
CaseExpression when(Number when);
|
||||
|
@ -80,8 +80,8 @@ public interface CaseExpression {
|
|||
* the result of the specific case. Clauses are evaluated in the order added
|
||||
*
|
||||
* @param when -
|
||||
* corresponds to the value against which the case operand of the
|
||||
* simple case is tested
|
||||
* corresponds to the value against which the case operand of
|
||||
* the simple case is tested
|
||||
* @return CaseExpression corresponding to the case with the added clause
|
||||
*/
|
||||
CaseExpression when(Date when);
|
||||
|
@ -92,8 +92,8 @@ public interface CaseExpression {
|
|||
* the result of the specific case. Clauses are evaluated in the order added
|
||||
*
|
||||
* @param when -
|
||||
* corresponds to the value against which the case operand of the
|
||||
* simple case is tested
|
||||
* corresponds to the value against which the case operand of
|
||||
* the simple case is tested
|
||||
* @return CaseExpression corresponding to the case with the added clause
|
||||
*/
|
||||
CaseExpression when(Calendar when);
|
||||
|
@ -104,8 +104,8 @@ public interface CaseExpression {
|
|||
* the result of the specific case. Clauses are evaluated in the order added
|
||||
*
|
||||
* @param when -
|
||||
* corresponds to the value against which the case operand of the
|
||||
* simple case is tested
|
||||
* corresponds to the value against which the case operand of
|
||||
* the simple case is tested
|
||||
* @return CaseExpression corresponding to the case with the added clause
|
||||
*/
|
||||
CaseExpression when(Class when);
|
||||
|
@ -116,8 +116,8 @@ public interface CaseExpression {
|
|||
* the result of the specific case. Clauses are evaluated in the order added
|
||||
*
|
||||
* @param when -
|
||||
* corresponds to the value against which the case operand of the
|
||||
* simple case is tested
|
||||
* corresponds to the value against which the case operand of
|
||||
* the simple case is tested
|
||||
* @return CaseExpression corresponding to the case with the added clause
|
||||
*/
|
||||
CaseExpression when(Enum<?> when);
|
||||
|
|
|
@ -35,7 +35,8 @@ public interface DomainObject extends PathExpression, QueryDefinition {
|
|||
* object.
|
||||
*
|
||||
* @param attribute -
|
||||
* name of the attribute that references the target of the join
|
||||
* name of the attribute that references the target of the
|
||||
* join
|
||||
* @return the new DomainObject that is added for the target of the join
|
||||
*/
|
||||
DomainObject join(String attribute);
|
||||
|
@ -51,7 +52,8 @@ public interface DomainObject extends PathExpression, QueryDefinition {
|
|||
* newly joined domain object.
|
||||
*
|
||||
* @param attribute -
|
||||
* name of the attribute that references the target of the join
|
||||
* name of the attribute that references the target of the
|
||||
* join
|
||||
* @return the new DomainObject that is added for the target of the join
|
||||
*/
|
||||
DomainObject leftJoin(String attribute);
|
||||
|
@ -63,7 +65,8 @@ public interface DomainObject extends PathExpression, QueryDefinition {
|
|||
* The query is modified to include the joined domain object.
|
||||
*
|
||||
* @param attribute -
|
||||
* name of the attribute that references the target of the join
|
||||
* name of the attribute that references the target of the
|
||||
* join
|
||||
* @return the FetchJoinObject that is added for the target of the join
|
||||
*/
|
||||
FetchJoinObject joinFetch(String attribute);
|
||||
|
@ -75,7 +78,8 @@ public interface DomainObject extends PathExpression, QueryDefinition {
|
|||
* attribute. The query is modified to include the joined domain object.
|
||||
*
|
||||
* @param attribute -
|
||||
* name of the attribute that references the target of the join
|
||||
* name of the attribute that references the target of the
|
||||
* join
|
||||
* @return the FetchJoinObject that is added for the target of the join
|
||||
*/
|
||||
FetchJoinObject leftJoinFetch(String attribute);
|
||||
|
|
|
@ -341,8 +341,8 @@ public interface Expression extends SelectItem, PredicateOperand {
|
|||
* @param str -
|
||||
* string to be located
|
||||
* @param position -
|
||||
* expression corresponding to position at which to start the
|
||||
* search
|
||||
* expression corresponding to position at which to start
|
||||
* the search
|
||||
* @return expression denoting the first position at which the string was
|
||||
* found or expression denoting 0 if the string was not found
|
||||
*/
|
||||
|
@ -372,8 +372,8 @@ public interface Expression extends SelectItem, PredicateOperand {
|
|||
* @param str -
|
||||
* expression corresponding to the string to be located
|
||||
* @param position -
|
||||
* expression corresponding to position at which to start the
|
||||
* search
|
||||
* expression corresponding to position at which to start
|
||||
* the search
|
||||
* @return expression denoting the first position at which the string was
|
||||
* found or expression denoting 0 if the string was not found
|
||||
*/
|
||||
|
|
|
@ -164,11 +164,13 @@ abstract class ExpressionImpl extends AbstractVisitable
|
|||
}
|
||||
|
||||
public Expression substring(int start, Expression len) {
|
||||
return new SubStringExpression(this, new ConstantExpression(start), len);
|
||||
return new SubStringExpression(this, new ConstantExpression(start),
|
||||
len);
|
||||
}
|
||||
|
||||
public Expression substring(Expression start, int len) {
|
||||
return new SubStringExpression(this, start, new ConstantExpression(len));
|
||||
return new SubStringExpression(this, start,
|
||||
new ConstantExpression(len));
|
||||
}
|
||||
|
||||
public Expression substring(Expression start, Expression len) {
|
||||
|
|
|
@ -26,7 +26,8 @@ package org.apache.openjpa.persistence.query;
|
|||
*/
|
||||
public class GreaterThanExpression extends BinaryExpressionPredicate {
|
||||
public GreaterThanExpression(Expression op1, Expression op2) {
|
||||
super(op1, BinaryConditionalOperator.GREATER, BinaryConditionalOperator.LESSEQUAL, op2);
|
||||
super(op1, BinaryConditionalOperator.GREATER,
|
||||
BinaryConditionalOperator.LESSEQUAL, op2);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,7 +27,8 @@ import static org.apache.openjpa.persistence.query.PathOperator.NAVIGATION;
|
|||
*
|
||||
*/
|
||||
public class JoinPath extends AbstractDomainObject implements DomainObject {
|
||||
public JoinPath(AbstractDomainObject parent, PathOperator join, String attr) {
|
||||
public JoinPath(AbstractDomainObject parent, PathOperator join, String attr)
|
||||
{
|
||||
super(parent.getOwner(), parent, join, attr);
|
||||
}
|
||||
|
||||
|
|
|
@ -45,8 +45,8 @@ public class LikeExpression extends BinaryExpressionPredicate {
|
|||
|
||||
@Override
|
||||
public String asExpression(AliasContext ctx) {
|
||||
return super.asExpression(ctx)
|
||||
+ (_escaped ? "ESCAPE " + JPQLHelper.toJPQL(ctx, _echar) : EMPTY);
|
||||
return super.asExpression(ctx) +
|
||||
(_escaped ? "ESCAPE " + JPQLHelper.toJPQL(ctx, _echar) : EMPTY);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -31,7 +31,8 @@ public class LocateExpression extends BinaryOperatorExpression {
|
|||
private final Expression _start;
|
||||
|
||||
public LocateExpression(Expression key, String str, int start) {
|
||||
super(key, BinaryFunctionalOperator.LOCATE, new ConstantExpression(str));
|
||||
super(key, BinaryFunctionalOperator.LOCATE,
|
||||
new ConstantExpression(str));
|
||||
_start = new ConstantExpression(start);
|
||||
}
|
||||
|
||||
|
@ -41,7 +42,8 @@ public class LocateExpression extends BinaryOperatorExpression {
|
|||
}
|
||||
|
||||
public LocateExpression(Expression key, String str, Expression start) {
|
||||
super(key, BinaryFunctionalOperator.LOCATE, new ConstantExpression(str));
|
||||
super(key, BinaryFunctionalOperator.LOCATE,
|
||||
new ConstantExpression(str));
|
||||
_start = start;
|
||||
}
|
||||
|
||||
|
@ -51,7 +53,8 @@ public class LocateExpression extends BinaryOperatorExpression {
|
|||
}
|
||||
|
||||
public String asExpression(AliasContext ctx) {
|
||||
String start = _start == null ? EMPTY : COMMA + ((Visitable)_start).asExpression(ctx);
|
||||
String start = _start == null ? EMPTY : COMMA +
|
||||
((Visitable)_start).asExpression(ctx);
|
||||
return new StringBuffer(_op.toString())
|
||||
.append(OPEN_BRACE)
|
||||
.append(((Visitable)_e1).asExpression(ctx))
|
||||
|
|
|
@ -558,8 +558,8 @@ public interface PredicateOperand {
|
|||
Predicate like(PredicateOperand pattern, char escapeChar);
|
||||
|
||||
/**
|
||||
* Create a predicate for testing whether the PredicateOperand satisfies the
|
||||
* given pattern.
|
||||
* Create a predicate for testing whether the PredicateOperand satisfies
|
||||
* the given pattern.
|
||||
*
|
||||
* @param pattern
|
||||
* @return conditional predicate
|
||||
|
@ -567,8 +567,8 @@ public interface PredicateOperand {
|
|||
Predicate like(String pattern);
|
||||
|
||||
/**
|
||||
* Create a predicate for testing whether the PredicateOperand satisfies the
|
||||
* given pattern.
|
||||
* Create a predicate for testing whether the PredicateOperand satisfies
|
||||
* the given pattern.
|
||||
*
|
||||
* @param pattern
|
||||
* @param escapeChar
|
||||
|
@ -577,8 +577,8 @@ public interface PredicateOperand {
|
|||
Predicate like(String pattern, PredicateOperand escapeChar);
|
||||
|
||||
/**
|
||||
* Create a predicate for testing whether the PredicateOperand satisfies the
|
||||
* given pattern.
|
||||
* Create a predicate for testing whether the PredicateOperand satisfies
|
||||
* the given pattern.
|
||||
*
|
||||
* @param pattern
|
||||
* @param escapeChar
|
||||
|
|
|
@ -126,7 +126,8 @@ public interface QueryDefinition extends Subquery {
|
|||
* results. Replaces the previous order-by list, if any.
|
||||
*
|
||||
* @param orderByItemList -
|
||||
* a list containing one or more OrderByItem instances
|
||||
* a list containing one or more OrderByItem
|
||||
* instances
|
||||
* @return the modified QueryDefinition instance
|
||||
*/
|
||||
QueryDefinition orderBy(List<OrderByItem> orderByItemList);
|
||||
|
@ -167,8 +168,8 @@ public interface QueryDefinition extends Subquery {
|
|||
* @param cls -
|
||||
* a class with the correponding constructor
|
||||
* @param args -
|
||||
* select items that correspond to result types that are valid as
|
||||
* arguments to the constructor
|
||||
* select items that correspond to result types that are valid
|
||||
* as arguments to the constructor
|
||||
* @result SelectItem instance representing the constructor
|
||||
*/
|
||||
SelectItem newInstance(Class cls, SelectItem... args);
|
||||
|
@ -252,7 +253,8 @@ public interface QueryDefinition extends Subquery {
|
|||
* .elseCase(scalar-expression)
|
||||
*
|
||||
* @param caseOperand -
|
||||
* value used for testing against the when scalar expressions
|
||||
* value used for testing against the when scalar
|
||||
* expressions
|
||||
* @return case expression with the given case operand
|
||||
*/
|
||||
CaseExpression simpleCase(String caseOperand);
|
||||
|
@ -266,7 +268,8 @@ public interface QueryDefinition extends Subquery {
|
|||
* .elseCase(scalar-expression)
|
||||
*
|
||||
* @param caseOperand -
|
||||
* value used for testing against the when scalar expressions
|
||||
* value used for testing against the when scalar
|
||||
* expressions
|
||||
* @return case expression with the given case operand
|
||||
*/
|
||||
CaseExpression simpleCase(Date caseOperand);
|
||||
|
@ -280,7 +283,8 @@ public interface QueryDefinition extends Subquery {
|
|||
* .elseCase(scalar-expression)
|
||||
*
|
||||
* @param caseOperand -
|
||||
* value used for testing against the when scalar expressions
|
||||
* value used for testing against the when scalar
|
||||
* expressions
|
||||
* @return case expression with the given case operand
|
||||
*/
|
||||
CaseExpression simpleCase(Calendar caseOperand);
|
||||
|
@ -294,7 +298,8 @@ public interface QueryDefinition extends Subquery {
|
|||
* .elseCase(scalar-expression)
|
||||
*
|
||||
* @param caseOperand -
|
||||
* value used for testing against the when scalar expressions
|
||||
* value used for testing against the when scalar
|
||||
* expressions
|
||||
* @return case expression with the given case operand
|
||||
*/
|
||||
CaseExpression simpleCase(Class caseOperand);
|
||||
|
@ -308,7 +313,8 @@ public interface QueryDefinition extends Subquery {
|
|||
* .elseCase(scalar-expression)
|
||||
*
|
||||
* @param caseOperand -
|
||||
* value used for testing against the when scalar expressions
|
||||
* value used for testing against the when scalar
|
||||
* expressions
|
||||
* @return case expression with the given case operand
|
||||
*/
|
||||
CaseExpression simpleCase(Enum<?> caseOperand);
|
||||
|
|
|
@ -54,7 +54,7 @@ public class SubStringExpression extends UnaryOperatorExpression {
|
|||
public String asExpression(AliasContext ctx) {
|
||||
return _op + "(" + ((Visitable)_e).asExpression(ctx)
|
||||
+ "," + ((Visitable)_start).asExpression(ctx)
|
||||
+ (_length == null ? "" : "," + ((Visitable)_length).asExpression(ctx))
|
||||
+ ")";
|
||||
+ (_length == null ? "" : ","
|
||||
+ ((Visitable)_length).asExpression(ctx)) + ")";
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,7 +60,8 @@ public class DistributedBrokerImpl extends FinalizingBrokerImpl
|
|||
}
|
||||
|
||||
public DistributedStoreManager getDistributedStoreManager() {
|
||||
return (DistributedStoreManager)getStoreManager().getInnermostDelegate();
|
||||
return (DistributedStoreManager)getStoreManager().
|
||||
getInnermostDelegate();
|
||||
}
|
||||
|
||||
public Slice addSlice(String name, Map properties) {
|
||||
|
@ -84,8 +85,10 @@ public class DistributedBrokerImpl extends FinalizingBrokerImpl
|
|||
OpCallbacks call) {
|
||||
OpenJPAStateManager sm = getStateManager(pc);
|
||||
SliceInfo info = null;
|
||||
boolean replicated = SliceImplHelper.isReplicated(pc, getConfiguration());
|
||||
if (getOperatingSet().isEmpty() && !SliceImplHelper.isSliceAssigned(sm)) {
|
||||
boolean replicated = SliceImplHelper.isReplicated(pc,
|
||||
getConfiguration());
|
||||
if (getOperatingSet().isEmpty() && !SliceImplHelper.isSliceAssigned(sm))
|
||||
{
|
||||
info = SliceImplHelper.getSlicesByPolicy(pc, getConfiguration(),
|
||||
this);
|
||||
_rootSlice = info.getSlices()[0];
|
||||
|
|
|
@ -30,7 +30,8 @@ import org.apache.openjpa.kernel.StoreQuery;
|
|||
*/
|
||||
public class DistributedQueryImpl extends QueryImpl {
|
||||
private final ReentrantSliceLock _lock;
|
||||
public DistributedQueryImpl(Broker broker, String language, StoreQuery storeQuery) {
|
||||
public DistributedQueryImpl(Broker broker, String language,
|
||||
StoreQuery storeQuery) {
|
||||
super(broker, language, storeQuery);
|
||||
_lock = new ReentrantSliceLock();
|
||||
}
|
||||
|
|
|
@ -43,7 +43,8 @@ public interface DistributionPolicy {
|
|||
* the list is either explicit <code>openjpa.slice.Names</code> property
|
||||
* or implicit i.e. alphabetic order of available identifiers if
|
||||
* <code>openjpa.slice.Names</code> is unspecified.
|
||||
* @param context the generic persistence context managing the given instance.
|
||||
* @param context the generic persistence context managing the given
|
||||
* instance.
|
||||
*
|
||||
* @return identifier of the slice. This name must match one of the
|
||||
* given slice names.
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue