OPENJPA-957 - Create Fetch*HintHandler(s) for property processing in EntityManager/Query interface methods.

git-svn-id: https://svn.apache.org/repos/asf/openjpa/trunk@762161 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Albert Lee 2009-04-05 21:29:42 +00:00
parent 616f8fdbee
commit 718370e1ef
32 changed files with 8455 additions and 5139 deletions

View File

@ -19,8 +19,8 @@
package org.apache.openjpa.jdbc.kernel; package org.apache.openjpa.jdbc.kernel;
import java.io.Serializable; import java.io.Serializable;
import java.sql.ResultSet;
import java.sql.Connection; import java.sql.Connection;
import java.sql.ResultSet;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
@ -31,6 +31,7 @@ import org.apache.commons.lang.StringUtils;
import org.apache.openjpa.conf.OpenJPAConfiguration; import org.apache.openjpa.conf.OpenJPAConfiguration;
import org.apache.openjpa.jdbc.conf.JDBCConfiguration; import org.apache.openjpa.jdbc.conf.JDBCConfiguration;
import org.apache.openjpa.jdbc.meta.ClassMapping; import org.apache.openjpa.jdbc.meta.ClassMapping;
import org.apache.openjpa.jdbc.sql.JoinSyntaxes;
import org.apache.openjpa.kernel.FetchConfiguration; import org.apache.openjpa.kernel.FetchConfiguration;
import org.apache.openjpa.kernel.FetchConfigurationImpl; import org.apache.openjpa.kernel.FetchConfigurationImpl;
import org.apache.openjpa.kernel.StoreContext; import org.apache.openjpa.kernel.StoreContext;
@ -121,6 +122,13 @@ public class JDBCFetchConfigurationImpl
} }
public JDBCFetchConfiguration setEagerFetchMode(int mode) { public JDBCFetchConfiguration setEagerFetchMode(int mode) {
if (mode != DEFAULT
&& mode != EagerFetchModes.EAGER_NONE
&& mode != EagerFetchModes.EAGER_JOIN
&& mode != EagerFetchModes.EAGER_PARALLEL)
throw new IllegalArgumentException(
_loc.get("bad-fetch-mode", new Integer(mode)).getMessage());
if (mode == DEFAULT) { if (mode == DEFAULT) {
JDBCConfiguration conf = getJDBCConfiguration(); JDBCConfiguration conf = getJDBCConfiguration();
if (conf != null) if (conf != null)
@ -145,6 +153,13 @@ public class JDBCFetchConfigurationImpl
} }
public JDBCFetchConfiguration setSubclassFetchMode(int mode) { public JDBCFetchConfiguration setSubclassFetchMode(int mode) {
if (mode != DEFAULT
&& mode != EagerFetchModes.EAGER_NONE
&& mode != EagerFetchModes.EAGER_JOIN
&& mode != EagerFetchModes.EAGER_PARALLEL)
throw new IllegalArgumentException(
_loc.get("bad-fetch-mode", new Integer(mode)).getMessage());
if (mode == DEFAULT) { if (mode == DEFAULT) {
JDBCConfiguration conf = getJDBCConfiguration(); JDBCConfiguration conf = getJDBCConfiguration();
if (conf != null) if (conf != null)
@ -160,6 +175,13 @@ public class JDBCFetchConfigurationImpl
} }
public JDBCFetchConfiguration setResultSetType(int type) { public JDBCFetchConfiguration setResultSetType(int type) {
if (type != DEFAULT
&& type != ResultSet.TYPE_FORWARD_ONLY
&& type != ResultSet.TYPE_SCROLL_INSENSITIVE
&& type != ResultSet.TYPE_SCROLL_SENSITIVE)
throw new IllegalArgumentException(_loc.get("bad-resultset-type",
new Integer(type)).getMessage());
if (type == DEFAULT) { if (type == DEFAULT) {
JDBCConfiguration conf = getJDBCConfiguration(); JDBCConfiguration conf = getJDBCConfiguration();
if (conf != null) if (conf != null)
@ -174,6 +196,13 @@ public class JDBCFetchConfigurationImpl
} }
public JDBCFetchConfiguration setFetchDirection(int direction) { public JDBCFetchConfiguration setFetchDirection(int direction) {
if (direction != DEFAULT
&& direction != ResultSet.FETCH_FORWARD
&& direction != ResultSet.FETCH_REVERSE
&& direction != ResultSet.FETCH_UNKNOWN)
throw new IllegalArgumentException(_loc.get("bad-fetch-direction",
new Integer(direction)).getMessage());
if (direction == DEFAULT) { if (direction == DEFAULT) {
JDBCConfiguration conf = getJDBCConfiguration(); JDBCConfiguration conf = getJDBCConfiguration();
if (conf != null) if (conf != null)
@ -188,6 +217,13 @@ public class JDBCFetchConfigurationImpl
} }
public JDBCFetchConfiguration setLRSSize(int size) { public JDBCFetchConfiguration setLRSSize(int size) {
if (size != DEFAULT
&& size != LRSSizes.SIZE_QUERY
&& size != LRSSizes.SIZE_LAST
&& size != LRSSizes.SIZE_UNKNOWN)
throw new IllegalArgumentException(
_loc.get("bad-lrs-size", new Integer(size)).getMessage());
if (size == DEFAULT) { if (size == DEFAULT) {
JDBCConfiguration conf = getJDBCConfiguration(); JDBCConfiguration conf = getJDBCConfiguration();
if (conf != null) if (conf != null)
@ -202,6 +238,13 @@ public class JDBCFetchConfigurationImpl
} }
public JDBCFetchConfiguration setJoinSyntax(int syntax) { public JDBCFetchConfiguration setJoinSyntax(int syntax) {
if (syntax != DEFAULT
&& syntax != JoinSyntaxes.SYNTAX_SQL92
&& syntax != JoinSyntaxes.SYNTAX_TRADITIONAL
&& syntax != JoinSyntaxes.SYNTAX_DATABASE)
throw new IllegalArgumentException(
_loc.get("bad-join-syntax", new Integer(syntax)).getMessage());
if (syntax == DEFAULT) { if (syntax == DEFAULT) {
JDBCConfiguration conf = getJDBCConfiguration(); JDBCConfiguration conf = getJDBCConfiguration();
if (conf != null) if (conf != null)

View File

@ -38,6 +38,7 @@ import org.apache.openjpa.jdbc.schema.Schema;
import org.apache.openjpa.jdbc.schema.Sequence; import org.apache.openjpa.jdbc.schema.Sequence;
import org.apache.openjpa.jdbc.schema.Table; import org.apache.openjpa.jdbc.schema.Table;
import org.apache.openjpa.kernel.Filters; import org.apache.openjpa.kernel.Filters;
import org.apache.openjpa.kernel.MixedLockLevels;
import org.apache.openjpa.lib.util.Localizer; import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.meta.JavaTypes; import org.apache.openjpa.meta.JavaTypes;
import org.apache.openjpa.util.OpenJPAException; import org.apache.openjpa.util.OpenJPAException;
@ -349,6 +350,9 @@ public class DB2Dictionary
else else
isolationLevel = conf.getTransactionIsolationConstant(); isolationLevel = conf.getTransactionIsolationConstant();
if (fetch.getReadLockLevel() >= MixedLockLevels.LOCK_PESSIMISTIC_WRITE)
isolationLevel = Connection.TRANSACTION_SERIALIZABLE;
if (isForUpdate) { if (isForUpdate) {
switch (db2ServerType) { switch (db2ServerType) {
case db2ISeriesV5R3OrEarlier: case db2ISeriesV5R3OrEarlier:

View File

@ -1,145 +1,154 @@
# Licensed to the Apache Software Foundation (ASF) under one # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file # or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information # distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file # regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the # to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance # "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at # with the License. You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, # Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an # software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
error-rollback: An error occurred attempting to rollback to the savepoint "{0}" error-rollback: An error occurred attempting to rollback to the savepoint "{0}"
error-save: An error occurred attempting to set the savepoint "{0}". This \ error-save: An error occurred attempting to set the savepoint "{0}". This \
driver may not support JDBC 3 savepoints. driver may not support JDBC 3 savepoints.
mult-mapping-aggregate: Cannot perform an aggregate query on a hierarchy with \ mult-mapping-aggregate: Cannot perform an aggregate query on a hierarchy with \
unjoined subclasses: {0} unjoined subclasses: {0}
sqlquery-missing-params: SQL query "{0}" declares a parameter index "{1}" for \ sqlquery-missing-params: SQL query "{0}" declares a parameter index "{1}" for \
which no value was given. The given parameters were: {2} which no value was given. The given parameters were: {2}
sqlquery-fewer-params: SQL query "{0}" declares {1} distinct parameter(s), \ sqlquery-fewer-params: SQL query "{0}" declares {1} distinct parameter(s), \
but only {2} parameters are given. Given parameter values are "{3}". but only {2} parameters are given. Given parameter values are "{3}".
no-sql: You have not specified a SQL filter to execute in your SQL query. no-sql: You have not specified a SQL filter to execute in your SQL query.
del-ins-cycle: An unresolvable constraint cycle was detected. This typically \ del-ins-cycle: An unresolvable constraint cycle was detected. This typically \
means that you are persisting a new object with the same primary key value \ means that you are persisting a new object with the same primary key value \
as an object you are deleting in the same transaction, and at the same \ as an object you are deleting in the same transaction, and at the same \
time you have circular foreign key dependencies in the transaction. The \ time you have circular foreign key dependencies in the transaction. The \
combination of these two factors can sometimes lead to a situation in \ combination of these two factors can sometimes lead to a situation in \
which OpenJPA cannot meet all the database constraints. which OpenJPA cannot meet all the database constraints.
ref-cycle: An unresolvable constraint cycle was detected. This typically \ ref-cycle: An unresolvable constraint cycle was detected. This typically \
means that a mapping in a table other than the class'' primary table has \ means that a mapping in a table other than the class'' primary table has \
a foreign key that is part of a circular foreign key dependency. OpenJPA \ a foreign key that is part of a circular foreign key dependency. OpenJPA \
sometimes cannot meet circular dependencies when some of the involved \ sometimes cannot meet circular dependencies when some of the involved \
mappings are in secondary tables. mappings are in secondary tables.
update-failed-no-failed-obj: Database operation failed. Update count for SQL \ update-failed-no-failed-obj: Database operation failed. Update count for SQL \
statement was {0}. Statement: {1} statement was {0}. Statement: {1}
virtual-mapping: Cannot instantiate virtual mapping "{0}". virtual-mapping: Cannot instantiate virtual mapping "{0}".
press-key-end: Server running. Press enter to stop. press-key-end: Server running. Press enter to stop.
no-server-conf: There is no persistence server configured. no-server-conf: There is no persistence server configured.
server-usage: Usage: \ server-usage: Usage: \
java org.apache.openjpa.jdbc.kernel.StartPersistenceServer\n\ java org.apache.openjpa.jdbc.kernel.StartPersistenceServer\n\
\t[-properties/-p <properties file or resource>]\n\ \t[-properties/-p <properties file or resource>]\n\
\t[-<property name> <property value>]* \t[-<property name> <property value>]*
cant-lock-on-load: The database is unable to lock this query. Each object \ cant-lock-on-load: The database is unable to lock this query. Each object \
matching the query will be locked individually after it is loaded; \ matching the query will be locked individually after it is loaded; \
however, it is technically possible that another transaction could modify \ however, it is technically possible that another transaction could modify \
the data before the lock is obtained. See the documentation on Object \ the data before the lock is obtained. See the documentation on Object \
Locking for details.\n"{0}" Locking for details.\n"{0}"
start-trans-for-lock: Though you are using optimistic transactions, OpenJPA is \ start-trans-for-lock: Though you are using optimistic transactions, OpenJPA is \
now beginning a datastore transaction because you have requested a lock \ now beginning a datastore transaction because you have requested a lock \
on some data. on some data.
millis-query-timeout: JDBC locking does not support millisecond-granularity \ millis-query-timeout: JDBC locking does not support millisecond-granularity \
timeouts. Use timeouts that are multiples of 1000 for even second values. timeouts. Use timeouts that are multiples of 1000 for even second values.
batch-not-supported: The update count for the statement was an invalid \ batch-not-supported: The update count for the statement was an invalid \
value ({0}). This indicates that your database or JDBC driver does not \ value ({0}). This indicates that your database or JDBC driver does not \
have complete support for executing batch statements. Batch \ have complete support for executing batch statements. Batch \
functionality should be disabled by including "BatchLimit=0" in \ functionality should be disabled by including "BatchLimit=0" in \
your openjpa.jdbc.DBDictionary configuration property. Statement: {1} your openjpa.jdbc.DBDictionary configuration property. Statement: {1}
bad-synch-mappings: Invalid SynchronizeMappings operation ("{0}") specified. \ bad-synch-mappings: Invalid SynchronizeMappings operation ("{0}") specified. \
Valid operations are: {1} Valid operations are: {1}
make-native-seq: Creating sequence. make-native-seq: Creating sequence.
drop-native-seq: Dropping sequence. drop-native-seq: Dropping sequence.
make-seq-table: Creating sequence table. make-seq-table: Creating sequence table.
drop-seq-table: Dropping sequence table. drop-seq-table: Dropping sequence table.
bad-seq-up: Attempt to update the sequence table "{0}" failed. The sequence \ bad-seq-up: Attempt to update the sequence table "{0}" failed. The sequence \
table is typically created when you run the mappingtool''s refresh action \ table is typically created when you run the mappingtool''s refresh action \
on any datastore identity class. If you have not run the mappingtool but \ on any datastore identity class. If you have not run the mappingtool but \
want to create the sequence table, run:\n\ want to create the sequence table, run:\n\
java org.apache.openjpa.jdbc.kernel.TableJDBCSeq -action add java org.apache.openjpa.jdbc.kernel.TableJDBCSeq -action add
bad-seq-type: This sequence of type "{0}" cannot generate values for \ bad-seq-type: This sequence of type "{0}" cannot generate values for \
persistent type "{1}". persistent type "{1}".
no-seq-sql: Error instantiating named sequence "{0}": Your database dictionary \ no-seq-sql: Error instantiating named sequence "{0}": Your database dictionary \
does not support native sequences. To tell the dictionary how to select \ does not support native sequences. To tell the dictionary how to select \
sequence values, use:\n\ sequence values, use:\n\
openjpa.jdbc.DBDictionary: NextSequenceQuery="SELECT NEXT VALUE \ openjpa.jdbc.DBDictionary: NextSequenceQuery="SELECT NEXT VALUE \
FOR '{0}"\n\ FOR '{0}"\n\
Where the above string is replaced with the proper SQL for your database. Where the above string is replaced with the proper SQL for your database.
invalid-seq-sql: No rows returned for sql "{0}". Check your configuration. invalid-seq-sql: No rows returned for sql "{0}". Check your configuration.
insert-seq: Inserting row for this mapping into sequence table. insert-seq: Inserting row for this mapping into sequence table.
no-seq-row: There is no row for mapping "{0}" in sequence table "{1}", and \ no-seq-row: There is no row for mapping "{0}" in sequence table "{1}", and \
the attempt to insert a row has apparently failed. the attempt to insert a row has apparently failed.
update-seq: Updating sequence values. update-seq: Updating sequence values.
null-join: Attempt to add a null/empty fetch join field. null-join: Attempt to add a null/empty fetch join field.
get-seq: Getting current sequence values. get-seq: Getting current sequence values.
seq-usage: Usage: java org.apache.openjpa.jdbc.kernel.TableJDBCSeq\n\ seq-usage: Usage: java org.apache.openjpa.jdbc.kernel.TableJDBCSeq\n\
\t[-properties/-p <properties file or resource>]\n\ \t[-properties/-p <properties file or resource>]\n\
\t[-<property name> <property value>]*\n\ \t[-<property name> <property value>]*\n\
\t-action/-a <add | drop | get | set> [value] \t-action/-a <add | drop | get | set> [value]
clstable-seq-usage: Usage: \ clstable-seq-usage: Usage: \
java org.apache.openjpa.jdbc.kernel.ClassTableJDBCSeq\n\ java org.apache.openjpa.jdbc.kernel.ClassTableJDBCSeq\n\
\t[-properties/-p <properties file or resource>]\n\ \t[-properties/-p <properties file or resource>]\n\
\t[-<property name> <property value>]*\n\ \t[-<property name> <property value>]*\n\
\t-action/-a <add | drop | get | set>\n\ \t-action/-a <add | drop | get | set>\n\
\t[class name | .java file | .class file | .jdo file] [value] \t[class name | .java file | .class file | .jdo file] [value]
native-seq-usage: Usage: java org.apache.openjpa.jdbc.kernel.NativeJDBCSeq\n\ native-seq-usage: Usage: java org.apache.openjpa.jdbc.kernel.NativeJDBCSeq\n\
\t[-properties/-p <properties file or resource>]\n\ \t[-properties/-p <properties file or resource>]\n\
\t[-<property name> <property value>]* \t[-<property name> <property value>]*
bad-level: Invalid isolation level. Valid levels are -1, \ bad-level: Invalid isolation level. Valid levels are -1, \
Connection.TRANSACTION_NONE, Connection.TRANSACTION_READ_UNCOMMITTED, \ "none"(0), "read-uncommitted"(1), "read-committed"(2), \
Connection.TRANSACTION_READ_COMMITTED, \ "repeatable-read"(4) or "serializable"(8). Specified value: {0}.
Connection.TRANSACTION_REPEATABLE_READ, or \ no-nullable-fk: No nullable foreign key found to resolve circular flush\n\
Connection.TRANSACTION_SERIALIZABLE. Specified value: {0}. dependency. During flush processing, changes to instances, new\n\
no-nullable-fk: No nullable foreign key found to resolve circular flush\n\ instances, and deleted instances must be processed in a specific sequence\n\
dependency. During flush processing, changes to instances, new\n\ to avoid foreign key constraint violations. The changes required in this\n\
instances, and deleted instances must be processed in a specific sequence\n\ transaction cannot be reordered because none of the foreign key constraints\n\
to avoid foreign key constraint violations. The changes required in this\n\ is nullable (optional).
transaction cannot be reordered because none of the foreign key constraints\n\ graph-not-cycle-free: A circular flush dependency has been found after all \
is nullable (optional). circular dependencies should have been resolved.
graph-not-cycle-free: A circular flush dependency has been found after all \ batch_limit: The batch limit is set to {0}.
circular dependencies should have been resolved. batch_update_info: ExecuteBatch command returns update count {0} for \
batch_limit: The batch limit is set to {0}. statement {1}.
batch_update_info: ExecuteBatch command returns update count {0} for \ prepared-query-cached: Query "{0}" is cached as target query "{1}"
statement {1}. prepared-query-not-cachable: Query "{0}" is not fit for caching.
prepared-query-cached: Query "{0}" is cached as target query "{1}" prepared-query-invalidate: Query "{0}" is invalidated and removed from cache.
prepared-query-not-cachable: Query "{0}" is not fit for caching. prepared-query-uncache-strong: Query "{0}" is permanently excluded from cache.
prepared-query-invalidate: Query "{0}" is invalidated and removed from cache. prepared-query-uncache-weak: Query "{0}" is excluded temporarily due to "{1}".
prepared-query-uncache-strong: Query "{0}" is permanently excluded from cache. prepared-query-add-pattern: Adding a Query exclusion pattern "{0}" has caused \
prepared-query-uncache-weak: Query "{0}" is excluded temporarily due to "{1}". following {1} cached queries to be removed from the cache: "{2}".
prepared-query-add-pattern: Adding a Query exclusion pattern "{0}" has caused \ prepared-query-remove-pattern: Removing a Query exclusion pattern "{0}" caused \
following {1} cached queries to be removed from the cache: "{2}". following {1} queries to be re-inserted in the cache: "{2}".
prepared-query-remove-pattern: Removing a Query exclusion pattern "{0}" caused \ uparam-mismatch: Supplied user parameters "{1}" do not match expected \
following {1} queries to be re-inserted in the cache: "{2}". parameters "{0}" for the prepared query "{2}".
uparam-mismatch: Supplied user parameters "{1}" do not match expected \ uparam-null: No user parameter was given. Expected parameters "{0}" for the \
parameters "{0}" for the prepared query "{2}". prepared query "{1}".
uparam-null: No user parameter was given. Expected parameters "{0}" for the \ uparam-coll-size: Parameter "{0}" has a value "{1}" which is not compatible \
prepared query "{1}". with the available positions {2} in the parameter list of the prepared query
uparam-coll-size: Parameter "{0}" has a value "{1}" which is not compatible \ uparam-no-pos: User parameter "{0}" does not appear in any position in the \
with the available positions {2} in the parameter list of the prepared query prepared query "{1}".
uparam-no-pos: User parameter "{0}" does not appear in any position in the \ uparam-pc-key: Class "{0}" uses {1} primary key columns but corresponding \
prepared query "{1}". positions {2} in the parameter list of the prepared query is not compatible.
uparam-pc-key: Class "{0}" uses {1} primary key columns but corresponding \ uparam-missing: Parameter {0} in SQL Query "{1}" is not given a value. The \
positions {2} in the parameter list of the prepared query is not compatible. parameters given is "{2}".
uparam-missing: Parameter {0} in SQL Query "{1}" is not given a value. The \ finder-cached: Cached finder for "{0}" SQL: "{1}"
parameters given is "{2}". finder-not-cachable: Finder for "{0}" is not cachable.
finder-cached: Cached finder for "{0}" SQL: "{1}" finder-add-pattern: Exclusion pattern "{0}" for finder query has invalidated \
finder-not-cachable: Finder for "{0}" is not cachable. {1} existing entries "{2}"
finder-add-pattern: Exclusion pattern "{0}" for finder query has invalidated \ optimistic-violation-lock: An optimistic lock violation was detected when \
{1} existing entries "{2}" locking object instance.
optimistic-violation-lock: An optimistic lock violation was detected when \ sql-warning: The statement resulted in SQL warning: {0}
locking object instance. bad-fetch-mode: Invalid fetch mode. Valid values are \
sql-warning: The statement resulted in SQL warning: {0} "none"(0), "join"(1) or "parallel"(2). Specified value: {0}.
bad-resultset-type: Invalid result set type. Valid values are \
"forward-only"(1003), "scroll-insensitive"(1004) or \
"scroll-sensitive"(1005). Specified value: {0}.
bad-fetch-direction: Invalid fetch direction. Valid values are \
"forward"(1000), "reverse"(1001) or "unknown"(1002). Specified value: {0}.
bad-lrs-size: Invalid LRS size. Valid values are \
"unknown"(0), "last"(1) or "query"(2). Specified value: {0}.
bad-join-syntax: Invalid join syntax. Valid values are \
"sql92"(0), "tradition"(1) or "database"(2). Specified value: {0}.

View File

@ -0,0 +1,124 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.kernel;
import java.io.Serializable;
import java.lang.reflect.Method;
import org.apache.openjpa.conf.OpenJPAConfiguration;
import org.apache.openjpa.enhance.Reflection;
import org.apache.openjpa.lib.util.Localizer;
/**
* Default hint handler abstract base class.
*
* @since 2.0.0
* @nojavadoc
*/
public abstract class AbstractHintHandler implements Serializable {
private static final Localizer _loc = Localizer
.forPackage(AbstractHintHandler.class);
protected static final String DOT = ".";
protected static final String BLANK = "";
protected static final String PREFIX_OPENJPA = "openjpa.";
protected static final String PREFIX_JDBC = PREFIX_OPENJPA + "jdbc.";
protected FetchConfigurationImpl _fConfig;
/**
* Constructor; supply delegate.
*/
public AbstractHintHandler(FetchConfigurationImpl fConfig) {
_fConfig = fConfig;
}
protected abstract boolean setHintInternal(String hintName, Object value,
boolean validateThrowException);
public boolean setHint(String hintName, Object value,
boolean validateThrowException) {
String key = hintToKey(hintName);
boolean valueSet = !hintName.equals(key);
if (hasPrecedent(hintName)) {
try {
valueSet |= setHintInternal(key, value, validateThrowException);
} catch (RuntimeException rte) {
if (validateThrowException) {
if (rte instanceof IllegalArgumentException)
throw rte;
else if (rte instanceof ClassCastException)
throw new IllegalArgumentException(_loc.get(
"bad-hint-value", key, value, rte.getMessage())
.getMessage());
else
handleException(rte);
} else
_fConfig.getContext().getConfiguration().getLog(
OpenJPAConfiguration.LOG_RUNTIME)
.warn(
_loc.get("bad-hint-value", key, value, rte
.getMessage()));
}
} else
valueSet = true;
return valueSet;
}
protected String hintToKey(String key) {
return key;
}
protected boolean hasPrecedent(String key) {
return true;
}
protected void handleException(RuntimeException e) {
throw e;
}
protected final boolean hintToSetter(Object target, String k,
Object value) {
if (target == null || k == null)
return false;
// remove key prefix as the source of property name
k = getSuffixOf(k);
Method setter = Reflection.findSetter(target.getClass(), k, true);
Class paramType = setter.getParameterTypes()[0];
if (Enum.class.isAssignableFrom(paramType) && value instanceof String) {
// to accomodate alias name input in relationship with enum values
String strValue = ((String) value).toUpperCase().replace('-', '_');
value = Enum.valueOf(paramType, strValue);
}
Filters.hintToSetter(target, k, value);
return true;
}
protected static String getPrefixOf(String key) {
int firstDot = key == null ? -1 : key.indexOf(DOT);
return (firstDot != -1) ? key.substring(0, firstDot) : key;
}
protected static String getSuffixOf(String key) {
int lastDot = key == null ? -1 : key.lastIndexOf(DOT);
return (lastDot != -1) ? key.substring(lastDot + 1) : key;
}
}

View File

@ -475,6 +475,14 @@ public class DelegatingFetchConfiguration
} }
} }
public void addHint(String name, Object value) {
try {
_fetch.addHint(name, value);
} catch (RuntimeException re) {
throw translate(re);
}
}
public Map<String, Object> getHints() { public Map<String, Object> getHints() {
try { try {
return _fetch.getHints(); return _fetch.getHints();

View File

@ -337,6 +337,15 @@ public interface FetchConfiguration
*/ */
public Object getHint (String name); public Object getHint (String name);
/**
* Adds the hint and the associated value to the list.
*
* @param name the name of the hint
* @param value the value of the hint
* @since 2.0.0
*/
public void addHint(String name, Object value);
/** /**
* Returns an immutable view of the currently active hints and their values. * Returns an immutable view of the currently active hints and their values.
* *

View File

@ -0,0 +1,68 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.kernel;
import java.util.HashMap;
import java.util.Map;
/**
* Fetch configuration hint handler. Handles openjpa.* and openjpa.jdbc.* hints.
*
* @since 2.0.0
* @nojavadoc
*/
public class FetchConfigurationHintHandler extends AbstractHintHandler {
protected static final Map<String, String> hintsMap =
new HashMap<String, String>();
static {
// Initialize hint to property name mapping.
hintsMap.put(PREFIX_JDBC + "TransactionIsolation", "Isolation");
}
/**
* Constructor; supply delegate.
*/
public FetchConfigurationHintHandler(FetchConfigurationImpl fConfig) {
super(fConfig);
}
public boolean setHintInternal(String hintName, Object value,
boolean validateThrowException) {
boolean valueSet = false;
String longPrefix = hintName
.substring(0, hintName.lastIndexOf(DOT) + 1);
if ((longPrefix.equals(PREFIX_JDBC) || longPrefix
.equals(PREFIX_OPENJPA))) {
valueSet = hintToSetter(_fConfig, hintToPropName(hintName), value);
} else {
valueSet = true;
}
return valueSet;
}
private String hintToPropName(String hintName) {
String propName = hintsMap.get(hintName);
if (propName == null) {
propName = hintName;
}
return propName;
}
}

View File

@ -98,6 +98,7 @@ public class FetchConfigurationImpl
private boolean _load = true; private boolean _load = true;
private int _availableRecursion; private int _availableRecursion;
private int _availableDepth; private int _availableDepth;
private FetchConfigurationHintHandler _hintHandler;
public FetchConfigurationImpl() { public FetchConfigurationImpl() {
this(null); this(null);
@ -106,6 +107,7 @@ public class FetchConfigurationImpl
protected FetchConfigurationImpl(ConfigurationState state) { protected FetchConfigurationImpl(ConfigurationState state) {
_state = (state == null) ? new ConfigurationState() : state; _state = (state == null) ? new ConfigurationState() : state;
_availableDepth = _state.maxFetchDepth; _availableDepth = _state.maxFetchDepth;
_hintHandler = new FetchConfigurationHintHandler(this);
} }
public StoreContext getContext() { public StoreContext getContext() {
@ -239,6 +241,13 @@ public class FetchConfigurationImpl
} }
public FetchConfiguration setFlushBeforeQueries(int flush) { public FetchConfiguration setFlushBeforeQueries(int flush) {
if (flush != DEFAULT
&& flush != QueryFlushModes.FLUSH_TRUE
&& flush != QueryFlushModes.FLUSH_FALSE
&& flush != QueryFlushModes.FLUSH_WITH_CONNECTION)
throw new IllegalArgumentException(_loc.get(
"bad-flush-before-queries", new Integer(flush)).getMessage());
if (flush == DEFAULT && _state.ctx != null) if (flush == DEFAULT && _state.ctx != null)
_state.flushQuery = _state.ctx.getConfiguration(). _state.flushQuery = _state.ctx.getConfiguration().
getFlushBeforeQueriesConstant(); getFlushBeforeQueriesConstant();
@ -455,13 +464,15 @@ public class FetchConfigurationImpl
public int getReadLockLevel() { public int getReadLockLevel() {
String hintKey = "openjpa.FetchPlan.ReadLockLevel"; String lockModeKey = "openjpa.FetchPlan.ReadLockMode";
if (getHint(hintKey) != null) { String deferLockModeKey = lockModeKey + ".Defer";
Integer value = (Integer)getHint(deferLockModeKey);
if (value != null) {
if (isActiveTransaction()) { if (isActiveTransaction()) {
setReadLockLevel((Integer)removeHint(hintKey)); removeHint(deferLockModeKey);
} else { setReadLockLevel(value);
return (Integer)getHint(hintKey); } else
} return value;
} }
return _state.readLockLevel; return _state.readLockLevel;
} }
@ -470,6 +481,16 @@ public class FetchConfigurationImpl
if (_state.ctx == null) if (_state.ctx == null)
return this; return this;
if (level != DEFAULT
&& level != MixedLockLevels.LOCK_NONE
&& level != MixedLockLevels.LOCK_OPTIMISTIC
&& level != MixedLockLevels.LOCK_OPTIMISTIC_FORCE_INCREMENT
&& level != MixedLockLevels.LOCK_PESSIMISTIC_READ
&& level != MixedLockLevels.LOCK_PESSIMISTIC_WRITE
&& level != MixedLockLevels.LOCK_PESSIMISTIC_FORCE_INCREMENT)
throw new IllegalArgumentException(_loc.get(
"bad-lock-level", new Integer(level)).getMessage());
lock(); lock();
try { try {
assertActiveTransaction(); assertActiveTransaction();
@ -485,13 +506,15 @@ public class FetchConfigurationImpl
} }
public int getWriteLockLevel() { public int getWriteLockLevel() {
String hintKey = "openjpa.FetchPlan.WriteLockLevel"; String lockModeKey = "openjpa.FetchPlan.WriteLockMode";
if (getHint(hintKey) != null) { String deferLockModeKey = lockModeKey + ".Defer";
Integer value = (Integer)getHint(deferLockModeKey);
if (value != null) {
if (isActiveTransaction()) { if (isActiveTransaction()) {
setReadLockLevel((Integer)removeHint(hintKey)); removeHint(deferLockModeKey);
} else { setWriteLockLevel(value);
return (Integer)getHint(hintKey); } else
} return value;
} }
return _state.writeLockLevel; return _state.writeLockLevel;
} }
@ -500,6 +523,16 @@ public class FetchConfigurationImpl
if (_state.ctx == null) if (_state.ctx == null)
return this; return this;
if (level != DEFAULT
&& level != MixedLockLevels.LOCK_NONE
&& level != MixedLockLevels.LOCK_OPTIMISTIC
&& level != MixedLockLevels.LOCK_OPTIMISTIC_FORCE_INCREMENT
&& level != MixedLockLevels.LOCK_PESSIMISTIC_READ
&& level != MixedLockLevels.LOCK_PESSIMISTIC_WRITE
&& level != MixedLockLevels.LOCK_PESSIMISTIC_FORCE_INCREMENT)
throw new IllegalArgumentException(_loc.get(
"bad-lock-level", new Integer(level)).getMessage());
lock(); lock();
try { try {
assertActiveTransaction(); assertActiveTransaction();
@ -537,6 +570,16 @@ public class FetchConfigurationImpl
} }
public void setHint(String name, Object value) { public void setHint(String name, Object value) {
setHint(name, value, false);
}
public void setHint(String name, Object value,
boolean validThrowException) {
if(_hintHandler.setHint(name, value, validThrowException))
addHint(name, value);
}
public void addHint(String name, Object value) {
lock(); lock();
try { try {
if (_state.hints == null) if (_state.hints == null)
@ -550,7 +593,7 @@ public class FetchConfigurationImpl
public Object getHint(String name) { public Object getHint(String name) {
return (_state.hints == null) ? null : _state.hints.get(name); return (_state.hints == null) ? null : _state.hints.get(name);
} }
public Object removeHint(String name) { public Object removeHint(String name) {
return (_state.hints == null) ? null : _state.hints.remove(name); return (_state.hints == null) ? null : _state.hints.remove(name);
} }

View File

@ -1,411 +1,419 @@
# Licensed to the Apache Software Foundation (ASF) under one # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file # or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information # distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file # regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the # to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance # "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at # with the License. You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, # Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an # software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
cant-convert-result: There is no conversion between query projection type \ cant-convert-result: There is no conversion between query projection type \
"{0}" and result type "{1}". "{0}" and result type "{1}".
unloaded-detached: Attempt to access an unloaded field of detached instance \ unloaded-detached: Attempt to access an unloaded field of detached instance \
"{0}". "{0}".
meta-unknownid: Cannot manipulate identity of type "{0}": it''s identity type \ meta-unknownid: Cannot manipulate identity of type "{0}": it''s identity type \
is unknown. is unknown.
new-abstract: Cannot create an instance of "{0}": abstract classes are not \ new-abstract: Cannot create an instance of "{0}": abstract classes are not \
yet supported. yet supported.
bad-new-query: Attempt to construct a query from an extent or class. You must \ bad-new-query: Attempt to construct a query from an extent or class. You must \
pass a (possibly null) query string or template to the query factory \ pass a (possibly null) query string or template to the query factory \
method when creating the query. method when creating the query.
update-restrict: Detected attempt to modify field "{0}" with value strategy \ update-restrict: Detected attempt to modify field "{0}" with value strategy \
"restrict". "restrict".
reentrant-flush: Detected reentrant flush. Make sure your flush-time instance \ reentrant-flush: Detected reentrant flush. Make sure your flush-time instance \
callback methods or event listeners do not invoke any operations that \ callback methods or event listeners do not invoke any operations that \
require the in-progress flush to complete. require the in-progress flush to complete.
rolled-back: The transaction has been rolled back. See the nested exceptions \ rolled-back: The transaction has been rolled back. See the nested exceptions \
for details on the errors that occurred. for details on the errors that occurred.
bad-lock-level: This lock manager does not recognize lock level "{0}". bad-lock-level: This lock manager does not recognize lock level "{0}".
pessimistic-mutate: You are attempting to directly mutate a persistent second \ pessimistic-mutate: You are attempting to directly mutate a persistent second \
class object (such as a collection or map field) that you obtained before \ class object (such as a collection or map field) that you obtained before \
the transaction began. After beginning a datastore transaction, you \ the transaction began. After beginning a datastore transaction, you \
must re-obtain any references to mutable second class objects from the \ must re-obtain any references to mutable second class objects from the \
owning persistent object. Detected attempt to mutate value of field "{0}" \ owning persistent object. Detected attempt to mutate value of field "{0}" \
in instance "{1}". This instance may not be locked correctly. in instance "{1}". This instance may not be locked correctly.
not-derefed: Encountered unknown dependent instance "{0}". This error is \ not-derefed: Encountered unknown dependent instance "{0}". This error is \
often caused by either removing a dependent instance from one dependent \ often caused by either removing a dependent instance from one dependent \
field, but not other dependent fields that reference it, or by removing a \ field, but not other dependent fields that reference it, or by removing a \
dependent instance from its owning field, flushing \ dependent instance from its owning field, flushing \
(causing the unreferenced dependent instance to be deleted), and then \ (causing the unreferenced dependent instance to be deleted), and then \
trying to assign the deleted instance to another field before commit. \ trying to assign the deleted instance to another field before commit. \
The instance cannot be un-deleted, resulting in an error. Make sure not \ The instance cannot be un-deleted, resulting in an error. Make sure not \
to leave dangling dependent references to objects, and to reassign \ to leave dangling dependent references to objects, and to reassign \
dependent objects before flush so that OpenJPA does not think they are \ dependent objects before flush so that OpenJPA does not think they are \
unreferenced and therefore safe to delete. unreferenced and therefore safe to delete.
init-null-pc: Attempt to initialize a state manager with a null \ init-null-pc: Attempt to initialize a state manager with a null \
persistence-capable instance for type "{0}". This is often caused by \ persistence-capable instance for type "{0}". This is often caused by \
attempting to load an instance of an abstract class, or \ attempting to load an instance of an abstract class, or \
neglecting to use a class indicator when the base persistence-capable \ neglecting to use a class indicator when the base persistence-capable \
class in an inheritance tree is abstract. class in an inheritance tree is abstract.
init-sm-pc: Attempt to initialize a state manager with an instance that is \ init-sm-pc: Attempt to initialize a state manager with an instance that is \
already managed ("{0}"). You might be trying to persist this instance \ already managed ("{0}"). You might be trying to persist this instance \
in two threads at the same time. in two threads at the same time.
bad-ds-oid: The type "{0}" declares datastore identity but the value \ bad-ds-oid: The type "{0}" declares datastore identity but the value \
passed to lookup of type "{1}" is not a OpenJPA id instance. passed to lookup of type "{1}" is not a OpenJPA id instance.
null-oids: Some of the object ids passed to getObjectsById were null. null-oids: Some of the object ids passed to getObjectsById were null.
marked-rollback: The transaction cannot be committed, because it was already \ marked-rollback: The transaction cannot be committed, because it was already \
marked for rollback only. The transaction will be rolled back instead. \ marked for rollback only. The transaction will be rolled back instead. \
The cause of the rollback-only status is reported in the embedded stack. The cause of the rollback-only status is reported in the embedded stack.
refresh-flushed: You cannot refresh an instance that has been flushed to the \ refresh-flushed: You cannot refresh an instance that has been flushed to the \
data store. data store.
pc-loader-different: Attempt to cast instance "{0}" to PersistenceCapable failed. \ pc-loader-different: Attempt to cast instance "{0}" to PersistenceCapable failed. \
The object implemented org.apache.openjpa.enhance.PersistenceCapable, \ The object implemented org.apache.openjpa.enhance.PersistenceCapable, \
but the instance of that interface was loaded by two different ClassLoaders: \ but the instance of that interface was loaded by two different ClassLoaders: \
"{1}" and "{2}". "{1}" and "{2}".
pc-cast: Attempt to cast instance "{0}" to PersistenceCapable failed. Ensure \ pc-cast: Attempt to cast instance "{0}" to PersistenceCapable failed. Ensure \
that it has been enhanced. that it has been enhanced.
del-instance: The instance of type "{0}" with oid "{1}" no longer exists in \ del-instance: The instance of type "{0}" with oid "{1}" no longer exists in \
the data store. This may mean that you deleted the instance in a separate \ the data store. This may mean that you deleted the instance in a separate \
transaction, but this context still has a cached version. transaction, but this context still has a cached version.
no-broker-class: The specified type "{0}" could not be loaded. Please ensure \ no-broker-class: The specified type "{0}" could not be loaded. Please ensure \
that the class exists in the project class path. that the class exists in the project class path.
bad-embed: Attempt to set an embedded value for unembeddable field "{0}". \ bad-embed: Attempt to set an embedded value for unembeddable field "{0}". \
Please report this error to OpenJPA support. Please report this error to OpenJPA support.
embed-ref: You are attempting to access an embedded object reference \ embed-ref: You are attempting to access an embedded object reference \
that was obtained before the last transaction status change. After \ that was obtained before the last transaction status change. After \
transactions begin and end, all references to embedded objects become \ transactions begin and end, all references to embedded objects become \
invalid; you have to re-acquire the reference from the owning persistent \ invalid; you have to re-acquire the reference from the owning persistent \
object. object.
deleted: Operation attempted on a deleted instance. deleted: Operation attempted on a deleted instance.
dirty: Illegal operation attempted on a dirty instance: dirty objects cannot \ dirty: Illegal operation attempted on a dirty instance: dirty objects cannot \
be evicted or made transient or non-transactional. be evicted or made transient or non-transactional.
nested-exceps: This operation failed for some instances. See the nested \ nested-exceps: This operation failed for some instances. See the nested \
exceptions array for details. exceptions array for details.
new: Illegal operation attempted on a newly persisted instance: new objects \ new: Illegal operation attempted on a newly persisted instance: new objects \
cannot be evicted or made transient or non-transactional. cannot be evicted or made transient or non-transactional.
transient: Illegal operation attempted on a transient instance. transient: Illegal operation attempted on a transient instance.
not-active: Can only perform operation while a transaction is active. not-active: Can only perform operation while a transaction is active.
trans-active: The "{0}" transaction property cannot be set during an \ trans-active: The "{0}" transaction property cannot be set during an \
active transaction. active transaction.
active: This operation cannot be performed while a Transaction is active. active: This operation cannot be performed while a Transaction is active.
closed: The context has been closed. The stack trace at which the \ closed: The context has been closed. The stack trace at which the \
context was closed is held in the embedded exception. context was closed is held in the embedded exception.
closed-notrace: The context has been closed. The stack trace at which the \ closed-notrace: The context has been closed. The stack trace at which the \
context was closed is available if Runtime=TRACE logging is enabled. context was closed is available if Runtime=TRACE logging is enabled.
closed-factory: The factory has been closed. The stack trace at \ closed-factory: The factory has been closed. The stack trace at \
which the factory was closed is held in the embedded exception. which the factory was closed is held in the embedded exception.
closed-factory-notrace: The factory has been closed. The stack trace at \ closed-factory-notrace: The factory has been closed. The stack trace at \
which the factory was closed is available if Runtime=TRACE logging is \ which the factory was closed is available if Runtime=TRACE logging is \
enabled. enabled.
non-trans-read: To perform reads on persistent data outside of a transaction, \ non-trans-read: To perform reads on persistent data outside of a transaction, \
the "NontransactionalRead" property must be set on the Transaction. the "NontransactionalRead" property must be set on the Transaction.
non-trans-write: To perform writes on persistent data outside of a \ non-trans-write: To perform writes on persistent data outside of a \
transaction, the "NontransactionalWrite" property must be set to true. transaction, the "NontransactionalWrite" property must be set to true.
write-operation: To perform this operation, it must be written within a transaction, \ write-operation: To perform this operation, it must be written within a transaction, \
or your settings must allow nontransactional writes and must not detach \ or your settings must allow nontransactional writes and must not detach \
all nontransactional reads. all nontransactional reads.
not-managed: The given instance "{0}" is not managed by this context. not-managed: The given instance "{0}" is not managed by this context.
trans-not-managed: This broker is not configured to use managed transactions. trans-not-managed: This broker is not configured to use managed transactions.
bad-detached-op: You cannot perform operation {0} on detached object "{1}". \ bad-detached-op: You cannot perform operation {0} on detached object "{1}". \
This operation only applies to managed objects. This operation only applies to managed objects.
persist-detached: Attempt to persist detached object "{0}". If this is a new \ persist-detached: Attempt to persist detached object "{0}". If this is a new \
instance, make sure any version and/or auto-generated primary key fields are \ instance, make sure any version and/or auto-generated primary key fields are \
null/default when persisting. null/default when persisting.
null-value: The field "{0}" of instance "{1}" contained a null value; \ null-value: The field "{0}" of instance "{1}" contained a null value; \
the metadata for this field specifies that nulls are illegal. the metadata for this field specifies that nulls are illegal.
change-identity: Attempt to change a primary key field of an instance that \ change-identity: Attempt to change a primary key field of an instance that \
already has a final object id. Only new, unflushed instances whose id you \ already has a final object id. Only new, unflushed instances whose id you \
have not retrieved can have their primary keys changed. have not retrieved can have their primary keys changed.
managed-oid: You cannot assign managed object "{0}" to the primary key field \ managed-oid: You cannot assign managed object "{0}" to the primary key field \
of "{1}". Embedded primary key values must be transient objects. of "{1}". Embedded primary key values must be transient objects.
changed-oid: You have modified the object id field of "{2}". Its old value \ changed-oid: You have modified the object id field of "{2}". Its old value \
was "{0}", and its new value is "{1}". Object id fields are immutable \ was "{0}", and its new value is "{1}". Object id fields are immutable \
once the object id of the instance has been assigned. once the object id of the instance has been assigned.
cache-exists: An object of type "{0}" with oid "{1}" already exists in this \ cache-exists: An object of type "{0}" with oid "{1}" already exists in this \
context; another cannot be persisted. context; another cannot be persisted.
null-trans: Attempt to commit a null javax.transaction.Transaction. Some \ null-trans: Attempt to commit a null javax.transaction.Transaction. Some \
application servers set the transaction to null if a rollback occurs. application servers set the transaction to null if a rollback occurs.
end-trans-error: An exception occurred while ending the transaction. This \ end-trans-error: An exception occurred while ending the transaction. This \
exception will be re-thrown. exception will be re-thrown.
not-bound: The file named "{0}" could not be found. not-bound: The file named "{0}" could not be found.
naming-exception: A NamingException was thrown while obtaining the \ naming-exception: A NamingException was thrown while obtaining the \
factory at "{0}" from JNDI. factory at "{0}" from JNDI.
attach-deleted: The object "{0}" with id "{1}" has been deleted and \ attach-deleted: The object "{0}" with id "{1}" has been deleted and \
cannot be attached. cannot be attached.
not-detachable: The class "{0}" does not declare the "detachable" metadata \ not-detachable: The class "{0}" does not declare the "detachable" metadata \
extension, so cannot be detached. extension, so cannot be detached.
not-copyable: Attempt to copy field "{0}" failed. The field is \ not-copyable: Attempt to copy field "{0}" failed. The field is \
not copyable. This can occur with custom SCO types. Only standard or \ not copyable. This can occur with custom SCO types. Only standard or \
immutable SCO types can be attached and detached. immutable SCO types can be attached and detached.
no-detach-object-id: Cannot access the detached object id of class "{0}". \ no-detach-object-id: Cannot access the detached object id of class "{0}". \
Ensure that the class has the "detachable" metadata extension, and \ Ensure that the class has the "detachable" metadata extension, and \
the the class has been re-enhanced. the the class has been re-enhanced.
attach-version-del: Attempted to attach deleted instance type "{0}" with oid \ attach-version-del: Attempted to attach deleted instance type "{0}" with oid \
"{1}". If the instance is new, the version field should be left to its \ "{1}". If the instance is new, the version field should be left to its \
default value. default value.
attach-wrongclass: Attempted to attach instance "{0}" of type "{1}", but this \ attach-wrongclass: Attempted to attach instance "{0}" of type "{1}", but this \
instance is already in the datastore as type "{2}". instance is already in the datastore as type "{2}".
sjvm-acquired-lock: Acquired an exclusive lock "{0}" on oid "{1}". sjvm-acquired-lock: Acquired an exclusive lock "{0}" on oid "{1}".
sjvm-released-lock: Released lock "{0}". sjvm-released-lock: Released lock "{0}".
conn-not-supported: This data store cannot return connections. conn-not-supported: This data store cannot return connections.
incremental-flush-not-supported: This data store does not support incremental \ incremental-flush-not-supported: This data store does not support incremental \
flushing. If you got this error while performing a query, make sure that \ flushing. If you got this error while performing a query, make sure that \
you have not set the FlushBeforeQueries option to true, either by \ you have not set the FlushBeforeQueries option to true, either by \
setting that option in your configuration file, or by programmatically \ setting that option in your configuration file, or by programmatically \
setting the current FetchConfiguration''s FlushBeforeQueries property \ setting the current FetchConfiguration''s FlushBeforeQueries property \
to QueryFlushModes.FLUSH_TRUE. to QueryFlushModes.FLUSH_TRUE.
nontrans-read-not-supported: This data store does not support nontransactional \ nontrans-read-not-supported: This data store does not support nontransactional \
reads. Set the NontransactionalRead property to false. reads. Set the NontransactionalRead property to false.
optimistic-not-supported: This datastore does not support optimistic \ optimistic-not-supported: This datastore does not support optimistic \
transactions. Set the Optimistic property to false. transactions. Set the Optimistic property to false.
restore-unset: The RestoreValues option is off, so initial values are \ restore-unset: The RestoreValues option is off, so initial values are \
not available. Turn on RestoreValues to be able to obtain initial \ not available. Turn on RestoreValues to be able to obtain initial \
values. values.
mutable-restore-unset: The RestoreValues option is not set to "all", \ mutable-restore-unset: The RestoreValues option is not set to "all", \
initial values of mutable fields are not available. Set RestoreValues to \ initial values of mutable fields are not available. Set RestoreValues to \
"all" in your properties to be able to obtain initial mutable values. "all" in your properties to be able to obtain initial mutable values.
initial-unloaded: OpenJPA cannot provide the initial value for field "{0}" \ initial-unloaded: OpenJPA cannot provide the initial value for field "{0}" \
because the field was never loaded before being changed. because the field was never loaded before being changed.
dup-oid-assign: Attempt to assign id "{0}" to new instance "{1}" failed; \ dup-oid-assign: Attempt to assign id "{0}" to new instance "{1}" failed; \
there is already an object in the L1 cache with this id. \ there is already an object in the L1 cache with this id. \
You must delete this object (in a previous transaction or the current one) \ You must delete this object (in a previous transaction or the current one) \
before reusing its id. This error can also occur when a horizontally \ before reusing its id. This error can also occur when a horizontally \
or vertically mapped classes uses auto-increment application identity and \ or vertically mapped classes uses auto-increment application identity and \
does not use a hierarchy of application identity classes. does not use a hierarchy of application identity classes.
dup-load: Cannot load object with id "{0}". Instance "{1}" with the same id \ dup-load: Cannot load object with id "{0}". Instance "{1}" with the same id \
already exists in the L1 cache. This can occur when you \ already exists in the L1 cache. This can occur when you \
assign an existing id to a new instance, and before flushing attempt to \ assign an existing id to a new instance, and before flushing attempt to \
load the existing instance for that id. load the existing instance for that id.
bad-id-value: The given value "{0}" cannot be converted into an identity \ bad-id-value: The given value "{0}" cannot be converted into an identity \
for "{2}". The value is the wrong type ({1}). for "{2}". The value is the wrong type ({1}).
factory-init: Starting OpenJPA {0} factory-init: Starting OpenJPA {0}
factory-properties: Properties: {0} factory-properties: Properties: {0}
inverse-consistency: An inverse inconsistency in the object model was \ inverse-consistency: An inverse inconsistency in the object model was \
detected while flushing the field "{0}" of the instance with id "{1}" \ detected while flushing the field "{0}" of the instance with id "{1}" \
in context "{2}". in context "{2}".
no-brokerfactory: You did not name the factory class with the required \ no-brokerfactory: You did not name the factory class with the required \
property openjpa.BrokerFactory. Normally this property defaults \ property openjpa.BrokerFactory. Normally this property defaults \
appropriately; have you forgotten to include all the OpenJPA jars in your \ appropriately; have you forgotten to include all the OpenJPA jars in your \
classpath? classpath?
brokerfactory-excep: There was an error when invoking the static \ brokerfactory-excep: There was an error when invoking the static \
getInstance method on the named factory class "{0}". See the \ getInstance method on the named factory class "{0}". See the \
nested exception for details. nested exception for details.
new-brokerfactory-excep: There was an error when invoking the static \ new-brokerfactory-excep: There was an error when invoking the static \
newInstance method on the named factory class "{0}". See the \ newInstance method on the named factory class "{0}". See the \
nested exception for details. nested exception for details.
bad-brokerfactory: Could not invoke the static getInstance method on the \ bad-brokerfactory: Could not invoke the static getInstance method on the \
named factory class "{0}". named factory class "{0}".
bad-new-brokerfactory: Could not invoke the static newInstance method on the \ bad-new-brokerfactory: Could not invoke the static newInstance method on the \
named factory class "{0}". named factory class "{0}".
bad-brokerfactory-class: The named BrokerFactory "{0}" is not valid. bad-brokerfactory-class: The named BrokerFactory "{0}" is not valid.
instantiate-abstract: Cannot instantiate abstract class of type "{0}" with \ instantiate-abstract: Cannot instantiate abstract class of type "{0}" with \
object id "{1}"; this may indicate that the inheritance discriminator \ object id "{1}"; this may indicate that the inheritance discriminator \
for the class is not configured correctly. for the class is not configured correctly.
nontrans-proxied: You cannot make a property access object created with "new" \ nontrans-proxied: You cannot make a property access object created with "new" \
nontransactional. nontransactional.
no-field: Field "{0}" is not declared in "{1}", or is not managed. no-field: Field "{0}" is not declared in "{1}", or is not managed.
no-field-index: "{0}" is not the index of any managed field in "{1}". no-field-index: "{0}" is not the index of any managed field in "{1}".
cant-cascade-persist: Encountered unmanaged object in persistent field \ cant-cascade-persist: Encountered unmanaged object in persistent field \
"{0}" during flush. However, this field does not \ "{0}" during flush. However, this field does not \
allow cascade persist. Set the cascade attribute for this field to \ allow cascade persist. Set the cascade attribute for this field to \
CascadeType.PERSIST or CascadeType.ALL (JPA annotations) or \ CascadeType.PERSIST or CascadeType.ALL (JPA annotations) or \
"persist" or "all" (JPA orm.xml), or enable cascade-persist globally, \ "persist" or "all" (JPA orm.xml), or enable cascade-persist globally, \
or manually persist the related field value prior to flushing. \ or manually persist the related field value prior to flushing. \
You cannot flush unmanaged objects or graphs that have persistent \ You cannot flush unmanaged objects or graphs that have persistent \
associations to unmanaged objects. associations to unmanaged objects.
cant-cascade-attach: Encountered new object in persistent field \ cant-cascade-attach: Encountered new object in persistent field \
"{0}" during attach. However, this field does not \ "{0}" during attach. However, this field does not \
allow cascade attach. Set the cascade attribute for this field to \ allow cascade attach. Set the cascade attribute for this field to \
CascadeType.MERGE or CascadeType.ALL (JPA annotations) or \ CascadeType.MERGE or CascadeType.ALL (JPA annotations) or \
"merge" or "all" (JPA orm.xml). \ "merge" or "all" (JPA orm.xml). \
You cannot attach a reference to a new object without cascading. You cannot attach a reference to a new object without cascading.
ref-to-deleted: Encountered deleted object "{0}" in persistent field \ ref-to-deleted: Encountered deleted object "{0}" in persistent field \
"{1}" of managed object "{2}" during flush. "{1}" of managed object "{2}" during flush.
no-version-field: Encountered object "{0}" without a version field during \ no-version-field: Encountered object "{0}" without a version field during \
attach. In order to attach an object, it must either be enhanced or must \ attach. In order to attach an object, it must either be enhanced or must \
have a version field. have a version field.
inmem-agg-proj-var: Queries with aggregates or projections using variables \ inmem-agg-proj-var: Queries with aggregates or projections using variables \
currently cannot be executed in-memory. Either set IgnoreCache to true, \ currently cannot be executed in-memory. Either set IgnoreCache to true, \
set the openjpa.FlushBeforeQueries property to true, or execute the query \ set the openjpa.FlushBeforeQueries property to true, or execute the query \
before changing any instances in the transaction. The offending query was \ before changing any instances in the transaction. The offending query was \
on type "{0}" with filter "{1}". on type "{0}" with filter "{1}".
merged-order-with-result: This query on candidate type "{0}" with filter "{1}" \ merged-order-with-result: This query on candidate type "{0}" with filter "{1}" \
involves combining the results of multiple queries in memory. \ involves combining the results of multiple queries in memory. \
You have chosen to order the results on "{2}", but you have not selected \ You have chosen to order the results on "{2}", but you have not selected \
this data in your setResult() clause. Please include this ordering data \ this data in your setResult() clause. Please include this ordering data \
in setResult() so that OpenJPA can extract it for in-memory ordering. in setResult() so that OpenJPA can extract it for in-memory ordering.
bad-grouping: Your query on type "{0}" with filter "{1}" is invalid. Your \ bad-grouping: Your query on type "{0}" with filter "{1}" is invalid. Your \
select and having clauses must only include aggregates or values that also \ select and having clauses must only include aggregates or values that also \
appear in your grouping clause. appear in your grouping clause.
query-nosupport: The "{0}" query type does not support this operation. query-nosupport: The "{0}" query type does not support this operation.
query-unmapped: You cannot query unmapped type "{0}". query-unmapped: You cannot query unmapped type "{0}".
range-too-big: The range of the query is too big. Start index: "{0}", end \ range-too-big: The range of the query is too big. Start index: "{0}", end \
index: "{1}". The range must be less than Integer.MAX_VALUE. index: "{1}". The range must be less than Integer.MAX_VALUE.
invalid-range: The query range from {0} to {1} is not valid. invalid-range: The query range from {0} to {1} is not valid.
no-impls: Unable to execute a query on type "{0}". This class or interface \ no-impls: Unable to execute a query on type "{0}". This class or interface \
is not mapped, and does not have any mapped implementors. is not mapped, and does not have any mapped implementors.
bad-param-name: The parameter name or position "{0}" passed to \ bad-param-name: The parameter name or position "{0}" passed to \
execute() is not valid. All map keys must be a declared parameter \ execute() is not valid. All map keys must be a declared parameter \
name or a number matching the parameter position. Native queries only \ name or a number matching the parameter position. Native queries only \
allow the use of positional parameters. allow the use of positional parameters.
force-in-mem: This query on type "{0}" must load the entire candidate class \ force-in-mem: This query on type "{0}" must load the entire candidate class \
extent and evaluate the query in-memory. This may be very slow. The \ extent and evaluate the query in-memory. This may be very slow. The \
query must be executed in memory because OpenJPA is configured with \ query must be executed in memory because OpenJPA is configured with \
IgnoreCache=false and FlushBeforeQueries=false and \ IgnoreCache=false and FlushBeforeQueries=false and \
there are dirty instances that may affect the query''s outcome in the \ there are dirty instances that may affect the query''s outcome in the \
cache. cache.
cant-exec-inmem: Queries of this type ("{0}") cannot be executed in-memory. \ cant-exec-inmem: Queries of this type ("{0}") cannot be executed in-memory. \
Either set IgnoreCache to true, set the openjpa.FlushBeforeQueries \ Either set IgnoreCache to true, set the openjpa.FlushBeforeQueries \
property to true, or execute the query before changing any instances in \ property to true, or execute the query before changing any instances in \
the transaction. the transaction.
executing-query: Executing query: {0} executing-query: Executing query: {0}
executing-query-with-params: Executing query: [{0}] with parameters: {1} executing-query-with-params: Executing query: [{0}] with parameters: {1}
not-unique: The query on candidate type "{0}" with filter "{1}" was \ not-unique: The query on candidate type "{0}" with filter "{1}" was \
configured to have a unique result, but more than one instance matched \ configured to have a unique result, but more than one instance matched \
the query. the query.
no-result: The query on candidate type "{0}" with filter "{1}" was \ no-result: The query on candidate type "{0}" with filter "{1}" was \
configured to have a unique result, but no instance matched \ configured to have a unique result, but no instance matched \
the query. the query.
serialized: Queries that have been serialized do not support this operation. serialized: Queries that have been serialized do not support this operation.
read-only: Attempt to modify a read-only query object. read-only: Attempt to modify a read-only query object.
no-class: A candidate Class must be specified before executing a query. no-class: A candidate Class must be specified before executing a query.
no-modify-resultclass: A query that declares a result class cannot be used \ no-modify-resultclass: A query that declares a result class cannot be used \
to perform bulk updates. to perform bulk updates.
no-modify-unique: A query that declares unique results cannot be used \ no-modify-unique: A query that declares unique results cannot be used \
to perform bulk updates. to perform bulk updates.
no-modify-range: A query that declares a result range cannot be used \ no-modify-range: A query that declares a result range cannot be used \
to perform bulk updates. to perform bulk updates.
unbound-param: Cannot execute query; the declared parameter "{0}" was \ unbound-param: Cannot execute query; the declared parameter "{0}" was \
not given a value. not given a value.
unbound-params: Cannot execute query; declared parameters "{0}" were not given \ unbound-params: Cannot execute query; declared parameters "{0}" were not given \
values. You must supply a value for each of the following parameters, \ values. You must supply a value for each of the following parameters, \
in the given order: {1} in the given order: {1}
extra-params: More parameters were passed to execute() than were declared: \ extra-params: More parameters were passed to execute() than were declared: \
{1} parameters were specified for query execution, but only {0} \ {1} parameters were specified for query execution, but only {0} \
parameters were declared in the query. parameters were declared in the query.
null-primitive-param: Parameter "{0}" expects a value of primitive "{1}" \ null-primitive-param: Parameter "{0}" expects a value of primitive "{1}" \
but was given a null value. but was given a null value.
param-value-mismatch: Parameter "{0}" expects a value of "{1}" but was given \ param-value-mismatch: Parameter "{0}" expects a value of "{1}" but was given \
a value of "{2}" of "{3}". a value of "{2}" of "{3}".
merged-aggregate: This query on candidate type "{0}" with filter "{1}" \ merged-aggregate: This query on candidate type "{0}" with filter "{1}" \
involves combining the results of multiple sub-queries. However, because \ involves combining the results of multiple sub-queries. However, because \
this query is for aggregate data, OpenJPA cannot combine the sub-query \ this query is for aggregate data, OpenJPA cannot combine the sub-query \
aggregates into correct final values. aggregates into correct final values.
bad-dec: The {1} declaration "{0}" is \ bad-dec: The {1} declaration "{0}" is \
not valid. Variables and imports must be delimited with ";". Parameters \ not valid. Variables and imports must be delimited with ";". Parameters \
and orderings must be delimited with ",". Imports require the "import" \ and orderings must be delimited with ",". Imports require the "import" \
keyword, and orderings require the "ascending" or "descending" keyword. keyword, and orderings require the "ascending" or "descending" keyword.
mod-bigdecimal: You cannot use the modulo operator (%) on numbers of type \ mod-bigdecimal: You cannot use the modulo operator (%) on numbers of type \
BigDecimal. BigDecimal.
cant-convert: Cannot convert object "{0}" of type "{1}" into an instance of \ cant-convert: Cannot convert object "{0}" of type "{1}" into an instance of \
"{2}". "{2}".
bad-method-class: You set the method name of this openjpa.MethodQL query to \ bad-method-class: You set the method name of this openjpa.MethodQL query to \
"{1}", but class "{0}" is not a valid class name. Make sure to fully \ "{1}", but class "{0}" is not a valid class name. Make sure to fully \
qualify the class name or to import its package into this query if the \ qualify the class name or to import its package into this query if the \
class is not in the query candidate class'' package. class is not in the query candidate class'' package.
method-not-static: Method "{0}" named in the MethodQL query must be static. method-not-static: Method "{0}" named in the MethodQL query must be static.
method-return-type-invalid: Method "{0}" named in the MethodQL query must \ method-return-type-invalid: Method "{0}" named in the MethodQL query must \
have a return type that is assignable from ResultObjectProvider. Return \ have a return type that is assignable from ResultObjectProvider. Return \
type is: {1}. type is: {1}.
no-method: You must set the query filter to the name of the method to execute \ no-method: You must set the query filter to the name of the method to execute \
for this MethodQL query instance. for this MethodQL query instance.
method-error: There was an error invoking method "{0}" with arguments "{1}". method-error: There was an error invoking method "{0}" with arguments "{1}".
bad-param-type: The type "{0}" as used in the parameter declarations \ bad-param-type: The type "{0}" as used in the parameter declarations \
could not be found in the imports. could not be found in the imports.
cant-set: Result type "{0}" does not have any public fields or setter methods \ cant-set: Result type "{0}" does not have any public fields or setter methods \
for the projection or aggregate result element "{1}", nor does it have a \ for the projection or aggregate result element "{1}", nor does it have a \
generic put(Object,Object) method that can be used, nor does it have a \ generic put(Object,Object) method that can be used, nor does it have a \
public constructor that takes the types {2}. public constructor that takes the types {2}.
pack-err: There was an error packing the projection and/or aggregate results \ pack-err: There was an error packing the projection and/or aggregate results \
of the query into result type "{0}". See the nested Throwable exception \ of the query into result type "{0}". See the nested Throwable exception \
for details. for details.
pack-instantiation-err: There was an error creating an instance of type "{0}" \ pack-instantiation-err: There was an error creating an instance of type "{0}" \
when packing the projection and/or aggregate results of the query. Ensure \ when packing the projection and/or aggregate results of the query. Ensure \
that you have defined a public no-args constructor in "{0}". that you have defined a public no-args constructor in "{0}".
bad-inmem-method: Method "{0}(StoreContext, ClassMetaData, boolean, Object, \ bad-inmem-method: Method "{0}(StoreContext, ClassMetaData, boolean, Object, \
Map, FetchConfiguration)" is not declared in type "{1}". \ Map, FetchConfiguration)" is not declared in type "{1}". \
Check the method name supplied in your MethodQL query filter. \ Check the method name supplied in your MethodQL query filter. \
OpenJPA is attempting to execute this query in-memory; if you implemented \ OpenJPA is attempting to execute this query in-memory; if you implemented \
the datastore method instead (a method with the same signature but without \ the datastore method instead (a method with the same signature but without \
the Object argument) and want this query to execute in the datastore, \ the Object argument) and want this query to execute in the datastore, \
either create the query before modifying objects in the current transaction, \ either create the query before modifying objects in the current transaction, \
set IgnoreCache to true, or set the openjpa.FlushBeforeQueries property to \ set IgnoreCache to true, or set the openjpa.FlushBeforeQueries property to \
true. true.
bad-datastore-method: Method "{0}(StoreContext, ClassMetaData, boolean, Map, \ bad-datastore-method: Method "{0}(StoreContext, ClassMetaData, boolean, Map, \
FetchConfiguration)" is not declared in type "{1}". Check \ FetchConfiguration)" is not declared in type "{1}". Check \
the method name supplied in your MethodQL query filter. OpenJPA is \ the method name supplied in your MethodQL query filter. OpenJPA is \
attempting to execute this query against the datastore; if you implemented \ attempting to execute this query against the datastore; if you implemented \
the in-memory method instead (a method with the same signature but with an \ the in-memory method instead (a method with the same signature but with an \
Object argument) and want this query to execute in-memory, supply a \ Object argument) and want this query to execute in-memory, supply a \
Collection of candidates to filter. Collection of candidates to filter.
only-update-constants: Bulk update queries when executed in memory \ only-update-constants: Bulk update queries when executed in memory \
may only update to constant values. may only update to constant values.
only-range-constants: Range values must be numeric constants. Illegal query: \ only-range-constants: Range values must be numeric constants. Illegal query: \
{0} {0}
no-savepoint-copy: Unable to copy field "{0}" for savepoint. no-savepoint-copy: Unable to copy field "{0}" for savepoint.
savepoint-exists: A savepoint with the name "{0}" already exists. \ savepoint-exists: A savepoint with the name "{0}" already exists. \
Each savepoint name must be unique. Each savepoint name must be unique.
no-lastsavepoint: Cannot rollback/release last savepoint as no savepoint \ no-lastsavepoint: Cannot rollback/release last savepoint as no savepoint \
has been set. has been set.
no-savepoint: You have not set a savepoint with the name "{0}" no-savepoint: You have not set a savepoint with the name "{0}"
savepoint-init: This savepoint has already been initialized. savepoint-init: This savepoint has already been initialized.
savepoint-flush-not-supported: The configured SavepointManager does not \ savepoint-flush-not-supported: The configured SavepointManager does not \
support incremental flushing when a savepoint has been set. You must \ support incremental flushing when a savepoint has been set. You must \
release your savepoints before flushing. release your savepoints before flushing.
callback-err: Errors occurred processing listener callbacks. See the nested \ callback-err: Errors occurred processing listener callbacks. See the nested \
exceptions for details. exceptions for details.
bad-agg-listener-hint: Query hint value "{0}" ({1}) cannot be converted into \ bad-agg-listener-hint: Query hint value "{0}" ({1}) cannot be converted into \
an aggregate listener. an aggregate listener.
bad-filter-listener-hint: Query hint value "{0}" ({1}) cannot be converted \ bad-filter-listener-hint: Query hint value "{0}" ({1}) cannot be converted \
into a filter listener. into a filter listener.
bad-setter-hint-arg: In query hint "{0}", cannot convert hint value "{1}" to \ bad-setter-hint-arg: In query hint "{0}", cannot convert hint value "{1}" to \
type "{2}". type "{2}".
detach-val-mismatch: The instance "{0}" is managed by another context and \ detach-val-mismatch: The instance "{0}" is managed by another context and \
cannot be inspected for field values. cannot be inspected for field values.
detach-val-badsm: The instance "{0}" has an unknown state manager which \ detach-val-badsm: The instance "{0}" has an unknown state manager which \
prevents field inspection. prevents field inspection.
null-oid: Cannot perform find using null object id. null-oid: Cannot perform find using null object id.
illegal-op-in-prestore: This operation is illegal when called during \ illegal-op-in-prestore: This operation is illegal when called during \
transaction completion. transaction completion.
no-expressions: The query cannot be executed because it has no \ no-expressions: The query cannot be executed because it has no \
valid expressions. valid expressions.
null-fg: Attempt to add null/empty fetch group name to fetch configuration. null-fg: Attempt to add null/empty fetch group name to fetch configuration.
null-field: Attempt to add null/empty field name to fetch configuration. null-field: Attempt to add null/empty field name to fetch configuration.
container-projection: Query projections cannot include array, collection, or \ container-projection: Query projections cannot include array, collection, or \
map fields. Invalid query: "{0}" map fields. Invalid query: "{0}"
existing-value-override-excep: The generated value processing detected an \ existing-value-override-excep: The generated value processing detected an \
existing value assigned to this field: {0}. This existing value was either \ existing value assigned to this field: {0}. This existing value was either \
provided via an initializer or by calling the setter method. You either need \ provided via an initializer or by calling the setter method. You either need \
to remove the @GeneratedValue annotation or modify the code to remove the \ to remove the @GeneratedValue annotation or modify the code to remove the \
initializer processing. initializer processing.
invalid-tran-status: The transaction was not in a valid state ({0}) to \ invalid-tran-status: The transaction was not in a valid state ({0}) to \
accept the "{1}" method invocation. Processing will continue. accept the "{1}" method invocation. Processing will continue.
multi-threaded-access: Multiple concurrent threads attempted to access a \ multi-threaded-access: Multiple concurrent threads attempted to access a \
single broker. By default brokers are not thread safe; if you require \ single broker. By default brokers are not thread safe; if you require \
and/or intend a broker to be accessed by more than one thread, set the \ and/or intend a broker to be accessed by more than one thread, set the \
openjpa.Multithreaded property to true to override the default behavior. openjpa.Multithreaded property to true to override the default behavior.
no-saved-fields: No state snapshot is available for instance of type "{0}", \ no-saved-fields: No state snapshot is available for instance of type "{0}", \
but this instance uses state-comparison for dirty detection. but this instance uses state-comparison for dirty detection.
cant-serialize-flushed-broker: Serialization not allowed once a broker has \ cant-serialize-flushed-broker: Serialization not allowed once a broker has \
been flushed. been flushed.
cant-serialize-pessimistic-broker: Serialization not allowed for brokers with \ cant-serialize-pessimistic-broker: Serialization not allowed for brokers with \
an active datastore (pessimistic) transaction. an active datastore (pessimistic) transaction.
cant-serialize-connected-broker: Serialization not allowed for brokers with \ cant-serialize-connected-broker: Serialization not allowed for brokers with \
an active connection to the database. an active connection to the database.
no-interface-metadata: No metadata was found for managed interface {0}. no-interface-metadata: No metadata was found for managed interface {0}.
fetch-configuration-stack-empty: Fetch configuration stack is empty. fetch-configuration-stack-empty: Fetch configuration stack is empty.
gap-query-param: Parameter {1} for query "{0}" exceeds the number of {2} \ gap-query-param: Parameter {1} for query "{0}" exceeds the number of {2} \
bound parameters with following values "{3}". This can happen if you have \ bound parameters with following values "{3}". This can happen if you have \
declared but missed to bind values for one or more parameters. declared but missed to bind values for one or more parameters.
query-execution-error: Failed to execute query "{0}". Check the query syntax \ query-execution-error: Failed to execute query "{0}". Check the query syntax \
for correctness. See nested exception for details. for correctness. See nested exception for details.
invalid-timeout: An invalid timeout of {0} milliseconds was ignored. \ invalid-timeout: An invalid timeout of {0} milliseconds was ignored. \
Expected a value that is greater than or equal to -1. Expected a value that is greater than or equal to -1.
bad-hint-value: "{1}" is not a valid value for hint "{0}" caused by: {2}.
bad-flush-before-queries: Invalid flush before queries type. Valid values are \
"true"(0), "false"(1) or "with-connection"(2). Specified value: {0}.
bad-lock-level: Invalid lock mode/level. Valid values are \
"none"(0), "read"(10), "write"(20), "optimistic"(10), \
"optimistic-force-increment"(20), "pessimistic-read"(30), \
"pessimistic-write"(40) or "pessimistic-force-increment"(50). \
Specified value: {0}.

View File

@ -104,8 +104,6 @@ public abstract class SequencedActionsTest extends SQLListenerTestCase {
} }
} }
assertAllSQLInOrder( assertAllSQLInOrder(
"INSERT INTO " + empTableName + " .*",
"INSERT INTO " + empTableName + " .*",
"INSERT INTO " + empTableName + " .*"); "INSERT INTO " + empTableName + " .*");
// dynamic runtime test to determine wait time. // dynamic runtime test to determine wait time.
@ -150,6 +148,8 @@ public abstract class SequencedActionsTest extends SQLListenerTestCase {
em.createQuery("delete from " + empTableName).executeUpdate(); em.createQuery("delete from " + empTableName).executeUpdate();
em.getTransaction().commit(); em.getTransaction().commit();
} catch(Exception e) {
e.printStackTrace();
} finally { } finally {
if (em != null && em.isOpen()) { if (em != null && em.isOpen()) {
em.close(); em.close();
@ -607,9 +607,10 @@ public abstract class SequencedActionsTest extends SQLListenerTestCase {
LockModeType expectedlockMode = (LockModeType)args[2]; LockModeType expectedlockMode = (LockModeType)args[2];
LockModeType testinglockMode = em.getLockMode(employee); LockModeType testinglockMode = em.getLockMode(employee);
log.trace("test version: expected=" + expectedlockMode log.trace("test version: expected=" + expectedlockMode
+ ", testing=" + em.getLockMode(employee)); + ", testing=" + testinglockMode);
assertEquals("", expectedlockMode, testinglockMode); assertEquals("", getCanonical(expectedlockMode),
getCanonical(testinglockMode));
break; break;
case ResetException: case ResetException:
thisThread.throwable = null; thisThread.throwable = null;
@ -825,6 +826,14 @@ public abstract class SequencedActionsTest extends SQLListenerTestCase {
} }
} }
private LockModeType getCanonical(LockModeType lockMode) {
if( lockMode == LockModeType.READ )
return LockModeType.OPTIMISTIC;
if( lockMode == LockModeType.WRITE )
return LockModeType.OPTIMISTIC_FORCE_INCREMENT;
return lockMode;
}
private String processException(Act curAction, Throwable t) { private String processException(Act curAction, Throwable t) {
String failStr = "Caught exception: none"; String failStr = "Caught exception: none";
if (t != null) { if (t != null) {

View File

@ -0,0 +1,264 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.persistence.lockmgr;
import javax.persistence.EntityManager;
import javax.persistence.LockModeType;
import org.apache.openjpa.jdbc.conf.JDBCConfiguration;
import org.apache.openjpa.jdbc.kernel.JDBCFetchConfigurationImpl;
import org.apache.openjpa.jdbc.sql.DB2Dictionary;
import org.apache.openjpa.jdbc.sql.DBDictionary;
import org.apache.openjpa.persistence.EntityManagerImpl;
import org.apache.openjpa.persistence.OpenJPAEntityManager;
import org.apache.openjpa.persistence.OpenJPAEntityManagerSPI;
/**
* Test hints using EntityManager interface.
*/
public class TestEmLockMode extends SequencedActionsTest {
private static String NON_SUPPORTED_OPTIMISTIC_SQL =
"SELECT .* FROM LockEmployee .*";
private static String NON_SUPPORTED_FOR_UPDATE_SQL =
"SELECT .* FROM LockEmployee .* FOR UPDATE.*";
private static String VERSION_UPDATE_SQL =
"UPDATE LockEmployee SET version .* WHERE .*";
private static String DB2_OPTIMISTIC_SQL =
"SELECT .* FROM LockEmployee .* WHERE .*";
private static String DB2_PESSIMISTIC_RS_SQL =
"SELECT .* FROM LockEmployee .* WITH RS USE .*";
private static String DB2_PESSIMISTIC_RR_SQL =
"SELECT .* FROM LockEmployee .* WITH RR USE .*";
public void setUp() {
setUp(LockEmployee.class, "openjpa.LockManager", "mixed");
commonSetUp();
}
/*
* Test em.find(lockmode);
*/
public void testFindLockModeIsolations() {
EntityManager em = emf.createEntityManager();
em.getTransaction().begin();
commonTestFindLockModeIsolations(em, LockModeType.NONE, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 0, null);
commonTestFindLockModeIsolations(em, LockModeType.READ, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestFindLockModeIsolations(em, LockModeType.WRITE, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 1,
VERSION_UPDATE_SQL);
commonTestFindLockModeIsolations(em, LockModeType.OPTIMISTIC, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestFindLockModeIsolations(em,
LockModeType.OPTIMISTIC_FORCE_INCREMENT, 1, DB2_OPTIMISTIC_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL, 1, VERSION_UPDATE_SQL);
commonTestFindLockModeIsolations(em, LockModeType.PESSIMISTIC_READ, 2,
DB2_PESSIMISTIC_RS_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestFindLockModeIsolations(em, LockModeType.PESSIMISTIC_WRITE, 2,
DB2_PESSIMISTIC_RR_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestFindLockModeIsolations(em,
LockModeType.PESSIMISTIC_FORCE_INCREMENT, 2,
DB2_PESSIMISTIC_RR_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
VERSION_UPDATE_SQL);
em.getTransaction().rollback();
em.close();
}
private void commonTestFindLockModeIsolations(EntityManager em,
LockModeType lockMode, int expectedSupportSQLCount,
String expectedSupportSQL, int expectedNonSupportSQLCount,
String expectedNonSupportSQL, int expectedVersionUpdateCount,
String expectedVersionUpdateSQL) {
OpenJPAEntityManager oem = (OpenJPAEntityManager) em.getDelegate();
JDBCFetchConfigurationImpl fConfig = (JDBCFetchConfigurationImpl)
((EntityManagerImpl) oem).getBroker().getFetchConfiguration();
DBDictionary dict = ((JDBCConfiguration) ((OpenJPAEntityManagerSPI) oem)
.getConfiguration()).getDBDictionaryInstance();
em.clear();
resetSQL();
int beforeIsolation = fConfig.getIsolation();
em.find(LockEmployee.class, 1, lockMode);
if (dict.supportsIsolationForUpdate() &&
dict instanceof DB2Dictionary) {
assertEquals(expectedSupportSQLCount, getSQLCount());
assertAllSQLInOrder(expectedSupportSQL);
} else {
assertEquals(expectedNonSupportSQLCount, getSQLCount());
assertAllSQLInOrder(expectedNonSupportSQL);
}
resetSQL();
em.flush();
assertEquals(expectedVersionUpdateCount, getSQLCount());
if (expectedVersionUpdateSQL != null)
assertAllSQLInOrder(expectedVersionUpdateSQL);
assertEquals(beforeIsolation, fConfig.getIsolation());
}
/*
* Test em.refresh(lockmode);
*/
public void testRefreshLockModeIsolations() {
EntityManager em = emf.createEntityManager();
em.getTransaction().begin();
commonTestRefreshLockModeIsolations(em, LockModeType.NONE, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 0, null);
commonTestRefreshLockModeIsolations(em, LockModeType.READ, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestRefreshLockModeIsolations(em, LockModeType.WRITE, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 1,
VERSION_UPDATE_SQL);
commonTestRefreshLockModeIsolations(em, LockModeType.OPTIMISTIC, 1,
DB2_OPTIMISTIC_SQL, 1, NON_SUPPORTED_OPTIMISTIC_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestRefreshLockModeIsolations(em,
LockModeType.OPTIMISTIC_FORCE_INCREMENT, 1, DB2_OPTIMISTIC_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL, 1, VERSION_UPDATE_SQL);
commonTestRefreshLockModeIsolations(em, LockModeType.PESSIMISTIC_READ,
2, DB2_PESSIMISTIC_RS_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestRefreshLockModeIsolations(em, LockModeType.PESSIMISTIC_WRITE,
2, DB2_PESSIMISTIC_RR_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestRefreshLockModeIsolations(em,
LockModeType.PESSIMISTIC_FORCE_INCREMENT, 2,
DB2_PESSIMISTIC_RR_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
VERSION_UPDATE_SQL);
em.getTransaction().rollback();
em.close();
}
private void commonTestRefreshLockModeIsolations(EntityManager em,
LockModeType lockMode, int expectedSupportSQLCount,
String expectedSupportSQL, int expectedNonSupportSQLCount,
String expectedNonSupportSQL, int expectedVersionUpdateCount,
String expectedVersionUpdateSQL) {
OpenJPAEntityManager oem = (OpenJPAEntityManager) em.getDelegate();
JDBCFetchConfigurationImpl fConfig = (JDBCFetchConfigurationImpl)
((EntityManagerImpl) oem).getBroker().getFetchConfiguration();
DBDictionary dict = ((JDBCConfiguration) ((OpenJPAEntityManagerSPI) oem)
.getConfiguration()).getDBDictionaryInstance();
em.clear();
LockEmployee employee = em.find(LockEmployee.class, 1);
resetSQL();
int beforeIsolation = fConfig.getIsolation();
em.refresh(employee, lockMode);
if (dict.supportsIsolationForUpdate() &&
dict instanceof DB2Dictionary) {
assertEquals(expectedSupportSQLCount, getSQLCount());
assertAllSQLInOrder(expectedSupportSQL);
} else {
assertEquals(expectedNonSupportSQLCount, getSQLCount());
assertAllSQLInOrder(expectedNonSupportSQL);
}
resetSQL();
em.flush();
assertEquals(expectedVersionUpdateCount, getSQLCount());
if (expectedVersionUpdateSQL != null)
assertAllSQLInOrder(expectedVersionUpdateSQL);
assertEquals(beforeIsolation, fConfig.getIsolation());
}
/*
* Test em.lock(lockmode);
*/
public void testLockLockModeIsolations() {
EntityManager em = emf.createEntityManager();
em.getTransaction().begin();
commonTestLockLockModeIsolations(em, LockModeType.NONE, 0, null, 0,
null, 0, null);
commonTestLockLockModeIsolations(em, LockModeType.READ, 0, null, 0,
null, 1, NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestLockLockModeIsolations(em, LockModeType.WRITE, 0, null, 0,
null, 1, VERSION_UPDATE_SQL);
commonTestLockLockModeIsolations(em, LockModeType.OPTIMISTIC, 0, null,
0, null, 1, NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestLockLockModeIsolations(em,
LockModeType.OPTIMISTIC_FORCE_INCREMENT, 0, null, 0, null, 1,
VERSION_UPDATE_SQL);
commonTestLockLockModeIsolations(em, LockModeType.PESSIMISTIC_READ, 2,
DB2_PESSIMISTIC_RS_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestLockLockModeIsolations(em, LockModeType.PESSIMISTIC_WRITE, 2,
DB2_PESSIMISTIC_RR_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
NON_SUPPORTED_OPTIMISTIC_SQL);
commonTestLockLockModeIsolations(em,
LockModeType.PESSIMISTIC_FORCE_INCREMENT, 2,
DB2_PESSIMISTIC_RR_SQL, 2, NON_SUPPORTED_FOR_UPDATE_SQL, 1,
VERSION_UPDATE_SQL);
em.getTransaction().rollback();
em.close();
}
private void commonTestLockLockModeIsolations(EntityManager em,
LockModeType lockMode, int expectedSupportSQLCount,
String expectedSupportSQL, int expectedNonSupportSQLCount,
String expectedNonSupportSQL, int expectedVersionUpdateCount,
String expectedVersionUpdateSQL) {
OpenJPAEntityManager oem = (OpenJPAEntityManager) em.getDelegate();
JDBCFetchConfigurationImpl fConfig = (JDBCFetchConfigurationImpl)
((EntityManagerImpl) oem).getBroker().getFetchConfiguration();
DBDictionary dict = ((JDBCConfiguration) ((OpenJPAEntityManagerSPI) oem)
.getConfiguration()).getDBDictionaryInstance();
em.clear();
LockEmployee employee = em.find(LockEmployee.class, 1);
resetSQL();
int beforeIsolation = fConfig.getIsolation();
em.lock(employee, lockMode);
if (dict.supportsIsolationForUpdate() &&
dict instanceof DB2Dictionary) {
assertEquals(expectedSupportSQLCount, getSQLCount());
if (expectedSupportSQL != null)
assertAllSQLInOrder(expectedSupportSQL);
} else {
assertEquals(expectedNonSupportSQLCount, getSQLCount());
if (expectedNonSupportSQL != null)
assertAllSQLInOrder(expectedNonSupportSQL);
}
resetSQL();
em.flush();
assertEquals(expectedVersionUpdateCount, getSQLCount());
if (expectedVersionUpdateSQL != null)
assertAllSQLInOrder(expectedVersionUpdateSQL);
assertEquals(beforeIsolation, fConfig.getIsolation());
}
}

View File

@ -0,0 +1,111 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.persistence.lockmgr;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.EntityManager;
import org.apache.openjpa.persistence.OpenJPAEntityManager;
import org.apache.openjpa.persistence.jdbc.JDBCFetchPlan;
import org.apache.openjpa.persistence.test.AllowFailure;
/**
* Test hints using EntityManager interface.
*/
public class TestEmLockTimeout extends SequencedActionsTest {
public void setUp() {
setUp(LockEmployee.class
, "openjpa.LockManager", "mixed"
);
commonSetUp();
emf.close();
}
/*
* Test setting lock.timeout at the createEntityManagerFactory.
*/
public void testSetJavaxLockTimeoutAtProviderCreateEmf() {
setUp(LockEmployee.class
, "openjpa.LockManager", "mixed"
, "javax.persistence.lock.timeout", "13"
);
EntityManager em = emf.createEntityManager();
OpenJPAEntityManager oem = (OpenJPAEntityManager)em.getDelegate();
JDBCFetchPlan fPlan = (JDBCFetchPlan) oem.getFetchPlan();
int lockTmo1 = fPlan.getLockTimeout();
assertEquals(13, lockTmo1);
em.close();
emf.close();
}
/*
* Test setting lock.timeout at the createEntityManagerFactory,
* with multiple equivalent entries.
*/
@AllowFailure(message="OPENJPA-??? - Provider.createEntityManagerFactory" +
" does not suppport multiple equivalent properties.")
public void testSetLockTimeoutsAtProviderCreateEmf() {
setUp(LockEmployee.class
, "openjpa.LockManager", "mixed"
, "openjpa.LockTimeout", 122
, "javax.persistence.lock.timeout", "133"
);
EntityManager em = emf.createEntityManager();
OpenJPAEntityManager oem = (OpenJPAEntityManager)em.getDelegate();
JDBCFetchPlan fPlan = (JDBCFetchPlan) oem.getFetchPlan();
int lockTmo1 = fPlan.getLockTimeout();
assertEquals(133, lockTmo1);
em.close();
emf.close();
}
/*
* Test setting lock.timeout at the em.find(), overriding
* value set at createEntityManagerFactory and createEm.
*/
public void testSetJavaxLockTimeoutAtFind() {
setUp(LockEmployee.class
, "openjpa.LockManager", "mixed"
, "javax.persistence.lock.timeout", "13"
);
EntityManager em = emf.createEntityManager();
Map<String,Object> props2 = new HashMap<String,Object>();
props2.put("javax.persistence.lock.timeout", 3333);
em.find(LockEmployee.class, 1, props2);
OpenJPAEntityManager oem = (OpenJPAEntityManager)em.getDelegate();
JDBCFetchPlan fPlan = (JDBCFetchPlan) oem.getFetchPlan();
int lockTmo3 = fPlan.getLockTimeout();
assertEquals(13, lockTmo3);
em.close();
emf.close();
}
}

View File

@ -23,7 +23,7 @@ import javax.persistence.LockModeType;
/** /**
* Test JPA 2.0 em.find(LockMode) behaviors with "mixed" lock manager. * Test JPA 2.0 em.find(LockMode) behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerFindBasicTest extends SequencedActionsTest { public class TestMixedLockManagerFindBasic extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"

View File

@ -26,7 +26,7 @@ import javax.persistence.TransactionRequiredException;
/** /**
* Test JPA 2.0 em.find(LockMode) exception behaviors with "mixed" lock manager. * Test JPA 2.0 em.find(LockMode) exception behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerFindExceptionTest extends SequencedActionsTest { public class TestMixedLockManagerFindException extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"

View File

@ -25,7 +25,7 @@ import javax.persistence.LockModeType;
/** /**
* Test JPA 2.0 LockMode type permutation behaviors with "mixed" lock manager. * Test JPA 2.0 LockMode type permutation behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerFindPermutationTest extends SequencedActionsTest { public class TestMixedLockManagerFindPermutation extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"

View File

@ -23,7 +23,7 @@ import javax.persistence.LockModeType;
/** /**
* Test JPA 2.0 em.lock(LockMode) basic behaviors with "mixed" lock manager. * Test JPA 2.0 em.lock(LockMode) basic behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerLockBasicTest extends SequencedActionsTest { public class TestMixedLockManagerLockBasic extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"

View File

@ -25,7 +25,7 @@ import javax.persistence.TransactionRequiredException;
/** /**
* Test JPA 2.0 em.lock(LockMode) exception behaviors with "mixed" lock manager. * Test JPA 2.0 em.lock(LockMode) exception behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerLockExceptionTest extends SequencedActionsTest { public class TestMixedLockManagerLockException extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"

View File

@ -25,7 +25,7 @@ import javax.persistence.LockModeType;
/** /**
* Test JPA 2.0 LockMode type permutation behaviors with "mixed" lock manager. * Test JPA 2.0 LockMode type permutation behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerLockPermutationTest extends SequencedActionsTest { public class TestMixedLockManagerLockPermutation extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"

View File

@ -23,7 +23,7 @@ import javax.persistence.LockModeType;
/** /**
* Test JPA 2.0 em.refresh(LockMode) basic behaviors with "mixed" lock manager. * Test JPA 2.0 em.refresh(LockMode) basic behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerRefreshBasicTest extends SequencedActionsTest { public class TestMixedLockManagerRefreshBasic extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"

View File

@ -25,7 +25,7 @@ import javax.persistence.TransactionRequiredException;
* Test JPA 2.0 em.refresh(LockMode) exception behaviors with "mixed" * Test JPA 2.0 em.refresh(LockMode) exception behaviors with "mixed"
* lock manager. * lock manager.
*/ */
public class MixedLockManagerRefreshExceptionTest extends SequencedActionsTest { public class TestMixedLockManagerRefreshException extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class
, "openjpa.LockManager", "mixed" , "openjpa.LockManager", "mixed"
@ -33,6 +33,44 @@ public class MixedLockManagerRefreshExceptionTest extends SequencedActionsTest {
commonSetUp(); commonSetUp();
} }
/**
* TransactionRequiredException if there is no transaction
*/
public void testRefreshNoTxReqExceptions() {
Object[][] threadMainTxReqTest = {
{Act.CreateEm},
{Act.Find},
{Act.SaveVersion},
{Act.TestEmployee, 1, Default_FirstName},
{Act.Refresh, 1, LockModeType.NONE },
{Act.TestException, 0, null },
{Act.Refresh, 1, LockModeType.READ },
{Act.TestException, 0, null },
{Act.Refresh, 1, LockModeType.WRITE },
{Act.TestException, 0, null },
{Act.Refresh, 1, LockModeType.OPTIMISTIC },
{Act.TestException, 0, null },
{Act.Refresh, 1, LockModeType.OPTIMISTIC_FORCE_INCREMENT },
{Act.TestException, 0, null },
{Act.Refresh, 1, LockModeType.PESSIMISTIC_READ},
{Act.TestException, 0, null },
{Act.Refresh, 1, LockModeType.PESSIMISTIC_WRITE},
{Act.TestException, 0, null },
{Act.Refresh, 1, LockModeType.PESSIMISTIC_FORCE_INCREMENT },
{Act.TestException, 0, null },
};
launchActionSequence("testLockTxReqExceptions()",
null, threadMainTxReqTest);
}
/** /**
* TransactionRequiredException if there is no transaction * TransactionRequiredException if there is no transaction
*/ */
@ -43,28 +81,28 @@ public class MixedLockManagerRefreshExceptionTest extends SequencedActionsTest {
{Act.SaveVersion}, {Act.SaveVersion},
{Act.TestEmployee, 1, Default_FirstName}, {Act.TestEmployee, 1, Default_FirstName},
{Act.Refresh, 1, LockModeType.NONE }, {Act.RefreshWithLock, 1, LockModeType.NONE },
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
{Act.Refresh, 1, LockModeType.READ }, {Act.RefreshWithLock, 1, LockModeType.READ },
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
{Act.Refresh, 1, LockModeType.WRITE }, {Act.RefreshWithLock, 1, LockModeType.WRITE },
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
{Act.Refresh, 1, LockModeType.OPTIMISTIC }, {Act.RefreshWithLock, 1, LockModeType.OPTIMISTIC },
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
{Act.Refresh, 1, LockModeType.OPTIMISTIC_FORCE_INCREMENT }, {Act.RefreshWithLock, 1, LockModeType.OPTIMISTIC_FORCE_INCREMENT },
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
{Act.Refresh, 1, LockModeType.PESSIMISTIC_READ}, {Act.RefreshWithLock, 1, LockModeType.PESSIMISTIC_READ},
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
{Act.Refresh, 1, LockModeType.PESSIMISTIC_WRITE}, {Act.RefreshWithLock, 1, LockModeType.PESSIMISTIC_WRITE},
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
{Act.Refresh, 1, LockModeType.PESSIMISTIC_FORCE_INCREMENT }, {Act.RefreshWithLock, 1, LockModeType.PESSIMISTIC_FORCE_INCREMENT },
{Act.TestException, 0, TransactionRequiredException.class }, {Act.TestException, 0, TransactionRequiredException.class },
}; };
launchActionSequence("testLockTxReqExceptions()", launchActionSequence("testLockTxReqExceptions()",

View File

@ -25,7 +25,7 @@ import javax.persistence.LockModeType;
/** /**
* Test JPA 2.0 LockMode type permutation behaviors with "mixed" lock manager. * Test JPA 2.0 LockMode type permutation behaviors with "mixed" lock manager.
*/ */
public class MixedLockManagerRefreshPermutationTest public class TestMixedLockManagerRefreshPermutation
extends SequencedActionsTest { extends SequencedActionsTest {
public void setUp() { public void setUp() {
setUp(LockEmployee.class setUp(LockEmployee.class

View File

@ -780,7 +780,7 @@ public class TestQueryTimeout extends SQLListenerTestCase {
} catch (Exception e) { } catch (Exception e) {
// expected - setHint(-2000) should cause an IllegalArgumentException // expected - setHint(-2000) should cause an IllegalArgumentException
checkException("testQueryTimeout5()", e, checkException("testQueryTimeout5()", e,
IllegalArgumentException.class, "Invalid value" ); IllegalArgumentException.class, "invalid timeout of -2,000");
} finally { } finally {
if ((em != null) && em.isOpen()) { if ((em != null) && em.isOpen()) {
em.close(); em.close();

View File

@ -56,7 +56,6 @@ import org.apache.openjpa.kernel.Broker;
import org.apache.openjpa.kernel.DelegatingBroker; import org.apache.openjpa.kernel.DelegatingBroker;
import org.apache.openjpa.kernel.FetchConfiguration; import org.apache.openjpa.kernel.FetchConfiguration;
import org.apache.openjpa.kernel.FindCallbacks; import org.apache.openjpa.kernel.FindCallbacks;
import org.apache.openjpa.kernel.MixedLockLevels;
import org.apache.openjpa.kernel.OpCallbacks; import org.apache.openjpa.kernel.OpCallbacks;
import org.apache.openjpa.kernel.OpenJPAStateManager; import org.apache.openjpa.kernel.OpenJPAStateManager;
import org.apache.openjpa.kernel.PreparedQuery; import org.apache.openjpa.kernel.PreparedQuery;
@ -65,16 +64,12 @@ import org.apache.openjpa.kernel.QueryFlushModes;
import org.apache.openjpa.kernel.QueryLanguages; import org.apache.openjpa.kernel.QueryLanguages;
import org.apache.openjpa.kernel.Seq; import org.apache.openjpa.kernel.Seq;
import org.apache.openjpa.kernel.jpql.JPQLParser; import org.apache.openjpa.kernel.jpql.JPQLParser;
import org.apache.openjpa.lib.conf.Configuration;
import org.apache.openjpa.lib.conf.IntValue;
import org.apache.openjpa.lib.util.Closeable; import org.apache.openjpa.lib.util.Closeable;
import org.apache.openjpa.lib.util.Localizer; import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.meta.ClassMetaData; import org.apache.openjpa.meta.ClassMetaData;
import org.apache.openjpa.meta.FieldMetaData; import org.apache.openjpa.meta.FieldMetaData;
import org.apache.openjpa.meta.QueryMetaData; import org.apache.openjpa.meta.QueryMetaData;
import org.apache.openjpa.meta.SequenceMetaData; import org.apache.openjpa.meta.SequenceMetaData;
import org.apache.openjpa.persistence.query.OpenJPAQueryBuilder;
import org.apache.openjpa.persistence.query.QueryBuilderImpl;
import org.apache.openjpa.util.Exceptions; import org.apache.openjpa.util.Exceptions;
import org.apache.openjpa.util.ImplHelper; import org.apache.openjpa.util.ImplHelper;
import org.apache.openjpa.util.RuntimeExceptionTranslator; import org.apache.openjpa.util.RuntimeExceptionTranslator;
@ -469,19 +464,24 @@ public class EntityManagerImpl
return find(cls, oid, mode, null); return find(cls, oid, mode, null);
} }
public <T> T find(Class<T> cls, Object oid,
Map<String, Object> properties){
return find(cls, oid, null, properties);
}
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <T> T find(Class<T> cls, Object oid, LockModeType mode, public <T> T find(Class<T> cls, Object oid, LockModeType mode,
Map<String, Object> properties) { Map<String, Object> properties) {
assertNotCloseInvoked(); assertNotCloseInvoked();
if (mode != LockModeType.NONE) if (mode != null && mode != LockModeType.NONE)
_broker.assertActiveTransaction(); _broker.assertActiveTransaction();
boolean fcPushed = pushLockProperties(mode, properties); processLockProperties(pushFetchPlan(), mode, properties);
try { try {
oid = _broker.newObjectId(cls, oid); oid = _broker.newObjectId(cls, oid);
return (T) _broker.find(oid, true, this); return (T) _broker.find(oid, true, this);
} finally { } finally {
popLockProperties(fcPushed); popFetchPlan();
} }
} }
@ -726,6 +726,10 @@ public class EntityManagerImpl
refresh(entity, mode, null); refresh(entity, mode, null);
} }
public void refresh(Object entity, Map<String, Object> properties) {
refresh(entity, null, properties);
}
public void refresh(Object entity, LockModeType mode, public void refresh(Object entity, LockModeType mode,
Map<String, Object> properties) { Map<String, Object> properties) {
assertNotCloseInvoked(); assertNotCloseInvoked();
@ -733,11 +737,11 @@ public class EntityManagerImpl
_broker.assertActiveTransaction(); _broker.assertActiveTransaction();
_broker.assertWriteOperation(); _broker.assertWriteOperation();
boolean fcPushed = pushLockProperties(mode, properties); processLockProperties(pushFetchPlan(), mode, properties);
try { try {
_broker.refresh(entity, this); _broker.refresh(entity, this);
} finally { } finally {
popLockProperties(fcPushed); popFetchPlan();
} }
} }
@ -1096,9 +1100,7 @@ public class EntityManagerImpl
} }
public void lock(Object entity, LockModeType mode) { public void lock(Object entity, LockModeType mode) {
assertNotCloseInvoked(); lock(entity, mode, -1);
assertValidAttchedEntity(entity);
_broker.lock(entity, MixedLockLevelsHelper.toLockLevel(mode), -1, this);
} }
public void lock(Object entity) { public void lock(Object entity) {
@ -1110,8 +1112,14 @@ public class EntityManagerImpl
public void lock(Object entity, LockModeType mode, int timeout) { public void lock(Object entity, LockModeType mode, int timeout) {
assertNotCloseInvoked(); assertNotCloseInvoked();
assertValidAttchedEntity(entity); assertValidAttchedEntity(entity);
_broker.lock(entity, MixedLockLevelsHelper.toLockLevel(mode), timeout,
this); processLockProperties(pushFetchPlan(), mode, null);
try {
_broker.lock(entity, MixedLockLevelsHelper.toLockLevel(mode),
timeout, this);
} finally {
popFetchPlan();
}
} }
public void lock(Object entity, LockModeType mode, public void lock(Object entity, LockModeType mode,
@ -1120,12 +1128,12 @@ public class EntityManagerImpl
assertValidAttchedEntity(entity); assertValidAttchedEntity(entity);
_broker.assertActiveTransaction(); _broker.assertActiveTransaction();
boolean fcPushed = pushLockProperties(mode, properties); processLockProperties(pushFetchPlan(), mode, properties);
try { try {
_broker.lock(entity, MixedLockLevelsHelper.toLockLevel(mode), _broker.lock(entity, MixedLockLevelsHelper.toLockLevel(mode),
_broker.getFetchConfiguration().getLockTimeout(), this); _broker.getFetchConfiguration().getLockTimeout(), this);
} finally { } finally {
popLockProperties(fcPushed); popFetchPlan();
} }
} }
@ -1542,8 +1550,7 @@ public class EntityManagerImpl
throw new PersistenceException(_loc.get("unwrap-em-invalid", cls) throw new PersistenceException(_loc.get("unwrap-em-invalid", cls)
.toString(), null, this, false); .toString(), null, this, false);
} }
public void setQuerySQLCache(boolean flag) { public void setQuerySQLCache(boolean flag) {
_broker.setCachePreparedQuery(flag); _broker.setCachePreparedQuery(flag);
} }
@ -1556,154 +1563,26 @@ public class EntityManagerImpl
return _ret; return _ret;
} }
private enum FetchConfigProperty { private void processLockProperties(FetchPlan fPlan, LockModeType mode,
LockTimeout, ReadLockLevel, WriteLockLevel
};
private boolean setFetchConfigProperty(FetchConfigProperty[] validProps,
Map<String, Object> properties) { Map<String, Object> properties) {
boolean fcPushed = false;
if (properties != null && properties.size() > 0) {
Configuration conf = _broker.getConfiguration();
Set<String> inKeys = properties.keySet();
for (String inKey : inKeys) {
for (FetchConfigProperty validProp : validProps) {
String validPropStr = validProp.toString();
Set<String> validPropKeys = conf
.getPropertyKeys(validPropStr);
if (validPropKeys.contains(inKey)) {
FetchConfiguration fCfg = _broker
.getFetchConfiguration();
IntValue intVal = new IntValue(inKey);
try {
Object setValue = properties.get(inKey);
if (setValue instanceof String) {
intVal.setString((String) setValue);
} else if (Number.class.isAssignableFrom(setValue
.getClass())) {
intVal.setObject(setValue);
} else {
intVal.setString(setValue.toString());
}
int value = intVal.get();
switch (validProp) {
case LockTimeout:
if (!fcPushed) {
fCfg = _broker.pushFetchConfiguration();
fcPushed = true;
}
fCfg.setLockTimeout(value);
break;
case ReadLockLevel:
if (value != MixedLockLevels.LOCK_NONE
&& value != fCfg.getReadLockLevel()) {
if (!fcPushed) {
fCfg = _broker.pushFetchConfiguration();
fcPushed = true;
}
fCfg.setReadLockLevel(value);
}
break;
case WriteLockLevel:
if (value != MixedLockLevels.LOCK_NONE
&& value != fCfg.getWriteLockLevel()) {
if (!fcPushed) {
fCfg = _broker.pushFetchConfiguration();
fcPushed = true;
}
fCfg.setWriteLockLevel(value);
}
break;
}
} catch (Exception e) {
// silently ignore the property
}
break; // for(String inKey : inKeys)
}
}
}
}
return fcPushed;
}
private boolean pushLockProperties(LockModeType mode,
Map<String, Object> properties) {
boolean fcPushed = false;
// handle properties in map first // handle properties in map first
if (properties != null) { fPlan.addHints(properties);
fcPushed = setFetchConfigProperty(new FetchConfigProperty[] {
FetchConfigProperty.LockTimeout,
FetchConfigProperty.ReadLockLevel,
FetchConfigProperty.WriteLockLevel }, properties);
}
// override with the specific lockMode, if needed. // override with the specific lockMode, if needed.
int setReadLevel = MixedLockLevelsHelper.toLockLevel(mode); if (mode != null && mode != LockModeType.NONE) {
if (setReadLevel != MixedLockLevels.LOCK_NONE) {
// Set overriden read lock level // Set overriden read lock level
FetchConfiguration fCfg = _broker.getFetchConfiguration(); LockModeType curReadLockMode = fPlan.getReadLockMode();
int curReadLevel = fCfg.getReadLockLevel(); if (mode != curReadLockMode)
if (setReadLevel != curReadLevel) { fPlan.setReadLockMode(mode);
if (!fcPushed) {
fCfg = _broker.pushFetchConfiguration();
fcPushed = true;
}
fCfg.setReadLockLevel(setReadLevel);
}
// Set overriden isolation level for pessimistic-read/write
switch (setReadLevel) {
case MixedLockLevels.LOCK_PESSIMISTIC_READ:
fcPushed = setIsolationForPessimisticLock(fCfg, fcPushed,
Connection.TRANSACTION_REPEATABLE_READ);
break;
case MixedLockLevels.LOCK_PESSIMISTIC_WRITE:
case MixedLockLevels.LOCK_PESSIMISTIC_FORCE_INCREMENT:
fcPushed = setIsolationForPessimisticLock(fCfg, fcPushed,
Connection.TRANSACTION_SERIALIZABLE);
break;
default:
}
}
return fcPushed;
}
private boolean setIsolationForPessimisticLock(FetchConfiguration fCfg,
boolean fcPushed, int level) {
if (!fcPushed) {
fCfg = _broker.pushFetchConfiguration();
fcPushed = true;
}
// TODO: refactoring under OPENJPA-957
// ((JDBCFetchConfiguration) fCfg).setIsolation(level);
return fcPushed;
}
private void popLockProperties(boolean fcPushed) {
if (fcPushed) {
_broker.popFetchConfiguration();
} }
} }
public Metamodel getMetamodel() { public Metamodel getMetamodel() {
throw new UnsupportedOperationException( throw new UnsupportedOperationException(
"JPA 2.0 - Method not yet implemented"); "JPA 2.0 - Method not yet implemented");
} }
public void refresh(Object arg0, Map<String, Object> arg1) {
throw new UnsupportedOperationException(
"JPA 2.0 - Method not yet implemented");
}
public void setProperty(String arg0, Object arg1) { public void setProperty(String arg0, Object arg1) {
throw new UnsupportedOperationException( throw new UnsupportedOperationException(
"JPA 2.0 - Method not yet implemented"); "JPA 2.0 - Method not yet implemented");
} }
public <T> T find(Class<T> arg0, Object arg1, Map<String, Object> arg2) {
throw new UnsupportedOperationException(
"JPA 2.0 - Method not yet implemented");
}
} }

View File

@ -129,6 +129,29 @@ public interface FetchPlan {
*/ */
public void setHint(String key, Object value); public void setHint(String key, Object value);
/**
* Sets the hint for the given key to the given value.
*
* @since 2.0.0
*/
public void setHint(String key, Object value, boolean validThrowException);
/**
* Adds the hint and the associated value to the list.
*
* @param name the name of the hint
* @param value the value of the hint
* @since 2.0.0
*/
public void addHint(String name, Object value);
/**
* Sets the hint keys and values currently set of this receiver.
*
* @since 2.0.0
*/
public void addHints(Map<String, Object> hints);
/** /**
* Gets the hint keys and values currently set of this receiver. * Gets the hint keys and values currently set of this receiver.
* *
@ -291,10 +314,10 @@ public interface FetchPlan {
public int getLockTimeout(); public int getLockTimeout();
/** /**
* The number of milliseconds to wait for a query, or -1 for no * The number of milliseconds to wait for an object lock, or -1 for no
* limit. * limit.
*/ */
public FetchPlan setQueryTimeout(int timeout); public FetchPlan setLockTimeout(int timeout);
/** /**
* The number of milliseconds to wait for a query, or -1 for no * The number of milliseconds to wait for a query, or -1 for no
@ -303,11 +326,11 @@ public interface FetchPlan {
public int getQueryTimeout(); public int getQueryTimeout();
/** /**
* The number of milliseconds to wait for an object lock, or -1 for no * The number of milliseconds to wait for a query, or -1 for no
* limit. * limit.
*/ */
public FetchPlan setQueryTimeout(int timeout);
public FetchPlan setLockTimeout(int timeout);
/** /**
* The lock level to use for locking loaded objects. * The lock level to use for locking loaded objects.
*/ */
@ -327,7 +350,6 @@ public interface FetchPlan {
* The lock level to use for locking dirtied objects. * The lock level to use for locking dirtied objects.
*/ */
public FetchPlan setWriteLockMode(LockModeType mode); public FetchPlan setWriteLockMode(LockModeType mode);
/** /**
* @deprecated cast to {@link FetchPlanImpl} instead. This * @deprecated cast to {@link FetchPlanImpl} instead. This

View File

@ -0,0 +1,214 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.persistence;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import javax.persistence.LockModeType;
import org.apache.openjpa.kernel.FetchConfigurationImpl;
import org.apache.openjpa.kernel.AbstractHintHandler;
import org.apache.openjpa.kernel.MixedLockLevels;
import org.apache.openjpa.lib.conf.ProductDerivations;
import org.apache.openjpa.lib.util.Localizer;
/**
* Fetch plan hint handler. Handles openjpa.FetchPlan.*,
* javax.persistence.lock.* and javax.persistence.query.* hints.
*
* @since 2.0.0
* @nojavadoc
*/
public class FetchPlanHintHandler extends AbstractHintHandler {
private static final Localizer _loc = Localizer
.forPackage(FetchPlanHintHandler.class);
protected static final String PREFIX_JPA = "javax.persistence.";
protected static final String PREFIX_FETCHPLAN = PREFIX_OPENJPA
+ "FetchPlan.";
// Valid defined product derivation prefixes
protected static final Set<String> ValidProductPrefixes =
new HashSet<String>();
// JPA Specification 2.0 keys are mapped to equivalent FetchPlan keys
protected static final Map<String, String> JavaxHintsMap =
new HashMap<String, String>();
// hints precedent definitions
protected static final Map<String, String[]> PrecedenceMap =
new HashMap<String, String[]>();
static {
// Initialize valid product prefixes from available product derivations.
for (String prefix : ProductDerivations.getConfigurationPrefixes()) {
ValidProductPrefixes.add(prefix);
}
// Initiali javax.persistence to openjpa.FetchPlan hint mapping.
JavaxHintsMap.put(PREFIX_JPA + "lock.timeout", PREFIX_FETCHPLAN
+ "LockTimeout");
JavaxHintsMap.put(PREFIX_JPA + "query.timeout", PREFIX_FETCHPLAN
+ "QueryTimeout");
// Initialize hint precendent order mapping from list.
String[][] precedenceMapList = {
{ PREFIX_JPA + "lock.timeout",
PREFIX_FETCHPLAN + "LockTimeout",
PREFIX_OPENJPA + "LockTimeout" },
{ PREFIX_JPA + "query.timeout",
PREFIX_FETCHPLAN + "QueryTimeout",
PREFIX_OPENJPA + "QueryTimeout" },
{ PREFIX_FETCHPLAN + "Isolation",
PREFIX_JDBC + "TransactionIsolation" },
{ PREFIX_FETCHPLAN + "EagerFetchMode",
PREFIX_JDBC + "EagerFetchMode" },
{ PREFIX_FETCHPLAN + "FetchDirection",
PREFIX_JDBC + "FetchDirection" },
{ PREFIX_FETCHPLAN + "JoinSyntax",
PREFIX_JDBC + "JoinSyntax" },
{ PREFIX_FETCHPLAN + "LRSSizeAlgorithm",
PREFIX_FETCHPLAN + "LRSSize",
PREFIX_JDBC + "LRSSize" },
{ PREFIX_FETCHPLAN + "ResultSetType",
PREFIX_JDBC + "ResultSetType" },
{ PREFIX_FETCHPLAN + "SubclassFetchMode",
PREFIX_JDBC + "SubclassFetchMode" },
{ PREFIX_FETCHPLAN + "ReadLockMode",
PREFIX_OPENJPA + "ReadLockLevel" },
{ PREFIX_FETCHPLAN + "WriteLockMode",
PREFIX_OPENJPA + "WriteLockLevel" },
{ PREFIX_FETCHPLAN + "FetchBatchSize",
PREFIX_OPENJPA + "FetchBatchSize" },
{ PREFIX_FETCHPLAN + "MaxFetchDepth",
PREFIX_OPENJPA + "MaxFetchDepth" }
};
for (String[] list : precedenceMapList) {
for (String hint : list)
PrecedenceMap.put(hint, list);
}
}
protected FetchPlanImpl _fPlan;
/**
* Constructor; supply delegate.
*/
public FetchPlanHintHandler(FetchPlanImpl fetchPlan) {
super((FetchConfigurationImpl) fetchPlan.getDelegate());
_fPlan = fetchPlan;
}
public boolean setHint(String hintName, Object value,
boolean validateThrowException) {
if (!hintName.startsWith(PREFIX_JPA)
&& !ValidProductPrefixes.contains(getPrefixOf(hintName)))
return false;
return super.setHint(hintName, value, validateThrowException);
}
protected boolean setHintInternal(String hintName, Object value,
boolean validateThrowException) {
boolean valueSet = false;
if (hintName.startsWith(PREFIX_FETCHPLAN)) {
if (hintName.endsWith("LockMode")
&& !_fConfig.getContext().isActive()) {
_fConfig.setHint(hintName + ".Defer", toLockLevel(value),
false);
valueSet = true;
} else
valueSet = hintToSetter(_fPlan, hintName, value);
} else
_fConfig.setHint(hintName, value, validateThrowException);
return valueSet;
}
protected String hintToKey(String key) {
// transform product derived prefix to openjpa prefix
if (!key.startsWith(PREFIX_OPENJPA)
&& ValidProductPrefixes.contains(getPrefixOf(key)))
key = PREFIX_OPENJPA + key.substring(key.indexOf('.') + 1);
// transform javax.persistence.* hints to fetch plan hints.
if (JavaxHintsMap.containsKey(key))
key = JavaxHintsMap.get(key);
return key;
}
protected boolean hasPrecedent(String key) {
boolean hasPrecedent = true;
String[] list = PrecedenceMap.get(key);
if (list != null) {
for (String hint : list) {
if (hint.equals(key))
break;
// stop if a higher precedence hint has already defined
if (_fConfig.getHint(hint) != null) {
hasPrecedent = false;
break;
}
}
}
return hasPrecedent;
}
protected void handleException(RuntimeException e) {
throw PersistenceExceptions.toPersistenceException(e);
}
private Integer toLockLevel(Object value) {
Object origValue = value;
if (value instanceof String) {
// to accomodate alias name input in relationship with enum values
// e.g. "optimistic-force-increment" ==
// LockModeType.OPTIMISTIC_FORCE_INCREMENT
String strValue = ((String) value).toUpperCase().replace('-', '_');
value = Enum.valueOf(LockModeType.class, strValue);
}
if (value instanceof LockModeType)
value = MixedLockLevelsHelper.toLockLevel((LockModeType) value);
Integer intValue = null;
if (value instanceof Integer)
intValue = (Integer) value;
if (intValue == null
|| (intValue != MixedLockLevels.LOCK_NONE
&& intValue != MixedLockLevels.LOCK_OPTIMISTIC
&& intValue != MixedLockLevels.LOCK_OPTIMISTIC_FORCE_INCREMENT
&& intValue != MixedLockLevels.LOCK_PESSIMISTIC_READ
&& intValue != MixedLockLevels.LOCK_PESSIMISTIC_WRITE
&& intValue != MixedLockLevels.LOCK_PESSIMISTIC_FORCE_INCREMENT)
)
throw new IllegalArgumentException(_loc.get("bad-lock-level",
origValue).getMessage());
return intValue;
}
}

View File

@ -28,7 +28,6 @@ import javax.persistence.LockModeType;
import org.apache.openjpa.kernel.DelegatingFetchConfiguration; import org.apache.openjpa.kernel.DelegatingFetchConfiguration;
import org.apache.openjpa.kernel.FetchConfiguration; import org.apache.openjpa.kernel.FetchConfiguration;
import org.apache.openjpa.kernel.MixedLockLevels;
/** /**
* Implements FetchPlan via delegation to FetchConfiguration. * Implements FetchPlan via delegation to FetchConfiguration.
@ -42,13 +41,14 @@ public class FetchPlanImpl
implements FetchPlan { implements FetchPlan {
private final DelegatingFetchConfiguration _fetch; private final DelegatingFetchConfiguration _fetch;
private FetchPlanHintHandler _hintHandler;
/** /**
* Constructor; supply delegate. * Constructor; supply delegate.
*/ */
public FetchPlanImpl(FetchConfiguration fetch) { public FetchPlanImpl(FetchConfiguration fetch) {
_fetch = newDelegatingFetchConfiguration(fetch); _fetch = newDelegatingFetchConfiguration(fetch);
_hintHandler = new FetchPlanHintHandler(this);
} }
/** /**
@ -275,8 +275,24 @@ public class FetchPlanImpl
return _fetch.getHint(key); return _fetch.getHint(key);
} }
public void addHint(String key, Object value) {
_fetch.addHint(key, value);
}
public void setHint(String key, Object value) { public void setHint(String key, Object value) {
_fetch.setHint(key, value); setHint(key, value, true);
}
public void setHint(String key, Object value, boolean validThrowException) {
if( _hintHandler.setHint(key, value, validThrowException) )
_fetch.addHint(key, value);
}
public void addHints(Map<String, Object> hints) {
if (hints != null && hints.size() > 0) {
for (String name : hints.keySet())
setHint(name, hints.get(name), false);
}
} }
public Map<String, Object> getHints() { public Map<String, Object> getHints() {

View File

@ -1,11 +1,28 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.openjpa.persistence; package org.apache.openjpa.persistence;
import static org.apache.openjpa.kernel.QueryHints.HINT_IGNORE_PREPARED_QUERY; import static org.apache.openjpa.kernel.QueryHints.HINT_IGNORE_PREPARED_QUERY;
import static org.apache.openjpa.kernel.QueryHints. import static org.apache.openjpa.kernel.QueryHints
HINT_INVALIDATE_PREPARED_QUERY; .HINT_INVALIDATE_PREPARED_QUERY;
import static org.apache.openjpa.kernel.QueryHints.HINT_RESULT_COUNT; import static org.apache.openjpa.kernel.QueryHints.HINT_RESULT_COUNT;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier; import java.lang.reflect.Modifier;
import java.util.Collections; import java.util.Collections;
import java.util.Comparator; import java.util.Comparator;
@ -26,7 +43,6 @@ import org.apache.openjpa.lib.log.Log;
import org.apache.openjpa.lib.util.Localizer; import org.apache.openjpa.lib.util.Localizer;
import org.apache.openjpa.lib.util.StringDistance; import org.apache.openjpa.lib.util.StringDistance;
/** /**
* Manages query hint keys and handles their values on behalf of a owning * Manages query hint keys and handles their values on behalf of a owning
* {@link QueryImpl}. Uses specific knowledge of hint keys declared in * {@link QueryImpl}. Uses specific knowledge of hint keys declared in
@ -81,15 +97,16 @@ import org.apache.openjpa.lib.util.StringDistance;
* *
* @nojavadoc * @nojavadoc
*/ */
public class HintHandler { public class HintHandler extends FetchPlanHintHandler {
private static final Localizer _loc = Localizer.forPackage(
HintHandler.class);
private final QueryImpl owner; private final QueryImpl owner;
private Map<String, Object> _hints; private Map<String, Object> _hints;
private Set<String> _supportedKeys; private Set<String> _supportedKeys;
private Set<String> _supportedPrefixes; private Set<String> _supportedPrefixes;
static final String PREFIX_JPA = "javax.persistence.";
static final String PREFIX_FETCHPLAN = "openjpa.FetchPlan.";
// These keys are directly handled in {@link QueryImpl} class. // These keys are directly handled in {@link QueryImpl} class.
// Declaring a public static final String variable in this class will // Declaring a public static final String variable in this class will
// make it register as a supported hint key // make it register as a supported hint key
@ -103,21 +120,8 @@ public class HintHandler {
public static final String HINT_AGGREGATE_LISTENERS = public static final String HINT_AGGREGATE_LISTENERS =
"openjpa.AggregateListeners"; "openjpa.AggregateListeners";
// JPA Specification 2.0 keys are mapped to equivalent FetchPlan keys
public static Map<String,String> _jpaKeys = new TreeMap<String, String>();
static {
_jpaKeys.put(addPrefix(PREFIX_JPA, "query.timeout"),
addPrefix(PREFIX_FETCHPLAN, "QueryTimeout"));
_jpaKeys.put(addPrefix(PREFIX_JPA, "lock.timeout"),
addPrefix(PREFIX_FETCHPLAN, "LockTimeout"));
}
private static final String DOT = ".";
private static final String BLANK = "";
private static final Localizer _loc = Localizer.forPackage(
HintHandler.class);
HintHandler(QueryImpl impl) { HintHandler(QueryImpl impl) {
super((FetchPlanImpl)impl.getFetchPlan());
owner = impl; owner = impl;
} }
@ -151,9 +155,8 @@ public class HintHandler {
.getLog(OpenJPAConfiguration.LOG_RUNTIME); .getLog(OpenJPAConfiguration.LOG_RUNTIME);
String possible = StringDistance.getClosestLevenshteinDistance(hint, String possible = StringDistance.getClosestLevenshteinDistance(hint,
getSupportedHints()); getSupportedHints());
if (log.isWarnEnabled()) { if (log.isWarnEnabled())
log.warn(_loc.get("bad-query-hint", hint, possible)); log.warn(_loc.get("bad-query-hint", hint, possible));
}
return (isKnownHintPrefix(hint)) ? null : Boolean.FALSE; return (isKnownHintPrefix(hint)) ? null : Boolean.FALSE;
} }
@ -176,7 +179,7 @@ public class HintHandler {
Reflection.getBeanStylePropertyNames( Reflection.getBeanStylePropertyNames(
owner.getFetchPlan().getClass()))); owner.getFetchPlan().getClass())));
_supportedKeys.addAll(_jpaKeys.keySet()); _supportedKeys.addAll(JavaxHintsMap.keySet());
_supportedKeys.addAll(Reflection.getFieldValues( _supportedKeys.addAll(Reflection.getFieldValues(
HintHandler.class, HintHandler.class,
@ -212,157 +215,76 @@ public class HintHandler {
return getSupportedHints().contains(key); return getSupportedHints().contains(key);
} }
/**
* Affirms the given key has a prefix that matches with any of the
* supported prefixes.
*/
private boolean isSupportedPrefix(String key) {
return getKnownPrefixes().contains(getPrefixOf(key));
}
static Set<String> addPrefix(String prefix, Set<String> original) {
Set<String> result = new TreeSet<String>();
String join = prefix.endsWith(DOT) ? BLANK : DOT;
for (String o : original)
result.add(prefix + join + o);
return result;
}
static String addPrefix(String prefix, String original) {
String join = prefix.endsWith(DOT) ? BLANK : DOT;
return prefix + join + original;
}
private static String removePrefix(String key, String prefix) {
if (prefix == null)
return key;
if (!prefix.endsWith(DOT))
prefix = prefix + DOT;
if (key != null && key.startsWith(prefix))
return key.substring(prefix.length());
return key;
}
static String getPrefixOf(String key) {
int index = key == null ? -1 : key.indexOf(DOT);
return (index != -1) ? key.substring(0,index) : key;
}
private boolean isKnownHintPrefix(String key) { private boolean isKnownHintPrefix(String key) {
String prefix = getPrefixOf(key); String prefix = getPrefixOf(key);
return getKnownPrefixes().contains(prefix); return getKnownPrefixes().contains(prefix);
} }
public static boolean hasPrefix(String key, String prefix) {
if (key == null || prefix == null)
return false;
if (!prefix.endsWith(DOT))
prefix = prefix + DOT;
return key.startsWith(prefix);
}
public void setHint(String key, Object value) { public void setHint(String key, Object value) {
owner.lock(); owner.lock();
try { try {
setHintInternal(key, value); Boolean record = record(key, value);
if (record == Boolean.FALSE)
return;
FetchPlan plan = owner.getFetchPlan();
if (record == null) {
plan.setHint(key, value);
return;
}
// request to throw IllegalArgumentException, if needed.
if (setHint(key, value, true))
plan.addHint(key, value);
} finally { } finally {
owner.unlock(); owner.unlock();
} }
} }
private void setHintInternal(String key, Object value) { protected boolean setHintInternal(String key, Object value,
Boolean record = record(key, value); boolean validateThrowException) {
FetchPlan plan = owner.getFetchPlan();
ClassLoader loader = owner.getDelegate().getBroker().getClassLoader(); ClassLoader loader = owner.getDelegate().getBroker().getClassLoader();
if (record == Boolean.FALSE) FetchPlan fPlan = owner.getFetchPlan();
return; boolean objectSet = true;
if (record == null) { if (HINT_SUBCLASSES.equals(key)) {
plan.setHint(key, value); if (value instanceof String)
return; value = Boolean.valueOf((String) value);
} owner.setSubclasses(((Boolean) value).booleanValue());
try { } else if (HINT_FILTER_LISTENER.equals(key))
if (HINT_SUBCLASSES.equals(key)) { owner.addFilterListener(Filters.hintToFilterListener(value,
if (value instanceof String) loader));
value = Boolean.valueOf((String) value); else if (HINT_FILTER_LISTENERS.equals(key)) {
owner.setSubclasses(((Boolean) value).booleanValue()); FilterListener[] arr = Filters.hintToFilterListeners(value, loader);
} else if (HINT_FILTER_LISTENER.equals(key)) for (int i = 0; i < arr.length; i++)
owner.addFilterListener(Filters.hintToFilterListener(value, owner.addFilterListener(arr[i]);
loader)); } else if (HINT_AGGREGATE_LISTENER.equals(key))
else if (HINT_FILTER_LISTENERS.equals(key)) { owner.addAggregateListener(Filters.hintToAggregateListener(value,
FilterListener[] arr = Filters.hintToFilterListeners(value, loader));
loader); else if (HINT_AGGREGATE_LISTENERS.equals(key)) {
for (int i = 0; i < arr.length; i++) AggregateListener[] arr = Filters.hintToAggregateListeners(value,
owner.addFilterListener(arr[i]); loader);
} else if (HINT_AGGREGATE_LISTENER.equals(key)) for (int i = 0; i < arr.length; i++)
owner.addAggregateListener(Filters.hintToAggregateListener( owner.addAggregateListener(arr[i]);
value, loader)); } else if (HINT_RESULT_COUNT.equals(key)) {
else if (HINT_AGGREGATE_LISTENERS.equals(key)) { int v = (Integer) Filters.convert(value, Integer.class);
AggregateListener[] arr = Filters.hintToAggregateListeners( if (v < 0)
value, loader); throw new ArgumentException(_loc.get("bad-query-hint-value",
for (int i = 0; i < arr.length; i++) key, value), null, null, false);
owner.addAggregateListener(arr[i]); fPlan.setHint(key, v);
} else if (isFetchPlanHint(key)) { objectSet = false;
if (requiresTransaction(key)) } else if (HINT_INVALIDATE_PREPARED_QUERY.equals(key)) {
plan.setHint(key, value); fPlan.setHint(key, Filters.convert(value, Boolean.class));
else owner.invalidatePreparedQuery();
hintToSetter(plan, getFetchPlanProperty(key), value); objectSet = false;
} else if (HINT_RESULT_COUNT.equals(key)) { } else if (HINT_IGNORE_PREPARED_QUERY.equals(key)) {
int v = (Integer)Filters.convert(value, Integer.class); fPlan.setHint(key, Filters.convert(value, Boolean.class));
if (v < 0) owner.ignorePreparedQuery();
throw new ArgumentException(_loc.get("bad-query-hint-value", objectSet = false;
key, value), null, null, false); } else { // default
plan.setHint(key, v); fPlan.setHint(key, value);
} else if (HINT_INVALIDATE_PREPARED_QUERY.equals(key)) { objectSet = false;
plan.setHint(key, Filters.convert(value, Boolean.class));
owner.invalidatePreparedQuery();
} else if (HINT_IGNORE_PREPARED_QUERY.equals(key)) {
plan.setHint(key, Filters.convert(value, Boolean.class));
owner.ignorePreparedQuery();
} else { // default
plan.setHint(key, value);
}
return;
} catch (IllegalArgumentException iae) {
throw new ArgumentException(_loc.get("bad-query-hint-value",
key, value), null, null, false);
} catch (ClassCastException ce) {
throw new ArgumentException(_loc.get("bad-query-hint-value",
key, ce.getMessage()), null, null, false);
} catch (Exception e) {
throw PersistenceExceptions.toPersistenceException(e);
} }
return objectSet;
} }
private boolean isFetchPlanHint(String key) {
return key.startsWith(PREFIX_FETCHPLAN)
|| (_jpaKeys.containsKey(key) && isFetchPlanHint(_jpaKeys.get(key)));
}
private boolean requiresTransaction(String key) {
return key.endsWith("LockMode");
}
private String getFetchPlanProperty(String key) {
if (key.startsWith(PREFIX_FETCHPLAN))
return removePrefix(key, PREFIX_FETCHPLAN);
else if (_jpaKeys.containsKey(key))
return getFetchPlanProperty(_jpaKeys.get(key));
else
return key;
}
private void hintToSetter(FetchPlan fetchPlan, String k, Object value) {
if (fetchPlan == null || k == null)
return;
Method setter = Reflection.findSetter(fetchPlan.getClass(), k, true);
Class paramType = setter.getParameterTypes()[0];
if (Enum.class.isAssignableFrom(paramType) && value instanceof String)
value = Enum.valueOf(paramType, (String) value);
Filters.hintToSetter(fetchPlan, k, value);
}
public static class HintKeyComparator implements Comparator<String> { public static class HintKeyComparator implements Comparator<String> {
public int compare(String s1, String s2) { public int compare(String s1, String s2) {
if (getPrefixOf(s1).equals(getPrefixOf(s2))) { if (getPrefixOf(s1).equals(getPrefixOf(s2))) {
@ -372,12 +294,26 @@ public class HintHandler {
} else } else
return s1.compareTo(s2); return s1.compareTo(s2);
} }
public int countDots(String s) { public int countDots(String s) {
if (s == null || s.length() == 0) if (s == null || s.length() == 0)
return 0; return 0;
int index = s.indexOf(DOT); int index = s.indexOf(DOT);
return (index == -1) ? 0 : countDots(s.substring(index+1)) + 1; return (index == -1) ? 0 : countDots(s.substring(index + 1)) + 1;
} }
} }
protected String hintToKey(String key) {
// Let superclass performs key transformation when fPlan.setHint()
// is called.
return key;
}
private Set<String> addPrefix(String prefix, Set<String> original) {
Set<String> result = new TreeSet<String>();
String join = prefix.endsWith(DOT) ? BLANK : DOT;
for (String o : original)
result.add(prefix + join + o);
return result;
}
} }

View File

@ -37,10 +37,10 @@ public class MixedLockLevelsHelper {
if (mode == null || mode == LockModeType.NONE) if (mode == null || mode == LockModeType.NONE)
return MixedLockLevels.LOCK_NONE; return MixedLockLevels.LOCK_NONE;
if (mode == LockModeType.READ || mode == LockModeType.OPTIMISTIC) if (mode == LockModeType.READ || mode == LockModeType.OPTIMISTIC)
return MixedLockLevels.LOCK_READ; return MixedLockLevels.LOCK_OPTIMISTIC;
if (mode == LockModeType.WRITE if (mode == LockModeType.WRITE
|| mode == LockModeType.OPTIMISTIC_FORCE_INCREMENT) || mode == LockModeType.OPTIMISTIC_FORCE_INCREMENT)
return MixedLockLevels.LOCK_WRITE; return MixedLockLevels.LOCK_OPTIMISTIC_FORCE_INCREMENT;
if (mode == LockModeType.PESSIMISTIC_READ) if (mode == LockModeType.PESSIMISTIC_READ)
return MixedLockLevels.LOCK_PESSIMISTIC_READ; return MixedLockLevels.LOCK_PESSIMISTIC_READ;
if (mode == LockModeType.PESSIMISTIC_WRITE) if (mode == LockModeType.PESSIMISTIC_WRITE)
@ -53,11 +53,11 @@ public class MixedLockLevelsHelper {
*/ */
public static LockModeType fromLockLevel(int level) { public static LockModeType fromLockLevel(int level) {
if (level < MixedLockLevels.LOCK_OPTIMISTIC) if (level < MixedLockLevels.LOCK_OPTIMISTIC)
return null; return LockModeType.NONE;
if (level < MixedLockLevels.LOCK_OPTIMISTIC_FORCE_INCREMENT) if (level < MixedLockLevels.LOCK_OPTIMISTIC_FORCE_INCREMENT)
return LockModeType.READ; return LockModeType.OPTIMISTIC;
if (level < MixedLockLevels.LOCK_PESSIMISTIC_READ) if (level < MixedLockLevels.LOCK_PESSIMISTIC_READ)
return LockModeType.WRITE; return LockModeType.OPTIMISTIC_FORCE_INCREMENT;
if (level < MixedLockLevels.LOCK_PESSIMISTIC_WRITE) if (level < MixedLockLevels.LOCK_PESSIMISTIC_WRITE)
return LockModeType.PESSIMISTIC_READ; return LockModeType.PESSIMISTIC_READ;
if (level < MixedLockLevels.LOCK_PESSIMISTIC_FORCE_INCREMENT) if (level < MixedLockLevels.LOCK_PESSIMISTIC_FORCE_INCREMENT)

View File

@ -1,171 +1,175 @@
# Licensed to the Apache Software Foundation (ASF) under one # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file # or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information # distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file # regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the # to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance # "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at # with the License. You may obtain a copy of the License at
# #
# http://www.apache.org/licenses/LICENSE-2.0 # http://www.apache.org/licenses/LICENSE-2.0
# #
# Unless required by applicable law or agreed to in writing, # Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an # software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the # KIND, either express or implied. See the License for the
# specific language governing permissions and limitations # specific language governing permissions and limitations
# under the License. # under the License.
close-invoked: You have closed the EntityManager, though the persistence \ close-invoked: You have closed the EntityManager, though the persistence \
context will remain active until the current transaction commits. context will remain active until the current transaction commits.
no-managed-trans: There is no managed transaction in progress to sync this \ no-managed-trans: There is no managed transaction in progress to sync this \
EntityManager with. EntityManager with.
get-managed-trans: You cannot access the EntityTransaction when using managed \ get-managed-trans: You cannot access the EntityTransaction when using managed \
transactions. transactions.
trans-ctx-notactive: When using a persistence context type of TRANSACTION, you \ trans-ctx-notactive: When using a persistence context type of TRANSACTION, you \
can only perform this operation during an active transaction. can only perform this operation during an active transaction.
not-entity: The class "{0}" is not an entity. not-entity: The class "{0}" is not an entity.
not-managed: Object "{0}" is not managed by this context. not-managed: Object "{0}" is not managed by this context.
parse-class: Parsing class "{0}". parse-class: Parsing class "{0}".
parse-package: Parsing package "{0}". parse-package: Parsing package "{0}".
parse-sequence: Parsing sequence "{0}". parse-sequence: Parsing sequence "{0}".
parse-query: Parsing query "{0}". parse-query: Parsing query "{0}".
parse-native-query: Parsing native query "{0}". parse-native-query: Parsing native query "{0}".
dup-metadata: Found duplicate metadata or mapping for "{0}". Ignoring. dup-metadata: Found duplicate metadata or mapping for "{0}". Ignoring.
dup-sequence: Found duplicate generator "{0}" in "{1}". Ignoring. dup-sequence: Found duplicate generator "{0}" in "{1}". Ignoring.
override-sequence: Found duplicate generator "{0}" in "{1}". Overriding \ override-sequence: Found duplicate generator "{0}" in "{1}". Overriding \
previous definition. previous definition.
dup-query: Ignoring duplicate query "{0}" in "{1}". A query with the same name \ dup-query: Ignoring duplicate query "{0}" in "{1}". A query with the same name \
been already declared in "{2}". been already declared in "{2}".
override-query: Found duplicate query "{0}" in "{1}". Overriding previous \ override-query: Found duplicate query "{0}" in "{1}". Overriding previous \
definition. definition.
no-seq-name: The sequence generator in "{0}" must declare a name. no-seq-name: The sequence generator in "{0}" must declare a name.
no-query-name: The named query in "{0}" must declare a name. no-query-name: The named query in "{0}" must declare a name.
no-native-query-name: The named native query in "{0}" must declare a name. no-native-query-name: The named native query in "{0}" must declare a name.
no-query-string: The named query "{0}" in "{1}" must declare a query string. no-query-string: The named query "{0}" in "{1}" must declare a query string.
no-native-query-string: The named native query "{0}" in "{1}" must declare a \ no-native-query-string: The named native query "{0}" in "{1}" must declare a \
query string. query string.
already-pers: Field "{0}" cannot be annotated by two persistence strategy \ already-pers: Field "{0}" cannot be annotated by two persistence strategy \
annotations. annotations.
unsupported: OpenJPA does not yet support "{1}" as used in "{0}". unsupported: OpenJPA does not yet support "{1}" as used in "{0}".
bad-meta-anno: The type of field "{0}" isn''t supported by declared \ bad-meta-anno: The type of field "{0}" isn''t supported by declared \
persistence strategy "{1}". Please choose a different strategy. persistence strategy "{1}". Please choose a different strategy.
no-pers-strat: Fields "{0}" are not a default persistent type, and do not \ no-pers-strat: Fields "{0}" are not a default persistent type, and do not \
have any annotations indicating their persistence strategy. If you do \ have any annotations indicating their persistence strategy. If you do \
not want these fields to be persisted, annotate them with @Transient. not want these fields to be persisted, annotate them with @Transient.
generator-bad-strategy: "{0}" declares generator name "{1}", but uses the AUTO \ generator-bad-strategy: "{0}" declares generator name "{1}", but uses the AUTO \
generation type. The only valid generator names under AUTO are "uuid-hex" \ generation type. The only valid generator names under AUTO are "uuid-hex" \
and "uuid-string". and "uuid-string".
unnamed-fg: "{0}" declares an unnamed fetch group. All fetch groups \ unnamed-fg: "{0}" declares an unnamed fetch group. All fetch groups \
must have names. must have names.
bad-fg-field: Fetch group "{0}" in type "{1}" includes field "{2}", but \ bad-fg-field: Fetch group "{0}" in type "{1}" includes field "{2}", but \
this field is not declared in "{1}", or is not persistent. Currently, \ this field is not declared in "{1}", or is not persistent. Currently, \
OpenJPA only supports declared fields in fetch groups. OpenJPA only supports declared fields in fetch groups.
missing-included-fg:"{0}" declares a fetch group "{1}" includes "{2}". But \ missing-included-fg:"{0}" declares a fetch group "{1}" includes "{2}". But \
the included fetch group "{2}" can not be found in "{0}". Currently, all \ the included fetch group "{2}" can not be found in "{0}". Currently, all \
included fetch groups must be declared within the same entity scope. included fetch groups must be declared within the same entity scope.
not-update-delete-query: Cannot perform an update or delete operation \ not-update-delete-query: Cannot perform an update or delete operation \
on select query: "{0}". on select query: "{0}".
not-select-query: Cannot perform a select on update or delete query: "{0}". not-select-query: Cannot perform a select on update or delete query: "{0}".
no-results: Query did not return any results: "{0}". no-results: Query did not return any results: "{0}".
mult-results: Query returned multiple results: "{0}". mult-results: Query returned multiple results: "{0}".
no-pos-named-params-mix: Cannot mix named and positional parameters in query \ no-pos-named-params-mix: Cannot mix named and positional parameters in query \
"{0}". "{0}".
bad-query-hint: "{0}" is not a supported query hint. May be you meant "{1}"? bad-query-hint: "{0}" is not a supported query hint. May be you meant "{1}"?
bad-query-hint-value: Invalid value specified for query hint "{0}": {1} bad-query-hint-value: Invalid value specified for query hint "{0}": {1}
detached: Cannot perform this operation on detached entity "{0}". detached: Cannot perform this operation on detached entity "{0}".
removed: Cannot perform this operation on removed entity "{0}". removed: Cannot perform this operation on removed entity "{0}".
bad-alias: There is no known entity class for entity name "{0}". It is \ bad-alias: There is no known entity class for entity name "{0}". It is \
possible that the corresponding class has not yet been registered in the \ possible that the corresponding class has not yet been registered in the \
JVM. JVM.
naming-exception: A NamingException was thrown while obtaining the \ naming-exception: A NamingException was thrown while obtaining the \
factory at "{0}" from JNDI. factory at "{0}" from JNDI.
bad-jar-name: The jar resource "{0}" cannot be loaded. bad-jar-name: The jar resource "{0}" cannot be loaded.
missing-xml-config: The specified XML resource "{0}" for persistence unit \ missing-xml-config: The specified XML resource "{0}" for persistence unit \
"{1}" can''t be found in your class path. "{1}" can''t be found in your class path.
cantload-xml-config: The specified XML resource "{0}" for persistence unit \ cantload-xml-config: The specified XML resource "{0}" for persistence unit \
"{1}" can''t be parsed. "{1}" can''t be parsed.
unknown-provider: Persistence provider "{2}" specified in persistence unit \ unknown-provider: Persistence provider "{2}" specified in persistence unit \
"{1}" in "{0}" is not a recognized provider. "{1}" in "{0}" is not a recognized provider.
illegal-index: The parameter index {0} is invalid. Parameters must be \ illegal-index: The parameter index {0} is invalid. Parameters must be \
integers starting at 1. integers starting at 1.
conf-load: Setting the following properties from "{0}" into configuration: {1} conf-load: Setting the following properties from "{0}" into configuration: {1}
no-named-field: Type "{0}" does not have a managed field named "{1}". no-named-field: Type "{0}" does not have a managed field named "{1}".
unsupported-tag: OpenJPA does not currently support XML element "{0}". Ignoring. unsupported-tag: OpenJPA does not currently support XML element "{0}". Ignoring.
no-class: No class attribute was specified. no-class: No class attribute was specified.
invalid-id-class: Could not load id class "{1}" for type "{0}". invalid-id-class: Could not load id class "{1}" for type "{0}".
invalid-attr: Could not find property/field with the name "{0}" in type "{1}". invalid-attr: Could not find property/field with the name "{0}" in type "{1}".
ser-class: Writing class "{0}". ser-class: Writing class "{0}".
ser-cls-query: Writing query "{1}" in class "{0}". ser-cls-query: Writing query "{1}" in class "{0}".
ser-query: Writing query "{1}". ser-query: Writing query "{1}".
ser-sequence: Writing sequence "{0}". ser-sequence: Writing sequence "{0}".
no-sql: You must provide a SQL string when creating a native query. no-sql: You must provide a SQL string when creating a native query.
no-named-params: Named parameter "{0}" is invalid for native query "{1}". \ no-named-params: Named parameter "{0}" is invalid for native query "{1}". \
Use only 1-based positional parameter in native queries. Use only 1-based positional parameter in native queries.
bad-pos-params: Positional parameter "{0}" is invalid for native query "{1}". \ bad-pos-params: Positional parameter "{0}" is invalid for native query "{1}". \
Use only 1-based positional parameter in native queries. Use only 1-based positional parameter in native queries.
bad-param-type: The parameter "{1}" in query "{0}" is set to a value of type \ bad-param-type: The parameter "{1}" in query "{0}" is set to a value of type \
"{2}", but the parameter binds to a field of type "{3}". "{2}", but the parameter binds to a field of type "{3}".
missing-param-name: The parameter "{1}" in query "{0}" is not found in the \ missing-param-name: The parameter "{1}" in query "{0}" is not found in the \
available list of parameters "{2}". available list of parameters "{2}".
bad-em-prop: Invalid EntityManager property passed to createEntityManager. \ bad-em-prop: Invalid EntityManager property passed to createEntityManager. \
Key: "{0}", Value: "{1}". Key: "{0}", Value: "{1}".
bad-em-props: Invalid EntityManager properties passed to createEntityManager. \ bad-em-props: Invalid EntityManager properties passed to createEntityManager. \
See nested exceptions for details. See nested exceptions for details.
system-listener-err: An error occurred invoking system entity listener \ system-listener-err: An error occurred invoking system entity listener \
callback on instance "{0}". callback on instance "{0}".
no-transaction: Cannot perform operation with no transaction. no-transaction: Cannot perform operation with no transaction.
multiple-methods-on-callback: Class "{0}" declares method "{1}" as well \ multiple-methods-on-callback: Class "{0}" declares method "{1}" as well \
as "{2}" for handling the same "{3}" callback. as "{2}" for handling the same "{3}" callback.
unloadable-provider: WARNING: Unable to load persistence provider "{0}" due \ unloadable-provider: WARNING: Unable to load persistence provider "{0}" due \
to "{1}" to "{1}"
unrecognized-provider: WARNING: Found unrecognized persistence provider "{0}" \ unrecognized-provider: WARNING: Found unrecognized persistence provider "{0}" \
in place of OpenJPA provider. This provider''s properties will not be used. in place of OpenJPA provider. This provider''s properties will not be used.
cant-convert-brokerfactory: Unable to convert EntityManagerFactory of type \ cant-convert-brokerfactory: Unable to convert EntityManagerFactory of type \
"{0}" into a BrokerFactory. "{0}" into a BrokerFactory.
cant-convert-broker: Unable to convert EntityManager of type "{0}" into a \ cant-convert-broker: Unable to convert EntityManager of type "{0}" into a \
Broker. Broker.
map-persistent-type-names: Mapping resource location "{0}" to persistent \ map-persistent-type-names: Mapping resource location "{0}" to persistent \
types "{1}". types "{1}".
map-persistent-types-skipping-non-url: Skipping persistent type location \ map-persistent-types-skipping-non-url: Skipping persistent type location \
association for location "{0}" since it is not a URL. association for location "{0}" since it is not a URL.
map-persistent-types-skipping-class: Skipping persistent type location \ map-persistent-types-skipping-class: Skipping persistent type location \
association for location "{0}" since it is a class, and will not \ association for location "{0}" since it is a class, and will not \
need to be re-parsed later. need to be re-parsed later.
no-setter-for-getter: No setter was found for method {0} in type {1} while \ no-setter-for-getter: No setter was found for method {0} in type {1} while \
searching for persistent properties. This method will be ignored. If you \ searching for persistent properties. This method will be ignored. If you \
intended for this to be persistent, please add a corresponding setter, \ intended for this to be persistent, please add a corresponding setter, \
or switch to field access for this type hierarchy. or switch to field access for this type hierarchy.
transformer-registration-error: An error occurred while registering a \ transformer-registration-error: An error occurred while registering a \
ClassTransformer with {0}. The error has been consumed. To see it, set \ ClassTransformer with {0}. The error has been consumed. To see it, set \
your openjpa.Runtime log level to TRACE. Load-time class transformation \ your openjpa.Runtime log level to TRACE. Load-time class transformation \
will not be available. will not be available.
transformer-registration-error-ex: An error occurred while registering a \ transformer-registration-error-ex: An error occurred while registering a \
ClassTransformer with {0}. The error is logged along with this warning. \ ClassTransformer with {0}. The error is logged along with this warning. \
Load-time class transformation will not be available. Load-time class transformation will not be available.
EntityManagerFactory-name: EntityManagerFactory implementation EntityManagerFactory-name: EntityManagerFactory implementation
EntityManagerFactory-desc: Allows extension of standard \ EntityManagerFactory-desc: Allows extension of standard \
org.apache.openjpa.persistence.EntityManagerFactoryImpl for custom behavior. org.apache.openjpa.persistence.EntityManagerFactoryImpl for custom behavior.
EntityManagerFactory-type: General EntityManagerFactory-type: General
EntityManagerFactory-cat: Persistence.Advanced EntityManagerFactory-cat: Persistence.Advanced
EntityManagerFactory-displayorder: 50 EntityManagerFactory-displayorder: 50
EntityManagerFactory-expert: true EntityManagerFactory-expert: true
EntityManagerFactory-interface: org.apache.openjpa.persistence.EntityManagerFactoryImpl EntityManagerFactory-interface: org.apache.openjpa.persistence.EntityManagerFactoryImpl
param-style-mismatch: Query "{0}" is declared with named parameters "{1}" but \ param-style-mismatch: Query "{0}" is declared with named parameters "{1}" but \
actual parameters "{2}" are bound by position. actual parameters "{2}" are bound by position.
param-missing: Parameter "{0}" declared in "{1}" but is missing from the bound \ param-missing: Parameter "{0}" declared in "{1}" but is missing from the bound \
parameters "{2}". parameters "{2}".
param-extra: Parameter "{0}" is bound to "{1}" but is missing from the \ param-extra: Parameter "{0}" is bound to "{1}" but is missing from the \
declared parameters "{2}". declared parameters "{2}".
param-type-mismatch: Parameter "{0}" declared in "{1}" is set to value of \ param-type-mismatch: Parameter "{0}" declared in "{1}" is set to value of \
"{2}" of type "{3}", but this parameter is bound to a field of type "{4}". "{2}" of type "{3}", but this parameter is bound to a field of type "{4}".
param-type-null: Parameter "{0}" declared in "{1}" is set to null, \ param-type-null: Parameter "{0}" declared in "{1}" is set to null, \
but this parameter is bound to a field of primitive type "{2}". but this parameter is bound to a field of primitive type "{2}".
version-check-error: An error occurred while attempting to determine the \ version-check-error: An error occurred while attempting to determine the \
version of "{0}". version of "{0}".
no-result: Query "{0}" selected no result, but expected unique result. no-result: Query "{0}" selected no result, but expected unique result.
non-unique-result: Query "{0}" selected {1} results, but expected unique result. non-unique-result: Query "{0}" selected {1} results, but expected unique result.
unwrap-em-invalid: EntityManager can not be unwrapped to an instance of "{0}". unwrap-em-invalid: EntityManager can not be unwrapped to an instance of "{0}".
unwrap-query-invalid: Query can not be unwrapped to an instance of "{0}". unwrap-query-invalid: Query can not be unwrapped to an instance of "{0}".
invalid_entity_argument: Object being locked must be an valid and not detached \ invalid_entity_argument: Object being locked must be an valid and not detached \
entity. entity.
bad-lock-level: Invalid lock mode/level. Valid values are \
"none"(0), "optimistic"(10), "optimistic-force-increment"(20), \
"pessimistic-read"(30), "pessimistic-write"(40) or \
"pessimistic-force-increment"(50). Specified value: {0}.

View File

@ -814,6 +814,40 @@ hints={ @QueryHint (name="openjpa.hint.OptimizeResultCount", value="2"),
</programlisting> </programlisting>
</example> </example>
</section> </section>
<section id="multi-hints-handling">
<title>
Handling of Multiple Similar Query Hints
</title>
<para>
When similar hints in different prefix scopes are specified in a query,
the following prefix precedence order is used to select the effective hint:
<itemizedlist>
<listitem>
javax.persistence.*
</listitem>
<listitem>
openjpa.FetchPlan.*
</listitem>
<listitem>
openjpa.jdbc.*
</listitem>
<listitem>
openjpa.*
</listitem>
</itemizedlist>
<example id="multi-hints-example">
<programlisting>
...
Query q = em.createQuery(.....);
q.setHint("openjpa.FetchPlan.LockTimeout", 1000);
q.setHint("javax.persistence.lock.timeout", 2000);
q.setHint("openjpa.LockTimeout", 3000);
// Lock time out of 2000 ms is in effect for query q
...
</programlisting>
</example>
</para>
</section>
</section> </section>
<section id="jpa_overview_query_ordering"> <section id="jpa_overview_query_ordering">
<title> <title>

File diff suppressed because it is too large Load Diff