code format and simple improvement
This commit is contained in:
parent
13c9dbfc21
commit
a512ede38b
|
@ -94,8 +94,8 @@ public enum LockMode {
|
||||||
OPTIMISTIC( 6 ),
|
OPTIMISTIC( 6 ),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Optimisticly assume that transaction will not experience contention for entities.
|
* Optimisticly assume that transaction will not experience contention for
|
||||||
* The entity version will be verified and incremented near the transaction end.
|
* entities. The entity version will be verified and incremented near the transaction end.
|
||||||
*/
|
*/
|
||||||
OPTIMISTIC_FORCE_INCREMENT( 7 ),
|
OPTIMISTIC_FORCE_INCREMENT( 7 ),
|
||||||
|
|
||||||
|
|
|
@ -93,23 +93,24 @@ public class StandardQueryCache implements QueryCache {
|
||||||
List result,
|
List result,
|
||||||
boolean isNaturalKeyLookup,
|
boolean isNaturalKeyLookup,
|
||||||
SessionImplementor session) throws HibernateException {
|
SessionImplementor session) throws HibernateException {
|
||||||
if ( isNaturalKeyLookup && result.size() == 0 ) {
|
if ( isNaturalKeyLookup && result.isEmpty() ) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
Long ts = session.getFactory().getSettings().getRegionFactory().nextTimestamp();
|
long ts = cacheRegion.nextTimestamp();
|
||||||
|
|
||||||
LOG.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), ts );
|
LOG.debugf( "Caching query results in region: %s; timestamp=%s", cacheRegion.getName(), ts );
|
||||||
|
|
||||||
List cacheable = new ArrayList( result.size() + 1 );
|
List cacheable = new ArrayList( result.size() + 1 );
|
||||||
logCachedResultDetails( key, null, returnTypes, cacheable );
|
logCachedResultDetails( key, null, returnTypes, cacheable );
|
||||||
cacheable.add( ts );
|
cacheable.add( ts );
|
||||||
|
final boolean singleResult = returnTypes.length == 1;
|
||||||
for ( Object aResult : result ) {
|
for ( Object aResult : result ) {
|
||||||
if ( returnTypes.length == 1 ) {
|
Serializable cacheItem = singleResult ? returnTypes[0].disassemble(
|
||||||
cacheable.add( returnTypes[0].disassemble( aResult, session, null ) );
|
aResult,
|
||||||
}
|
session,
|
||||||
else {
|
null
|
||||||
cacheable.add( TypeHelper.disassemble( (Object[]) aResult, returnTypes, null, session, null ) );
|
) : TypeHelper.disassemble( (Object[]) aResult, returnTypes, null, session, null );
|
||||||
}
|
cacheable.add( cacheItem );
|
||||||
logCachedResultRowDetails( returnTypes, aResult );
|
logCachedResultRowDetails( returnTypes, aResult );
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -141,8 +142,9 @@ public class StandardQueryCache implements QueryCache {
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG.debug( "Returning cached query results" );
|
LOG.debug( "Returning cached query results" );
|
||||||
|
final boolean singleResult = returnTypes.length == 1;
|
||||||
for ( int i = 1; i < cacheable.size(); i++ ) {
|
for ( int i = 1; i < cacheable.size(); i++ ) {
|
||||||
if ( returnTypes.length == 1 ) {
|
if ( singleResult ) {
|
||||||
returnTypes[0].beforeAssemble( (Serializable) cacheable.get( i ), session );
|
returnTypes[0].beforeAssemble( (Serializable) cacheable.get( i ), session );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
@ -152,7 +154,7 @@ public class StandardQueryCache implements QueryCache {
|
||||||
List result = new ArrayList( cacheable.size() - 1 );
|
List result = new ArrayList( cacheable.size() - 1 );
|
||||||
for ( int i = 1; i < cacheable.size(); i++ ) {
|
for ( int i = 1; i < cacheable.size(); i++ ) {
|
||||||
try {
|
try {
|
||||||
if ( returnTypes.length == 1 ) {
|
if ( singleResult ) {
|
||||||
result.add( returnTypes[0].assemble( (Serializable) cacheable.get( i ), session, null ) );
|
result.add( returnTypes[0].assemble( (Serializable) cacheable.get( i ), session, null ) );
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
|
@ -75,7 +75,7 @@ public class UpdateTimestampsCache {
|
||||||
try {
|
try {
|
||||||
Long ts = region.nextTimestamp() + region.getTimeout();
|
Long ts = region.nextTimestamp() + region.getTimeout();
|
||||||
for ( Serializable space : spaces ) {
|
for ( Serializable space : spaces ) {
|
||||||
LOG.debugf( "Pre-invalidating space [%s]", space );
|
LOG.debugf( "Pre-invalidating space [%s], timestamp: %s", space, ts );
|
||||||
//put() has nowait semantics, is this really appropriate?
|
//put() has nowait semantics, is this really appropriate?
|
||||||
//note that it needs to be async replication, never local or sync
|
//note that it needs to be async replication, never local or sync
|
||||||
region.put( space, ts );
|
region.put( space, ts );
|
||||||
|
|
|
@ -27,8 +27,6 @@ package org.hibernate.internal.util.collections;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
|
|
||||||
import org.apache.commons.collections.map.LRUMap;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Cache following a "Most Recently Used" (MRU) algorithm for maintaining a
|
* Cache following a "Most Recently Used" (MRU) algorithm for maintaining a
|
||||||
* bounded in-memory size; the "Least Recently Used" (LRU) entry is the first
|
* bounded in-memory size; the "Least Recently Used" (LRU) entry is the first
|
||||||
|
|
|
@ -238,7 +238,7 @@ public abstract class Loader {
|
||||||
return sql;
|
return sql;
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
return new StringBuffer( comment.length() + sql.length() + 5 )
|
return new StringBuilder( comment.length() + sql.length() + 5 )
|
||||||
.append( "/* " )
|
.append( "/* " )
|
||||||
.append( comment )
|
.append( comment )
|
||||||
.append( " */ " )
|
.append( " */ " )
|
||||||
|
@ -819,7 +819,7 @@ public abstract class Loader {
|
||||||
|
|
||||||
final RowSelection selection = queryParameters.getRowSelection();
|
final RowSelection selection = queryParameters.getRowSelection();
|
||||||
final int maxRows = hasMaxRows( selection ) ?
|
final int maxRows = hasMaxRows( selection ) ?
|
||||||
selection.getMaxRows().intValue() :
|
selection.getMaxRows() :
|
||||||
Integer.MAX_VALUE;
|
Integer.MAX_VALUE;
|
||||||
|
|
||||||
final int entitySpan = getEntityPersisters().length;
|
final int entitySpan = getEntityPersisters().length;
|
||||||
|
@ -841,18 +841,12 @@ public abstract class Loader {
|
||||||
final List results = new ArrayList();
|
final List results = new ArrayList();
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
||||||
handleEmptyCollections( queryParameters.getCollectionKeys(), rs, session );
|
handleEmptyCollections( queryParameters.getCollectionKeys(), rs, session );
|
||||||
|
|
||||||
EntityKey[] keys = new EntityKey[entitySpan]; //we can reuse it for each row
|
EntityKey[] keys = new EntityKey[entitySpan]; //we can reuse it for each row
|
||||||
|
|
||||||
LOG.trace( "Processing result set" );
|
LOG.trace( "Processing result set" );
|
||||||
|
|
||||||
int count;
|
int count;
|
||||||
for ( count = 0; count < maxRows && rs.next(); count++ ) {
|
for ( count = 0; count < maxRows && rs.next(); count++ ) {
|
||||||
|
|
||||||
LOG.debugf( "Result set row: %s", count );
|
LOG.debugf( "Result set row: %s", count );
|
||||||
|
|
||||||
Object result = getRowFromResultSet(
|
Object result = getRowFromResultSet(
|
||||||
rs,
|
rs,
|
||||||
session,
|
session,
|
||||||
|
@ -865,12 +859,10 @@ public abstract class Loader {
|
||||||
forcedResultTransformer
|
forcedResultTransformer
|
||||||
);
|
);
|
||||||
results.add( result );
|
results.add( result );
|
||||||
|
|
||||||
if ( createSubselects ) {
|
if ( createSubselects ) {
|
||||||
subselectResultKeys.add(keys);
|
subselectResultKeys.add(keys);
|
||||||
keys = new EntityKey[entitySpan]; //can't reuse in this case
|
keys = new EntityKey[entitySpan]; //can't reuse in this case
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LOG.tracev( "Done processing result set ({0} rows)", count );
|
LOG.tracev( "Done processing result set ({0} rows)", count );
|
||||||
|
@ -879,12 +871,9 @@ public abstract class Loader {
|
||||||
finally {
|
finally {
|
||||||
st.close();
|
st.close();
|
||||||
}
|
}
|
||||||
|
|
||||||
initializeEntitiesAndCollections( hydratedObjects, rs, session, queryParameters.isReadOnly( session ) );
|
initializeEntitiesAndCollections( hydratedObjects, rs, session, queryParameters.isReadOnly( session ) );
|
||||||
|
|
||||||
if ( createSubselects ) createSubselects( subselectResultKeys, queryParameters, session );
|
if ( createSubselects ) createSubselects( subselectResultKeys, queryParameters, session );
|
||||||
|
return results;
|
||||||
return results; //getResultList(results);
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1662,12 +1651,7 @@ public abstract class Loader {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static int getFirstRow(RowSelection selection) {
|
private static int getFirstRow(RowSelection selection) {
|
||||||
if ( selection == null || selection.getFirstRow() == null ) {
|
return ( selection == null || selection.getFirstRow() == null ) ? 0 : selection.getFirstRow();
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return selection.getFirstRow().intValue();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private int interpretFirstRow(int zeroBasedFirstResult) {
|
private int interpretFirstRow(int zeroBasedFirstResult) {
|
||||||
|
@ -1733,10 +1717,7 @@ public abstract class Loader {
|
||||||
|
|
||||||
sql = preprocessSQL( sql, queryParameters, dialect );
|
sql = preprocessSQL( sql, queryParameters, dialect );
|
||||||
|
|
||||||
PreparedStatement st = null;
|
PreparedStatement st = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareQueryStatement(
|
||||||
|
|
||||||
|
|
||||||
st = session.getTransactionCoordinator().getJdbcCoordinator().getStatementPreparer().prepareQueryStatement(
|
|
||||||
sql,
|
sql,
|
||||||
callable,
|
callable,
|
||||||
scrollMode
|
scrollMode
|
||||||
|
@ -1765,10 +1746,10 @@ public abstract class Loader {
|
||||||
|
|
||||||
if ( selection != null ) {
|
if ( selection != null ) {
|
||||||
if ( selection.getTimeout() != null ) {
|
if ( selection.getTimeout() != null ) {
|
||||||
st.setQueryTimeout( selection.getTimeout().intValue() );
|
st.setQueryTimeout( selection.getTimeout() );
|
||||||
}
|
}
|
||||||
if ( selection.getFetchSize() != null ) {
|
if ( selection.getFetchSize() != null ) {
|
||||||
st.setFetchSize( selection.getFetchSize().intValue() );
|
st.setFetchSize( selection.getFetchSize() );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1776,9 +1757,17 @@ public abstract class Loader {
|
||||||
LockOptions lockOptions = queryParameters.getLockOptions();
|
LockOptions lockOptions = queryParameters.getLockOptions();
|
||||||
if ( lockOptions != null ) {
|
if ( lockOptions != null ) {
|
||||||
if ( lockOptions.getTimeOut() != LockOptions.WAIT_FOREVER ) {
|
if ( lockOptions.getTimeOut() != LockOptions.WAIT_FOREVER ) {
|
||||||
if (!dialect.supportsLockTimeouts()) LOG.debugf("Lock timeout [%s] requested but dialect reported to not support lock timeouts",
|
if ( !dialect.supportsLockTimeouts() ) {
|
||||||
lockOptions.getTimeOut());
|
if ( LOG.isDebugEnabled() ) {
|
||||||
else if (dialect.isLockTimeoutParameterized()) st.setInt(col++, lockOptions.getTimeOut());
|
LOG.debugf(
|
||||||
|
"Lock timeout [%s] requested but dialect reported to not support lock timeouts",
|
||||||
|
lockOptions.getTimeOut()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else if ( dialect.isLockTimeoutParameterized() ) {
|
||||||
|
st.setInt( col++, lockOptions.getTimeOut() );
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1807,13 +1796,8 @@ public abstract class Loader {
|
||||||
*/
|
*/
|
||||||
private static int getMaxOrLimit(final RowSelection selection, final Dialect dialect) {
|
private static int getMaxOrLimit(final RowSelection selection, final Dialect dialect) {
|
||||||
final int firstRow = dialect.convertToFirstRowValue( getFirstRow( selection ) );
|
final int firstRow = dialect.convertToFirstRowValue( getFirstRow( selection ) );
|
||||||
final int lastRow = selection.getMaxRows().intValue();
|
final int lastRow = selection.getMaxRows();
|
||||||
if ( dialect.useMaxForLimit() ) {
|
return dialect.useMaxForLimit() ? lastRow + firstRow : lastRow;
|
||||||
return lastRow + firstRow;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return lastRow;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1854,7 +1838,7 @@ public abstract class Loader {
|
||||||
final PreparedStatement st,
|
final PreparedStatement st,
|
||||||
final RowSelection selection) throws SQLException {
|
final RowSelection selection) throws SQLException {
|
||||||
if ( hasMaxRows( selection ) ) {
|
if ( hasMaxRows( selection ) ) {
|
||||||
st.setMaxRows( selection.getMaxRows().intValue() + interpretFirstRow( getFirstRow( selection ) ) );
|
st.setMaxRows( selection.getMaxRows() + interpretFirstRow( getFirstRow( selection ) ) );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -292,11 +292,6 @@ public abstract class AbstractPropertyMapping implements PropertyMapping {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String extendPath(String path, String property) {
|
private static String extendPath(String path, String property) {
|
||||||
if ( path==null || "".equals(path) ) {
|
return StringHelper.isEmpty( path ) ? property : StringHelper.qualify( path, property );
|
||||||
return property;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return StringHelper.qualify(path, property);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue