Loaderpublic abstract class Loader extends Object Abstract superclass of object loading (and querying) strategies. This class implements
useful common functionality that concrete loaders delegate to. It is not intended that this
functionality would be directly accessed by client code. (Hence, all methods of this class
are declared protected or private.) This class relies heavily upon the
Loadable interface, which is the contract between this class and
EntityPersisters that may be loaded by it.
The present implementation is able to load any number of columns of entities and at most
one collection role per query. |
Fields Summary |
---|
private static final Log | log | private final org.hibernate.engine.SessionFactoryImplementor | factory | private org.hibernate.jdbc.ColumnNameCache | columnNameCache |
Methods Summary |
---|
private void | advance(java.sql.ResultSet rs, org.hibernate.engine.RowSelection selection)Advance the cursor to the first required row of the ResultSet
final int firstRow = getFirstRow( selection );
if ( firstRow != 0 ) {
if ( getFactory().getSettings().isScrollableResultSetsEnabled() ) {
// we can go straight to the first required row
rs.absolute( firstRow );
}
else {
// we need to step through the rows one row at a time (slow)
for ( int m = 0; m < firstRow; m++ ) rs.next();
}
}
| protected java.lang.String | applyLocks(java.lang.String sql, java.util.Map lockModes, org.hibernate.dialect.Dialect dialect)Append FOR UPDATE OF clause, if necessary. This
empty superclass implementation merely returns its first
argument.
return sql;
| protected void | autoDiscoverTypes(java.sql.ResultSet rs)
throw new AssertionFailure("Auto discover types not supported in this loader");
| private int | bindLimitParameters(java.sql.PreparedStatement statement, int index, org.hibernate.engine.RowSelection selection)Bind parameter values needed by the dialect-specific LIMIT clause.
Dialect dialect = getFactory().getDialect();
if ( !dialect.supportsVariableLimit() ) {
return 0;
}
if ( !hasMaxRows( selection ) ) {
throw new AssertionFailure( "no max results set" );
}
int firstRow = getFirstRow( selection );
int lastRow = getMaxOrLimit( selection, dialect );
boolean hasFirstRow = firstRow > 0 && dialect.supportsLimitOffset();
boolean reverse = dialect.bindLimitParametersInReverseOrder();
if ( hasFirstRow ) {
statement.setInt( index + ( reverse ? 1 : 0 ), firstRow );
}
statement.setInt( index + ( reverse || !hasFirstRow ? 0 : 1 ), lastRow );
return hasFirstRow ? 2 : 1;
| protected int | bindNamedParameters(java.sql.PreparedStatement statement, java.util.Map namedParams, int startIndex, org.hibernate.engine.SessionImplementor session)Bind named parameters to the JDBC prepared statement.
This is a generic implementation, the problem being that in the
general case we do not know enough information about the named
parameters to perform this in a complete manner here. Thus this
is generally overridden on subclasses allowing named parameters to
apply the specific behavior. The most usual limitation here is that
we need to assume the type span is always one...
if ( namedParams != null ) {
// assumes that types are all of span 1
Iterator iter = namedParams.entrySet().iterator();
int result = 0;
while ( iter.hasNext() ) {
Map.Entry e = ( Map.Entry ) iter.next();
String name = ( String ) e.getKey();
TypedValue typedval = ( TypedValue ) e.getValue();
int[] locs = getNamedParameterLocs( name );
for ( int i = 0; i < locs.length; i++ ) {
if ( log.isDebugEnabled() ) {
log.debug(
"bindNamedParameters() " +
typedval.getValue() + " -> " + name +
" [" + ( locs[i] + startIndex ) + "]"
);
}
typedval.getType().nullSafeSet( statement, typedval.getValue(), locs[i] + startIndex, session );
}
result += locs.length;
}
return result;
}
else {
return 0;
}
| protected int | bindParameterValues(java.sql.PreparedStatement statement, org.hibernate.engine.QueryParameters queryParameters, int startIndex, org.hibernate.engine.SessionImplementor session)Bind all parameter values into the prepared statement in preparation
for execution.
int span = 0;
span += bindPositionalParameters( statement, queryParameters, startIndex, session );
span += bindNamedParameters( statement, queryParameters.getNamedParameters(), startIndex + span, session );
return span;
| protected int | bindPositionalParameters(java.sql.PreparedStatement statement, org.hibernate.engine.QueryParameters queryParameters, int startIndex, org.hibernate.engine.SessionImplementor session)Bind positional parameter values to the JDBC prepared statement.
Postional parameters are those specified by JDBC-style ? parameters
in the source query. It is (currently) expected that these come
before any named parameters in the source query.
final Object[] values = queryParameters.getFilteredPositionalParameterValues();
final Type[] types = queryParameters.getFilteredPositionalParameterTypes();
int span = 0;
for ( int i = 0; i < values.length; i++ ) {
types[i].nullSafeSet( statement, values[i], startIndex + span, session );
span += types[i].getColumnSpan( getFactory() );
}
return span;
| private java.util.Map | buildNamedParameterLocMap(org.hibernate.engine.QueryParameters queryParameters)
if ( queryParameters.getNamedParameters()!=null ) {
final Map namedParameterLocMap = new HashMap();
Iterator piter = queryParameters.getNamedParameters().keySet().iterator();
while ( piter.hasNext() ) {
String name = (String) piter.next();
namedParameterLocMap.put(
name,
getNamedParameterLocs(name)
);
}
return namedParameterLocMap;
}
else {
return null;
}
| protected void | checkScrollability()Check whether the current loader can support returning ScrollableResults.
// Allows various loaders (ok mainly the QueryLoader :) to check
// whether scrolling of their result set should be allowed.
//
// By default it is allowed.
return;
| private void | checkVersion(int i, org.hibernate.persister.entity.Loadable persister, java.io.Serializable id, java.lang.Object entity, java.sql.ResultSet rs, org.hibernate.engine.SessionImplementor session)Check the version of the object in the ResultSet against
the object version in the session cache, throwing an exception
if the version numbers are different
Object version = session.getPersistenceContext().getEntry( entity ).getVersion();
if ( version != null ) { //null version means the object is in the process of being loaded somewhere else in the ResultSet
VersionType versionType = persister.getVersionType();
Object currentVersion = versionType.nullSafeGet(
rs,
getEntityAliases()[i].getSuffixedVersionAliases(),
session,
null
);
if ( !versionType.isEqual(version, currentVersion) ) {
if ( session.getFactory().getStatistics().isStatisticsEnabled() ) {
session.getFactory().getStatisticsImplementor()
.optimisticFailure( persister.getEntityName() );
}
throw new StaleObjectStateException( persister.getEntityName(), id );
}
}
| private void | createSubselects(java.util.List keys, org.hibernate.engine.QueryParameters queryParameters, org.hibernate.engine.SessionImplementor session)
if ( keys.size() > 1 ) { //if we only returned one entity, query by key is more efficient
Set[] keySets = transpose(keys);
Map namedParameterLocMap = buildNamedParameterLocMap( queryParameters );
final Loadable[] loadables = getEntityPersisters();
final String[] aliases = getAliases();
final Iterator iter = keys.iterator();
while ( iter.hasNext() ) {
final EntityKey[] rowKeys = (EntityKey[]) iter.next();
for ( int i=0; i<rowKeys.length; i++ ) {
if ( rowKeys[i]!=null && loadables[i].hasSubselectLoadableCollections() ) {
SubselectFetch subselectFetch = new SubselectFetch(
//getSQLString(),
aliases[i],
loadables[i],
queryParameters,
keySets[i],
namedParameterLocMap
);
session.getPersistenceContext()
.getBatchFetchQueue()
.addSubselect( rowKeys[i], subselectFetch );
}
}
}
}
| protected java.util.List | doList(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters)Actually execute a query, ignoring the query cache
final boolean stats = getFactory().getStatistics().isStatisticsEnabled();
long startTime = 0;
if ( stats ) startTime = System.currentTimeMillis();
List result;
try {
result = doQueryAndInitializeNonLazyCollections( session, queryParameters, true );
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not execute query",
getSQLString()
);
}
if ( stats ) {
getFactory().getStatisticsImplementor().queryExecuted(
getQueryIdentifier(),
result.size(),
System.currentTimeMillis() - startTime
);
}
return result;
| private java.util.List | doQuery(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, boolean returnProxies)
final RowSelection selection = queryParameters.getRowSelection();
final int maxRows = hasMaxRows( selection ) ?
selection.getMaxRows().intValue() :
Integer.MAX_VALUE;
final int entitySpan = getEntityPersisters().length;
final ArrayList hydratedObjects = entitySpan == 0 ? null : new ArrayList( entitySpan * 10 );
final PreparedStatement st = prepareQueryStatement( queryParameters, false, session );
final ResultSet rs = getResultSet( st, queryParameters.hasAutoDiscoverScalarTypes(), queryParameters.isCallable(), selection, session );
// would be great to move all this below here into another method that could also be used
// from the new scrolling stuff.
//
// Would need to change the way the max-row stuff is handled (i.e. behind an interface) so
// that I could do the control breaking at the means to know when to stop
final LockMode[] lockModeArray = getLockModes( queryParameters.getLockModes() );
final EntityKey optionalObjectKey = getOptionalObjectKey( queryParameters, session );
final boolean createSubselects = isSubselectLoadingEnabled();
final List subselectResultKeys = createSubselects ? new ArrayList() : null;
final List results = new ArrayList();
try {
handleEmptyCollections( queryParameters.getCollectionKeys(), rs, session );
EntityKey[] keys = new EntityKey[entitySpan]; //we can reuse it for each row
if ( log.isTraceEnabled() ) log.trace( "processing result set" );
int count;
for ( count = 0; count < maxRows && rs.next(); count++ ) {
if ( log.isTraceEnabled() ) log.debug("result set row: " + count);
Object result = getRowFromResultSet(
rs,
session,
queryParameters,
lockModeArray,
optionalObjectKey,
hydratedObjects,
keys,
returnProxies
);
results.add( result );
if ( createSubselects ) {
subselectResultKeys.add(keys);
keys = new EntityKey[entitySpan]; //can't reuse in this case
}
}
if ( log.isTraceEnabled() ) {
log.trace( "done processing result set (" + count + " rows)" );
}
}
finally {
session.getBatcher().closeQueryStatement( st, rs );
}
initializeEntitiesAndCollections( hydratedObjects, rs, session, queryParameters.isReadOnly() );
if ( createSubselects ) createSubselects( subselectResultKeys, queryParameters, session );
return results; //getResultList(results);
| private java.util.List | doQueryAndInitializeNonLazyCollections(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, boolean returnProxies)Execute an SQL query and attempt to instantiate instances of the class mapped by the given
persister from each row of the ResultSet. If an object is supplied, will attempt to
initialize that object. If a collection is supplied, attempt to initialize that collection.
final PersistenceContext persistenceContext = session.getPersistenceContext();
persistenceContext.beforeLoad();
List result;
try {
result = doQuery( session, queryParameters, returnProxies );
}
finally {
persistenceContext.afterLoad();
}
persistenceContext.initializeNonLazyCollections();
return result;
| private void | endCollectionLoad(java.lang.Object resultSetId, org.hibernate.engine.SessionImplementor session, org.hibernate.persister.collection.CollectionPersister collectionPersister)
//this is a query and we are loading multiple instances of the same collection role
session.getPersistenceContext()
.getLoadContexts()
.getCollectionLoadContext( ( ResultSet ) resultSetId )
.endLoadingCollections( collectionPersister );
| protected java.lang.String[] | getAliases()Get the SQL table aliases of entities whose
associations are subselect-loadable, returning
null if this loader does not support subselect
loading
return null;
| protected abstract CollectionAliases[] | getCollectionAliases()
| protected int[] | getCollectionOwners()Get the index of the entity that owns the collection, or -1
if there is no owner in the query results (ie. in the case of a
collection initializer) or no collection.
return null;
| protected org.hibernate.persister.collection.CollectionPersister[] | getCollectionPersisters()An (optional) persister for a collection to be initialized; only
collection loaders return a non-null value
return null;
| protected abstract EntityAliases[] | getEntityAliases()Get the result set descriptor
| protected boolean[] | getEntityEagerPropertyFetches()An array indicating whether the entities have eager property fetching
enabled.
return null;
| protected abstract org.hibernate.persister.entity.Loadable[] | getEntityPersisters()An array of persisters of entity classes contained in each row of results;
implemented by all subclasses
| public final org.hibernate.engine.SessionFactoryImplementor | getFactory()
return factory;
| private static int | getFirstRow(org.hibernate.engine.RowSelection selection)
if ( selection == null || selection.getFirstRow() == null ) {
return 0;
}
else {
return selection.getFirstRow().intValue();
}
| private java.lang.String | getInstanceClass(java.sql.ResultSet rs, int i, org.hibernate.persister.entity.Loadable persister, java.io.Serializable id, org.hibernate.engine.SessionImplementor session)Determine the concrete class of an instance in the ResultSet
if ( persister.hasSubclasses() ) {
// Code to handle subclasses of topClass
Object discriminatorValue = persister.getDiscriminatorType().nullSafeGet(
rs,
getEntityAliases()[i].getSuffixedDiscriminatorAlias(),
session,
null
);
final String result = persister.getSubclassForDiscriminatorValue( discriminatorValue );
if ( result == null ) {
//woops we got an instance of another class hierarchy branch
throw new WrongClassException(
"Discriminator: " + discriminatorValue,
id,
persister.getEntityName()
);
}
return result;
}
else {
return persister.getEntityName();
}
| private org.hibernate.engine.EntityKey | getKeyFromResultSet(int i, org.hibernate.persister.entity.Loadable persister, java.io.Serializable id, java.sql.ResultSet rs, org.hibernate.engine.SessionImplementor session)Read a row of Keys from the ResultSet into the given array.
Warning: this method is side-effecty.
If an id is given, don't bother going to the ResultSet.
Serializable resultId;
// if we know there is exactly 1 row, we can skip.
// it would be great if we could _always_ skip this;
// it is a problem for <key-many-to-one>
if ( isSingleRowLoader() && id != null ) {
resultId = id;
}
else {
Type idType = persister.getIdentifierType();
resultId = (Serializable) idType.nullSafeGet(
rs,
getEntityAliases()[i].getSuffixedKeyAliases(),
session,
null //problematic for <key-many-to-one>!
);
final boolean idIsResultId = id != null &&
resultId != null &&
idType.isEqual( id, resultId, session.getEntityMode(), factory );
if ( idIsResultId ) resultId = id; //use the id passed in
}
return resultId == null ?
null :
new EntityKey( resultId, persister, session.getEntityMode() );
| protected abstract org.hibernate.LockMode[] | getLockModes(java.util.Map lockModes)What lock mode does this load entities with?
| private static int | getMaxOrLimit(org.hibernate.engine.RowSelection selection, org.hibernate.dialect.Dialect dialect)Some dialect-specific LIMIT clauses require the maximium last row number
(aka, first_row_number + total_row_count), while others require the maximum
returned row count (the total maximum number of rows to return).
final int firstRow = getFirstRow( selection );
final int lastRow = selection.getMaxRows().intValue();
if ( dialect.useMaxForLimit() ) {
return lastRow + firstRow;
}
else {
return lastRow;
}
| public int[] | getNamedParameterLocs(java.lang.String name)
throw new AssertionFailure("no named parameters");
| private static org.hibernate.engine.EntityKey | getOptionalObjectKey(org.hibernate.engine.QueryParameters queryParameters, org.hibernate.engine.SessionImplementor session)
final Object optionalObject = queryParameters.getOptionalObject();
final Serializable optionalId = queryParameters.getOptionalId();
final String optionalEntityName = queryParameters.getOptionalEntityName();
if ( optionalObject != null && optionalEntityName != null ) {
return new EntityKey(
optionalId,
session.getEntityPersister( optionalEntityName, optionalObject ),
session.getEntityMode()
);
}
else {
return null;
}
| protected org.hibernate.type.EntityType[] | getOwnerAssociationTypes()An array of the owner types corresponding to the {@link #getOwners()}
returns. Indices indicating no owner would be null here.
return null;
| protected int[] | getOwners()An array of indexes of the entity that owns a one-to-one association
to the entity at the given index (-1 if there is no "owner"). The
indexes contained here are relative to the result of
{@link #getEntityPersisters}.
return null;
| protected java.lang.String | getQueryIdentifier()Identifies the query for statistics reporting, if null,
no statistics will be reported
return null;
| protected java.lang.Object | getResultColumnOrRow(java.lang.Object[] row, org.hibernate.transform.ResultTransformer transformer, java.sql.ResultSet rs, org.hibernate.engine.SessionImplementor session)Get the actual object that is returned in the user-visible result list.
This empty implementation merely returns its first argument. This is
overridden by some subclasses.
return row;
| private java.util.List | getResultFromQueryCache(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, java.util.Set querySpaces, org.hibernate.type.Type[] resultTypes, org.hibernate.cache.QueryCache queryCache, org.hibernate.cache.QueryKey key)
List result = null;
if ( session.getCacheMode().isGetEnabled() ) {
result = queryCache.get(
key,
resultTypes,
queryParameters.isNaturalKeyLookup(),
querySpaces,
session
);
if ( factory.getStatistics().isStatisticsEnabled() ) {
if (result==null) {
factory.getStatisticsImplementor()
.queryCacheMiss( getQueryIdentifier(), queryCache.getRegionName() );
}
else {
factory.getStatisticsImplementor()
.queryCacheHit( getQueryIdentifier(), queryCache.getRegionName() );
}
}
}
return result;
| protected java.util.List | getResultList(java.util.List results, org.hibernate.transform.ResultTransformer resultTransformer)
return results;
| protected final java.sql.ResultSet | getResultSet(java.sql.PreparedStatement st, boolean autodiscovertypes, boolean callable, org.hibernate.engine.RowSelection selection, org.hibernate.engine.SessionImplementor session)Fetch a PreparedStatement, call setMaxRows and then execute it,
advance to the first result and return an SQL ResultSet
ResultSet rs = null;
try {
Dialect dialect = getFactory().getDialect();
if (callable) {
rs = session.getBatcher().getResultSet( (CallableStatement) st, dialect );
}
else {
rs = session.getBatcher().getResultSet( st );
}
rs = wrapResultSetIfEnabled( rs , session );
if ( !dialect.supportsLimitOffset() || !useLimit( selection, dialect ) ) {
advance( rs, selection );
}
if ( autodiscovertypes ) {
autoDiscoverTypes( rs );
}
return rs;
}
catch ( SQLException sqle ) {
session.getBatcher().closeQueryStatement( st, rs );
throw sqle;
}
| private java.lang.Object[] | getRow(java.sql.ResultSet rs, org.hibernate.persister.entity.Loadable[] persisters, org.hibernate.engine.EntityKey[] keys, java.lang.Object optionalObject, org.hibernate.engine.EntityKey optionalObjectKey, org.hibernate.LockMode[] lockModes, java.util.List hydratedObjects, org.hibernate.engine.SessionImplementor session)Resolve any ids for currently loaded objects, duplications within the
ResultSet, etc. Instantiate empty objects to be initialized from the
ResultSet. Return an array of objects (a row of results) and an
array of booleans (by side-effect) that determine whether the corresponding
object should be initialized.
final int cols = persisters.length;
final EntityAliases[] descriptors = getEntityAliases();
if ( log.isDebugEnabled() ) {
log.debug(
"result row: " +
StringHelper.toString( keys )
);
}
final Object[] rowResults = new Object[cols];
for ( int i = 0; i < cols; i++ ) {
Object object = null;
EntityKey key = keys[i];
if ( keys[i] == null ) {
//do nothing
}
else {
//If the object is already loaded, return the loaded one
object = session.getEntityUsingInterceptor( key );
if ( object != null ) {
//its already loaded so don't need to hydrate it
instanceAlreadyLoaded(
rs,
i,
persisters[i],
key,
object,
lockModes[i],
session
);
}
else {
object = instanceNotYetLoaded(
rs,
i,
persisters[i],
descriptors[i].getRowIdAlias(),
key,
lockModes[i],
optionalObjectKey,
optionalObject,
hydratedObjects,
session
);
}
}
rowResults[i] = object;
}
return rowResults;
| private java.lang.Object | getRowFromResultSet(java.sql.ResultSet resultSet, org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, org.hibernate.LockMode[] lockModeArray, org.hibernate.engine.EntityKey optionalObjectKey, java.util.List hydratedObjects, org.hibernate.engine.EntityKey[] keys, boolean returnProxies)
final Loadable[] persisters = getEntityPersisters();
final int entitySpan = persisters.length;
for ( int i = 0; i < entitySpan; i++ ) {
keys[i] = getKeyFromResultSet(
i,
persisters[i],
i == entitySpan - 1 ?
queryParameters.getOptionalId() :
null,
resultSet,
session
);
//TODO: the i==entitySpan-1 bit depends upon subclass implementation (very bad)
}
registerNonExists( keys, persisters, session );
// this call is side-effecty
Object[] row = getRow(
resultSet,
persisters,
keys,
queryParameters.getOptionalObject(),
optionalObjectKey,
lockModeArray,
hydratedObjects,
session
);
readCollectionElements( row, resultSet, session );
if ( returnProxies ) {
// now get an existing proxy for each row element (if there is one)
for ( int i = 0; i < entitySpan; i++ ) {
Object entity = row[i];
Object proxy = session.getPersistenceContext().proxyFor( persisters[i], keys[i], entity );
if ( entity != proxy ) {
// force the proxy to resolve itself
( (HibernateProxy) proxy ).getHibernateLazyInitializer().setImplementation(entity);
row[i] = proxy;
}
}
}
return getResultColumnOrRow( row, queryParameters.getResultTransformer(), resultSet, session );
| protected abstract java.lang.String | getSQLString()The SQL query string to be called; implemented by all subclasses
| private void | handleEmptyCollections(java.io.Serializable[] keys, java.lang.Object resultSetId, org.hibernate.engine.SessionImplementor session)If this is a collection initializer, we need to tell the session that a collection
is being initilized, to account for the possibility of the collection having
no elements (hence no rows in the result set).
if ( keys != null ) {
// this is a collection initializer, so we must create a collection
// for each of the passed-in keys, to account for the possibility
// that the collection is empty and has no rows in the result set
CollectionPersister[] collectionPersisters = getCollectionPersisters();
for ( int j=0; j<collectionPersisters.length; j++ ) {
for ( int i = 0; i < keys.length; i++ ) {
//handle empty collections
if ( log.isDebugEnabled() ) {
log.debug(
"result set contains (possibly empty) collection: " +
MessageHelper.collectionInfoString( collectionPersisters[j], keys[i], getFactory() )
);
}
session.getPersistenceContext()
.getLoadContexts()
.getCollectionLoadContext( ( ResultSet ) resultSetId )
.getLoadingCollection( collectionPersisters[j], keys[i] );
}
}
}
// else this is not a collection initializer (and empty collections will
// be detected by looking for the owner's identifier in the result set)
| private static boolean | hasMaxRows(org.hibernate.engine.RowSelection selection)
return selection != null && selection.getMaxRows() != null;
| protected boolean | hasSubselectLoadableCollections()
final Loadable[] loadables = getEntityPersisters();
for (int i=0; i<loadables.length; i++ ) {
if ( loadables[i].hasSubselectLoadableCollections() ) return true;
}
return false;
| private void | initializeEntitiesAndCollections(java.util.List hydratedObjects, java.lang.Object resultSetId, org.hibernate.engine.SessionImplementor session, boolean readOnly)
final CollectionPersister[] collectionPersisters = getCollectionPersisters();
if ( collectionPersisters != null ) {
for ( int i=0; i<collectionPersisters.length; i++ ) {
if ( collectionPersisters[i].isArray() ) {
//for arrays, we should end the collection load before resolving
//the entities, since the actual array instances are not instantiated
//during loading
//TODO: or we could do this polymorphically, and have two
// different operations implemented differently for arrays
endCollectionLoad( resultSetId, session, collectionPersisters[i] );
}
}
}
//important: reuse the same event instances for performance!
final PreLoadEvent pre;
final PostLoadEvent post;
if ( session.isEventSource() ) {
pre = new PreLoadEvent( (EventSource) session );
post = new PostLoadEvent( (EventSource) session );
}
else {
pre = null;
post = null;
}
if ( hydratedObjects!=null ) {
int hydratedObjectsSize = hydratedObjects.size();
if ( log.isTraceEnabled() ) {
log.trace( "total objects hydrated: " + hydratedObjectsSize );
}
for ( int i = 0; i < hydratedObjectsSize; i++ ) {
TwoPhaseLoad.initializeEntity( hydratedObjects.get(i), readOnly, session, pre, post );
}
}
if ( collectionPersisters != null ) {
for ( int i=0; i<collectionPersisters.length; i++ ) {
if ( !collectionPersisters[i].isArray() ) {
//for sets, we should end the collection load after resolving
//the entities, since we might call hashCode() on the elements
//TODO: or we could do this polymorphically, and have two
// different operations implemented differently for arrays
endCollectionLoad( resultSetId, session, collectionPersisters[i] );
}
}
}
| private void | instanceAlreadyLoaded(java.sql.ResultSet rs, int i, org.hibernate.persister.entity.Loadable persister, org.hibernate.engine.EntityKey key, java.lang.Object object, org.hibernate.LockMode lockMode, org.hibernate.engine.SessionImplementor session)The entity instance is already in the session cache
if ( !persister.isInstance( object, session.getEntityMode() ) ) {
throw new WrongClassException(
"loaded object was of wrong class " + object.getClass(),
key.getIdentifier(),
persister.getEntityName()
);
}
if ( LockMode.NONE != lockMode && upgradeLocks() ) { //no point doing this if NONE was requested
final boolean isVersionCheckNeeded = persister.isVersioned() &&
session.getPersistenceContext().getEntry(object)
.getLockMode().lessThan( lockMode );
// we don't need to worry about existing version being uninitialized
// because this block isn't called by a re-entrant load (re-entrant
// loads _always_ have lock mode NONE)
if (isVersionCheckNeeded) {
//we only check the version when _upgrading_ lock modes
checkVersion( i, persister, key.getIdentifier(), object, rs, session );
//we need to upgrade the lock mode to the mode requested
session.getPersistenceContext().getEntry(object)
.setLockMode(lockMode);
}
}
| private java.lang.Object | instanceNotYetLoaded(java.sql.ResultSet rs, int i, org.hibernate.persister.entity.Loadable persister, java.lang.String rowIdAlias, org.hibernate.engine.EntityKey key, org.hibernate.LockMode lockMode, org.hibernate.engine.EntityKey optionalObjectKey, java.lang.Object optionalObject, java.util.List hydratedObjects, org.hibernate.engine.SessionImplementor session)The entity instance is not in the session cache
final String instanceClass = getInstanceClass(
rs,
i,
persister,
key.getIdentifier(),
session
);
final Object object;
if ( optionalObjectKey != null && key.equals( optionalObjectKey ) ) {
//its the given optional object
object = optionalObject;
}
else {
// instantiate a new instance
object = session.instantiate( instanceClass, key.getIdentifier() );
}
//need to hydrate it.
// grab its state from the ResultSet and keep it in the Session
// (but don't yet initialize the object itself)
// note that we acquire LockMode.READ even if it was not requested
LockMode acquiredLockMode = lockMode == LockMode.NONE ? LockMode.READ : lockMode;
loadFromResultSet(
rs,
i,
object,
instanceClass,
key,
rowIdAlias,
acquiredLockMode,
persister,
session
);
//materialize associations (and initialize the object) later
hydratedObjects.add( object );
return object;
| private boolean | isEagerPropertyFetchEnabled(int i)
boolean[] array = getEntityEagerPropertyFetches();
return array!=null && array[i];
| protected boolean | isSingleRowLoader()Return false is this loader is a batch entity loader
return false;
| protected boolean | isSubselectLoadingEnabled()
return false;
| protected java.util.List | list(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, java.util.Set querySpaces, org.hibernate.type.Type[] resultTypes)Return the query results, using the query cache, called
by subclasses that implement cacheable queries
final boolean cacheable = factory.getSettings().isQueryCacheEnabled() &&
queryParameters.isCacheable();
if ( cacheable ) {
return listUsingQueryCache( session, queryParameters, querySpaces, resultTypes );
}
else {
return listIgnoreQueryCache( session, queryParameters );
}
| private java.util.List | listIgnoreQueryCache(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters)
return getResultList( doList( session, queryParameters ), queryParameters.getResultTransformer() );
| private java.util.List | listUsingQueryCache(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, java.util.Set querySpaces, org.hibernate.type.Type[] resultTypes)
QueryCache queryCache = factory.getQueryCache( queryParameters.getCacheRegion() );
Set filterKeys = FilterKey.createFilterKeys(
session.getEnabledFilters(),
session.getEntityMode()
);
QueryKey key = new QueryKey(
getSQLString(),
queryParameters,
filterKeys,
session.getEntityMode()
);
List result = getResultFromQueryCache(
session,
queryParameters,
querySpaces,
resultTypes,
queryCache,
key
);
if ( result == null ) {
result = doList( session, queryParameters );
putResultInQueryCache(
session,
queryParameters,
resultTypes,
queryCache,
key,
result
);
}
return getResultList( result, queryParameters.getResultTransformer() );
| public final void | loadCollection(org.hibernate.engine.SessionImplementor session, java.io.Serializable id, org.hibernate.type.Type type)Called by subclasses that initialize collections
if ( log.isDebugEnabled() ) {
log.debug(
"loading collection: "+
MessageHelper.collectionInfoString( getCollectionPersisters()[0], id, getFactory() )
);
}
Serializable[] ids = new Serializable[]{id};
try {
doQueryAndInitializeNonLazyCollections(
session,
new QueryParameters( new Type[]{type}, ids, ids ),
true
);
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not initialize a collection: " +
MessageHelper.collectionInfoString( getCollectionPersisters()[0], id, getFactory() ),
getSQLString()
);
}
log.debug("done loading collection");
| public final void | loadCollectionBatch(org.hibernate.engine.SessionImplementor session, java.io.Serializable[] ids, org.hibernate.type.Type type)Called by wrappers that batch initialize collections
if ( log.isDebugEnabled() ) {
log.debug(
"batch loading collection: "+
MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() )
);
}
Type[] idTypes = new Type[ids.length];
Arrays.fill( idTypes, type );
try {
doQueryAndInitializeNonLazyCollections(
session,
new QueryParameters( idTypes, ids, ids ),
true
);
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not initialize a collection batch: " +
MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() ),
getSQLString()
);
}
log.debug("done batch load");
| protected final void | loadCollectionSubselect(org.hibernate.engine.SessionImplementor session, java.io.Serializable[] ids, java.lang.Object[] parameterValues, org.hibernate.type.Type[] parameterTypes, java.util.Map namedParameters, org.hibernate.type.Type type)Called by subclasses that batch initialize collections
Type[] idTypes = new Type[ids.length];
Arrays.fill( idTypes, type );
try {
doQueryAndInitializeNonLazyCollections( session,
new QueryParameters( parameterTypes, parameterValues, namedParameters, ids ),
true
);
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not load collection by subselect: " +
MessageHelper.collectionInfoString( getCollectionPersisters()[0], ids, getFactory() ),
getSQLString()
);
}
| protected final java.util.List | loadEntity(org.hibernate.engine.SessionImplementor session, java.lang.Object id, org.hibernate.type.Type identifierType, java.lang.Object optionalObject, java.lang.String optionalEntityName, java.io.Serializable optionalIdentifier, org.hibernate.persister.entity.EntityPersister persister)Called by subclasses that load entities
if ( log.isDebugEnabled() ) {
log.debug(
"loading entity: " +
MessageHelper.infoString( persister, id, identifierType, getFactory() )
);
}
List result;
try {
result = doQueryAndInitializeNonLazyCollections(
session,
new QueryParameters(
new Type[] { identifierType },
new Object[] { id },
optionalObject,
optionalEntityName,
optionalIdentifier
),
false
);
}
catch ( SQLException sqle ) {
final Loadable[] persisters = getEntityPersisters();
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not load an entity: " +
MessageHelper.infoString( persisters[persisters.length-1], id, identifierType, getFactory() ),
getSQLString()
);
}
log.debug("done entity load");
return result;
| protected final java.util.List | loadEntity(org.hibernate.engine.SessionImplementor session, java.lang.Object key, java.lang.Object index, org.hibernate.type.Type keyType, org.hibernate.type.Type indexType, org.hibernate.persister.entity.EntityPersister persister)Called by subclasses that load entities
if ( log.isDebugEnabled() ) {
log.debug( "loading collection element by index" );
}
List result;
try {
result = doQueryAndInitializeNonLazyCollections(
session,
new QueryParameters(
new Type[] { keyType, indexType },
new Object[] { key, index }
),
false
);
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not collection element by index",
getSQLString()
);
}
log.debug("done entity load");
return result;
| public final java.util.List | loadEntityBatch(org.hibernate.engine.SessionImplementor session, java.io.Serializable[] ids, org.hibernate.type.Type idType, java.lang.Object optionalObject, java.lang.String optionalEntityName, java.io.Serializable optionalId, org.hibernate.persister.entity.EntityPersister persister)Called by wrappers that batch load entities
if ( log.isDebugEnabled() ) {
log.debug(
"batch loading entity: " +
MessageHelper.infoString(persister, ids, getFactory() )
);
}
Type[] types = new Type[ids.length];
Arrays.fill( types, idType );
List result;
try {
result = doQueryAndInitializeNonLazyCollections(
session,
new QueryParameters( types, ids, optionalObject, optionalEntityName, optionalId ),
false
);
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not load an entity batch: " +
MessageHelper.infoString( getEntityPersisters()[0], ids, getFactory() ),
getSQLString()
);
}
log.debug("done entity batch load");
return result;
| private void | loadFromResultSet(java.sql.ResultSet rs, int i, java.lang.Object object, java.lang.String instanceEntityName, org.hibernate.engine.EntityKey key, java.lang.String rowIdAlias, org.hibernate.LockMode lockMode, org.hibernate.persister.entity.Loadable rootPersister, org.hibernate.engine.SessionImplementor session)Hydrate the state an object from the SQL ResultSet, into
an array or "hydrated" values (do not resolve associations yet),
and pass the hydrates state to the session.
final Serializable id = key.getIdentifier();
// Get the persister for the _subclass_
final Loadable persister = (Loadable) getFactory().getEntityPersister( instanceEntityName );
if ( log.isTraceEnabled() ) {
log.trace(
"Initializing object from ResultSet: " +
MessageHelper.infoString( persister, id, getFactory() )
);
}
boolean eagerPropertyFetch = isEagerPropertyFetchEnabled(i);
// add temp entry so that the next step is circular-reference
// safe - only needed because some types don't take proper
// advantage of two-phase-load (esp. components)
TwoPhaseLoad.addUninitializedEntity(
key,
object,
persister,
lockMode,
!eagerPropertyFetch,
session
);
//This is not very nice (and quite slow):
final String[][] cols = persister == rootPersister ?
getEntityAliases()[i].getSuffixedPropertyAliases() :
getEntityAliases()[i].getSuffixedPropertyAliases(persister);
final Object[] values = persister.hydrate(
rs,
id,
object,
rootPersister,
cols,
eagerPropertyFetch,
session
);
final Object rowId = persister.hasRowId() ? rs.getObject(rowIdAlias) : null;
final AssociationType[] ownerAssociationTypes = getOwnerAssociationTypes();
if ( ownerAssociationTypes != null && ownerAssociationTypes[i] != null ) {
String ukName = ownerAssociationTypes[i].getRHSUniqueKeyPropertyName();
if (ukName!=null) {
final int index = ( (UniqueKeyLoadable) persister ).getPropertyIndex(ukName);
final Type type = persister.getPropertyTypes()[index];
// polymorphism not really handled completely correctly,
// perhaps...well, actually its ok, assuming that the
// entity name used in the lookup is the same as the
// the one used here, which it will be
EntityUniqueKey euk = new EntityUniqueKey(
rootPersister.getEntityName(), //polymorphism comment above
ukName,
type.semiResolve( values[index], session, object ),
type,
session.getEntityMode(), session.getFactory()
);
session.getPersistenceContext().addEntity( euk, object );
}
}
TwoPhaseLoad.postHydrate(
persister,
id,
values,
rowId,
object,
lockMode,
!eagerPropertyFetch,
session
);
| public java.lang.Object | loadSequentialRowsForward(java.sql.ResultSet resultSet, org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, boolean returnProxies)Loads a single logical row from the result set moving forward. This is the
processing used from the ScrollableResults where there were collection fetches
encountered; thus a single logical row may have multiple rows in the underlying
result set.
// note that for sequential scrolling, we make the assumption that
// the first persister element is the "root entity"
try {
if ( resultSet.isAfterLast() ) {
// don't even bother trying to read further
return null;
}
if ( resultSet.isBeforeFirst() ) {
resultSet.next();
}
// We call getKeyFromResultSet() here so that we can know the
// key value upon which to perform the breaking logic. However,
// it is also then called from getRowFromResultSet() which is certainly
// not the most efficient. But the call here is needed, and there
// currently is no other way without refactoring of the doQuery()/getRowFromResultSet()
// methods
final EntityKey currentKey = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
return sequentialLoad( resultSet, session, queryParameters, returnProxies, currentKey );
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not perform sequential read of results (forward)",
getSQLString()
);
}
| public java.lang.Object | loadSequentialRowsReverse(java.sql.ResultSet resultSet, org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, boolean returnProxies, boolean isLogicallyAfterLast)Loads a single logical row from the result set moving forward. This is the
processing used from the ScrollableResults where there were collection fetches
encountered; thus a single logical row may have multiple rows in the underlying
result set.
// note that for sequential scrolling, we make the assumption that
// the first persister element is the "root entity"
try {
if ( resultSet.isFirst() ) {
// don't even bother trying to read any further
return null;
}
EntityKey keyToRead = null;
// This check is needed since processing leaves the cursor
// after the last physical row for the current logical row;
// thus if we are after the last physical row, this might be
// caused by either:
// 1) scrolling to the last logical row
// 2) scrolling past the last logical row
// In the latter scenario, the previous logical row
// really is the last logical row.
//
// In all other cases, we should process back two
// logical records (the current logic row, plus the
// previous logical row).
if ( resultSet.isAfterLast() && isLogicallyAfterLast ) {
// position cursor to the last row
resultSet.last();
keyToRead = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
}
else {
// Since the result set cursor is always left at the first
// physical row after the "last processed", we need to jump
// back one position to get the key value we are interested
// in skipping
resultSet.previous();
// sequentially read the result set in reverse until we recognize
// a change in the key value. At that point, we are pointed at
// the last physical sequential row for the logical row in which
// we are interested in processing
boolean firstPass = true;
final EntityKey lastKey = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
while ( resultSet.previous() ) {
EntityKey checkKey = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
if ( firstPass ) {
firstPass = false;
keyToRead = checkKey;
}
if ( !lastKey.equals( checkKey ) ) {
break;
}
}
}
// Read backwards until we read past the first physical sequential
// row with the key we are interested in loading
while ( resultSet.previous() ) {
EntityKey checkKey = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
if ( !keyToRead.equals( checkKey ) ) {
break;
}
}
// Finally, read ahead one row to position result set cursor
// at the first physical row we are interested in loading
resultSet.next();
// and perform the load
return sequentialLoad( resultSet, session, queryParameters, returnProxies, keyToRead );
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not perform sequential read of results (forward)",
getSQLString()
);
}
| public java.lang.Object | loadSingleRow(java.sql.ResultSet resultSet, org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, boolean returnProxies)Loads a single row from the result set. This is the processing used from the
ScrollableResults where no collection fetches were encountered.
final int entitySpan = getEntityPersisters().length;
final List hydratedObjects = entitySpan == 0 ?
null : new ArrayList( entitySpan );
final Object result;
try {
result = getRowFromResultSet(
resultSet,
session,
queryParameters,
getLockModes( queryParameters.getLockModes() ),
null,
hydratedObjects,
new EntityKey[entitySpan],
returnProxies
);
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not read next row of results",
getSQLString()
);
}
initializeEntitiesAndCollections(
hydratedObjects,
resultSet,
session,
queryParameters.isReadOnly()
);
session.getPersistenceContext().initializeNonLazyCollections();
return result;
| protected boolean | needsFetchingScroll()Does the result set to be scrolled contain collection fetches?
return false;
| protected void | postInstantiate()Calculate and cache select-clause suffixes. Must be
called by subclasses after instantiation.
| protected final java.sql.PreparedStatement | prepareQueryStatement(org.hibernate.engine.QueryParameters queryParameters, boolean scroll, org.hibernate.engine.SessionImplementor session)Obtain a PreparedStatement with all parameters pre-bound.
Bind JDBC-style ? parameters, named parameters, and
limit parameters.
String sql = processFilters( queryParameters, session );
final Dialect dialect = getFactory().getDialect();
final RowSelection selection = queryParameters.getRowSelection();
boolean useLimit = useLimit( selection, dialect );
boolean hasFirstRow = getFirstRow( selection ) > 0;
boolean useOffset = hasFirstRow && useLimit && dialect.supportsLimitOffset();
boolean callable = queryParameters.isCallable();
boolean useScrollableResultSetToSkip = hasFirstRow &&
!useOffset &&
getFactory().getSettings().isScrollableResultSetsEnabled();
ScrollMode scrollMode = scroll ? queryParameters.getScrollMode() : ScrollMode.SCROLL_INSENSITIVE;
if ( useLimit ) {
sql = dialect.getLimitString(
sql.trim(), //use of trim() here is ugly?
useOffset ? getFirstRow(selection) : 0,
getMaxOrLimit(selection, dialect)
);
}
sql = preprocessSQL( sql, queryParameters, dialect );
PreparedStatement st = null;
if (callable) {
st = session.getBatcher()
.prepareCallableQueryStatement( sql, scroll || useScrollableResultSetToSkip, scrollMode );
}
else {
st = session.getBatcher()
.prepareQueryStatement( sql, scroll || useScrollableResultSetToSkip, scrollMode );
}
try {
int col = 1;
//TODO: can we limit stored procedures ?!
if ( useLimit && dialect.bindLimitParametersFirst() ) {
col += bindLimitParameters( st, col, selection );
}
if (callable) {
col = dialect.registerResultSetOutParameter( (CallableStatement)st, col );
}
col += bindParameterValues( st, queryParameters, col, session );
if ( useLimit && !dialect.bindLimitParametersFirst() ) {
col += bindLimitParameters( st, col, selection );
}
if ( !useLimit ) {
setMaxRows( st, selection );
}
if ( selection != null ) {
if ( selection.getTimeout() != null ) {
st.setQueryTimeout( selection.getTimeout().intValue() );
}
if ( selection.getFetchSize() != null ) {
st.setFetchSize( selection.getFetchSize().intValue() );
}
}
}
catch ( SQLException sqle ) {
session.getBatcher().closeQueryStatement( st, null );
throw sqle;
}
catch ( HibernateException he ) {
session.getBatcher().closeQueryStatement( st, null );
throw he;
}
return st;
| private java.lang.String | prependComment(java.lang.String sql, org.hibernate.engine.QueryParameters parameters)
String comment = parameters.getComment();
if ( comment == null ) {
return sql;
}
else {
return new StringBuffer( comment.length() + sql.length() + 5 )
.append( "/* " )
.append( comment )
.append( " */ " )
.append( sql )
.toString();
}
| protected java.lang.String | preprocessSQL(java.lang.String sql, org.hibernate.engine.QueryParameters parameters, org.hibernate.dialect.Dialect dialect)Modify the SQL, adding lock hints and comments, if necessary
sql = applyLocks( sql, parameters.getLockModes(), dialect );
return getFactory().getSettings().isCommentsEnabled() ?
prependComment( sql, parameters ) : sql;
| protected java.lang.String | processFilters(org.hibernate.engine.QueryParameters queryParameters, org.hibernate.engine.SessionImplementor session)
queryParameters.processFilters( getSQLString(), session );
return queryParameters.getFilteredSQL();
| private void | putResultInQueryCache(org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, org.hibernate.type.Type[] resultTypes, org.hibernate.cache.QueryCache queryCache, org.hibernate.cache.QueryKey key, java.util.List result)
if ( session.getCacheMode().isPutEnabled() ) {
boolean put = queryCache.put(
key,
resultTypes,
result,
queryParameters.isNaturalKeyLookup(),
session
);
if ( put && factory.getStatistics().isStatisticsEnabled() ) {
factory.getStatisticsImplementor()
.queryCachePut( getQueryIdentifier(), queryCache.getRegionName() );
}
}
| private void | readCollectionElement(java.lang.Object optionalOwner, java.io.Serializable optionalKey, org.hibernate.persister.collection.CollectionPersister persister, CollectionAliases descriptor, java.sql.ResultSet rs, org.hibernate.engine.SessionImplementor session)Read one collection element from the current row of the JDBC result set
final PersistenceContext persistenceContext = session.getPersistenceContext();
final Serializable collectionRowKey = (Serializable) persister.readKey(
rs,
descriptor.getSuffixedKeyAliases(),
session
);
if ( collectionRowKey != null ) {
// we found a collection element in the result set
if ( log.isDebugEnabled() ) {
log.debug(
"found row of collection: " +
MessageHelper.collectionInfoString( persister, collectionRowKey, getFactory() )
);
}
Object owner = optionalOwner;
if ( owner == null ) {
owner = persistenceContext.getCollectionOwner( collectionRowKey, persister );
if ( owner == null ) {
//TODO: This is assertion is disabled because there is a bug that means the
// original owner of a transient, uninitialized collection is not known
// if the collection is re-referenced by a different object associated
// with the current Session
//throw new AssertionFailure("bug loading unowned collection");
}
}
PersistentCollection rowCollection = persistenceContext.getLoadContexts()
.getCollectionLoadContext( rs )
.getLoadingCollection( persister, collectionRowKey );
if ( rowCollection != null ) {
rowCollection.readFrom( rs, persister, descriptor, owner );
}
}
else if ( optionalKey != null ) {
// we did not find a collection element in the result set, so we
// ensure that a collection is created with the owner's identifier,
// since what we have is an empty collection
if ( log.isDebugEnabled() ) {
log.debug(
"result set contains (possibly empty) collection: " +
MessageHelper.collectionInfoString( persister, optionalKey, getFactory() )
);
}
persistenceContext.getLoadContexts()
.getCollectionLoadContext( rs )
.getLoadingCollection( persister, optionalKey ); // handle empty collection
}
// else no collection element, but also no owner
| private void | readCollectionElements(java.lang.Object[] row, java.sql.ResultSet resultSet, org.hibernate.engine.SessionImplementor session)Read any collection elements contained in a single row of the result set
//TODO: make this handle multiple collection roles!
final CollectionPersister[] collectionPersisters = getCollectionPersisters();
if ( collectionPersisters != null ) {
final CollectionAliases[] descriptors = getCollectionAliases();
final int[] collectionOwners = getCollectionOwners();
for ( int i=0; i<collectionPersisters.length; i++ ) {
final boolean hasCollectionOwners = collectionOwners !=null &&
collectionOwners[i] > -1;
//true if this is a query and we are loading multiple instances of the same collection role
//otherwise this is a CollectionInitializer and we are loading up a single collection or batch
final Object owner = hasCollectionOwners ?
row[ collectionOwners[i] ] :
null; //if null, owner will be retrieved from session
final CollectionPersister collectionPersister = collectionPersisters[i];
final Serializable key;
if ( owner == null ) {
key = null;
}
else {
key = collectionPersister.getCollectionType().getKeyOfOwner( owner, session );
//TODO: old version did not require hashmap lookup:
//keys[collectionOwner].getIdentifier()
}
readCollectionElement(
owner,
key,
collectionPersister,
descriptors[i],
resultSet,
session
);
}
}
| private void | registerNonExists(org.hibernate.engine.EntityKey[] keys, org.hibernate.persister.entity.Loadable[] persisters, org.hibernate.engine.SessionImplementor session)For missing objects associated by one-to-one with another object in the
result set, register the fact that the the object is missing with the
session.
final int[] owners = getOwners();
if ( owners != null ) {
EntityType[] ownerAssociationTypes = getOwnerAssociationTypes();
for ( int i = 0; i < keys.length; i++ ) {
int owner = owners[i];
if ( owner > -1 ) {
EntityKey ownerKey = keys[owner];
if ( keys[i] == null && ownerKey != null ) {
final PersistenceContext persistenceContext = session.getPersistenceContext();
/*final boolean isPrimaryKey;
final boolean isSpecialOneToOne;
if ( ownerAssociationTypes == null || ownerAssociationTypes[i] == null ) {
isPrimaryKey = true;
isSpecialOneToOne = false;
}
else {
isPrimaryKey = ownerAssociationTypes[i].getRHSUniqueKeyPropertyName()==null;
isSpecialOneToOne = ownerAssociationTypes[i].getLHSPropertyName()!=null;
}*/
//TODO: can we *always* use the "null property" approach for everything?
/*if ( isPrimaryKey && !isSpecialOneToOne ) {
persistenceContext.addNonExistantEntityKey(
new EntityKey( ownerKey.getIdentifier(), persisters[i], session.getEntityMode() )
);
}
else if ( isSpecialOneToOne ) {*/
boolean isOneToOneAssociation = ownerAssociationTypes!=null &&
ownerAssociationTypes[i]!=null &&
ownerAssociationTypes[i].isOneToOne();
if ( isOneToOneAssociation ) {
persistenceContext.addNullProperty( ownerKey,
ownerAssociationTypes[i].getPropertyName() );
}
/*}
else {
persistenceContext.addNonExistantEntityUniqueKey( new EntityUniqueKey(
persisters[i].getEntityName(),
ownerAssociationTypes[i].getRHSUniqueKeyPropertyName(),
ownerKey.getIdentifier(),
persisters[owner].getIdentifierType(),
session.getEntityMode()
) );
}*/
}
}
}
}
| private org.hibernate.jdbc.ColumnNameCache | retreiveColumnNameToIndexCache(java.sql.ResultSet rs)
if ( columnNameCache == null ) {
log.trace("Building columnName->columnIndex cache");
columnNameCache = new ColumnNameCache( rs.getMetaData().getColumnCount() );
}
return columnNameCache;
| protected org.hibernate.ScrollableResults | scroll(org.hibernate.engine.QueryParameters queryParameters, org.hibernate.type.Type[] returnTypes, org.hibernate.hql.HolderInstantiator holderInstantiator, org.hibernate.engine.SessionImplementor session)Return the query results, as an instance of ScrollableResults
checkScrollability();
final boolean stats = getQueryIdentifier() != null &&
getFactory().getStatistics().isStatisticsEnabled();
long startTime = 0;
if ( stats ) startTime = System.currentTimeMillis();
try {
PreparedStatement st = prepareQueryStatement( queryParameters, true, session );
ResultSet rs = getResultSet(st, queryParameters.hasAutoDiscoverScalarTypes(), queryParameters.isCallable(), queryParameters.getRowSelection(), session);
if ( stats ) {
getFactory().getStatisticsImplementor().queryExecuted(
getQueryIdentifier(),
0,
System.currentTimeMillis() - startTime
);
}
if ( needsFetchingScroll() ) {
return new FetchingScrollableResultsImpl(
rs,
st,
session,
this,
queryParameters,
returnTypes,
holderInstantiator
);
}
else {
return new ScrollableResultsImpl(
rs,
st,
session,
this,
queryParameters,
returnTypes,
holderInstantiator
);
}
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not execute query using scroll",
getSQLString()
);
}
| private java.lang.Object | sequentialLoad(java.sql.ResultSet resultSet, org.hibernate.engine.SessionImplementor session, org.hibernate.engine.QueryParameters queryParameters, boolean returnProxies, org.hibernate.engine.EntityKey keyToRead)
final int entitySpan = getEntityPersisters().length;
final List hydratedObjects = entitySpan == 0 ?
null : new ArrayList( entitySpan );
Object result = null;
final EntityKey[] loadedKeys = new EntityKey[entitySpan];
try {
do {
Object loaded = getRowFromResultSet(
resultSet,
session,
queryParameters,
getLockModes( queryParameters.getLockModes() ),
null,
hydratedObjects,
loadedKeys,
returnProxies
);
if ( result == null ) {
result = loaded;
}
}
while ( keyToRead.equals( loadedKeys[0] ) && resultSet.next() );
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not perform sequential read of results (forward)",
getSQLString()
);
}
initializeEntitiesAndCollections(
hydratedObjects,
resultSet,
session,
queryParameters.isReadOnly()
);
session.getPersistenceContext().initializeNonLazyCollections();
return result;
| private void | setMaxRows(java.sql.PreparedStatement st, org.hibernate.engine.RowSelection selection)Use JDBC API to limit the number of rows returned by the SQL query if necessary
if ( hasMaxRows( selection ) ) {
st.setMaxRows( selection.getMaxRows().intValue() + getFirstRow( selection ) );
}
| public java.lang.String | toString()
return getClass().getName() + '(" + getSQLString() + ')";
| private static java.util.Set[] | transpose(java.util.List keys)
Set[] result = new Set[ ( ( EntityKey[] ) keys.get(0) ).length ];
for ( int j=0; j<result.length; j++ ) {
result[j] = new HashSet( keys.size() );
for ( int i=0; i<keys.size(); i++ ) {
result[j].add( ( ( EntityKey[] ) keys.get(i) ) [j] );
}
}
return result;
| protected boolean | upgradeLocks()Does this query return objects that might be already cached
by the session, whose lock mode may need upgrading
return false;
| private static boolean | useLimit(org.hibernate.engine.RowSelection selection, org.hibernate.dialect.Dialect dialect)Should we pre-process the SQL string, adding a dialect-specific
LIMIT clause.
return dialect.supportsLimit() && hasMaxRows( selection );
| private synchronized java.sql.ResultSet | wrapResultSetIfEnabled(java.sql.ResultSet rs, org.hibernate.engine.SessionImplementor session)
// synchronized to avoid multi-thread access issues; defined as method synch to avoid
// potential deadlock issues due to nature of code.
if ( session.getFactory().getSettings().isWrapResultSetsEnabled() ) {
try {
log.debug("Wrapping result set [" + rs + "]");
return new ResultSetWrapper( rs, retreiveColumnNameToIndexCache( rs ) );
}
catch(SQLException e) {
log.info("Error wrapping result set", e);
return rs;
}
}
else {
return rs;
}
|
|