/** * Create a close-suppressing proxy for the given Hibernate Session. * The proxy also prepares returned Query and Criteria objects. * @param session the Hibernate Session to create a proxy for * @return the Session proxy * @see org.hibernate.Session#close() * @see #prepareQuery * @see #prepareCriteria */ protected Session createSessionProxy(Session session) { Class<?>[] sessionIfcs; Class<?> mainIfc = (session instanceof org.hibernate.classic.Session ? org.hibernate.classic.Session.class : Session.class); if (session instanceof EventSource) { sessionIfcs = new Class<?>[] {mainIfc, EventSource.class}; } else if (session instanceof SessionImplementor) { sessionIfcs = new Class<?>[] {mainIfc, SessionImplementor.class}; } else { sessionIfcs = new Class<?>[] {mainIfc}; } return (Session) Proxy.newProxyInstance( session.getClass().getClassLoader(), sessionIfcs, new CloseSuppressingInvocationHandler(session)); }
public Iterator performIterate( QueryParameters queryParameters, EventSource session) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "iterate: " + getSourceQuery() ); queryParameters.traceParameters( session.getFactory() ); } if ( translators.length == 0 ) { return EmptyIterator.INSTANCE; } Iterator[] results = null; boolean many = translators.length > 1; if (many) { results = new Iterator[translators.length]; } Iterator result = null; for ( int i = 0; i < translators.length; i++ ) { result = translators[i].iterate( queryParameters, session ); if (many) results[i] = result; } return many ? new JoinedIterator(results) : result; }
private void postInsert() { if ( isDelayed ) { getSession().getPersistenceContext().replaceDelayedEntityIdentityInsertKeys( delayedEntityKey, generatedId ); } PostInsertEventListener[] postListeners = getSession().getListeners() .getPostInsertEventListeners(); if (postListeners.length>0) { PostInsertEvent postEvent = new PostInsertEvent( getInstance(), generatedId, state, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostInsert(postEvent); } } }
protected void doEvict( final Object object, final EntityKey key, final EntityPersister persister, final EventSource session) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "evicting " + MessageHelper.infoString(persister) ); } // remove all collections for the entity from the session-level cache if ( persister.hasCollections() ) { new EvictVisitor( session ).process( object, persister ); } new Cascade( CascadingAction.EVICT, Cascade.AFTER_EVICT, session ) .cascade( persister, object ); }
/** * We encountered a delete request on a transient instance. * <p/> * This is a deviation from historical Hibernate (pre-3.2) behavior to * align with the JPA spec, which states that transient entities can be * passed to remove operation in which case cascades still need to be * performed. * * @param session The session which is the source of the event * @param entity The entity being delete processed * @param cascadeDeleteEnabled Is cascading of deletes enabled * @param persister The entity persister * @param transientEntities A cache of already visited transient entities * (to avoid infinite recursion). */ protected void deleteTransientEntity( EventSource session, Object entity, boolean cascadeDeleteEnabled, EntityPersister persister, Set transientEntities) { log.info( "handling transient entity in delete processing" ); if ( transientEntities.contains( entity ) ) { log.trace( "already handled transient entity; skipping" ); return; } transientEntities.add( entity ); cascadeBeforeDelete( session, persister, entity, null, transientEntities ); cascadeAfterDelete( session, persister, entity, transientEntities ); }
private void postInsert() { PostInsertEventListener[] postListeners = getSession().getListeners() .getPostInsertEventListeners(); if ( postListeners.length > 0 ) { PostInsertEvent postEvent = new PostInsertEvent( getInstance(), getId(), state, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostInsert(postEvent); } } }
protected void cascadeBeforeDelete( EventSource session, EntityPersister persister, Object entity, EntityEntry entityEntry, Set transientEntities) throws HibernateException { CacheMode cacheMode = session.getCacheMode(); session.setCacheMode( CacheMode.GET ); session.getPersistenceContext().incrementCascadeLevel(); try { // cascade-delete to collections BEFORE the collection owner is deleted new Cascade( CascadingAction.DELETE, Cascade.AFTER_INSERT_BEFORE_DELETE, session ) .cascade( persister, entity, transientEntities ); } finally { session.getPersistenceContext().decrementCascadeLevel(); session.setCacheMode( cacheMode ); } }
protected void cascadeAfterDelete( EventSource session, EntityPersister persister, Object entity, Set transientEntities) throws HibernateException { CacheMode cacheMode = session.getCacheMode(); session.setCacheMode( CacheMode.GET ); session.getPersistenceContext().incrementCascadeLevel(); try { // cascade-delete to many-to-one AFTER the parent was deleted new Cascade( CascadingAction.DELETE, Cascade.BEFORE_INSERT_AFTER_DELETE, session ) .cascade( persister, entity, transientEntities ); } finally { session.getPersistenceContext().decrementCascadeLevel(); session.setCacheMode( cacheMode ); } }
private void postCommitDelete() { PostDeleteEventListener[] postListeners = getSession().getListeners() .getPostCommitDeleteEventListeners(); if (postListeners.length>0) { PostDeleteEvent postEvent = new PostDeleteEvent( getInstance(), getId(), state, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostDelete(postEvent); } } }
private boolean wrapCollections( EventSource session, EntityPersister persister, Type[] types, Object[] values ) { if ( persister.hasCollections() ) { // wrap up any new collections directly referenced by the object // or its components // NOTE: we need to do the wrap here even if its not "dirty", // because collections need wrapping but changes to _them_ // don't dirty the container. Also, for versioned data, we // need to wrap before calling searchForDirtyCollections WrapVisitor visitor = new WrapVisitor(session); // substitutes into values by side-effect visitor.processEntityPropertyValues(values, types); return visitor.isSubstitutionRequired(); } else { return false; } }
protected void entityIsPersistent(PersistEvent event, Map createCache) { log.trace("ignoring persistent instance"); final EventSource source = event.getSession(); //TODO: check that entry.getIdentifier().equals(requestedId) final Object entity = source.getPersistenceContext().unproxy( event.getObject() ); final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); if ( createCache.put(entity, entity)==null ) { //TODO: merge into one method! cascadeBeforeSave(source, persister, entity, createCache); cascadeAfterSave(source, persister, entity, createCache); } }
/** * The given save-update event named a transient entity. * <p/> * Here, we will perform the save processing. * * @param event The save event to be handled. * * @return The entity's identifier after saving. */ protected Serializable entityIsTransient(SaveOrUpdateEvent event) { log.trace( "saving transient instance" ); final EventSource source = event.getSession(); EntityEntry entityEntry = event.getEntry(); if ( entityEntry != null ) { if ( entityEntry.getStatus() == Status.DELETED ) { source.forceFlush( entityEntry ); } else { throw new AssertionFailure( "entity was persistent" ); } } Serializable id = saveWithGeneratedOrRequestedId( event ); source.getPersistenceContext().reassociateProxy( event.getObject(), id ); return id; }
private void postCommitInsert() { PostInsertEventListener[] postListeners = getSession().getListeners() .getPostCommitInsertEventListeners(); if (postListeners.length>0) { PostInsertEvent postEvent = new PostInsertEvent( getInstance(), generatedId, state, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostInsert(postEvent); } } }
private void postUpdate() { PostUpdateEventListener[] postListeners = getSession().getListeners() .getPostUpdateEventListeners(); if (postListeners.length>0) { PostUpdateEvent postEvent = new PostUpdateEvent( getInstance(), getId(), state, previousState, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostUpdate(postEvent); } } }
/** * process cascade save/update at the start of a flush to discover * any newly referenced entity that must be passed to saveOrUpdate(), * and also apply orphan delete */ private void prepareEntityFlushes(EventSource session) throws HibernateException { log.debug("processing flush-time cascades"); final Map.Entry[] list = IdentityMap.concurrentEntries( session.getPersistenceContext().getEntityEntries() ); //safe from concurrent modification because of how entryList() is implemented on IdentityMap final int size = list.length; final Object anything = getAnything(); for ( int i=0; i<size; i++ ) { Map.Entry me = list[i]; EntityEntry entry = (EntityEntry) me.getValue(); Status status = entry.getStatus(); if ( status == Status.MANAGED || status == Status.SAVING ) { cascadeOnFlush( session, entry.getPersister(), me.getKey(), anything ); } } }
/** * Execute all SQL and second-level cache updates, in a * special order so that foreign-key constraints cannot * be violated: * <ol> * <li> Inserts, in the order they were performed * <li> Updates * <li> Deletion of collection elements * <li> Insertion of collection elements * <li> Deletes, in the order they were performed * </ol> */ protected void performExecutions(EventSource session) throws HibernateException { log.trace("executing flush"); try { session.getJDBCContext().getConnectionManager().flushBeginning(); // we need to lock the collection caches before // executing entity inserts/updates in order to // account for bidi associations session.getActionQueue().prepareActions(); session.getActionQueue().executeActions(); } catch (HibernateException he) { log.error("Could not synchronize database state with session", he); throw he; } finally { session.getJDBCContext().getConnectionManager().flushEnding(); } }
private void postCommitUpdate() { PostUpdateEventListener[] postListeners = getSession().getListeners() .getPostCommitUpdateEventListeners(); if (postListeners.length>0) { PostUpdateEvent postEvent = new PostUpdateEvent( getInstance(), getId(), state, previousState, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostUpdate(postEvent); } } }
private boolean isVersionChanged(Object entity, EventSource source, EntityPersister persister, Object target) { if ( ! persister.isVersioned() ) { return false; } // for merging of versioned entities, we consider the version having // been changed only when: // 1) the two version values are different; // *AND* // 2) The target actually represents database state! // // This second condition is a special case which allows // an entity to be merged during the same transaction // (though during a seperate operation) in which it was // originally persisted/saved boolean changed = ! persister.getVersionType().isSame( persister.getVersion( target, source.getEntityMode() ), persister.getVersion( entity, source.getEntityMode() ), source.getEntityMode() ); // TODO : perhaps we should additionally require that the incoming entity // version be equivalent to the defined unsaved-value? return changed && existsInDatabase( target, source, persister ); }
private boolean existsInDatabase(Object entity, EventSource source, EntityPersister persister) { EntityEntry entry = source.getPersistenceContext().getEntry( entity ); if ( entry == null ) { Serializable id = persister.getIdentifier( entity, source.getEntityMode() ); if ( id != null ) { EntityKey key = new EntityKey( id, persister, source.getEntityMode() ); Object managedEntity = source.getPersistenceContext().getEntity( key ); entry = source.getPersistenceContext().getEntry( managedEntity ); } } if ( entry == null ) { // perhaps this should be an exception since it is only ever used // in the above method? return false; } else { return entry.isExistsInDatabase(); } }
private void postCommitInsert() { PostInsertEventListener[] postListeners = getSession().getListeners() .getPostCommitInsertEventListeners(); if ( postListeners.length > 0 ) { PostInsertEvent postEvent = new PostInsertEvent( getInstance(), getId(), state, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostInsert(postEvent); } } }
/** * Prepares the save call using the given requested id. * * @param entity The entity to be saved. * @param requestedId The id to which to associate the entity. * @param entityName The name of the entity being saved. * @param anything Generally cascade-specific information. * @param source The session which is the source of this save event. * * @return The id used to save the entity. */ protected Serializable saveWithRequestedId( Object entity, Serializable requestedId, String entityName, Object anything, EventSource source) { return performSave( entity, requestedId, source.getEntityPersister( entityName, entity ), false, anything, source, true ); }
/** * Handles the calls needed to perform pre-save cascades for the given entity. * * @param source The session from whcih the save event originated. * @param persister The entity's persister instance. * @param entity The entity to be saved. * @param anything Generally cascade-specific data */ protected void cascadeBeforeSave( EventSource source, EntityPersister persister, Object entity, Object anything) { // cascade-save to many-to-one BEFORE the parent is saved source.getPersistenceContext().incrementCascadeLevel(); try { new Cascade( getCascadeAction(), Cascade.BEFORE_INSERT_AFTER_DELETE, source ) .cascade( persister, entity, anything ); } finally { source.getPersistenceContext().decrementCascadeLevel(); } }
/** * Handles to calls needed to perform post-save cascades. * * @param source The session from which the event originated. * @param persister The entity's persister instance. * @param entity The entity beng saved. * @param anything Generally cascade-specific data */ protected void cascadeAfterSave( EventSource source, EntityPersister persister, Object entity, Object anything) { // cascade-save to collections AFTER the collection owner was saved source.getPersistenceContext().incrementCascadeLevel(); try { new Cascade( getCascadeAction(), Cascade.AFTER_INSERT_BEFORE_DELETE, source ) .cascade( persister, entity, anything ); } finally { source.getPersistenceContext().decrementCascadeLevel(); } }
public IteratorImpl( ResultSet rs, PreparedStatement ps, EventSource sess, Type[] types, String[][] columnNames, HolderInstantiator holderInstantiator) throws HibernateException, SQLException { this.rs=rs; this.ps=ps; this.session = sess; this.types = types; this.names = columnNames; this.holderInstantiator = holderInstantiator; single = types.length==1; postNext(); }
private void postDelete() { PostDeleteEventListener[] postListeners = getSession().getListeners() .getPostDeleteEventListeners(); if (postListeners.length>0) { PostDeleteEvent postEvent = new PostDeleteEvent( getInstance(), getId(), state, getPersister(), (EventSource) getSession() ); for ( int i = 0; i < postListeners.length; i++ ) { postListeners[i].onPostDelete(postEvent); } } }
protected void coordinateSharedCacheCleanup(SessionImplementor session) { BulkOperationCleanupAction action = new BulkOperationCleanupAction( session, getCustomQuery().getQuerySpaces() ); action.init(); if ( session.isEventSource() ) { ( ( EventSource ) session ).getActionQueue().addAction( action ); } }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace("cascading to delete: " + entityName); } session.delete( entityName, child, isCascadeDeleteEnabled, ( Set ) anything ); }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to lock: " + entityName ); } session.lock( entityName, child, LockMode.NONE/*(LockMode) anything*/ ); }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to refresh: " + entityName ); } session.refresh( child, (Map) anything ); }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to evict: " + entityName ); } session.evict(child); }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to saveOrUpdate: " + entityName ); } session.saveOrUpdate(entityName, child); }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to merge: " + entityName ); } session.merge( entityName, child, (Map) anything ); }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to saveOrUpdateCopy: " + entityName ); } session.saveOrUpdateCopy( entityName, child, (Map) anything ); }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to persist: " + entityName ); } session.persist( entityName, child, (Map) anything ); }
public void noCascade( EventSource session, Object child, Object parent, EntityPersister persister, int propertyIndex) { if ( child == null ) { return; } Type type = persister.getPropertyTypes()[propertyIndex]; if ( type.isEntityType() ) { String childEntityName = ( ( EntityType ) type ).getAssociatedEntityName( session.getFactory() ); if ( ! isInManagedState( child, session ) && ! ( child instanceof HibernateProxy ) //a proxy cannot be transient and it breaks ForeignKeys.isTransient && ForeignKeys.isTransient( childEntityName, child, null, session ) ) { String parentEntiytName = persister.getEntityName(); String propertyName = persister.getPropertyNames()[propertyIndex]; throw new TransientObjectException( "object references an unsaved transient instance - " + "save the transient instance before flushing: " + parentEntiytName + "." + propertyName + " -> " + childEntityName ); } } }
public void cascade(EventSource session, Object child, String entityName, Object anything, boolean isCascadeDeleteEnabled) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "cascading to replicate: " + entityName ); } session.replicate( entityName, child, (ReplicationMode) anything ); }
private static Object[] assemble( final Serializable[] values, final Object result, final Serializable id, final EntityPersister persister, final Interceptor interceptor, final EventSource session) throws HibernateException { //assembled state gets put in a new array (we read from cache by value!) Object[] assembledProps = TypeFactory.assemble( values, persister.getPropertyTypes(), session, result ); //persister.setIdentifier(result, id); //before calling interceptor, for consistency with normal load //TODO: reuse the PreLoadEvent PreLoadEvent preLoadEvent = new PreLoadEvent( session ) .setEntity(result) .setState(assembledProps) .setId(id) .setPersister(persister); PreLoadEventListener[] listeners = session.getListeners().getPreLoadEventListeners(); for ( int i = 0; i < listeners.length; i++ ) { listeners[i].onPreLoad(preLoadEvent); } persister.setPropertyValues( result, assembledProps, session.getEntityMode() ); return assembledProps; }
/** * {@inheritDoc} */ Object processCollection(Object collection, CollectionType type) throws HibernateException { if ( collection == CollectionType.UNFETCHED_COLLECTION ) { return null; } EventSource session = getSession(); CollectionPersister persister = session.getFactory().getCollectionPersister( type.getRole() ); final Serializable collectionKey = extractCollectionKeyFromOwner( persister ); if ( collection!=null && (collection instanceof PersistentCollection) ) { PersistentCollection wrapper = (PersistentCollection) collection; if ( wrapper.setCurrentSession(session) ) { //a "detached" collection! if ( !isOwnerUnchanged( wrapper, persister, collectionKey ) ) { // if the collection belonged to a different entity, // clean up the existing state of the collection removeCollection( persister, collectionKey, session ); } reattachCollection(wrapper, type); } else { // a collection loaded in the current session // can not possibly be the collection belonging // to the entity passed to update() removeCollection(persister, collectionKey, session); } } else { // null or brand new collection // this will also (inefficiently) handle arrays, which have // no snapshot, so we can't do any better removeCollection(persister, collectionKey, session); } return null; }
/** * Schedules a collection for deletion. * * @param role The persister representing the collection to be removed. * @param collectionKey The collection key (differs from owner-id in the case of property-refs). * @param source The session from which the request originated. * @throws HibernateException */ void removeCollection(CollectionPersister role, Serializable collectionKey, EventSource source) throws HibernateException { if ( log.isTraceEnabled() ) { log.trace( "collection dereferenced while transient " + MessageHelper.collectionInfoString( role, ownerIdentifier, source.getFactory() ) ); } source.getActionQueue().addAction( new CollectionRemoveAction( null, role, collectionKey, false, source ) ); }