/** * Execute schema drop script, determined by the Configuration object * used for creating the SessionFactory. A replacement for Hibernate's * SchemaExport class, to be invoked on application setup. * <p>Fetch the LocalSessionFactoryBean itself rather than the exposed * SessionFactory to be able to invoke this method, e.g. via * {@code LocalSessionFactoryBean lsfb = (LocalSessionFactoryBean) ctx.getBean("&mySessionFactory");}. * <p>Uses the SessionFactory that this bean generates for accessing a * JDBC connection to perform the script. * @throws org.springframework.dao.DataAccessException in case of script execution errors * @see org.hibernate.cfg.Configuration#generateDropSchemaScript * @see org.hibernate.tool.hbm2ddl.SchemaExport#drop */ public void dropDatabaseSchema() throws DataAccessException { logger.info("Dropping database schema for Hibernate SessionFactory"); SessionFactory sessionFactory = getSessionFactory(); final Dialect dialect = ((SessionFactoryImplementor) sessionFactory).getDialect(); HibernateTemplate hibernateTemplate = new HibernateTemplate(sessionFactory); hibernateTemplate.execute( new HibernateCallback<Object>() { @Override public Object doInHibernate(Session session) throws HibernateException, SQLException { @SuppressWarnings("deprecation") Connection con = session.connection(); String[] sql = getConfiguration().generateDropSchemaScript(dialect); executeSchemaScript(con, sql); return null; } } ); }
/** * {@inheritDoc} */ @Override public String render(Type firstArgumentType, List arguments, SessionFactoryImplementor factory) { // 1 arguments is the date field to use if (arguments.size() != 2) { throw new QueryException( "Need exactly two arguments for day of date function, but got only " + arguments.size() + " args=" + arguments); } DateField dateField; try { dateField = DateField.valueOf(arguments.get(0).toString()); } catch (Exception e) { throw new QueryException( "Invalid datefield used: " + arguments.get(0) + ". " + e.getMessage(), e); } String sql = "date_part('" + dateField + "'," + arguments.get(1) + ")"; return sql; }
/** * {@inheritDoc} */ @Override public String render(Type firstArgumentType, List arguments, SessionFactoryImplementor factory) { // 2 arguments without locale, 3 with locale; however locale not used if (arguments.size() < 2 || arguments.size() > 3) { throw new QueryException("Need 2 or 3 arguments for fulltext function, but got only " + arguments.size() + " args=" + arguments); } // see KENMEI-4643 for details why this way complicated // see KENMEI-4678 why to uses simple as default String sql = "to_tsvector(" + textSearchConfigurationNameQueryPart + ", " + arguments.get(0) + ") @@ tsquery(plainto_tsquery(" + textSearchConfigurationNameQueryPart + ", " + arguments.get(1) + ") :: varchar || ':*')"; return sql; }
protected QueryableCollection getQueryableCollection(String entityName, String propertyName, SessionFactoryImplementor factory) throws HibernateException { PropertyMapping ownerMapping = ( PropertyMapping ) factory.getEntityPersister( entityName ); Type type = ownerMapping.toType( propertyName ); if ( !type.isCollectionType() ) { throw new MappingException( "Property path [" + entityName + "." + propertyName + "] does not reference a collection" ); } String role = ( ( CollectionType ) type ).getRole(); try { return ( QueryableCollection ) factory.getCollectionPersister( role ); } catch ( ClassCastException cce ) { throw new QueryException( "collection role is not queryable: " + role ); } catch ( Exception e ) { throw new QueryException( "collection role not found: " + role ); } }
public StreamedContent getListaFile() { try { if(turma == null){ addMessage(getSeverityWarn(),"Por favor selecione a turma!"); return null; } InputStream inputStream = getClass().getResourceAsStream("/br/edu/ifnmg/ifad/report/lista_ata_assinatura.jrxml"); HashMap<String, Object> map = new HashMap<String, Object>(); map.put("COD_TURMA", turma.getId()); SessionFactoryImplementor factoryImplementor = (SessionFactoryImplementor) HibernateUtil.getSessionFactory(); map.put("REPORT_CONNECTION",factoryImplementor.getConnectionProvider().getConnection()); StringBuilder nome = new StringBuilder("lista_ata_"); nome.append(turma.getNome().replaceAll(" ", "_")); nome.append(".pdf"); StreamedContent streamedContent = new DefaultStreamedContent(new ByteArrayInputStream(ReportUtil.reportToPDF(null, inputStream, map)), "application/pdf", nome.toString()); return streamedContent; } catch (Exception ex) { addMessage(getSeverityError(),"Erro ao emitir arquivo com senhas! Detalhes: "+ex.getMessage()); Logger.getLogger(EmissaoReportsBean.class.getName()).log(Level.SEVERE, null, ex); return null; } }
public String getAssociatedEntityName(SessionFactoryImplementor factory) throws MappingException { try { QueryableCollection collectionPersister = (QueryableCollection) factory .getCollectionPersister( role ); if ( !collectionPersister.getElementType().isEntityType() ) { throw new MappingException( "collection was not an association: " + collectionPersister.getRole() ); } return collectionPersister.getElementPersister().getEntityName(); } catch (ClassCastException cce) { throw new MappingException( "collection role is not queryable " + role ); } }
public void set(Object target, Object value, SessionFactoryImplementor factory) throws HibernateException { Element owner = ( Element ) target; Element element = owner.element(elementName); if (value==null) { if (element!=null) element.detach(); } else { Attribute attribute; if (element==null) { element = owner.addElement(elementName); attribute = null; } else { attribute = element.attribute(attributeName); } if (attribute==null) { element.addAttribute(attributeName, "null"); attribute = element.attribute(attributeName); } super.propertyType.setToXMLNode(attribute, value, factory); } }
public StreamedContent getFichaAvaliacaoDocenteFile() { try { InputStream inputStream = getClass().getResourceAsStream("/br/edu/ifnmg/ifad/report/ficha_avaliacao_docente.jrxml"); HashMap<String, Object> map = new HashMap<String, Object>(); InputStream isSubReport = getClass().getResourceAsStream("/br/edu/ifnmg/ifad/report/media_respostas_avaliadas_subreport.jrxml"); map.put("SUBREPORT_DIR", ReportUtil.compileReport(isSubReport)); SessionFactoryImplementor factoryImplementor = (SessionFactoryImplementor) HibernateUtil.getSessionFactory(); map.put("REPORT_CONNECTION",factoryImplementor.getConnectionProvider().getConnection()); StringBuilder nome = new StringBuilder("ficha_avaliacao_docente_"); nome.append(new SimpleDateFormat("yyyy_MM_dd").format(new Date())); nome.append(".pdf"); StreamedContent streamedContent = new DefaultStreamedContent(new ByteArrayInputStream(ReportUtil.reportToPDF(null, inputStream, map)), "application/pdf", nome.toString()); return streamedContent; } catch (Exception ex) { addMessage(getSeverityError(),"Erro ao emitir arquivo com senhas! Detalhes: "+ex.getMessage()); Logger.getLogger(EmissaoReportsBean.class.getName()).log(Level.SEVERE, null, ex); return null; } }
JoinEntityLoader( org.hibernate.persister.entity.OuterJoinLoadable persister, int batchSize, LockMode lockMode, SessionFactoryImplementor factory, LoadQueryInfluencers loadQueryInfluencers, java.util.List<String> joins) throws MappingException { this( persister, persister.getIdentifierColumnNames(), persister.getIdentifierType(), batchSize, lockMode, factory, loadQueryInfluencers, joins ); }
public OneToManyLoader( QueryableCollection oneToManyPersister, int batchSize, String subquery, SessionFactoryImplementor factory, Map enabledFilters) throws MappingException { super(oneToManyPersister, factory, enabledFilters); JoinWalker walker = new OneToManyJoinWalker( oneToManyPersister, batchSize, subquery, factory, enabledFilters ); initFromWalker( walker ); postInstantiate(); log.debug( "Static select for one-to-many " + oneToManyPersister.getRole() + ": " + getSQLString() ); }
public boolean isEqual(Object x, Object y, EntityMode entityMode, SessionFactoryImplementor factory) throws HibernateException { if ( x == y ) { return true; } if ( x == null || y == null ) { return false; } Object[] xvalues = getPropertyValues( x, entityMode ); Object[] yvalues = getPropertyValues( y, entityMode ); for ( int i = 0; i < propertySpan; i++ ) { if ( !propertyTypes[i].isEqual( xvalues[i], yvalues[i], entityMode, factory ) ) { return false; } } return true; }
protected void runClassicTranslator(String hql) throws Exception { SessionFactoryImplementor factory = getSessionFactoryImplementor(); Map replacements = new HashMap(); QueryTranslator oldQueryTranslator = null; try { QueryTranslatorFactory classic = new ClassicQueryTranslatorFactory(); oldQueryTranslator = classic.createQueryTranslator( hql, hql, Collections.EMPTY_MAP, factory ); oldQueryTranslator.compile( replacements, false ); } catch ( Exception e ) { e.printStackTrace(); throw e; } String oldsql = oldQueryTranslator.getSQLString(); System.out.println( "HQL : " + hql ); System.out.println( "OLD SQL: " + oldsql ); }
public static CollectionInitializer createBatchingOneToManyInitializer( final QueryableCollection persister, final int maxBatchSize, final SessionFactoryImplementor factory, final Map enabledFilters) throws MappingException { if ( maxBatchSize>1 ) { int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize); Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ]; for ( int i=0; i<batchSizesToCreate.length; i++ ) { loadersToCreate[i] = new OneToManyLoader(persister, batchSizesToCreate[i], factory, enabledFilters); } return new BatchingCollectionInitializer(persister, batchSizesToCreate, loadersToCreate); } else { return new OneToManyLoader(persister, factory, enabledFilters); } }
public static CollectionInitializer createBatchingCollectionInitializer( final QueryableCollection persister, final int maxBatchSize, final SessionFactoryImplementor factory, final Map enabledFilters) throws MappingException { if ( maxBatchSize>1 ) { int[] batchSizesToCreate = ArrayHelper.getBatchSizes(maxBatchSize); Loader[] loadersToCreate = new Loader[ batchSizesToCreate.length ]; for ( int i=0; i<batchSizesToCreate.length; i++ ) { loadersToCreate[i] = new BasicCollectionLoader(persister, batchSizesToCreate[i], factory, enabledFilters); } return new BatchingCollectionInitializer(persister, batchSizesToCreate, loadersToCreate); } else { return new BasicCollectionLoader(persister, factory, enabledFilters); } }
public void afterSessionFactoryBuilt(SessionFactoryImplementor sfi) { super.afterSessionFactoryBuilt( sfi ); Session session = null; try { session = sfi.openSession(); Statement stat = session.connection().createStatement(); stat.execute("CREATE SCHEMA sb AUTHORIZATION DBA "); stat.execute(" CREATE SCHEMA sa AUTHORIZATION DBA "); stat.execute(" CREATE TABLE \"SA\".\"Team\" (test INTEGER) "); stat.close(); } catch ( SQLException e ) { throw new RuntimeException( "could not prepare additional schemas" ); } finally { if ( session != null ) { try { session.close(); } catch( Throwable ignore ) { } } } }
public EntityPersister getSubclassEntityPersister(Object instance, SessionFactoryImplementor factory, EntityMode entityMode) { if ( !hasSubclasses() ) { return this; } else { // TODO : really need a way to do something like : // getTuplizer(entityMode).determineConcreteSubclassEntityName(instance) Class clazz = instance.getClass(); if ( clazz == getMappedClass( entityMode ) ) { return this; } else { String subclassEntityName = getSubclassEntityName( clazz ); if ( subclassEntityName == null ) { throw new HibernateException( "instance not of expected entity type: " + clazz.getName() + " is not a: " + getEntityName() ); } else { return factory.getEntityPersister( subclassEntityName ); } } } }
@Test public void testSelectStatementWithStatementsCache() throws SQLException { Session session = HibernateTestUtils.getSessionFactoryWithStmtCache().openSession(); ConnectionProvider cp = ((SessionFactoryImplementor) session.getSessionFactory()).getConnectionProvider(); ViburDBCPDataSource ds = ((ViburDBCPConnectionProvider) cp).getDataSource(); ConcurrentMap<StatementMethod, StatementHolder> mockedStatementCache = mockStatementCache(ds); executeAndVerifySelectInSession(session); // resources/hibernate-with-stmt-cache.cfg.xml defines pool with 1 connection only, that's why // the second session will get and use the same underlying connection. session = HibernateTestUtils.getSessionFactoryWithStmtCache().openSession(); executeAndVerifySelectInSession(session); InOrder inOrder = inOrder(mockedStatementCache); inOrder.verify(mockedStatementCache).get(key1.capture()); inOrder.verify(mockedStatementCache).putIfAbsent(same(key1.getValue()), val1.capture()); inOrder.verify(mockedStatementCache).get(key2.capture()); assertEquals(1, mockedStatementCache.size()); assertTrue(mockedStatementCache.containsKey(key1.getValue())); assertEquals(key1.getValue(), key2.getValue()); assertEquals(AVAILABLE, val1.getValue().state().get()); }
/** * Private constructor used exclusively from custom serialization */ private ConnectionManager( SessionFactoryImplementor factory, Callback callback, ConnectionReleaseMode releaseMode, Interceptor interceptor, boolean wasConnectionSupplied, boolean isClosed) { this.factory = factory; this.callback = callback; this.interceptor = interceptor; this.batcher = factory.getSettings().getBatcherFactory().createBatcher( this, interceptor ); this.wasConnectionSupplied = wasConnectionSupplied; this.isClosed = isClosed; this.releaseMode = wasConnectionSupplied ? ConnectionReleaseMode.ON_CLOSE : releaseMode; }
public StreamedContent getSenhasFile() { try { // if(turma == null){ // addMessage(getSeverityWarn(),"Por favor selecione a turma!"); // return null; // } InputStream inputStream = getClass().getResourceAsStream("/br/edu/ifnmg/ifad/report/lista_cpf_senha.jrxml"); HashMap<String, Object> map = new HashMap<String, Object>(); StringBuilder nome = new StringBuilder("senhas_"); if(turma != null){ map.put("COD_TURMA", turma.getId()); nome.append(turma.getNome().replaceAll(" ", "_")); } SessionFactoryImplementor factoryImplementor = (SessionFactoryImplementor) HibernateUtil.getSessionFactory(); map.put("REPORT_CONNECTION",factoryImplementor.getConnectionProvider().getConnection()); nome.append(".pdf"); StreamedContent streamedContent = new DefaultStreamedContent(new ByteArrayInputStream(ReportUtil.reportToPDF(null, inputStream, map)), "application/pdf", nome.toString()); return streamedContent; } catch (Exception ex) { addMessage(getSeverityError(),"Erro ao emitir arquivo com senhas! Detalhes: "+ex.getMessage()); Logger.getLogger(EmissaoReportsBean.class.getName()).log(Level.SEVERE, null, ex); return null; } }
/** * Execute schema update script, determined by the Configuration object * used for creating the SessionFactory. A replacement for Hibernate's * SchemaUpdate class, for automatically executing schema update scripts * on application startup. Can also be invoked manually. * <p>Fetch the LocalSessionFactoryBean itself rather than the exposed * SessionFactory to be able to invoke this method, e.g. via * {@code LocalSessionFactoryBean lsfb = (LocalSessionFactoryBean) ctx.getBean("&mySessionFactory");}. * <p>Uses the SessionFactory that this bean generates for accessing a * JDBC connection to perform the script. * @throws DataAccessException in case of script execution errors * @see #setSchemaUpdate * @see org.hibernate.cfg.Configuration#generateSchemaUpdateScript * @see org.hibernate.tool.hbm2ddl.SchemaUpdate */ public void updateDatabaseSchema() throws DataAccessException { logger.info("Updating database schema for Hibernate SessionFactory"); DataSource dataSource = getDataSource(); if (dataSource != null) { // Make given DataSource available for the schema update. configTimeDataSourceHolder.set(dataSource); } try { SessionFactory sessionFactory = getSessionFactory(); final Dialect dialect = ((SessionFactoryImplementor) sessionFactory).getDialect(); HibernateTemplate hibernateTemplate = new HibernateTemplate(sessionFactory); hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER); hibernateTemplate.execute( new HibernateCallback<Object>() { @Override public Object doInHibernate(Session session) throws HibernateException, SQLException { @SuppressWarnings("deprecation") Connection con = session.connection(); DatabaseMetadata metadata = new DatabaseMetadata(con, dialect); String[] sql = getConfiguration().generateSchemaUpdateScript(dialect, metadata); executeSchemaScript(con, sql); return null; } } ); } finally { if (dataSource != null) { configTimeDataSourceHolder.remove(); } } }
/** * Execute schema creation script, determined by the Configuration object * used for creating the SessionFactory. A replacement for Hibernate's * SchemaValidator class, to be invoked after application startup. * <p>Fetch the LocalSessionFactoryBean itself rather than the exposed * SessionFactory to be able to invoke this method, e.g. via * {@code LocalSessionFactoryBean lsfb = (LocalSessionFactoryBean) ctx.getBean("&mySessionFactory");}. * <p>Uses the SessionFactory that this bean generates for accessing a * JDBC connection to perform the script. * @throws DataAccessException in case of script execution errors * @see org.hibernate.cfg.Configuration#validateSchema * @see org.hibernate.tool.hbm2ddl.SchemaValidator */ public void validateDatabaseSchema() throws DataAccessException { logger.info("Validating database schema for Hibernate SessionFactory"); DataSource dataSource = getDataSource(); if (dataSource != null) { // Make given DataSource available for the schema update. configTimeDataSourceHolder.set(dataSource); } try { SessionFactory sessionFactory = getSessionFactory(); final Dialect dialect = ((SessionFactoryImplementor) sessionFactory).getDialect(); HibernateTemplate hibernateTemplate = new HibernateTemplate(sessionFactory); hibernateTemplate.setFlushMode(HibernateTemplate.FLUSH_NEVER); hibernateTemplate.execute( new HibernateCallback<Object>() { @Override public Object doInHibernate(Session session) throws HibernateException, SQLException { @SuppressWarnings("deprecation") Connection con = session.connection(); DatabaseMetadata metadata = new DatabaseMetadata(con, dialect, false); getConfiguration().validateSchema(dialect, metadata); return null; } } ); } finally { if (dataSource != null) { configTimeDataSourceHolder.remove(); } } }
/** * Execute schema creation script, determined by the Configuration object * used for creating the SessionFactory. A replacement for Hibernate's * SchemaExport class, to be invoked on application setup. * <p>Fetch the LocalSessionFactoryBean itself rather than the exposed * SessionFactory to be able to invoke this method, e.g. via * {@code LocalSessionFactoryBean lsfb = (LocalSessionFactoryBean) ctx.getBean("&mySessionFactory");}. * <p>Uses the SessionFactory that this bean generates for accessing a * JDBC connection to perform the script. * @throws DataAccessException in case of script execution errors * @see org.hibernate.cfg.Configuration#generateSchemaCreationScript * @see org.hibernate.tool.hbm2ddl.SchemaExport#create */ public void createDatabaseSchema() throws DataAccessException { logger.info("Creating database schema for Hibernate SessionFactory"); DataSource dataSource = getDataSource(); if (dataSource != null) { // Make given DataSource available for the schema update. configTimeDataSourceHolder.set(dataSource); } try { SessionFactory sessionFactory = getSessionFactory(); final Dialect dialect = ((SessionFactoryImplementor) sessionFactory).getDialect(); HibernateTemplate hibernateTemplate = new HibernateTemplate(sessionFactory); hibernateTemplate.execute( new HibernateCallback<Object>() { @Override public Object doInHibernate(Session session) throws HibernateException, SQLException { @SuppressWarnings("deprecation") Connection con = session.connection(); String[] sql = getConfiguration().generateSchemaCreationScript(dialect); executeSchemaScript(con, sql); return null; } } ); } finally { if (dataSource != null) { configTimeDataSourceHolder.remove(); } } }
/** * Determine the DataSource of the given SessionFactory. * @param sessionFactory the SessionFactory to check * @return the DataSource, or {@code null} if none found * @see org.hibernate.engine.SessionFactoryImplementor#getConnectionProvider * @see LocalDataSourceConnectionProvider */ public static DataSource getDataSource(SessionFactory sessionFactory) { if (sessionFactory instanceof SessionFactoryImplementor) { ConnectionProvider cp = ((SessionFactoryImplementor) sessionFactory).getConnectionProvider(); if (cp instanceof LocalDataSourceConnectionProvider) { return ((LocalDataSourceConnectionProvider) cp).getDataSource(); } } return null; }
private static String hqlToSql(String hqlQueryText, SessionFactory sessionFactory) { if (hqlQueryText != null && hqlQueryText.trim().length() > 0 && sessionFactory != null) { final QueryTranslatorFactory translatorFactory = new ASTQueryTranslatorFactory(); final SessionFactoryImplementor factory = (SessionFactoryImplementor) sessionFactory; final QueryTranslator translator = translatorFactory .createQueryTranslator(hqlQueryText, hqlQueryText, Collections.EMPTY_MAP, factory); translator.compile(Collections.EMPTY_MAP, false); return translator.getSQLString(); } return null; }
@SuppressWarnings( "unchecked" ) public EntityManagerFactoryImpl( SessionFactory sessionFactory, PersistenceUnitTransactionType transactionType, boolean discardOnClose, Class<?> sessionInterceptorClass, Configuration cfg) { this.sessionFactory = sessionFactory; this.transactionType = transactionType; this.discardOnClose = discardOnClose; this.sessionInterceptorClass = sessionInterceptorClass; final Iterator<PersistentClass> classes = cfg.getClassMappings(); List<PersistentClass> persistentClasses = new ArrayList<PersistentClass>(); while (classes.hasNext()) { PersistentClass persistentClass = classes.next(); // Hardcode jBPM classes for now, but make tidy with a property like "hibernate.ejb.metamodel.excluded.pkgs" if (persistentClass.getClassName().startsWith("org.jbpm")) { continue; } else { persistentClasses.add(persistentClass); } } //a safe guard till we are confident that metamodel is wll tested if ( !"disabled".equalsIgnoreCase( cfg.getProperty( "hibernate.ejb.metamodel.generation" ) ) ) { this.metamodel = MetamodelImpl.buildMetamodel( persistentClasses.iterator(), ( SessionFactoryImplementor ) sessionFactory ); } else { this.metamodel = null; } this.criteriaBuilder = new CriteriaBuilderImpl( this ); this.util = new HibernatePersistenceUnitUtil( this ); HashMap<String,Object> props = new HashMap<String, Object>(); addAll( props, ( (SessionFactoryImplementor) sessionFactory ).getProperties() ); addAll( props, cfg.getProperties() ); this.properties = Collections.unmodifiableMap( props ); }
@Test @SuppressWarnings("rawtypes") public void testJtaSessionSynchronization() throws Exception { TransactionManager tm = mock(TransactionManager.class); MockJtaTransaction transaction = new MockJtaTransaction(); given(tm.getTransaction()).willReturn(transaction); final SessionFactoryImplementor sf = mock(SessionFactoryImplementor.class); final Session session = mock(Session.class); given(sf.openSession()).willReturn(session); given(sf.getTransactionManager()).willReturn(tm); given(session.isOpen()).willReturn(true); given(session.getFlushMode()).willReturn(FlushMode.AUTO); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { @Override public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); synchronization.beforeCompletion(); synchronization.afterCompletion(Status.STATUS_COMMITTED); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); verify(session).flush(); verify(session).close(); }
@Test @SuppressWarnings("rawtypes") public void testJtaSessionSynchronizationWithRollback() throws Exception { TransactionManager tm = mock(TransactionManager.class); MockJtaTransaction transaction = new MockJtaTransaction(); given(tm.getTransaction()).willReturn(transaction); final SessionFactoryImplementor sf = mock(SessionFactoryImplementor.class); final Session session = mock(Session.class); given(sf.openSession()).willReturn(session); given(sf.getTransactionManager()).willReturn(tm); given(session.isOpen()).willReturn(true); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); HibernateTemplate ht = new HibernateTemplate(sf); ht.setExposeNativeSession(true); for (int i = 0; i < 5; i++) { ht.executeFind(new HibernateCallback() { @Override public Object doInHibernate(org.hibernate.Session sess) { assertTrue("Has thread session", TransactionSynchronizationManager.hasResource(sf)); assertEquals(session, sess); return null; } }); } Synchronization synchronization = transaction.getSynchronization(); assertTrue("JTA synchronization registered", synchronization != null); synchronization.afterCompletion(Status.STATUS_ROLLEDBACK); assertTrue("Hasn't thread session", !TransactionSynchronizationManager.hasResource(sf)); assertTrue("JTA synchronizations not active", !TransactionSynchronizationManager.isSynchronizationActive()); verify(session).close(); }
/** * HQL to SQL translator */ public static String toSql(String hql) { if (hql != null && hql.trim().length() > 0) { final QueryTranslatorFactory qtf = new ASTQueryTranslatorFactory(); final SessionFactoryImplementor sfi = (SessionFactoryImplementor) sessionFactory; final QueryTranslator translator = qtf.createQueryTranslator(hql, hql, Collections.EMPTY_MAP, sfi); translator.compile(Collections.EMPTY_MAP, false); return translator.getSQLString(); } return null; }
/** * {@inheritDoc} */ @Override public String render(Type firstArgumentType, List args, SessionFactoryImplementor factory) throws QueryException { if (args.size() < 2 || args.size() > 3) { throw new QueryException( "Need exactly 2 arguments for fulltext function, but got " + args.size() + " args=" + args); } return "CONTAINS(" + args.get(0) + "," + args.get(1) + ") AND 1 "; }
/** * {@inheritDoc} */ @Override public String render(Type firstArgumentType, List args, SessionFactoryImplementor factory) throws QueryException { if (args.size() < 2 || args.size() > 3) { throw new QueryException( "Need exactly 2 arguments for fulltext function, but got " + args.size() + " args=" + args); } return " (CONTAINS(" + args.get(0) + "," + args.get(1) + ",1) >0) and 1 "; }
/** * {@inheritDoc} */ @Override public String render(Type firstArgumentType, List arguments, SessionFactoryImplementor factory) throws QueryException { if (arguments.size() < 2 || arguments.size() > 3) { throw new QueryException( "Need exactly 2 arguments for like fulltext function, but got only " + arguments.size() + " args=" + arguments); } return "lower(" + arguments.get(0) + ") like lower(" + arguments.get(1) + ") AND " + factory.getDialect().toBooleanValueString(true); }
/** * Tries to extract <code>HazelcastInstance</code> from <code>SessionFactory</code>. * * @param sessionFactory Hibernate <code>SessionFactory</code> to extract settings from * @return Currently used <code>HazelcastInstance</code> or null if an error occurs. */ public static HazelcastInstance getHazelcastInstance(final SessionFactory sessionFactory) { if (!(sessionFactory instanceof SessionFactoryImplementor)) { LOGGER.warning("SessionFactory is expected to be instance of SessionFactoryImplementor."); return null; } return getHazelcastInstance((SessionFactoryImplementor) sessionFactory); }
/** * Tries to extract <code>HazelcastInstance</code> from <code>SessionFactoryImplementor</code>. * * @param sessionFactory Hibernate <code>SessionFactory</code> to extract settings from * @return currently used <code>HazelcastInstance</code> or null if an error occurs. */ public static HazelcastInstance getHazelcastInstance(final SessionFactoryImplementor sessionFactory) { final Settings settings = sessionFactory.getSettings(); final RegionFactory rf = settings.getRegionFactory(); if (rf instanceof AbstractHazelcastCacheRegionFactory) { return ((AbstractHazelcastCacheRegionFactory) rf).getHazelcastInstance(); } else { LOGGER.warning("Current 2nd level cache implementation is not HazelcastCacheRegionFactory!"); } return null; }
public CacheEvictionRules(SessionFactory factory) { if (factory instanceof SessionFactoryImplementor) { this.sessionFactory = factory; this.factoryImplementor = (SessionFactoryImplementor)factory; } else { this.sessionFactory = factory; //this.factoryImplementor = (SessionFactoryImplementor)((SpringSessionFactory)factory).getOriginal(); } }
/** * To get all columns for table. * * @param table String * @return List<ColumnDefinition> * @throws SQLException in case of error */ public List<ColumnDefinition> getAllColumnsForTable(String table) throws SQLException { List<ColumnDefinition> columnDefinitions = new LinkedList<ColumnDefinition>(); if (sessionFactory instanceof SessionFactoryImplementor) { try { if (connection != null) { connection.close(); } if (connectionProvider != null) { connectionProvider.close(); } } catch (Exception e) { LOG.error("Could not close connections ", e); } connectionProvider = ((SessionFactoryImplementor) sessionFactory).getConnectionProvider(); connection = connectionProvider.getConnection(); DatabaseMetaData databaseMetaData = connection.getMetaData(); ResultSet rSet = null; try { rSet = databaseMetaData.getColumns(null, null, table, null); while (rSet.next()) { columnDefinitions.add(new ColumnDefinition(rSet.getString("COLUMN_NAME"), getColumnClassName(rSet .getInt("DATA_TYPE")))); } } finally { if (rSet != null) { rSet.close(); } } } return columnDefinitions; }