@Override public void prepare(SQLWarning warning) { LOG.debug( introMessage, warning ); }
@Override public void release(NClob nclob) { // todo : just store them in clobs? if ( nclobs == null ) { log.debug( "Request to release NClob, but appears no NClobs have ever been registered" ); return; } nclobs.remove( nclob ); }
@Override public void clearSynchronizations() { log.debug( "Clearing local Synchronizations" ); if ( synchronizations != null ) { synchronizations.clear(); } } }
@Override public void release(Clob clob) { if ( clobs == null ) { log.debug( "Request to release Clob, but appears no Clobs have ever been registered" ); return; } clobs.remove( clob ); }
@Override protected void doExecuteBatch() { if (batchPosition == 0 ) { if(! batchExecuted) { LOG.debug( "No batched statements to execute" ); } } else { performExecution(); } }
protected boolean invokeUpdateLifecycle(Object entity, EntityPersister persister, EventSource source) { if ( persister.implementsLifecycle() ) { LOG.debug( "Calling onUpdate()" ); if ( ( (Lifecycle) entity ).onUpdate( source ) ) { LOG.debug( "Update vetoed by onUpdate()" ); return true; } } return false; }
@Override public Object load(Serializable id, Object optionalObject, SharedSessionContractImplementor session, LockOptions lockOptions) { if ( lockOptions != null ) { LOG.debug( "Ignoring lock-options passed to named query loader" ); } return load( id, optionalObject, session ); }
@Override public void addImport(String importName, String entityName) { if ( importName == null || entityName == null ) { throw new IllegalArgumentException( "Import name or entity name is null" ); } log.tracev( "Import: {0} -> {1}", importName, entityName ); String old = imports.put( importName, entityName ); if ( old != null ) { log.debug( "import name [" + importName + "] overrode previous [{" + old + "}]" ); } }
private void processPropertyReferences() { if ( delayedPropertyReferenceHandlers == null ) { return; } log.debug( "Processing association property references" ); for ( DelayedPropertyReferenceHandler delayedPropertyReferenceHandler : delayedPropertyReferenceHandlers ) { delayedPropertyReferenceHandler.process( this ); } delayedPropertyReferenceHandlers.clear(); }
@Override public Reference getReference() { // from javax.naming.Referenceable LOG.debug( "Returning a Reference to the SessionFactory" ); return new Reference( SessionFactoryImpl.class.getName(), new StringRefAddr("uuid", getUuid()), SessionFactoryRegistry.ObjectFactoryImpl.class.getName(), null ); }
protected boolean invokeSaveLifecycle(Object entity, EntityPersister persister, EventSource source) { // Sub-insertions should occur before containing insertion so // Try to do the callback now if ( persister.implementsLifecycle() ) { LOG.debug( "Calling onSave()" ); if ( ((Lifecycle) entity).onSave( source ) ) { LOG.debug( "Insertion vetoed by onSave()" ); return true; } } return false; }
protected boolean invokeDeleteLifecycle(EventSource session, Object entity, EntityPersister persister) { callbackRegistry.preRemove( entity ); if ( persister.implementsLifecycle() ) { LOG.debug( "Calling onDelete()" ); if ( ( (Lifecycle) entity ).onDelete( session ) ) { LOG.debug( "Deletion vetoed by onDelete()" ); return true; } } return false; }
/** * Initialize the flags of the CollectionEntry, including the * dirty check. */ private void prepareCollectionFlushes(PersistenceContext persistenceContext) throws HibernateException { // Initialize dirty flags for arrays + collections with composite elements // and reset reached, doupdate, etc. LOG.debug( "Dirty checking collections" ); for ( Map.Entry<PersistentCollection,CollectionEntry> entry : IdentityMap.concurrentEntries( (Map<PersistentCollection,CollectionEntry>) persistenceContext.getCollectionEntries() )) { entry.getValue().preFlush( entry.getKey() ); } }
void showHqlAst(AST hqlAst) { if ( LOG.isDebugEnabled() ) { LOG.debug( TokenPrinters.HQL_TOKEN_PRINTER.showAsString( hqlAst, "--- HQL AST ---" ) ); } }
@Override public void evictQueryRegions() { if ( LOG.isDebugEnabled() ) { LOG.debug( "Evicting cache of all query regions." ); } evictQueryResultRegion( defaultQueryResultsCache ); for ( QueryResultsCache cache : namedQueryResultsCacheMap.values() ) { evictQueryResultRegion( cache ); } }
@Override public void throwQueryException() throws QueryException { if ( getErrorCount() > 0 ) { if ( recognitionExceptions.size() > 0 ) { throw QuerySyntaxException.convert( recognitionExceptions.get( 0 ), hql ); } throw new QueryException( getErrorString(), hql ); } LOG.debug( "throwQueryException() : no errors" ); } }
private void createSelectClauseFromFromClause(QueryNode qn) throws SemanticException { AST select = astFactory.create( SELECT_CLAUSE, "{derived select clause}" ); AST sibling = qn.getFromClause(); qn.setFirstChild( select ); select.setNextSibling( sibling ); selectClause = (SelectClause) select; selectClause.initializeDerivedSelectClause( currentFromClause ); LOG.debug( "Derived SELECT clause created." ); }
private HqlSqlWalker analyze(HqlParser parser, String collectionRole) throws QueryException, RecognitionException { final HqlSqlWalker w = new HqlSqlWalker( this, factory, parser, tokenReplacements, collectionRole ); final AST hqlAst = parser.getAST(); // Transform the tree. w.statement( hqlAst ); if ( LOG.isDebugEnabled() ) { LOG.debug( TokenPrinters.SQL_TOKEN_PRINTER.showAsString( w.getAST(), "--- SQL AST ---" ) ); } w.getParseErrorHandler().throwQueryException(); return w; }