/** * Constructs a LazyInitializationException using the given message. * * @param message A message explaining the exception condition */ public LazyInitializationException(String message) { super( message ); LOG.trace( message, this ); }
private ColumnNameCache retreiveColumnNameToIndexCache(ResultSet rs) throws SQLException { if ( columnNameCache == null ) { log.trace( "Building columnName->columnIndex cache" ); columnNameCache = new ColumnNameCache( rs.getMetaData().getColumnCount() ); } return columnNameCache; }
private InputStream getStreamFromClasspath(String fileName) { LOG.trace( "Recognized JPA ORM namespace; attempting to resolve on classpath under org/hibernate/jpa" ); String path = "org/hibernate/jpa/" + fileName; InputStream dtdStream = resolveInHibernateNamespace( path ); return dtdStream; } }
/** * Custom serialization hook defined by Java spec. Used when the factory is directly serialized * * @param out The stream into which the object is being serialized. * * @throws IOException Can be thrown by the stream */ private void writeObject(ObjectOutputStream out) throws IOException { LOG.debugf( "Serializing: %s", getUuid() ); out.defaultWriteObject(); LOG.trace( "Serialized" ); }
public void setIncludeSubclasses(boolean includeSubclasses) { if ( !includeSubclasses && isDereferencedBySuperclassOrSubclassProperty() && LOG.isTraceEnabled() ) { LOG.trace( "Attempt to disable subclass-inclusions : ", new Exception( "Stack-trace source" ) ); } this.includeSubclasses = includeSubclasses; }
/** * Custom serialization hook defined by Java spec. Used when the factory is directly deserialized * * @param in The stream from which the object is being deserialized. * * @throws IOException Can be thrown by the stream * @throws ClassNotFoundException Again, can be thrown by the stream */ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { LOG.trace( "Deserializing" ); in.defaultReadObject(); LOG.debugf( "Deserialized: %s", getUuid() ); }
@Override public void traceOut(String ruleName) { if ( !LOG.isTraceEnabled() ) { return; } if ( inputState.guessing > 0 ) { return; } String prefix = "<-" + StringHelper.repeat( '-', ( --traceDepth * 2 ) ) + " "; LOG.trace( prefix + ruleName ); }
@Override public void traceOut(String ruleName, AST tree) { if ( !LOG.isTraceEnabled() ) { return; } if ( inputState.guessing > 0 ) { return; } String prefix = "<-" + StringHelper.repeat( '-', ( --traceDepth * 2 ) ) + " "; LOG.trace( prefix + ruleName ); }
private Object readResolve() throws InvalidObjectException { log.trace( "Resolving serialized TypeConfiguration - readResolve" ); return TypeConfigurationRegistry.INSTANCE.findTypeConfiguration( getUuid() ); } }
@Override public void beforeCompletion() { log.trace( "Synchronization coordinator: beforeCompletion()" ); if ( !target.isActive() ) { return; } target.beforeCompletion(); }
@Override public void traceIn(String ruleName) { if ( !LOG.isTraceEnabled() ) { return; } if ( inputState.guessing > 0 ) { return; } String prefix = StringHelper.repeat( '-', ( traceDepth++ * 2 ) ) + "-> "; LOG.trace( prefix + ruleName ); }
@Override public void traceOut(String ruleName, AST tree) { if ( !LOG.isTraceEnabled() ) { return; } if ( inputState.guessing > 0 ) { return; } String prefix = "<-" + StringHelper.repeat( '-', ( --traceDepth * 2 ) ) + " "; LOG.trace( prefix + ruleName ); }
/** * Custom serialization hook defined by Java spec. Used when the factory is directly deserialized. * Here we resolve the uuid/name read from the stream previously to resolve the SessionFactory * instance to use based on the registrations with the {@link SessionFactoryRegistry} * * @return The resolved factory to use. * * @throws InvalidObjectException Thrown if we could not resolve the factory by uuid/name. */ private Object readResolve() throws InvalidObjectException { LOG.trace( "Resolving serialized SessionFactory" ); return locateSessionFactoryOnDeserialization( getUuid(), name ); }
/** * Clean up the caches when the SessionFactory is closed. * <p> * Note that depending on the cache strategy implementation chosen, clearing the cache might not reclaim all the * memory. * <p> * Typically, when using LIRS, clearing the cache only invalidates the entries but the outdated entries are kept in * memory until they are replaced by others. It is not considered a memory leak as the cache is bounded. */ public void cleanup() { LOG.trace( "Cleaning QueryPlan Cache" ); queryPlanCache.clear(); parameterMetadataCache.clear(); }
@Override public void traceIn(String ruleName, AST tree) { if ( !LOG.isTraceEnabled() ) { return; } if ( inputState.guessing > 0 ) { return; } String prefix = StringHelper.repeat( '-', ( traceDepth++ * 2 ) ) + "-> "; String traceText = ruleName + " (" + buildTraceNodeName( tree ) + ")"; LOG.trace( prefix + traceText ); }
@Override public void traceIn(String ruleName, AST tree) { if ( !LOG.isTraceEnabled() ) { return; } if ( inputState.guessing > 0 ) { return; } String prefix = StringHelper.repeat( '-', ( traceDepth++ * 2 ) ) + "-> "; String traceText = ruleName + " (" + buildTraceNodeName( tree ) + ")"; LOG.trace( prefix + traceText ); }
@SuppressWarnings({"unchecked"}) protected void entityIsPersistent(PersistEvent event, Map createCache) { LOG.trace( "Ignoring persistent instance" ); final EventSource source = event.getSession(); //TODO: check that entry.getIdentifier().equals(requestedId) final Object entity = source.getPersistenceContext().unproxy( event.getObject() ); final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); if ( createCache.put( entity, entity ) == null ) { justCascade( createCache, source, entity, persister ); } }
private void logDirtyProperties(int[] props) { if ( LOG.isTraceEnabled() ) { for ( int i = 0; i < props.length; i++ ) { String propertyName = entityMetamodel.getProperties()[props[i]].getName(); LOG.trace( StringHelper.qualify( getEntityName(), propertyName ) + " is dirty" ); } } }
private void afterBeginCallback() { if(this.timeOut > 0) { transactionCoordinatorOwner.setTransactionTimeOut( this.timeOut ); } // report entering into a "transactional context" transactionCoordinatorOwner.startTransactionBoundary(); // trigger the Transaction-API-only after-begin callback transactionCoordinatorOwner.afterTransactionBegin(); // notify all registered observers for ( TransactionObserver observer : observers() ) { observer.afterBegin(); } log.trace( "ResourceLocalTransactionCoordinatorImpl#afterBeginCallback" ); }
protected void entityIsPersistent(MergeEvent event, Map copyCache) { LOG.trace( "Ignoring persistent instance" ); //TODO: check that entry.getIdentifier().equals(requestedId) final Object entity = event.getEntity(); final EventSource source = event.getSession(); final EntityPersister persister = source.getEntityPersister( event.getEntityName(), entity ); ( (MergeContext) copyCache ).put( entity, entity, true ); //before cascade! cascadeOnMerge( source, persister, entity, copyCache ); copyValues( persister, entity, entity, source, copyCache ); event.setResult( entity ); }