@Override public Tuple getTuple(EntityKey key, OperationContext operationContext) { log.tracef( "Reading tuple with key %1$s and context %2$s", key, operationContext ); return super.getTuple( key, operationContext ); }
@Override public void removeAssociation(AssociationKey key, AssociationContext associationContext) { log.tracef( "Removing association with key %1$s from datastore", key ); super.removeAssociation( key, associationContext ); }
@Override public Association getAssociation(AssociationKey key, AssociationContext associationContext) { log.tracef( "Reading association with key %1$s from datastore and context %2$s", key, associationContext ); return super.getAssociation( key, associationContext ); }
@Override public void removeTuple(EntityKey key, TupleContext tupleContext) { log.tracef( "Removing tuple with key %1$s from datastore", key ); super.removeTuple( key, tupleContext ); }
@Override public Tuple createTuple(EntityKey key, OperationContext operationContext) { log.tracef( "Creating tuple with key %1$s", key ); return super.createTuple( key, operationContext ); }
@Override public Association createAssociation(AssociationKey key, AssociationContext associationContext) { log.tracef( "Creating association with key %1$s", key ); return super.createAssociation( key, associationContext ); }
@Override public boolean isStoredInEntityStructure(AssociationKeyMetadata associationKeyMetadata, AssociationTypeContext associationTypeContext) { log.tracef( "Determining whether association %1$s is stored in an entity structure", associationKeyMetadata ); return super.isStoredInEntityStructure( associationKeyMetadata, associationTypeContext ); }
@Override public void nullSafeSet(Tuple resultset, Object value, String[] names, SharedSessionContractImplementor session) throws HibernateException { if ( names.length > 1 ) { throw new NotYetImplementedException( "Multi column property not implemented yet" ); } if ( value == null ) { log.tracef( "binding [null] to parameter [$s]", names[0] ); } else { Object endValue = isOrdinal ? Integer.valueOf( ( (Enum<?>) value ).ordinal() ) : ( (Enum<?>) value ).name(); log.tracef( "binding [$s] to parameter(s) $s", endValue, names[0] ); resultset.put( names[0], endValue ); } }
@Override public void insertOrUpdateAssociation(AssociationKey key, Association association, AssociationContext associationContext) { if ( association.getSnapshot().size() == 0 ) { log.tracef( "Creating association with key %1$s in datastore", key ); } else { log.tracef( "Updating association with key %1$s in datastore", key ); } super.insertOrUpdateAssociation( key, association, associationContext ); }
@Override public List<Tuple> getTuples(EntityKey[] keys, TupleContext tupleContext) { if ( log.isTraceEnabled() ) { log.tracef( "Reading tuples with keys %1$s and context %2$s", Arrays.toString( keys ), tupleContext ); } return super.getTuples( keys, tupleContext ); }
@Override public void insertOrUpdateTuple(EntityKey key, TuplePointer tuplePointer, TupleContext tupleContext) { if ( tuplePointer.getTuple().getSnapshot().isEmpty() ) { log.tracef( "Inserting tuple with key %1$s into datastore", key ); } else { log.tracef( "Updating tuple with key %1$s in datastore", key ); } super.insertOrUpdateTuple( key, tuplePointer, tupleContext ); }
@Override public void executeBatch(OperationsQueue queue) { log.tracef( "Executing batch with %1$s items", queue.size() ); super.executeBatch( queue ); }
@Override public ClosableIterator<Tuple> executeBackendQuery(BackendQuery<Serializable> query, QueryParameters queryParameters, TupleContext tupleContext) { log.tracef( "Executing backend query: %1$s", query.getQuery() ); return super.executeBackendQuery( query, queryParameters, tupleContext ); } }
@Override public Number nextValue(NextValueRequest request) { log.tracef( "Extracting next value from key %1$s", request.getKey() ); return super.nextValue( request ); }
@Override public void bind(Tuple resultset, X value, String[] names) { if ( value == null ) { for ( String name : names ) { log.tracef( "binding [null] to parameter [%1$s]", name ); resultset.put( name, null ); } } else { if ( log.isTraceEnabled() ) { log.tracef( "binding [%1$s] to parameter(s) %2$s", javaDescriptor.extractLoggableRepresentation( value ), Arrays.toString( names ) ); } doBind( resultset, value, names, DEFAULT_OPTIONS ); } }
@Override public J extract(final Tuple resultset, final String name) { @SuppressWarnings("unchecked") final J result = (J) resultset.get( name ); if ( result == null ) { log.tracef( "found [null] as column [%s]", name ); return null; } else { if ( log.isTraceEnabled() ) { log.tracef( "found [%1$s] as column [%2$s]", javaTypeDescriptor.extractLoggableRepresentation( result ), name ); } return wrap ? javaTypeDescriptor.wrap( result, null ) : result; } } }
@Override public J extract(Tuple resultset, String name) { @SuppressWarnings( "unchecked" ) final String result = (String) resultset.get( name ); if ( result == null ) { log.tracef( "found [null] as column [$s]", name ); return null; } else { final J resultJ = javaTypeDescriptor.fromString( result ); if ( log.isTraceEnabled() ) { log.tracef( "found [$s] as column [$s]", javaTypeDescriptor.extractLoggableRepresentation( resultJ ), name ); } return resultJ; } } }
/** * Initialize the internal values form the given {@link Map}. * * @param configurationMap * The values to use as configuration */ public void initConfiguration(Map<?, ?> configurationMap) { ConfigurationPropertyReader propertyReader = new ConfigurationPropertyReader( configurationMap ); this.configUrl = propertyReader .property( InfinispanProperties.CONFIGURATION_RESOURCE_NAME, URL.class ) .withDefault( InfinispanConfiguration.class.getClassLoader().getResource( INFINISPAN_DEFAULT_CONFIG ) ) .getValue(); this.jndi = propertyReader .property( InfinispanProperties.CACHE_MANAGER_JNDI_NAME, String.class ) .getValue(); log.tracef( "Initializing Infinispan from configuration file at %1$s", configUrl ); } }
/** * Initialize the internal values form the given {@link Map}. * * @param configurationMap * The values to use as configuration */ public void initConfiguration(Map<?, ?> configurationMap) { ConfigurationPropertyReader propertyReader = new ConfigurationPropertyReader( configurationMap ); this.configUrl = propertyReader .property( InfinispanProperties.CONFIGURATION_RESOURCE_NAME, URL.class ) .withDefault( InfinispanConfiguration.class.getClassLoader().getResource( INFINISPAN_DEFAULT_CONFIG ) ) .getValue(); this.jndi = propertyReader .property( InfinispanProperties.CACHE_MANAGER_JNDI_NAME, String.class ) .getValue(); log.tracef( "Initializing Infinispan from configuration file at %1$s", configUrl ); } }
@Override public void executeBatch(OperationsQueue operationsQueue) { try { if ( GridDialects.hasFacet( getGridDialect(), BatchableGridDialect.class ) || GridDialects.hasFacet( getGridDialect(), GroupingByEntityDialect.class ) ) { log.tracef( "Executing batch" ); super.executeBatch( operationsQueue ); } } catch ( TupleAlreadyExistsException taee ) { // TODO: Ideally, we should log the entity name + id here; For now we trust the datastore to provide this // information via the original exception; It'd require a fair bit of changes to obtain the entity name here // (we'd have to obtain the persister matching the given entity key metadata which in turn would require // access to the session factory which is not easily available here) throw log.mustNotInsertSameEntityTwice( taee.getMessage(), taee ); } }