public Settings(SessionFactoryOptions sessionFactoryOptions, Metadata metadata) { this( sessionFactoryOptions, extractName( metadata.getDatabase().getDefaultNamespace().getName().getCatalog() ), extractName( metadata.getDatabase().getDefaultNamespace().getName().getSchema() ) ); }
@Override public String getAlterTableToDropUniqueKeyCommand(UniqueKey uniqueKey, Metadata metadata) { final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); final String tableName = jdbcEnvironment.getQualifiedObjectNameFormatter().format( uniqueKey.getTable().getQualifiedTableName(), dialect ); final StringBuilder buf = new StringBuilder( dialect.getAlterTableString(tableName) ); buf.append( getDropUnique() ); if ( dialect.supportsIfExistsBeforeConstraintName() ) { buf.append( "if exists " ); } buf.append( dialect.quote( uniqueKey.getName() ) ); if ( dialect.supportsIfExistsAfterConstraintName() ) { buf.append( " if exists" ); } return buf.toString(); }
@Override public String getAlterTableToAddUniqueKeyCommand(UniqueKey uniqueKey, Metadata metadata) { // Do this here, rather than allowing UniqueKey/Constraint to do it. // We need full, simplified control over whether or not it happens. final String tableName = metadata.getDatabase().getJdbcEnvironment().getQualifiedObjectNameFormatter().format( uniqueKey.getTable().getQualifiedTableName(), metadata.getDatabase().getJdbcEnvironment().getDialect() ); final String constraintName = dialect.quote( uniqueKey.getName() ); return dialect.getAlterTableString( tableName ) + " add constraint " + uniqueConstraintSql( uniqueKey ) + " constraint " + constraintName; }
@Override public String[] getSqlDropStrings(Sequence sequence, Metadata metadata) { final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); return dialect.getDropSequenceStrings( jdbcEnvironment.getQualifiedObjectNameFormatter().format( sequence.getName(), jdbcEnvironment.getDialect() ) ); } }
@Override public String[] getSqlDropStrings(Table table, Metadata metadata) { StringBuilder buf = new StringBuilder( "drop table " ); if ( dialect.supportsIfExistsBeforeTableName() ) { buf.append( "if exists " ); } final QualifiedName tableName = new QualifiedNameParser.NameParts( Identifier.toIdentifier( table.getCatalog(), table.isCatalogQuoted() ), Identifier.toIdentifier( table.getSchema(), table.isSchemaQuoted() ), table.getNameIdentifier() ); final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); buf.append( jdbcEnvironment.getQualifiedObjectNameFormatter().format( tableName, jdbcEnvironment.getDialect() ) ) .append( dialect.getCascadeConstraintsString() ); if ( dialect.supportsIfExistsAfterTableName() ) { buf.append( " if exists" ); } return new String[] { buf.toString() }; } }
@Override public String getAlterTableToDropUniqueKeyCommand(UniqueKey uniqueKey, Metadata metadata) { if ( hasNullable( uniqueKey ) ) { return org.hibernate.mapping.Index.buildSqlDropIndexString( uniqueKey.getName(), metadata.getDatabase().getJdbcEnvironment().getQualifiedObjectNameFormatter().format( uniqueKey.getTable().getQualifiedTableName(), metadata.getDatabase().getJdbcEnvironment().getDialect() ) ); } else { return super.getAlterTableToDropUniqueKeyCommand( uniqueKey, metadata ); } }
public static String buildSqlCreateIndexString( Dialect dialect, String name, Table table, Iterator<Column> columns, java.util.Map<Column, String> columnOrderMap, boolean unique, Metadata metadata) { final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); final String tableName = jdbcEnvironment.getQualifiedObjectNameFormatter().format( table.getQualifiedTableName(), dialect ); return buildSqlCreateIndexString( dialect, name, tableName, columns, columnOrderMap, unique ); }
public void performValidation( Metadata metadata, DatabaseInformation databaseInformation, ExecutionOptions options, Dialect dialect) { for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { if ( schemaFilter.includeNamespace( namespace ) ) { validateTables( metadata, databaseInformation, options, dialect, namespace ); } } for ( Namespace namespace : metadata.getDatabase().getNamespaces() ) { if ( schemaFilter.includeNamespace( namespace ) ) { for ( Sequence sequence : namespace.getSequences() ) { if ( schemaFilter.includeSequence( sequence ) ) { final SequenceInformation sequenceInformation = databaseInformation.getSequenceInformation( sequence.getName() ); validateSequence( sequence, sequenceInformation ); } } } } }
@Override public String[] getSqlCreateStrings(Sequence sequence, Metadata metadata) { final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); return dialect.getCreateSequenceStrings( jdbcEnvironment.getQualifiedObjectNameFormatter().format( sequence.getName(), jdbcEnvironment.getDialect() ), sequence.getInitialValue(), sequence.getIncrementSize() ); }
@Override protected void afterMetadataBuilt(Metadata metadata) { Collection children = metadata.getCollectionBinding( Parent.class.getName() + ".children" ); Component childComponents = ( Component ) children.getElement(); Formula f = ( Formula ) childComponents.getProperty( "bioLength" ).getValue().getColumnIterator().next(); SQLFunction lengthFunction = metadata.getDatabase().getJdbcEnvironment().getDialect().getFunctions().get( "length" ); if ( lengthFunction != null ) { ArrayList args = new ArrayList(); args.add( "bio" ); f.setFormula( lengthFunction.render( StandardBasicTypes.INTEGER, args, null ) ); } }
@Override public void doValidation(Metadata metadata, ExecutionOptions options) { final JdbcContext jdbcContext = tool.resolveJdbcContext( options.getConfigurationValues() ); final DdlTransactionIsolator isolator = tool.getDdlTransactionIsolator( jdbcContext ); final DatabaseInformation databaseInformation = Helper.buildDatabaseInformation( tool.getServiceRegistry(), isolator, metadata.getDatabase().getDefaultNamespace().getName() ); try { performValidation( metadata, databaseInformation, options, jdbcContext.getDialect() ); } finally { try { databaseInformation.cleanup(); } catch (Exception e) { log.debug( "Problem releasing DatabaseInformation : " + e.getMessage() ); } isolator.release(); } }
@Override public String getAlterTableToAddUniqueKeyCommand(UniqueKey uniqueKey, Metadata metadata) { final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); final String tableName = jdbcEnvironment.getQualifiedObjectNameFormatter().format( uniqueKey.getTable().getQualifiedTableName(), dialect ); final String constraintName = dialect.quote( uniqueKey.getName() ); return dialect.getAlterTableString( tableName ) + " add constraint " + constraintName + " " + uniqueConstraintSql( uniqueKey ); }
protected void migrateTable( Table table, TableInformation tableInformation, Dialect dialect, Metadata metadata, Formatter formatter, ExecutionOptions options, GenerationTarget... targets) { final Database database = metadata.getDatabase(); //noinspection unchecked applySqlStrings( false, table.sqlAlterStrings( dialect, metadata, tableInformation, database.getDefaultNamespace().getPhysicalName().getCatalog(), database.getDefaultNamespace().getPhysicalName().getSchema() ), formatter, options, targets ); }
@Override public String[] getSqlDropStrings(ForeignKey foreignKey, Metadata metadata) { if ( !dialect.hasAlterTable() ) { return NO_COMMANDS; } if ( !foreignKey.isCreationEnabled() ) { return NO_COMMANDS; } if ( !foreignKey.isPhysicalConstraint() ) { return NO_COMMANDS; } final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); final String sourceTableName = jdbcEnvironment.getQualifiedObjectNameFormatter().format( foreignKey.getTable().getQualifiedTableName(), dialect ); return new String[] { getSqlDropStrings( sourceTableName, foreignKey, dialect ) }; }
@Test @TestForIssue( jiraKey = "HHH-9850" ) public void testNewGeneratorTableCreationOnDb2() { StandardServiceRegistry ssr = new StandardServiceRegistryBuilder() .applySetting( AvailableSettings.DIALECT, DB2Dialect.class.getName() ) .build(); try { Metadata metadata = new MetadataSources( ssr ) .buildMetadata(); assertEquals( 0, metadata.getDatabase().getDefaultNamespace().getTables().size() ); TableGenerator generator = new TableGenerator(); Properties properties = new Properties(); generator.configure( IntegerType.INSTANCE, properties, ssr ); generator.registerExportables( metadata.getDatabase() ); assertEquals( 1, metadata.getDatabase().getDefaultNamespace().getTables().size() ); final Table table = metadata.getDatabase().getDefaultNamespace().getTables().iterator().next(); final String[] createCommands = new DB2Dialect().getTableExporter().getSqlCreateStrings( table, metadata ); assertContains( "sequence_name varchar(255) not null", createCommands[0] ); } finally { StandardServiceRegistryBuilder.destroy( ssr ); } }
.buildMetadata(); assertEquals( 0, metadata.getDatabase().getDefaultNamespace().getTables().size() ); generator.configure( IntegerType.INSTANCE, properties, ssr ); generator.registerExportables( metadata.getDatabase() ); assertEquals( 1, metadata.getDatabase().getDefaultNamespace().getTables().size() ); final Table table = metadata.getDatabase().getDefaultNamespace().getTables().iterator().next(); final String[] createCommands = new DB2Dialect().getTableExporter().getSqlCreateStrings( table, metadata ); assertContains( "sequence_name varchar(255) not null", createCommands[0] );
@Override protected void afterMetadataBuilt(Metadata metadata) { // Oracle and Postgres do not have year() functions, so we need to // redefine the 'User.person.yob' formula // // consider temporary until we add the capability to define // mapping formulas which can use dialect-registered functions... PersistentClass user = metadata.getEntityBinding( User.class.getName() ); org.hibernate.mapping.Property personProperty = user.getProperty( "person" ); Component component = ( Component ) personProperty.getValue(); Formula f = ( Formula ) component.getProperty( "yob" ).getValue().getColumnIterator().next(); SQLFunction yearFunction = metadata.getDatabase().getJdbcEnvironment().getDialect().getFunctions().get( "year" ); if ( yearFunction == null ) { // the dialect not know to support a year() function, so rely on the // ANSI SQL extract function f.setFormula( "extract( year from dob )"); } else { List args = new ArrayList(); args.add( "dob" ); f.setFormula( yearFunction.render( StandardBasicTypes.INTEGER, args, null ) ); } }
@Override public String[] getSqlDropStrings(Index index, Metadata metadata) { if ( !dialect.dropConstraints() ) { return NO_COMMANDS; } final JdbcEnvironment jdbcEnvironment = metadata.getDatabase().getJdbcEnvironment(); final String tableName = jdbcEnvironment.getQualifiedObjectNameFormatter().format( index.getTable().getQualifiedTableName(), dialect ); final String indexNameForCreation; if ( dialect.qualifyIndexName() ) { indexNameForCreation = StringHelper.qualify( tableName, index.getName() ); } else { indexNameForCreation = index.getName(); } return new String[] { "drop index " + indexNameForCreation }; } }
@Before public void setUp() throws Exception { serviceRegistry = new StandardServiceRegistryBuilder() .enableAutoClose() .applySetting( AvailableSettings.HBM2DDL_AUTO, "create-drop" ) .build(); MetadataBuildingContext buildingContext = new MetadataBuildingContextTestingImpl( serviceRegistry ); Properties properties = new Properties(); properties.setProperty( SequenceGenerator.SEQUENCE, TEST_SEQUENCE ); properties.setProperty( SequenceHiLoGenerator.MAX_LO, "3" ); properties.put( PersistentIdentifierGenerator.IDENTIFIER_NORMALIZER, buildingContext.getObjectNameNormalizer() ); generator = new SequenceHiLoGenerator(); generator.configure( StandardBasicTypes.LONG, properties, serviceRegistry ); Metadata metadata = new MetadataSources( serviceRegistry ).buildMetadata(); generator.registerExportables( metadata.getDatabase() ); sessionFactory = (SessionFactoryImplementor) metadata.buildSessionFactory(); sequenceValueExtractor = new SequenceValueExtractor( sessionFactory.getDialect(), TEST_SEQUENCE ); }
@Test public void testIdentifierGeneratorExtendsIdentityGenerator() { final MetadataSources sources = new MetadataSources( serviceRegistry() ); sources.addAnnotatedClass( EntityBean.class ); final MetadataBuilder builder = sources.getMetadataBuilder(); final Metadata metadata = builder.build(); for ( final Namespace ns : metadata.getDatabase().getNamespaces() ) { for ( final org.hibernate.mapping.Table table : ns.getTables() ) { final KeyValue value = table.getIdentifierValue(); assertNotNull( "IdentifierValue was null", value ); assertTrue( value.isIdentityColumn( metadata.getIdentifierGeneratorFactory(), getDialect() ) ); } } Session s = openSession(); s.beginTransaction(); s.save( new EntityBean() ); s.getTransaction().commit(); s.close(); } }