@Override public void setId( String id ) { this.objectId = new LongObjectId( new StringObjectId( id ) ); }
/** * When connected to the repository and {@link JobEntryJob} references a child job by {@link ObjectId}, * keep {@link ObjectLocationSpecificationMethod} as {@code REPOSITORY_BY_REFERENCE}. * Load the job from the repository using the specified {@link ObjectId}. */ @Test public void testConnectedLoad_RepByRef() throws Exception { Repository myrepo = mock( Repository.class ); doReturn( true ).when( myrepo ).isConnected(); doReturn( null ).when( myrepo ).getJobEntryAttributeString( any( ObjectId.class ), anyString() ); doReturn( "rep_ref" ).when( myrepo ).getJobEntryAttributeString( JOB_ENTRY_JOB_OBJECT_ID, "specification_method" ); doReturn( JOB_ENTRY_JOB_OBJECT_ID.toString() ).when( myrepo ).getJobEntryAttributeString( JOB_ENTRY_JOB_OBJECT_ID, "job_object_id" ); JobEntryJob jej = spy( new JobEntryJob( JOB_ENTRY_JOB_NAME ) ); jej.loadRep( myrepo, store, JOB_ENTRY_JOB_OBJECT_ID, databases, servers ); jej.getJobMeta( myrepo, store, space ); assertEquals( ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE, jej.getSpecificationMethod() ); verify( myrepo, times( 1 ) ).loadJob( JOB_ENTRY_JOB_OBJECT_ID, null ); }
/** * When connected to the repository and {@link JobEntryJob} references a child job by {@link ObjectId}, * guess {@link ObjectLocationSpecificationMethod} as {@code REPOSITORY_BY_REFERENCE}. * Load the job from the repository using the specified {@link ObjectId}. */ @Test public void testConnectedLoad_RepByRef_Guess() throws Exception { Repository myrepo = mock( Repository.class ); doReturn( true ).when( myrepo ).isConnected(); doReturn( null ).when( myrepo ).getJobEntryAttributeString( any( ObjectId.class ), anyString() ); doReturn( JOB_ENTRY_JOB_OBJECT_ID.toString() ).when( myrepo ).getJobEntryAttributeString( JOB_ENTRY_JOB_OBJECT_ID, "job_object_id" ); JobEntryJob jej = spy( new JobEntryJob( JOB_ENTRY_JOB_NAME ) ); jej.loadRep( myrepo, store, JOB_ENTRY_JOB_OBJECT_ID, databases, servers ); jej.getJobMeta( myrepo, store, space ); assertEquals( ObjectLocationSpecificationMethod.REPOSITORY_BY_REFERENCE, jej.getSpecificationMethod() ); verify( myrepo, times( 1 ) ).loadJob( JOB_ENTRY_JOB_OBJECT_ID, null ); }
@Override public ObjectId getPartitionSchemaID( String name ) throws KettleException { // The ID is the filename relative to the base directory, including the file extension // return new StringObjectId( calcObjectId( (RepositoryDirectory) null ) + name + EXT_SLAVE_SERVER ); }
@Override public ObjectId getSlaveID( String name ) throws KettleException { // Only return the ID if the slave server exists Object slaveID = name + EXT_SLAVE_SERVER; Object[] ids = getRootObjectIDs( EXT_SLAVE_SERVER ); for ( Object rootID : ids ) { if ( rootID.toString().equals( slaveID ) ) { return new StringObjectId( slaveID.toString() ); } } return null; }
@Override public ObjectId getClusterID( String name ) throws KettleException { // The ID is the filename relative to the base directory, including the file extension // return new StringObjectId( calcObjectId( (RepositoryDirectory) null ) + name + EXT_SLAVE_SERVER ); }
@Override public ObjectId getId() { return new StringObjectId( file.getId().toString() ); }
@Injection( name = "TRANS_OBJECT_ID" ) public void setTransStringObjectId( String transStringObjectId ) { this.transObjectId = new StringObjectId( transStringObjectId ); }
@Override public ObjectId getObjectId() { return new StringObjectId( self.getId().toString() ); }
@Override public ObjectId[] getDirectoryIDs() { List<RepositoryFile> children = this.getAllURChildrenFiles(); ObjectId[] objectIds = new ObjectId[ children.size() ]; for ( int i = 0; i < children.size(); i++ ) { objectIds[ i ] = new StringObjectId( children.get( i ).getId().toString() ); } return objectIds; }
@Override public ObjectId insertLogEntry( String description ) throws KettleException { String logfile = calcDirectoryName( null ) + LOG_FILE; try { OutputStream outputStream = KettleVFS.getOutputStream( logfile, true ); outputStream.write( description.getBytes() ); outputStream.write( Const.CR.getBytes() ); outputStream.close(); return new StringObjectId( logfile ); } catch ( IOException e ) { throw new KettleException( "Unable to write log entry to file [" + logfile + "]" ); } }
@Override public ObjectId[] getDatabaseIDs( boolean includeDeleted ) throws KettleException { try { List<RepositoryFile> children = getAllFilesOfType( null, RepositoryObjectType.DATABASE, includeDeleted ); List<ObjectId> ids = new ArrayList<ObjectId>( children.size() ); for ( RepositoryFile file : children ) { ids.add( new StringObjectId( file.getId().toString() ) ); } return ids.toArray( new ObjectId[ 0 ] ); } catch ( Exception e ) { throw new KettleException( "Unable to get all database IDs", e ); } }
@Override public ObjectId[] getClusterIDs( boolean includeDeleted ) throws KettleException { try { List<RepositoryFile> children = getAllFilesOfType( null, RepositoryObjectType.CLUSTER_SCHEMA, includeDeleted ); List<ObjectId> ids = new ArrayList<ObjectId>(); for ( RepositoryFile file : children ) { ids.add( new StringObjectId( file.getId().toString() ) ); } return ids.toArray( new ObjectId[ 0 ] ); } catch ( Exception e ) { throw new KettleException( "Unable to get all cluster schema IDs", e ); } }
@Override public ObjectId[] getPartitionSchemaIDs( boolean includeDeleted ) throws KettleException { try { List<RepositoryFile> children = getAllFilesOfType( null, RepositoryObjectType.PARTITION_SCHEMA, includeDeleted ); List<ObjectId> ids = new ArrayList<ObjectId>(); for ( RepositoryFile file : children ) { ids.add( new StringObjectId( file.getId().toString() ) ); } return ids.toArray( new ObjectId[ 0 ] ); } catch ( Exception e ) { throw new KettleException( "Unable to get all partition schema IDs", e ); } }
@Override public ObjectId[] getSlaveIDs( boolean includeDeleted ) throws KettleException { try { List<RepositoryFile> children = getAllFilesOfType( null, RepositoryObjectType.SLAVE_SERVER, includeDeleted ); List<ObjectId> ids = new ArrayList<ObjectId>(); for ( RepositoryFile file : children ) { ids.add( new StringObjectId( file.getId().toString() ) ); } return ids.toArray( new ObjectId[ 0 ] ); } catch ( Exception e ) { throw new KettleException( "Unable to get all slave server IDs", e ); } }
@Override public IMetaStoreElement getElement( String namespace, IMetaStoreElementType elementType, String elementId ) throws MetaStoreException { try { RowMetaAndData elementRow = delegate.getElement( new LongObjectId( new StringObjectId( elementId ) ) ); if ( elementRow == null ) { return null; } return delegate.parseElement( elementType, elementRow ); } catch ( Exception e ) { throw new MetaStoreException( "Unable to get element", e ); } }
@Override public ObjectId getTestObject() { return new StringObjectId( UUID.randomUUID().toString() ); }
public EERepositoryObject( RepositoryFile file, RepositoryDirectoryInterface repositoryDirectory, String modifiedUser, RepositoryObjectType objectType, String description, RepositoryLock lock, boolean deleted ) { this( new StringObjectId( file.getId().toString() ), file.getTitle(), repositoryDirectory, modifiedUser, file .getLastModifiedDate(), objectType, description, lock, deleted ); }
@Override public IMetaStoreElementType getElementType( String namespace, String elementTypeId ) throws MetaStoreException { try { ObjectId namespaceId = delegate.getNamespaceId( namespace ); if ( namespaceId == null ) { return null; } RowMetaAndData elementTypeRow = delegate.getElementType( new LongObjectId( new StringObjectId( elementTypeId ) ) ); return delegate.parseElementType( namespace, namespaceId, elementTypeRow ); } catch ( Exception e ) { throw new MetaStoreException( "Unable to get element type with id '" + elementTypeId + "' in namespace '" + namespace + "'", e ); } }
@Override public RepositoryDirectoryInterface loadRepositoryDirectoryTree() throws KettleException { RepositoryDirectory root = new RepositoryDirectory(); root.setObjectId( new StringObjectId( "/" ) ); return loadRepositoryDirectoryTree( root ); }