@Override public PartitionSchema loadPartitionSchema( ObjectId partitionSchemaId, String versionId ) throws KettleException { readWriteLock.readLock().lock(); try { NodeRepositoryFileData data = pur.getDataAtVersionForRead( partitionSchemaId.getId(), versionId, NodeRepositoryFileData.class ); RepositoryFile file = null; if ( versionId != null ) { file = pur.getFileAtVersion( partitionSchemaId.getId(), versionId ); } else { file = pur.getFileById( partitionSchemaId.getId() ); } return partitionSchemaTransformer.assemble( file, data, pur.getVersionSummary( partitionSchemaId.getId(), versionId ) ); } catch ( Exception e ) { throw new KettleException( "Unable to load partition schema with id [" + partitionSchemaId + "]", e ); } finally { readWriteLock.readLock().unlock(); } }
@Override public DatabaseMeta loadDatabaseMeta( final ObjectId databaseId, final String versionId ) throws KettleException { readWriteLock.readLock().lock(); try { NodeRepositoryFileData data = pur.getDataAtVersionForRead( databaseId.getId(), versionId, NodeRepositoryFileData.class ); RepositoryFile file = null; if ( versionId != null ) { file = pur.getFileAtVersion( databaseId.getId(), versionId ); } else { file = pur.getFileById( databaseId.getId() ); } return databaseMetaTransformer.assemble( file, data, pur.getVersionSummary( databaseId.getId(), versionId ) ); } catch ( Exception e ) { throw new KettleException( "Unable to load database with id [" + databaseId + "]", e ); } finally { readWriteLock.readLock().unlock(); } }
@Override public SlaveServer loadSlaveServer( ObjectId idSlaveServer, String versionId ) throws KettleException { readWriteLock.readLock().lock(); try { NodeRepositoryFileData data = pur.getDataAtVersionForRead( idSlaveServer.getId(), versionId, NodeRepositoryFileData.class ); RepositoryFile file = null; if ( versionId != null ) { file = pur.getFileAtVersion( idSlaveServer.getId(), versionId ); } else { file = pur.getFileById( idSlaveServer.getId() ); } return slaveTransformer.assemble( file, data, pur.getVersionSummary( idSlaveServer.getId(), versionId ) ); } catch ( Exception e ) { throw new KettleException( "Unable to load slave server with id [" + idSlaveServer + "]", e ); } finally { readWriteLock.readLock().unlock(); } }
@Override public ClusterSchema loadClusterSchema( ObjectId idClusterSchema, List<SlaveServer> slaveServers, String versionId ) throws KettleException { readWriteLock.readLock().lock(); try { // We dont need to use slaveServer variable as the dataNoteToElement method finds the server from the repository NodeRepositoryFileData data = pur.getDataAtVersionForRead( idClusterSchema.getId(), versionId, NodeRepositoryFileData.class ); RepositoryFile file = null; if ( versionId != null ) { file = pur.getFileAtVersion( idClusterSchema.getId(), versionId ); } else { file = pur.getFileById( idClusterSchema.getId() ); } return clusterTransformer.assemble( file, data, pur.getVersionSummary( idClusterSchema.getId(), versionId ) ); } catch ( Exception e ) { throw new KettleException( "Unable to load cluster schema with id [" + idClusterSchema + "]", e ); } finally { readWriteLock.readLock().unlock(); } }
data = pur.getDataAtVersionForRead( file.getId(), versionId, NodeRepositoryFileData.class ); } finally { readWriteLock.readLock().unlock();
data = pur.getDataAtVersionForRead( file.getId(), versionId, NodeRepositoryFileData.class ); } finally { readWriteLock.writeLock().unlock();
jobDelegate.dataNodeToElement( pur.getDataAtVersionForRead( idJob.getId(), versionLabel, NodeRepositoryFileData.class ).getNode(), jobMeta );
file = builder.build(); NodeRepositoryFileData data; data = pur.getDataAtVersionForRead( file.getId(), null, NodeRepositoryFileData.class );
pur.getDataAtVersionForRead( idTransformation.getId(), versionLabel, NodeRepositoryFileData.class ).getNode(), transMeta ); } finally {
public T call() throws Exception { return delegatee.getDataAtVersionForRead( fileId, versionId, dataClass ); } }, Messages.getInstance().getString( "ExceptionLoggingDecorator.getDataAtVersion", fileId, versionId ) ); //$NON-NLS-1$
public SimpleRepositoryFileDataDto getDataAsBinaryForReadAtVersion( final String fileId, final String versionId ) { SimpleRepositoryFileData simpleData = repo.getDataAtVersionForRead( fileId, versionId, SimpleRepositoryFileData.class ); return SimpleRepositoryFileDataDto.convert( simpleData ); }
public NodeRepositoryFileDataDto getDataAsNodeForReadAtVersion( String fileId, String versionId ) { return nodeRepositoryFileDataAdapter.marshal( repo.getDataAtVersionForRead( fileId, versionId, NodeRepositoryFileData.class ) ); }
/** * Gets data at given version for read. * * @param fileId * file id * @param versionId * version id * @param dataClass * class that implements {@link org.pentaho.platform.api.repository2.unified.IRepositoryFileData} * @return data */ @Override public <T extends IRepositoryFileData> T getDataAtVersionForRead( final Serializable fileId, final Serializable versionId, final Class<T> dataClass ) { return repository.getDataAtVersionForRead( fileId, versionId, dataClass ); }
public List<NodeRepositoryFileDataDto> getDataAsNodeForReadInBatch( final List<RepositoryFileDto> files ) { List<NodeRepositoryFileDataDto> data = new ArrayList<NodeRepositoryFileDataDto>( files.size() ); for ( RepositoryFileDto f : files ) { if ( f.getVersionId() == null ) { data.add( nodeRepositoryFileDataAdapter .marshal( repo.getDataForRead( f.getId(), NodeRepositoryFileData.class ) ) ); } else { data.add( nodeRepositoryFileDataAdapter.marshal( repo.getDataAtVersionForRead( f.getId(), f.getVersionId(), NodeRepositoryFileData.class ) ) ); } } return data; }