@Override public List<DatabaseMeta> readDatabases() throws KettleException { readWriteLock.readLock().lock(); try { List<RepositoryFile> children = getAllFilesOfType( null, RepositoryObjectType.DATABASE, false ); List<DatabaseMeta> dbMetas = new ArrayList<DatabaseMeta>(); for ( RepositoryFile file : children ) { DataNode node; node = pur.getDataForRead( file.getId(), NodeRepositoryFileData.class ).getNode(); DatabaseMeta databaseMeta = (DatabaseMeta) databaseMetaTransformer.dataNodeToElement( node ); databaseMeta.setName( file.getTitle() ); dbMetas.add( databaseMeta ); } return dbMetas; } catch ( Exception e ) { throw new KettleException( "Unable to read all databases", e ); } finally { readWriteLock.readLock().unlock(); } }
@Override public IMetaStoreElement getElement( String namespace, IMetaStoreElementType elementType, String elementId ) throws MetaStoreException { NodeRepositoryFileData data = pur.getDataForRead( elementId, NodeRepositoryFileData.class ); if ( data == null ) { return null; } IMetaStoreElement element = newElement(); element.setId( elementId ); element.setElementType( elementType ); DataNode dataNode = data.getNode(); dataNodeToElement( dataNode, element ); return element; }
public DatabaseMeta assemble( RepositoryFile file, NodeRepositoryFileData data, VersionSummary version ) throws KettleException { DatabaseMeta databaseMeta = (DatabaseMeta) dataNodeToElement( data.getNode() ); String fileName = file.getName(); if ( fileName.endsWith( ".kdb" ) ) { fileName = fileName.substring( 0, fileName.length() - 4 ); } databaseMeta.setChangedDate( file.getLastModifiedDate() ); databaseMeta.setName( fileName ); databaseMeta.setDisplayName( file.getTitle() ); databaseMeta.setObjectId( new StringObjectId( file.getId().toString() ) ); databaseMeta.setObjectRevision( repo.createObjectRevision( version ) ); databaseMeta.clearChanged(); return databaseMeta; } }
public PartitionSchema assemble( RepositoryFile file, NodeRepositoryFileData data, VersionSummary version ) throws KettleException { PartitionSchema partitionSchema = (PartitionSchema) dataNodeToElement( data.getNode() ); partitionSchema.setName( file.getTitle() ); partitionSchema.setObjectId( new StringObjectId( file.getId().toString() ) ); partitionSchema.setObjectRevision( repo.createObjectRevision( version ) ); partitionSchema.clearChanged(); return partitionSchema; } }
public ClusterSchema assemble( RepositoryFile file, NodeRepositoryFileData data, VersionSummary version ) throws KettleException { ClusterSchema clusterSchema = (ClusterSchema) dataNodeToElement( data.getNode() ); clusterSchema.setName( file.getTitle() ); clusterSchema.setObjectId( new StringObjectId( file.getId().toString() ) ); clusterSchema.setObjectRevision( repo.createObjectRevision( version ) ); clusterSchema.clearChanged(); return clusterSchema; } }
public SlaveServer assemble( RepositoryFile file, NodeRepositoryFileData data, VersionSummary version ) throws KettleException { SlaveServer slaveServer = (SlaveServer) dataNodeToElement( data.getNode() ); slaveServer.setName( file.getTitle() ); slaveServer.setObjectId( new StringObjectId( file.getId().toString() ) ); slaveServer.setObjectRevision( repo.createObjectRevision( version ) ); slaveServer.clearChanged(); return slaveServer; } }
@Override public IMetaStoreElementType getElementType( String namespace, String elementTypeId ) throws MetaStoreException { RepositoryFile elementTypeFolder = pur.getFileById( elementTypeId ); if ( elementTypeFolder == null ) { return null; } IMetaStoreElementType elementType = newElementType( namespace ); elementType.setId( elementTypeFolder.getId().toString() ); elementType.setName( elementTypeFolder.getName() ); RepositoryFile detailsFile = findChildByName( elementTypeFolder.getId(), ELEMENT_TYPE_DETAILS_FILENAME, true ); if ( detailsFile != null ) { NodeRepositoryFileData data = pur.getDataForRead( detailsFile.getId(), NodeRepositoryFileData.class ); DataProperty property = data.getNode().getProperty( "element_type_description" ); if ( property != null ) { elementType.setDescription( property.getString() ); } } return elementType; }
NodeRepositoryFileData.class ).getNode(), jobMeta );
pur.getDataAtVersionForRead( idTransformation.getId(), versionLabel, NodeRepositoryFileData.class ).getNode(), transMeta ); } finally {
private TransMeta buildTransMeta( final RepositoryFile file, final RepositoryDirectoryInterface parentDir, final NodeRepositoryFileData data, final ObjectRevision revision ) throws KettleException { TransMeta transMeta = new TransMeta(); transMeta.setName( file.getTitle() ); transMeta.setFilename( file.getName() ); transMeta.setDescription( file.getDescription() ); transMeta.setObjectId( new StringObjectId( file.getId().toString() ) ); transMeta.setObjectRevision( revision ); transMeta.setRepository( this ); transMeta.setRepositoryDirectory( parentDir ); transMeta.setMetaStore( getMetaStore() ); readTransSharedObjects( transMeta ); // This should read from the local cache transDelegate.dataNodeToElement( data.getNode(), transMeta ); transMeta.clearChanged(); return transMeta; }
private JobMeta buildJobMeta( final RepositoryFile file, final RepositoryDirectoryInterface parentDir, final NodeRepositoryFileData data, final ObjectRevision revision ) throws KettleException { JobMeta jobMeta = new JobMeta(); jobMeta.setName( file.getTitle() ); jobMeta.setFilename( file.getName() ); jobMeta.setDescription( file.getDescription() ); jobMeta.setObjectId( new StringObjectId( file.getId().toString() ) ); jobMeta.setObjectRevision( revision ); jobMeta.setRepository( this ); jobMeta.setRepositoryDirectory( parentDir ); jobMeta.setMetaStore( getMetaStore() ); readJobMetaSharedObjects( jobMeta ); // This should read from the local cache jobDelegate.dataNodeToElement( data.getNode(), jobMeta ); jobMeta.clearChanged(); return jobMeta; }
protected void createOrUpdateContentNode( final Session session, final PentahoJcrConstants pentahoJcrConstants, final NodeRepositoryFileData data, final Node fileNode ) throws RepositoryException { Node unstructuredNode = null; if ( fileNode.hasNode( pentahoJcrConstants.getJCR_CONTENT() ) ) { unstructuredNode = fileNode.getNode( pentahoJcrConstants.getJCR_CONTENT() ); } else { unstructuredNode = fileNode.addNode( pentahoJcrConstants.getJCR_CONTENT(), pentahoJcrConstants.getPHO_NT_INTERNALFOLDER() ); } // clear out all nodes since it's the quickest way to guarantee that existing nodes that should be deleted are // removed final String pattern = session.getNamespacePrefix( PentahoJcrConstants.PHO_NS ) + ":" + "*"; //$NON-NLS-1$ //$NON-NLS-2$ NodeIterator nodes = unstructuredNode.getNodes( pattern ); while ( nodes.hasNext() ) { nodes.nextNode().remove(); } internalCreateOrUpdate( session, pentahoJcrConstants, unstructuredNode, data.getNode() ); }
public RepositoryFile updateFile( RepositoryFile file, IRepositoryFileData data, String versionMessage ) { File f = new File( file.getId().toString() ); FileOutputStream fos = null; try { fos = new FileOutputStream( f, false ); if ( data instanceof SimpleRepositoryFileData ) { fos.write( inputStreamToBytes( ( (SimpleRepositoryFileData) data ).getInputStream() ) ); } else if ( data instanceof NodeRepositoryFileData ) { fos.write( inputStreamToBytes( new ByteArrayInputStream( ( (NodeRepositoryFileData) data ).getNode().toString() .getBytes() ) ) ); } } catch ( FileNotFoundException e ) { throw new UnifiedRepositoryException( e ); } catch ( IOException e ) { throw new UnifiedRepositoryException( e ); } finally { IOUtils.closeQuietly( fos ); } return getFile( file.getPath() ); }
public RepositoryFile createFile( Serializable parentFolderId, RepositoryFile file, IRepositoryFileData data, RepositoryFileAcl acl, String versionMessage ) { String fileNameWithPath = RepositoryFilenameUtils.concat( parentFolderId.toString(), file.getName() ); FileOutputStream fos = null; File f = new File( fileNameWithPath ); try { f.createNewFile(); fos = new FileOutputStream( f ); if ( data instanceof SimpleRepositoryFileData ) { fos.write( inputStreamToBytes( ( (SimpleRepositoryFileData) data ).getInputStream() ) ); } else if ( data instanceof NodeRepositoryFileData ) { fos.write( inputStreamToBytes( new ByteArrayInputStream( ( (NodeRepositoryFileData) data ).getNode().toString() .getBytes() ) ) ); } } catch ( FileNotFoundException e ) { throw new UnifiedRepositoryException( "Error writing file [" + fileNameWithPath + "]", e ); } catch ( IOException e ) { throw new UnifiedRepositoryException( "Error writing file [" + fileNameWithPath + "]", e ); } finally { IOUtils.closeQuietly( fos ); } return internalGetFile( f ); }
@Override public RepositoryFile call() throws Exception { List<RepositoryFile> referrers = unifiedRepository.getReferrers( file.getId() ); // Loop through nodes referring to the target file, return the first one designated as an ACL node int i = referrers.size(); while ( i-- > 0 ) { RepositoryFile referrer = referrers.get( i ); NodeRepositoryFileData dataForRead = unifiedRepository.getDataForRead( referrer.getId(), NodeRepositoryFileData.class ); if ( dataForRead != null && dataForRead.getNode().hasProperty( IS_ACL_NODE ) ) { return referrer; } } // No ACL node found return null; } } );
@Override public NodeRepositoryFileDataDto marshal( final NodeRepositoryFileData v ) { NodeRepositoryFileDataDto d = new NodeRepositoryFileDataDto(); DataNodeDto node = new DataNodeDto(); d.setNode( node ); toDataNodeDto( node, v.getNode() ); return d; }
private void process( final FileRecord r, final IRepositoryFileData oldData ) { IRepositoryFileData data = r.getData(); if ( data instanceof SimpleRepositoryFileData ) { r.setData( new ReusableSimpleRepositoryFileData( (SimpleRepositoryFileData) data ) ); } else if ( data instanceof NodeRepositoryFileData ) { DataNode node = ( (NodeRepositoryFileData) data ).getNode(); referralManager.process( r.getFile().getId(), oldData != null ? ( (NodeRepositoryFileData) oldData ).getNode() : null, node ); r.setData( new NodeRepositoryFileData( idManager.process( node ) ) ); } }
private void testgetDatasourceById( boolean throwException ) throws Exception { final String dotKdb = ".kdb"; IUnifiedRepository repo = mock( IUnifiedRepository.class ); NodeRepositoryFileData nodeRep = mock ( NodeRepositoryFileData.class ); DataNode dataNode = mock( DataNode.class ); // stub out get parent folder doReturn( new RepositoryFile.Builder( "123", "databases" ).folder( true ).build() ).when( repo ).getFileById( EXP_FILE_ID ); doReturn( reservedChars ).when( repo ).getReservedChars(); // stub out get file to delete doReturn( new RepositoryFile.Builder( EXP_FILE_ID, EXP_DBMETA_NAME + dotKdb ).build() ).when( repo ).getFileById( EXP_FILE_ID); doReturn( nodeRep ).when(repo).getDataForRead( any(), any() ); doReturn( dataNode ).when( nodeRep ).getNode(); IDatasourceMgmtService datasourceMgmtService = new JcrBackedDatasourceMgmtService( repo, new DatabaseDialectService() ); if( throwException ) { getDatasourceWithIdThrowException( repo ); } assertNotNull( datasourceMgmtService.getDatasourceById( EXP_FILE_ID ) ); }
@Override public boolean matchesSafely( final NodeRepositoryFileData data ) { for ( PathPropertyPair pair : pairs ) { DataProperty expectedProperty = pair.getProperty(); String[] pathSegments = pair.getPath().substring( 1 ).split( "/" ); DataNode currentNode = data.getNode(); if ( !currentNode.getName().equals( pathSegments[0] ) ) { return false; } for ( int i = 1; i < pathSegments.length - 1; i++ ) { currentNode = currentNode.getNode( pathSegments[i] ); if ( currentNode == null ) { return false; } } DataProperty actualProperty = currentNode.getProperty( pathSegments[pathSegments.length - 1] ); if ( !expectedProperty.equals( actualProperty ) ) { return false; } } return true; }
private IDatabaseConnection getDatasource( RepositoryFile file ) throws DatasourceMgmtServiceException { try { if ( file != null ) { NodeRepositoryFileData data = repository.getDataForRead( file.getId(), NodeRepositoryFileData.class ); IDatabaseConnection databaseConnection = databaseHelper.dataNodeToDatabaseConnection( file.getId(), file.getTitle(), data.getNode() ); // IPasswordService passwordService = PentahoSystem.get(IPasswordService.class, // PentahoSessionHolder.getSession()); // databaseMeta.setPassword(passwordService.decrypt(databaseMeta.getPassword())); return databaseConnection; } else { throw new DatasourceMgmtServiceException( Messages.getInstance().getErrorString( "DatasourceMgmtService.ERROR_0004_UNABLE_TO_RETRIEVE_DATASOURCE", "", "" ) ); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ } // } catch(PasswordServiceException pse) { // throw new DatasourceMgmtServiceException(Messages.getInstance() // .getErrorString("DatasourceMgmtService.ERROR_0008_UNABLE_TO_DECRYPT_PASSWORD"), pse ); //$NON-NLS-1$ } catch ( UnifiedRepositoryException ure ) { throw new DatasourceMgmtServiceException( Messages .getInstance() .getErrorString( "DatasourceMgmtService.ERROR_0004_UNABLE_TO_RETRIEVE_DATASOURCE", file.getName(), ure.getLocalizedMessage() ), ure ); //$NON-NLS-1$ } }