public static Map<String, Object> getXmlRpcProductType(ProductType type) { Map<String, Object> productTypeHash = new Hashtable<String, Object>(); // TODO(bfoster): ProductType ID is currently required by XmlRpcFileManager. productTypeHash.put("id", type.getProductTypeId()); if (type.getName() != null) { productTypeHash.put("name", type.getName()); } if (type.getDescription() != null) { productTypeHash.put("description", type.getDescription()); } if (type.getProductRepositoryPath() != null) { productTypeHash.put("repositoryPath", type.getProductRepositoryPath()); } if (type.getVersioner() != null) { productTypeHash.put("versionerClass", type.getVersioner()); } if (type.getTypeMetadata() != null) { productTypeHash.put("typeMetadata", type.getTypeMetadata().getHashTable()); } if (type.getExtractors() != null) { productTypeHash.put("typeExtractors", getXmlRpcTypeExtractors(type.getExtractors())); } if (type.getHandlers() != null) { productTypeHash.put("typeHandlers", getXmlRpcTypeHandlers(type.getHandlers())); } return productTypeHash; }
public ProductType safeGetProductTypeByName(String name) { if (!isConnected()) return ProductType.blankProductType(); try { return fm.getProductTypeByName(name); } catch (Exception e) { LOG.log(Level.WARNING, "Unable to get product type by name: [" + name + "]: Message: " + e.getMessage()); return null; } }
public String addProductType(ProductType productType) throws RepositoryManagerException { LOG.fine("Adding product type : " + productType.toString()); repositoryManager.addProductType(productType); return productType.getProductTypeId(); }
public static ProductType toScienceDataProductType(ResultSet rs) throws SQLException { ProductType type = new ProductType(); type.setProductTypeId(rs.getString("dataset_id")); type.setDescription(rs.getString("description")); type.setName(rs.getString("shortName")); type.setVersioner("gov.nasa.jpl.oodt.cas.filemgr.versioning.BasicVersioner"); // use // basic // versioner type.setProductRepositoryPath("file:///tmp"); // not moving files anyways Metadata typeMet = new Metadata(); typeMet.addMetadata("DatasetId", type.getProductTypeId()); typeMet.addMetadata("DatasetShortName", type.getName() != null ? type.getName() : ""); typeMet.addMetadata("DatasetLongName", rs.getString("longName") != null ? rs.getString("longName") : ""); typeMet.addMetadata("Description", type.getDescription() != null ? type.getDescription() : ""); typeMet.addMetadata("Source", rs.getString("source") != null ? rs.getString("source") : ""); typeMet.addMetadata("ReferenceURL", rs.getString("referenceURL") != null ? rs.getString("referenceURL") : ""); type.setTypeMetadata(typeMet); return type; }
+ productType.getName() + "', '" + productType.getDescription() + "', '" + productType.getProductRepositoryPath() + "', '" + productType.getVersioner() + "')"; productType.setProductTypeId(productTypeId);
.getProductType().getProductTypeId())); } catch (RepositoryManagerException e) { LOG.log(Level.SEVERE, "Failed to load ProductType " + product .getProductType().getProductTypeId(), e); return null; met.addMetadata(metadata.getHashTable()); if (product.getProductType().getExtractors() != null) { for (ExtractorSpec spec: product.getProductType().getExtractors()) { FilemgrMetExtractor extractor = GenericFileManagerObjectFactory .getExtractorFromClassName(spec.getClassName()); + extractor.getClass().getName() + "] for product type: [" + product.getProductType().getName() + "]"); try { met = extractor.extractMetadata(product, met);
private Metadata getOrigValues(Metadata metadata, ProductType productType) throws RepositoryManagerException { List<TypeHandler> handlers = this.repositoryManager.getProductTypeById( productType.getProductTypeId()).getHandlers(); if (handlers != null) { for (TypeHandler handler : handlers) { handler.postGetMetadataHandle(metadata); } } return metadata; }
public void modifyProductType(ProductType productType) throws RepositoryManagerException { String sql = "UPDATE dataset SET shortName='" + productType.getName() + "',description='" + productType.getDescription() + "' WHERE dataset_id = " + productType.getProductTypeId(); Connection conn = null; Statement statement = null; try { conn = this.dataSource.getConnection(); statement = conn.createStatement(); statement.execute(sql); } catch (SQLException e) { LOG.log(Level.SEVERE, e.getMessage()); } finally { if (statement != null) { try { statement.close(); } catch (Exception ignore) { } } if (conn != null) { try { conn.close(); } catch (Exception ignore) { } } } }
private String getFilePathSpec(ProductType productType) throws VersioningException { String fileSpec = properties.getProperty(BASE_PROPERTY + productType.getName().toLowerCase()); if (Strings.isNullOrEmpty(fileSpec)) { fileSpec = properties.getProperty(BASE_PROPERTY + ALL); if (Strings.isNullOrEmpty(fileSpec)) { throw new VersioningException("Not defined for product type " + productType.getName()); } } return fileSpec; } }
public void addProductType(ProductType productType) throws RepositoryManagerException { String sql = "INSERT INTO dataset (longName, shortName, description) VALUES ('" + productType.getName() + "', '" + productType.getName() + "', '" + productType.getDescription() + "'"; Connection conn = null; Statement statement = null;
/** * Sets a parentId for an existing {@link ProductType} * @param type The {@link ProductType} to add a parent for * @param parentId The id of the parent {@link ProductType} * @throws ValidationLayerException * If any error occurs */ public void addParentForProductType(ProductType type, String parentId) { subToSuperMap.put(type.getProductTypeId(), parentId); saveElementsAndMappings(); }
List<ProductType> types = fmClient.getProductTypes(); for (ProductType type : types) { if (!config.getIgnoreTypes().contains(type.getName().trim())) { LOG.info("Paging through products for product type: " + type.getName()); ProductPage page = safeFirstPage(fmClient, type); while (page != null) { try { this.indexProduct(product.getProductId(), fmClient .getMetadata(product), type.getTypeMetadata()); } catch (Exception e) { LOG.severe("Could not index " + product.getProductId() + ": "
protected File getProductFile(Product product) throws MetExtractionException { File prodFile; if (product.getProductStructure() .equals(Product.STRUCTURE_HIERARCHICAL)) { try { prodFile = new File(getRootRefPath(product .getProductReferences(), product.getProductType() .getProductRepositoryPath())); } catch (Exception e) { LOG.log(Level.SEVERE, e.getMessage()); throw new MetExtractionException("URI exception parsing: [" + product.getRootRef().getOrigReference() + "]"); } } else { try { prodFile = new File(new URI(((Reference) product .getProductReferences().get(0)).getOrigReference())); } catch (Exception e) { throw new MetExtractionException("URI exception parsing: [" + ((Reference) product.getProductReferences().get(0)) .getOrigReference() + "]"); } } return prodFile; }
Metadata productMetadata = fmClient.getMetadata(product); indexProduct(product.getProductId(), productMetadata, product .getProductType().getTypeMetadata()); } catch (MalformedURLException e) { LOG.severe("File Manager URL is malformed: " + e.getMessage());
@Override public Metadata doExtract(Product product, Metadata met) throws MetExtractionException { Metadata extractMet = new Metadata(); merge(met, extractMet); // get the Versioner Versioner versioner = GenericFileManagerObjectFactory .getVersionerFromClassName(product.getProductType().getVersioner()); try { versioner.createDataStoreReferences(product, met); } catch (VersioningException e) { throw new MetExtractionException( "Unable to generate final FileLocation: Reason: " + e.getMessage()); } Reference r = product.getProductReferences().get(0); String finalLocation = VersioningUtils.getAbsolutePathFromUri(r .getDataStoreReference()); if (this.replaceLocation) { extractMet.replaceMetadata(FILE_LOCATION, new File(finalLocation).getParent()); } else { extractMet .addMetadata(FILE_LOCATION, new File(finalLocation).getParent()); } this.scrubRefs(product); return extractMet; }
public static ProductType toScienceDataProductType(ResultSet rs) throws SQLException { ProductType type = new ProductType(); type.setProductTypeId(rs.getString("dataset_id")); type.setDescription(rs.getString("description")); type.setName(rs.getString("shortName")); type.setVersioner("gov.nasa.jpl.oodt.cas.filemgr.versioning.BasicVersioner"); // use // basic // versioner type.setProductRepositoryPath("file:///tmp"); // not moving files anyways Metadata typeMet = new Metadata(); typeMet.addMetadata("DatasetId", type.getProductTypeId()); typeMet.addMetadata("DatasetShortName", type.getName() != null ? type.getName() : ""); typeMet.addMetadata("DatasetLongName", rs.getString("longName") != null ? rs.getString("longName") : ""); typeMet.addMetadata("Description", type.getDescription() != null ? type.getDescription() : ""); typeMet.addMetadata("Source", rs.getString("source") != null ? rs.getString("source") : ""); typeMet.addMetadata("ReferenceURL", rs.getString("referenceURL") != null ? rs.getString("referenceURL") : ""); type.setTypeMetadata(typeMet); return type; }
+ productType.getName() + "', '" + productType.getDescription() + "', '" + productType.getProductRepositoryPath() + "', '" + productType.getVersioner() + "')"; productType.setProductTypeId(productTypeId);
.getProductType().getProductTypeId())); } catch (RepositoryManagerException e) { LOG.log(Level.SEVERE, "Failed to load ProductType " + product .getProductType().getProductTypeId(), e); return null; met.addMetadata(metadata.getHashTable()); if (product.getProductType().getExtractors() != null) { for (ExtractorSpec spec : product.getProductType().getExtractors()) { FilemgrMetExtractor extractor = GenericFileManagerObjectFactory .getExtractorFromClassName(spec.getClassName()); + (extractor != null ? extractor.getClass().getName() : null) + "] for product type: [" + product.getProductType().getName() + "]"); try { met = extractor != null ? extractor.extractMetadata(product, met) : null;