public CloseableIterable<? extends Element> doOperation(final GetAllElements operation, final User user, final AccumuloStore store) throws OperationException { try { return new AccumuloAllElementsRetriever(store, operation, user); } catch (final IteratorSettingException | StoreException e) { throw new OperationException("Failed to get elements", e); } } }
@Override public Schema doOperation(final GetSchema operation, final Context context, final Store store) throws OperationException { if (null == operation) { throw new OperationException("Operation cannot be null"); } return ((FederatedStore) store).getSchema(operation, context); } }
protected Configuration getConfiguration(final OP operation) throws OperationException { final Configuration conf = new Configuration(); final String serialisedConf = operation.getOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY); if (null != serialisedConf) { try { final ByteArrayInputStream bais = new ByteArrayInputStream(serialisedConf.getBytes(CommonConstants.UTF_8)); conf.readFields(new DataInputStream(bais)); } catch (final IOException e) { throw new OperationException("Exception decoding Configuration from options", e); } } return conf; }
private void moveData(final FileSystem fs, final String tempFileDir, final String group, final String column, final String splitNumber) throws StoreException, IOException, OperationException { // Move data from temp to data final String sourceFile = ParquetStore.getGroupDirectory(group, column, tempFileDir) + SORTED + SPLIT + splitNumber + "/part-00000-*.parquet"; final FileStatus[] files = fs.globStatus(new Path(sourceFile)); if (files.length == 1) { final Path destPath = new Path(ParquetStore.getGroupDirectory(group, column, tempFileDir + SORTED) + "/part-" + zeroPad(splitNumber, 5) + ".gz.parquet"); fs.mkdirs(destPath.getParent()); fs.rename(files[0].getPath(), destPath); } else if (files.length > 1) { throw new OperationException("Expected to get only one file which matched the file pattern " + sourceFile); } }
protected Configuration getConfiguration(final OP operation) throws OperationException { final String serialisedConf = operation.getOption(AbstractGetRDDHandler.HADOOP_CONFIGURATION_KEY); if (null == serialisedConf) { return new Configuration(); } try { return AbstractGetRDDHandler.convertStringToConfiguration(serialisedConf); } catch (final IOException e) { throw new OperationException("Exception decoding Configuration from options", e); } }
public CloseableIterable<? extends Element> doOperation(final GetElements operation, final User user, final AccumuloStore store) throws OperationException { if (null != operation.getOption("accumulostore.operation.return_matched_id_as_edge_source")) { throw new IllegalArgumentException("The accumulostore.operation.return_matched_id_as_edge_source option has been removed. Instead of flipping the Edges around the result Edges will have a matchedVertex field set specifying if the SOURCE or DESTINATION was matched."); } if (null == operation.getInput()) { throw new OperationException("Operation input is undefined - please specify an input."); } try { return new AccumuloElementsRetriever(store, operation, user); } catch (final IteratorSettingException | StoreException e) { throw new OperationException("Failed to get elements", e); } } }
@Override public int run(final String[] strings) throws Exception { jobFactory.prepareStore(store); LOGGER.info("Adding elements from HDFS"); final List<Job> jobs = jobFactory.createJobs(operation, store); for (final Job job : jobs) { job.waitForCompletion(true); if (!job.isSuccessful()) { LOGGER.error("Error running job"); throw new OperationException("Error running job"); } } LOGGER.info("Finished adding elements from HDFS"); return SUCCESS_RESPONSE; }
private void importFiles(final ImportAccumuloKeyValueFiles operation, final AccumuloStore store) throws OperationException { final ImportElementsToAccumuloTool importTool = new ImportElementsToAccumuloTool(operation.getInputPath(), operation.getFailurePath(), store, operation.getOptions()); try { ToolRunner.run(importTool, new String[0]); } catch (final Exception e) { throw new OperationException(e.getMessage(), e); } } }
public <O> O executeOpChainViaUrl(final OperationChain<O> opChain, final Context context) throws OperationException { final String opChainJson; try { opChainJson = new String(JSONSerialiser.serialise(opChain), CommonConstants.UTF_8); } catch (final UnsupportedEncodingException | SerialisationException e) { throw new OperationException("Unable to serialise operation chain into JSON.", e); } final URL url = getProperties().getGafferUrl("graph/operations/execute"); try { return doPost(url, opChainJson, opChain.getOutputTypeReference(), context); } catch (final StoreException e) { throw new OperationException(e.getMessage(), e); } }
public CloseableIterable<? extends EntityId> doOperation(final GetAdjacentIds op, final User user, final AccumuloStore store) throws OperationException { try { return new AccumuloAdjacentIdRetriever(store, op, user); } catch (final IteratorSettingException | StoreException e) { throw new OperationException(e.getMessage(), e); } } }
@Override public Void doOperation(final RemoveGraph operation, final Context context, final Store store) throws OperationException { try { ((FederatedStore) store).remove(operation.getGraphId(), context.getUser()); } catch (final Exception e) { throw new OperationException("Error removing graph: " + operation.getGraphId(), e); } return null; } }
@Override public JobDetail executeJob(final OperationChain<?> operationChain, final Context context) throws OperationException { final URL url = getProperties().getGafferUrl("graph/jobs"); try { return doPost(url, operationChain, new TypeReferenceImpl.JobDetail(), context); } catch (final StoreException e) { throw new OperationException(e.getMessage(), e); } }
private CloseableIterable<Element> doOperation(final GetAllElements operation, final ParquetStore store, final User user) throws OperationException { try { return new ParquetElementRetriever(operation.getView(), store, operation.getDirectedType(), null, null, null, user); } catch (final StoreException e) { throw new OperationException("Failed to get elements", e); } } }
private RDD<Element> doOperation(final GetRDDOfAllElements operation, final Context context, final AccumuloStore accumuloStore) throws OperationException { SparkSession sparkSession = SparkContextUtil.getSparkSession(context, accumuloStore.getProperties()); if (sparkSession == null) { throw new OperationException("This operation requires an active SparkSession."); } sparkSession.sparkContext().hadoopConfiguration().addResource(getConfiguration(operation)); final String useRFileReaderRDD = operation.getOption(USE_RFILE_READER_RDD); if (Boolean.parseBoolean(useRFileReaderRDD)) { return doOperationUsingRFileReaderRDD(operation, context, accumuloStore); } else { return doOperationUsingElementInputFormat(operation, context, accumuloStore); } }
private CloseableIterable<Element> doOperation(final GetElements operation, final MapStore mapStore) throws OperationException { final MapImpl mapImpl = mapStore.getMapImpl(); if (!mapImpl.isMaintainIndex()) { throw new OperationException("Cannot execute getElements if the properties request that an index is not created"); } final Iterable<? extends ElementId> seeds = operation.getInput(); if (null == seeds) { return new EmptyClosableIterable<>(); } return new ElementsIterable(mapImpl, operation, mapStore.getSchema()); }
private CloseableIterable<Element> doOperation(final GetElements operation, final ParquetStore store, final User user) throws OperationException { try { return new ParquetElementRetriever(operation.getView(), store, operation.getDirectedType(), operation.getIncludeIncomingOutGoing(), operation.getSeedMatching(), operation.getInput(), user); } catch (final StoreException e) { throw new OperationException("Failed to getGroup elements", e); } } }
private void addElements(final AddElements operation, final AccumuloStore store) throws OperationException { try { final Iterable<?extends Element> validatedElements; if (operation.isValidate()) { validatedElements = new ValidatedElements(operation.getInput(), store.getSchema(), operation.isSkipInvalidElements()); } else { validatedElements = operation.getInput(); } store.addElements(validatedElements); } catch (final StoreException e) { throw new OperationException("Failed to add elements", e); } } }
public CloseableIterable<? extends Element> doOperation(final GetElementsBetweenSets operation, final User user, final AccumuloStore store) throws OperationException { try { final IteratorSettingFactory iteratorFactory = store.getKeyPackage().getIteratorFactory(); return new AccumuloIDBetweenSetsRetriever(store, operation, user, iteratorFactory.getElementPreAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getElementPostAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getEdgeEntityDirectionFilterIteratorSetting(operation), iteratorFactory.getQueryTimeAggregatorIteratorSetting(operation.getView(), store)); } catch (final IteratorSettingException | StoreException e) { throw new OperationException("Failed to get elements", e); } }
public CloseableIterable<? extends Element> doOperation(final GetElementsWithinSet operation, final User user, final AccumuloStore store) throws OperationException { try { final IteratorSettingFactory iteratorFactory = store.getKeyPackage().getIteratorFactory(); return new AccumuloIDWithinSetRetriever(store, operation, user, iteratorFactory.getElementPreAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getElementPostAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getEdgeEntityDirectionFilterIteratorSetting(operation), iteratorFactory.getQueryTimeAggregatorIteratorSetting(operation.getView(), store)); } catch (final IteratorSettingException | StoreException e) { throw new OperationException("Failed to get elements", e); } }
public CloseableIterable<? extends Element> doOperation(final GetElementsInRanges operation, final User user, final AccumuloStore store) throws OperationException { final IteratorSettingFactory itrFactory = store.getKeyPackage().getIteratorFactory(); try { return new AccumuloRangeIDRetriever<>(store, operation, user, itrFactory.getElementPreAggregationFilterIteratorSetting(operation.getView(), store), itrFactory.getElementPostAggregationFilterIteratorSetting(operation.getView(), store), itrFactory.getEdgeEntityDirectionFilterIteratorSetting(operation), itrFactory.getElementPropertyRangeQueryFilter(operation), itrFactory.getQueryTimeAggregatorIteratorSetting(operation.getView(), store)); } catch (final IteratorSettingException | StoreException e) { throw new OperationException("Failed to get elements", e); } } }