@Override public String toString() { return fileSplit.getNodeName() + ":" + fileSplit.getPath(); }
@Override public void open() throws HyracksDataException { try { out = new FileOutputStream(splits[partition].getFile(ioManager)); } catch (FileNotFoundException e) { throw HyracksDataException.create(e); } }
public static FileReference getIndexAbsoluteFileRef(IFileSplitProvider fileSplitProvider, int partition, IIOManager ioManager) throws HyracksDataException { FileSplit split = fileSplitProvider.getFileSplits()[partition]; return split.getFileReference(ioManager); } }
private boolean ncEq(FileSplit fs1, FileSplit fs2) { return fs1.getNodeName().equals(fs2.getNodeName()); }
public static FeedLogManager getFeedLogManager(IHyracksTaskContext ctx, int partition, FileSplit[] feedLogFileSplits) throws HyracksDataException { return new FeedLogManager( FeedUtils.getAbsoluteFileRef(feedLogFileSplits[partition].getPath(), 0, ctx.getIoManager()).getFile()); }
static void createPartitionConstraint(JobSpecification spec, IOperatorDescriptor op, FileSplit[] splits) { String[] parts = new String[splits.length]; for (int i = 0; i < splits.length; ++i) { parts[i] = splits[i].getNodeName(); } PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, op, parts); } }
protected int[] getDatasetPartitions(MetadataProvider metadataProvider) throws AlgebricksException { FileSplit[] splitsForDataset = metadataProvider.splitsForIndex(metadataProvider.getMetadataTxnContext(), this, getDatasetName()); int[] partitions = new int[splitsForDataset.length]; for (int i = 0; i < partitions.length; i++) { partitions[i] = StoragePathUtil.getPartitionNumFromRelativePath(splitsForDataset[i].getPath()); } return partitions; }
@Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("FileSplitDomain["); boolean fst = true; for (FileSplit fs : splits) { if (fst) { fst = false; } else { sb.append(", "); } sb.append(fs.getNodeName() + ":" + fs.getPath()); } sb.append(']'); return sb.toString(); }
private static void createPartitionConstraint(JobSpecification spec, IOperatorDescriptor op, FileSplit[] splits) { String[] parts = new String[splits.length]; for (int i = 0; i < splits.length; ++i) { parts[i] = splits[i].getNodeName(); } PartitionConstraintHelper.addAbsoluteLocationConstraint(spec, op, parts); } }
public static FeedLogManager getFeedLogManager(IHyracksTaskContext ctx, FileSplit feedLogFileSplit) throws HyracksDataException { return new FeedLogManager( FeedUtils.getAbsoluteFileRef(feedLogFileSplit.getPath(), 0, ctx.getIoManager()).getFile()); }
@Override protected IRecordWriter createRecordWriter(IIOManager ioManager, FileSplit fileSplit, int index) throws HyracksDataException { return new LineWriterImpl(fileSplit.getFile(ioManager), columns, separator); } }
@Override public IIndexDataflowHelper create(INCServiceContext ctx, int partition) throws HyracksDataException { FileSplit fileSplit = fileSplitProvider.getFileSplits()[partition]; FileReference resourceRef = fileSplit.getFileReference(ctx.getIoManager()); return new IndexDataflowHelper(ctx, storageMgr, resourceRef); } }
private void formResponseObject(ObjectNode jsonResponse, FileSplit[] fileSplits, ARecordType recordType, String primaryKeys, Map<String, NodeControllerInfo> nodeMap) { ArrayNode partititons = OBJECT_MAPPER.createArrayNode(); // Adds a primary key. jsonResponse.put("keys", primaryKeys); // Adds record type. jsonResponse.set("type", recordType.toJSON()); // Generates file partitions. for (FileSplit split : fileSplits) { String ipAddress = nodeMap.get(split.getNodeName()).getNetworkAddress().getAddress(); String path = split.getPath(); FilePartition partition = new FilePartition(ipAddress, path); partititons.add(partition.toObjectNode()); } // Generates the response object which contains the splits. jsonResponse.set("splits", partititons); } }
public static Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitProviderAndPartitionConstraints( FileSplit[] splits) { IFileSplitProvider splitProvider = new ConstantFileSplitProvider(splits); String[] loc = new String[splits.length]; for (int p = 0; p < splits.length; p++) { loc[p] = splits[p].getNodeName(); } AlgebricksPartitionConstraint pc = new AlgebricksAbsolutePartitionConstraint(loc); return new Pair<>(splitProvider, pc); }
@Override public void initialize() throws HyracksDataException { // will only work for files inside the io devices File f = split.getFile(ioManager); if (quietly) { FileUtils.deleteQuietly(f); } else { try { FileUtils.deleteDirectory(f); } catch (IOException e) { throw HyracksDataException.create(e); } } }
@Override public IIndexBuilder create(IHyracksTaskContext ctx, int partition) throws HyracksDataException { FileReference resourceRef = fileSplitProvider.getFileSplits()[partition].getFileReference(ctx.getIoManager()); return new IndexBuilder(ctx.getJobletContext().getServiceContext(), storageManager, storageManager.getResourceIdFactory(ctx.getJobletContext().getServiceContext()), resourceRef, localResourceFactory, durable); } }
@Override public Pair<IPushRuntimeFactory, AlgebricksPartitionConstraint> getWriteFileRuntime(IDataSink sink, int[] printColumns, IPrinterFactory[] printerFactories, RecordDescriptor inputDesc) { FileSplitDataSink fsds = (FileSplitDataSink) sink; FileSplitSinkId fssi = fsds.getId(); FileSplit fs = fssi.getFileSplit(); File outFile = new File(fs.getPath()); String nodeId = fs.getNodeName(); SinkWriterRuntimeFactory runtime = new SinkWriterRuntimeFactory(printColumns, printerFactories, outFile, getWriterFactory(), inputDesc); AlgebricksPartitionConstraint apc = new AlgebricksAbsolutePartitionConstraint(new String[] { nodeId }); return new Pair<>(runtime, apc); }
@Override public void open() throws HyracksDataException { try { out = new BufferedWriter(new FileWriter(splits[partition].getFile(ioManager))); bbis = new ByteBufferInputStream(); di = new DataInputStream(bbis); } catch (Exception e) { throw HyracksDataException.create(e); } }
public static String getIndexPath(AsterixHyracksIntegrationUtil integrationUtil, Dataset dataset, String nodeId) throws Exception { final FileSplit[] datasetSplits = TestDataUtil.getDatasetSplits(integrationUtil, dataset); final Optional<FileSplit> nodeFileSplit = Arrays.stream(datasetSplits).filter(s -> s.getNodeName().equals(nodeId)).findFirst(); Assert.assertTrue(nodeFileSplit.isPresent()); return nodeFileSplit.get().getPath(); } }