/** * This method ensures all operations that supposed to be executed at this moment, are executed and finished. */ @Override public void commit() { backendExecutioner.commit(); }
/**Create a dataset with the specified input INDArray and labels (output) INDArray, plus (optionally) mask arrays * for the features and labels * @param features Features (input) * @param labels Labels (output) * @param featuresMask Mask array for features, may be null * @param labelsMask Mask array for labels, may be null */ public DataSet(INDArray features, INDArray labels, INDArray featuresMask, INDArray labelsMask) { this.features = features; this.labels = labels; this.featuresMask = featuresMask; this.labelsMask = labelsMask; // we want this dataset to be fully committed to device Nd4j.getExecutioner().commit(); }
/** * * @param features The features (inputs) to the algorithm/neural network * @param labels The labels (outputs) to the algorithm/neural network * @param featuresMaskArrays The mask arrays for the features. May be null. Typically used with variable-length time series models, etc * @param labelsMaskArrays The mask arrays for the labels. May be null. Typically used with variable-length time series models, etc */ public MultiDataSet(INDArray[] features, INDArray[] labels, INDArray[] featuresMaskArrays, INDArray[] labelsMaskArrays) { if (features != null && featuresMaskArrays != null && features.length != featuresMaskArrays.length) { throw new IllegalArgumentException("Invalid features / features mask arrays combination: " + "features and features mask arrays must not be different lengths"); } if (labels != null && labelsMaskArrays != null && labels.length != labelsMaskArrays.length) { throw new IllegalArgumentException("Invalid labels / labels mask arrays combination: " + "labels and labels mask arrays must not be different lengths"); } this.features = features; this.labels = labels; this.featuresMaskArrays = featuresMaskArrays; this.labelsMaskArrays = labelsMaskArrays; Nd4j.getExecutioner().commit(); }
File f = new File(saveRootDir, "netParams_subset" + currentSubset + "_" + (saveCount++) + "_train" + totalTrainMs + "ms_" + now + ".bin"); try { Nd4j.getExecutioner().commit(); //Required for CUDA thread safety Nd4j.saveBinary(params, f); log.info("Saved parameters to file: {}", f.getAbsolutePath());
public INDArray compress(INDArray array) { Nd4j.getExecutioner().commit(); return compress(array, getDefaultCompression()); }
/** * Setup the given byte buffer * for serialization (note that this is for uncompressed INDArrays) * 4 bytes int for rank * 4 bytes for data opType * shape buffer * data buffer * * @param arr the array to setup * @param allocated the byte buffer to setup * @param rewind whether to rewind the byte buffer or nt */ public static void doByteBufferPutUnCompressed(INDArray arr, ByteBuffer allocated, boolean rewind) { // ensure we send data to host memory Nd4j.getExecutioner().commit(); Nd4j.getAffinityManager().ensureLocation(arr, AffinityManager.Location.HOST); ByteBuffer buffer = arr.data().pointer().asByteBuffer().order(ByteOrder.nativeOrder()); ByteBuffer shapeBuffer = arr.shapeInfoDataBuffer().pointer().asByteBuffer().order(ByteOrder.nativeOrder()); //2 four byte ints at the beginning allocated.putInt(arr.rank()); //put data opType next so its self describing allocated.putInt(arr.data().dataType().ordinal()); allocated.put(shapeBuffer); allocated.put(buffer); if (rewind) allocated.rewind(); }
Nd4j.getExecutioner().commit();
@Override public INDArray compress(INDArray array) { INDArray dup = array.dup(array.ordering()); Nd4j.getExecutioner().commit(); dup.setData(compress(dup.data())); dup.markAsCompressed(true); return dup; }
Nd4j.getExecutioner().commit(); FlatBufferBuilder bufferBuilder = new FlatBufferBuilder(1024); val idCounter = new AtomicInteger(0);
/** * This method duplicates array, and stores it to all devices * * @param array */ public void broadcast(INDArray array) { if (array == null) return; Nd4j.getExecutioner().commit(); int numDevices = Nd4j.getAffinityManager().getNumberOfDevices(); for (int i = 0; i < numDevices; i++) { // if current thread equal to this device - we just save it, without duplication if (Nd4j.getAffinityManager().getDeviceForCurrentThread() == i) { set(i, array); } else { set(i, Nd4j.getAffinityManager().replicateToDevice(i, array)); } } } }
Nd4j.getExecutioner().commit(); Nd4j.getExecutioner().commit(); DataBuffer buffer = Nd4j.createBuffer(this.lengthLong(), false); INDArray copy = Nd4j.createUninitialized(this.shape(), this.ordering()); copy.assign(this); Nd4j.getExecutioner().commit(); Nd4j.getExecutioner().commit(); DataBuffer buffer = Nd4j.createBuffer(this.lengthLong(), false); copy = Nd4j.createUninitialized(this.shape(), this.ordering()); copy.assign(this); Nd4j.getExecutioner().commit();
@Override public boolean broadcastUpdates(INDArray updates) { // we just loop back data immediately accumulator.receiveUpdate(updates); updates.assign(0.0); Nd4j.getExecutioner().commit(); return true; } }
/** * * Additionally, this method checks, that there's no ops pending execution for this array * @param point */ @Override public void waitTillFinished(AllocationPoint point) { if (!point.isConstant() && point.isEnqueued()) Nd4j.getExecutioner().commit(); super.waitTillFinished(point); }
/** * * Additionally, this method checks, that there's no ops pending execution for this array * @param point */ @Override public void waitTillFinished(AllocationPoint point) { if (!point.isConstant() && point.isEnqueued()) Nd4j.getExecutioner().commit(); super.waitTillFinished(point); }
public BenchmarkDataSetIterator(DataSet example, int totalIterations) { this.baseFeatures = example.getFeatures().dup(); this.baseLabels = example.getLabels().dup(); Nd4j.getExecutioner().commit(); this.limit = totalIterations; }
@Override public void call(DataSet dataSet) { if (!isInitialized) initializeWorkspaces(dataSet.getMemoryFootprint()); Nd4j.getExecutioner().commit(); int currIdx = (int) (counterInput.getAndIncrement() % numWorkspaces); MemoryWorkspace currWs = Nd4j.getMemoryManager().getCurrentWorkspace(); Nd4j.getMemoryManager().setCurrentWorkspace(workspaces.get(currIdx)); dataSet.migrate(); Nd4j.getMemoryManager().setCurrentWorkspace(currWs); }
@Override public INDArray compress(INDArray array) { INDArray dup = array.dup(array.ordering()); Nd4j.getExecutioner().commit(); dup.setData(compress(dup.data())); dup.markAsCompressed(true); return dup; }