Refine search
/** * Creates one by one a new {@link BytePointer} for each {@code byte[]}, * and writes them into the native {@code void*} array. * * @param array the array of {@code byte[]} to read from * @return this */ public PointerPointer<P> put(byte[] ... array) { pointerArray = (P[])new BytePointer[array.length]; for (int i = 0; i < array.length; i++) { pointerArray[i] = array[i] != null ? (P)new BytePointer(array[i]) : null; } return put(pointerArray); } /**
private String getWebcamNameForOs() { switch (OsUtils.getOS()) { case WIN: return videoInput.getDeviceName(address).getString(); case OSX: return "Webcam " + address; // XXX: any clues how to get webcam name on mac? case NIX: return vfile.getAbsolutePath(); default: throw new UnsupportedOperationException("Unsupported operating system"); } }
/** * Creates a byte indexer to access efficiently the data of a pointer. * * @param pointer data to access via a buffer or to copy to an array * @param direct {@code true} to use a direct buffer, see {@link Indexer} for details * @return the new byte indexer backed by the raw memory interface, a buffer, or an array */ public static UByteIndexer create(final BytePointer pointer, long[] sizes, long[] strides, boolean direct) { if (direct) { return Raw.getInstance() != null ? new UByteRawIndexer(pointer, sizes, strides) : new UByteBufferIndexer(pointer.asBuffer(), sizes, strides); } else { final long position = pointer.position(); byte[] array = new byte[(int)Math.min(pointer.limit() - position, Integer.MAX_VALUE)]; pointer.get(array); return new UByteArrayIndexer(array, sizes, strides) { @Override public void release() { pointer.position(position).put(array); super.release(); } }; } }
/** Constructor to set the {@link #pointer}, {@link #sizes} and {@link #strides}. */ public ByteRawIndexer(BytePointer pointer, long[] sizes, long[] strides) { super(sizes, strides); this.pointer = pointer; base = pointer.address() + pointer.position(); size = pointer.limit() - pointer.position(); }
@Override public Map<String, INDArray> executeGraph(long id, @NonNull Map<String, INDArray> map, @NonNull Map<String, Integer> reverseMap) { Nd4j.getExecutioner().commit(); val ptrBuffers = new PointerPointer(map.size() * 2); val ptrShapes = new PointerPointer(map.size() * 2); val ptrIndices = new IntPointer(map.size()); val array = map.get(key); if (Nd4j.dataType() == DataBuffer.Type.FLOAT) { val result = (Nd4jCuda.FloatVariablesSet) nativeOps.executeStoredGraphFloat(null, id, ptrBuffers, ptrShapes, ptrIndices, map.size()); AtomicAllocator.getInstance().getAllocationPoint(array).tickHostWrite(); val nodeName = var.getName().getString(); newMap.put(nodeName, array); AtomicAllocator.getInstance().getAllocationPoint(array).tickHostWrite(); val nodeName = var.getName().getString(); newMap.put(nodeName, array); AtomicAllocator.getInstance().getAllocationPoint(array).tickHostWrite(); val nodeName = var.getName().getString(); newMap.put(nodeName, array);
} else if (ret < 0) { throw new Exception("av_buffersink_get_frame(): Error occurred: " + av_make_error_string(new BytePointer(256), 256, ret).getString()); if (ptr != null && !ptr.equals(image_ptr[0])) { image_ptr[0] = ptr.capacity(frame.imageHeight * Math.abs(frame.imageStride)); image_buf[0] = ptr.asBuffer(); frame.image[0].position(0).limit(size); frame.imageChannels = (size + frame.imageWidth * frame.imageHeight - 1) / (frame.imageWidth * frame.imageHeight); ret = av_image_copy_to_buffer(new BytePointer((ByteBuffer) frame.image[0].position(0)), frame.image[0].capacity(), new PointerPointer(filt_frame), filt_frame.linesize(), filt_frame.format(), frame.imageWidth, frame.imageHeight, 1);
/** @return {@link PIX#data()} wrapped in a {@link ByteBuffer} starting at given byte index. */ public ByteBuffer createBuffer(int index) { int h = pixGetHeight((PIX)this); int wpl = pixGetWpl((PIX)this); BytePointer data = new BytePointer(pixGetData((PIX)this)).position(index).capacity(h * wpl * 4); return data.asByteBuffer(); }
@Override protected CompressedDataBuffer compressPointer(DataBuffer.TypeEx srcType, Pointer srcPointer, int length, int elementSize) { BytePointer ptr = new BytePointer(length * 2); CompressionDescriptor descriptor = new CompressionDescriptor(); descriptor.setCompressedLength(length * 2); descriptor.setOriginalLength(length * elementSize); descriptor.setOriginalElementSize(elementSize); descriptor.setNumberOfElements(length); descriptor.setCompressionAlgorithm(getDescriptor()); descriptor.setCompressionType(getCompressionType()); CompressedDataBuffer buffer = new CompressedDataBuffer(ptr, descriptor); Nd4j.getNDArrayFactory().convertDataEx(srcType, srcPointer, DataBuffer.TypeEx.FLOAT16, ptr, length); return buffer; } }
if (samples == null && samples_out[0].position() > 0) { double sampleDivisor = Math.floor((int)Math.min(samples_out[0].limit(), Integer.MAX_VALUE) / audio_input_frame_size); writeSamples((int)Math.floor((int)samples_out[0].position() / sampleDivisor)); return record((AVFrame)null); ByteBuffer b = (ByteBuffer)samples[i]; if (samples_in[i] instanceof BytePointer && samples_in[i].capacity() >= inputSize && b.hasArray()) { ((BytePointer)samples_in[i]).position(0).put(b.array(), b.position(), inputSize); } else { samples_in[i] = new BytePointer(b); int outputCount = (int)Math.min((samples_out[0].limit() - samples_out[0].position()) / (outputChannels * outputDepth), Integer.MAX_VALUE); inputCount = Math.min(inputCount, (outputCount * sampleRate + audio_c.sample_rate() - 1) / audio_c.sample_rate()); for (int i = 0; samples != null && i < samples.length; i++) { samples_in_ptr.put(i, samples_in[i]); samples_out_ptr.put(i, samples_out[i]); samples_out[i].position(samples_out[i].position() + ret * outputChannels * outputDepth); if (samples == null || samples_out[0].position() >= samples_out[0].limit()) { writeSamples(audio_input_frame_size);
int step = stride * Math.abs(depth) / 8; BytePointer data = image[0] instanceof ByteBuffer ? new BytePointer((ByteBuffer)image[0].position(0)) : new BytePointer(new Pointer(image[0].position(0))); av_image_fill_arrays(new PointerPointer(image_frame), image_frame.linesize(), data, pixelFormat, width, height, 1); image_frame.linesize(0, step); image_frame.format(pixelFormat);
for (int i = 0; i < planes; i++) { BytePointer p = samples_frame.data(i); if (!p.equals(samples_ptr[i]) || samples_ptr[i].capacity() < data_size) { samples_ptr[i] = p.capacity(data_size); ByteBuffer b = p.asBuffer(); switch (sample_format) { case AV_SAMPLE_FMT_U8: int sample_bytes_out = av_get_bytes_per_sample(samples_format); int buffer_size_out = sample_size_out * sample_bytes_out * (planes_out > 1 ? 1 : samples_channels); if (samples_buf_out == null || samples_buf.length != planes_out || samples_ptr_out[0].capacity() < buffer_size_out) { for (int i = 0; samples_ptr_out != null && i < samples_ptr_out.length; i++) { av_free(samples_ptr_out[i].position(0)); samples_ptr_out[i] = new BytePointer(av_malloc(buffer_size_out)).capacity(buffer_size_out); ByteBuffer b = samples_ptr_out[i].asBuffer(); switch (samples_format) { case AV_SAMPLE_FMT_U8: frame.samples = samples_buf_out; if ((ret = swr_convert(samples_convert_ctx, new PointerPointer(samples_ptr_out), sample_size_out, new PointerPointer(samples_ptr), sample_size_in)) < 0) { throw new Exception("swr_convert() error " + ret + ": Cannot convert audio samples."); samples_ptr_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels)); samples_buf_out[i].position(0).limit(ret * (planes_out > 1 ? 1 : samples_channels));
/** * Creates a new {@code AudioFrame} with descriptive format and allocates memory * for the specified amount of samples. * * @param format the format of the samples. * @param samples the amount of samples. */ public AudioFrame(AudioFormat format, int samples) { int channels = format.getChannels(); int sampleFormat = format.getSampleFormat().value(); int planes = av_sample_fmt_is_planar(sampleFormat) != 0 ? channels : 1; int planeLength = (samples * channels * av_get_bytes_per_sample(sampleFormat)) / planes; this.format = format; this.samples = samples; this.planePointers = new BytePointer[planes]; this.samplePointer = new PointerPointer(planes); for (int i = 0; i < planes; i++) { this.planePointers[i] = new BytePointer(av_malloc(planeLength)).capacity(planeLength); this.planePointers[i].limit(planeLength); this.samplePointer.put(i, planePointers[i]); } }
public INDArray yetAnotherExecMethod(@NonNull Map<String, INDArray> inputs) { if (!wasRegistered.get()) { synchronized (this) { if (!wasRegistered.get()) { val bb = asFlatBuffers(); val ptr = new BytePointer(bb); Nd4j.getExecutioner().registerGraph(this.hashCode(), ptr); wasRegistered.set(true); } } } val newMap = new LinkedHashMap<String, INDArray>(); val keySet = inputs.keySet(); for (val key : keySet) { val vx = variableMap.get(key); newMap.put(vx.getVarName(), inputs.get(key)); } val result = Nd4j.getExecutioner().executeGraph(this.hashCode(), newMap); if (result.size() == 0) throw new ND4JIllegalStateException("Execution failed"); val list = new ArrayList<INDArray>(result.values()); return list.get(list.size() - 1); }
@Nonnull private CharSequence getString(@Nonnull Attribute attribute, DataType dataType, @Nonnull byte[] buffer) { @Nonnull BytePointer pointer = new BytePointer(buffer); attribute.read(dataType, pointer); pointer.get(buffer); @Nonnull String str = new String(buffer); if (str.indexOf('\0') >= 0) { return str.substring(0, str.indexOf('\0')); } else { return str; } }
DataBuffer shapeBuff = Nd4j.createBufferDetached(new int[shapeBufferLength]); ByteBuffer slice = byteBuffer.slice(); DataBuffer buff = Nd4j.createBuffer(slice, type, Shape.length(shapeBuff)); INDArray arr = Nd4j.createArrayFromShapeBuffer(buff.dup(), shapeBuff.dup()); return Pair.of(arr, byteBuffer); } else { ByteBuffer slice = byteBuffer.slice(); BytePointer byteBufferPointer = new BytePointer(slice);
@Override public DataBuffer compress(DataBuffer buffer) { CompressionDescriptor descriptor = new CompressionDescriptor(buffer, this); BytePointer ptr = new BytePointer(buffer.length() * buffer.getElementSize()); CompressedDataBuffer result = new CompressedDataBuffer(ptr, descriptor); Nd4j.getMemoryManager().memcpy(result, buffer); return result; }
import org.bytedeco.javacpp.*; public class Main { public static void main(String[] args) { Message.S_MESSAGE_STRUCT my_struct = new Message.S_MESSAGE_STRUCT(); IntPointer i1 = new IntPointer(1).put(1); BytePointer c1 = new BytePointer(128); my_struct.i1(i1); my_struct.c1(c1); my_struct.len(c1.capacity()); Message.t_func(my_struct); System.out.println(c1.getString()); } }
image_ptr = new BytePointer[] { new BytePointer(av_malloc(size)).capacity(size) }; image_buf = new Buffer[] { image_ptr[0].asBuffer() }; av_image_fill_arrays(new PointerPointer(picture_rgb), picture_rgb.linesize(), image_ptr[0], fmt, width, height, 1); picture_rgb.format(fmt); picture_rgb.width(width);