private Frame newFrame() { Frame f = this.cached.poll(); if (f == null) { f = new Frame(width, height, Frame.DEPTH_UBYTE,4/* RGBA */); } return f; }
public synchronized static Frame toFrame(BufferedImage src){ return biConv.convert(src).clone(); } }
/**Care must be taken if this method is to be used in conjunction with movie recordings. * Cloning a frame containing a full HD picture (alpha channel included) would take 1920 x 1080 * 4 = 8.294.400 Bytes. * Expect a heap overflow exception when using this method without cleaning up. * * @return A deep copy of this frame. * @see {@link #cloneBufferArray} * * @author Extension proposed by Dragos Dutu * */ @Override public Frame clone() { Frame newFrame = new Frame(); // Video part newFrame.imageWidth = imageWidth; newFrame.imageHeight = imageHeight; newFrame.imageDepth = imageDepth; newFrame.imageChannels = imageChannels; newFrame.imageStride = imageStride; newFrame.keyFrame = keyFrame; newFrame.opaque = opaque; newFrame.image = cloneBufferArray(image); // Audio part newFrame.audioChannels = audioChannels; newFrame.sampleRate = sampleRate; newFrame.samples = cloneBufferArray(samples); // Add timestamp newFrame.timestamp = timestamp; return newFrame; }
/** Returns {@code createIndexer(true, 0)}. */ public <I extends Indexer> I createIndexer() { return (I)createIndexer(true, 0); } @Override public <I extends Indexer> I createIndexer(boolean direct) {
seekFrame = grabFrame(true, true, false, false); if (seekFrame == null) return; //is it better to throw NullPointerException? EnumSet<Frame.Type> frameTypes = seekFrame.getTypes(); frameTypes.retainAll(frameTypesToSeek); if (!frameTypes.isEmpty()) { seekFrame = grabFrame(true, true, false, false); if (seekFrame == null) return; //is it better to throw NullPointerException? EnumSet<Frame.Type> frameTypes = seekFrame.getTypes(); frameTypes.retainAll(frameTypesToSeek); if (!frameTypes.isEmpty()) {
@Override public <I extends Indexer> I createIndexer(boolean direct) { return (I)createIndexer(direct, 0); } /** Returns an {@link Indexer} for the <i>i</i>th image plane. */
@Override public Frame convert(Bitmap bitmap) { if (bitmap == null) { return null; } int channels = 0; switch (bitmap.getConfig()) { case ALPHA_8: channels = 1; break; case RGB_565: case ARGB_4444: channels = 2; break; case ARGB_8888: channels = 4; break; default: assert false; } if (frame == null || frame.imageWidth != bitmap.getWidth() || frame.imageHeight != bitmap.getHeight() || frame.imageChannels != channels) { frame = new Frame(bitmap.getWidth(), bitmap.getHeight(), Frame.DEPTH_UBYTE, channels); } // assume matching strides bitmap.copyPixelsToBuffer(frame.image[0].position(0)); return frame; }
public synchronized static Frame toFrame(Mat src){ return matConv.convert(src).clone(); }
if (frame == null || frame.imageWidth != width || frame.imageHeight != height || frame.imageChannels != 3) { frame = new Frame(width, height, Frame.DEPTH_UBYTE, 3);
public void start() throws Exception { for (int pfn = 0, vfn = 0; pfn < 100 && vfn < 10; pfn++) { Frame frame = grab().clone(); if (frame.image != null) { hasVideo = true; vfn++; } probeFrames.add(frame); } started = true; }
private void initRecorder() { Log.w(LOG_TAG,"initRecorder"); // region yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2); Log.d(LOG_TAG, "IplImage.create"); // endregion recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " + imageWidth + " imageHeight " + imageHeight); recorder.setFormat("flv"); Log.v(LOG_TAG, "recorder.setFormat(\"flv\")"); recorder.setSampleRate(sampleAudioRateInHz); Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)"); // re-set in the surface changed method as well recorder.setFrameRate(frameRate); Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)"); // Create audio recording thread audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); }
public synchronized static Frame toFrame(IplImage src){ return iplConv.convert(src).clone(); }
private void initRecorder() { Log.w(LOG_TAG, "init recorder"); if(RECORD_LENGTH > 0) { imagesIndex = 0; images = new Frame[RECORD_LENGTH * frameRate]; timestamps = new long[images.length]; for(int i = 0; i < images.length; i++) { images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2); timestamps[i] = -1; } } else if(yuvImage == null) { yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2); Log.i(LOG_TAG, "create yuvImage"); } Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link.getAbsolutePath()); recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1); recorder.setFormat("mp4"); recorder.setSampleRate(sampleAudioRateInHz); // Set in the surface changed method recorder.setFrameRate(frameRate); Log.i(LOG_TAG, "recorder initialize success"); audioRecordRunnable = new AudioRecordRunnable(); audioThread = new Thread(audioRecordRunnable); runAudioThread = true; }
public synchronized static Mat toMat(IplImage src){ return matConv.convertToMat(iplConv.convert(src).clone()); }
frame = new Frame(image.getWidth(), image.getHeight(), depth, numChannels);
public synchronized static BufferedImage toBufferedImage(Frame src) { return deepCopy(biConv.getBufferedImage(src.clone())); }
yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
public synchronized static BufferedImage toBufferedImage(Mat src) { return deepCopy(biConv.getBufferedImage(matConv.convert(src).clone())); }
frame = new Frame(); frame.imageWidth = pix.w(); frame.imageHeight = pix.h();
public synchronized static BufferedImage toBufferedImage(IplImage src) { return deepCopy(biConv.getBufferedImage(iplConv.convert(src).clone())); }