/** * Initializes a new <tt>Packetizer</tt> instance. */ public Packetizer() { super( "VP8 Packetizer", VideoFormat.class, new VideoFormat[] { new VideoFormat(Constants.VP8_RTP) }); inputFormats = new VideoFormat[] { new VideoFormat(Constants.VP8)}; }
/** * Initializes a new <tt>VPXDecoder</tt> instance. */ public VPXDecoder() { super("VP8 VPX Decoder", VideoFormat.class, SUPPORTED_OUTPUT_FORMATS); inputFormat = null; outputFormat = null; inputFormats = new VideoFormat[] { new VideoFormat(Constants.VP8) }; }
/** * Initializes a new <tt>JNIEncoder</tt> instance. */ public DePacketizer() { super("VP8 RTP DePacketizer", VideoFormat.class, new VideoFormat[]{ new VideoFormat(Constants.VP8) }); inputFormats = new VideoFormat[] { new VideoFormat(Constants.VP8_RTP) }; }
/** * Initializes a new <tt>JNIDecoder</tt> instance which is to decode H.263+ * encoded data into frames in YUV format. */ public JNIDecoder() { inputFormats = new VideoFormat[] { new VideoFormat(Constants.H263P) }; outputFormats = DEFAULT_OUTPUT_FORMATS; }
/** * Initializes a new <tt>Packetizer</tt> instance which is to packetize * H.263+ encoded data into RTP packets in accord with * RFC 4529 "RTP Payload Format for ITU-T Rec. H.263 Video". */ public Packetizer() { inputFormats = new Format[] { new VideoFormat(Constants.H263P) }; inputFormat = null; outputFormat = null; }
/** * Initializes a new <tt>DePacketizer</tt> instance which is to depacketize * H.263+ RTP packet. */ public DePacketizer() { super( "H263+ DePacketizer", VideoFormat.class, new VideoFormat[] { new VideoFormat(Constants.H263P) }); inputFormats = new VideoFormat[] { new VideoFormat(Constants.H263P_RTP) }; }
/** * Initializes a new <tt>VideoMediaFormatImpl</tt> instance with a specific * encoding and a specific clock rate. * * @param encoding the encoding of the new <tt>VideoMediaFormatImpl</tt> * instance * @param clockRate the clock rate of the new <tt>VideoMediaFormatImpl</tt> * instance */ VideoMediaFormatImpl(String encoding, double clockRate) { this(new VideoFormat(encoding), clockRate); }
CaptureDeviceManager.getDeviceList(new VideoFormat("yuv"))
CaptureDeviceManager.getDeviceList(new VideoFormat("rgb"))
/** * Initializes a new <tt>DePacketizer</tt> instance which is to depacketize * H.264 RTP packets into NAL units. */ public DePacketizer() { super( "H264 DePacketizer", VideoFormat.class, new VideoFormat[] { new VideoFormat(Constants.H264) }); List<Format> inputFormats = new ArrayList<Format>(); inputFormats.add(new VideoFormat(Constants.H264_RTP)); /* * Apart from the generic Constants.H264_RTP VideoFormat, add the * possible respective ParameterizedVideoFormats because * ParameterizedVideoFormat will not match every VideoFormat due to the * fact that a missing packetization-mode format parameter is * interpreted as it having a value of 0. */ Collections.addAll(inputFormats, Packetizer.SUPPORTED_OUTPUT_FORMATS); this.inputFormats = inputFormats.toArray(EMPTY_FORMATS); }
/** * Initializes a new <tt>JNIDecoder</tt> instance which is to decode H.264 * NAL units into frames in YUV format. */ public JNIDecoder() { inputFormats = new VideoFormat[] { /* * Explicitly state both ParameterizedVideoFormat (to * receive any format parameters which may be of concern * to this JNIDecoder) and VideoFormat (to make sure * that nothing breaks because of equality and/or * matching tests involving ParameterizedVideoFormat). */ new ParameterizedVideoFormat(Constants.H264), new VideoFormat(Constants.H264) }; outputFormats = DEFAULT_OUTPUT_FORMATS; }
/** * Gets the matching output formats for a specific format. * * @param in input format * @return array for formats matching input format */ private Format[] getMatchingOutputFormats(Format in) { VideoFormat videoIn = (VideoFormat) in; return new VideoFormat[] { new VideoFormat( Constants.H263P, videoIn.getSize(), Format.NOT_SPECIFIED, Format.byteArray, videoIn.getFrameRate()) }; }
/** * Get the matching output formats for a specific format. * * @param inputFormat input format * @return array for formats matching input format */ private Format[] getMatchingOutputFormats(Format inputFormat) { VideoFormat inputVideoFormat = (VideoFormat) inputFormat; return new Format[] { new VideoFormat( Constants.H263P_RTP, inputVideoFormat.getSize(), /* maxDataLength */ Format.NOT_SPECIFIED, Format.byteArray, inputVideoFormat.getFrameRate()) }; }
/** * Gets the matching output formats for a specific format. * * @param inputFormat input format * @return array of formats matching input format */ @Override protected Format[] getMatchingOutputFormats(Format inputFormat) { VideoFormat inputVideoFormat = (VideoFormat) inputFormat; return new VideoFormat[] { new VideoFormat( Constants.VP8, inputVideoFormat.getSize(), /* maxDataLength */ Format.NOT_SPECIFIED, Format.byteArray, inputVideoFormat.getFrameRate()) }; }
/** * Returns the format depending on the media type: AudioFormat for AUDIO, * VideoFormat for VIDEO. Otherwise, returns null. * * @return The format depending on the media type: AudioFormat for AUDIO, * VideoFormat for VIDEO. Otherwise, returns null. */ public Format getFormat() { Format format = null; switch (getMediaType()) { case AUDIO: format = new AudioFormat(null); break; case VIDEO: format = new VideoFormat(null); break; default: format = null; break; } return format; }
/** * Creates an unknown <tt>MediaFormat</tt>. * * @param type <tt>MediaType</tt> * @return unknown <tt>MediaFormat</tt> */ public MediaFormat createUnknownMediaFormat(MediaType type) { Format unknown = null; /* * FIXME Why is a VideoFormat instance created for MediaType.AUDIO and * an AudioFormat instance for MediaType.VIDEO? */ if(type.equals(MediaType.AUDIO)) unknown = new VideoFormat("unknown"); else if(type.equals(MediaType.VIDEO)) unknown = new AudioFormat("unknown"); return MediaFormatImpl.createInstance(unknown); }
/** * {@inheritDoc} */ @Override public void open() throws IOException, SecurityException { synchronized (openCloseSyncRoot) { if (dataSource instanceof PushBufferDataSource) { PushBufferDataSource pbds = (PushBufferDataSource) dataSource; PushBufferStream[] streams = pbds.getStreams(); //XXX: should we allow for multiple streams in the data source? for (PushBufferStream stream : streams) { //XXX whats the proper way to check for this? and handle? if (!stream.getFormat().matches(new VideoFormat("VP8"))) throw new IOException("Unsupported stream format"); stream.setTransferHandler(this); } } dataSource.connect(); open = true; } }
/** * doConnect allows us to initialize the DataSource with information that * we couldn't have in the constructor, like the MediaLocator that give us * the path of the ivf file which give us information on the format */ public void doConnect() throws IOException { super.doConnect(); this.fileLocation = getLocator().getRemainder(); ivfHeader = new IVFHeader(this.fileLocation); this.SUPPORTED_FORMATS[0] = new VideoFormat( Constants.VP8, ivfHeader.getDimension(), Format.NOT_SPECIFIED, Format.byteArray, Format.NOT_SPECIFIED); }
= new VideoFormat( videoFormat.getEncoding(), size,