public void addTimecode(Packet packet) throws IOException { if (packet.isKeyFrame()) processGop(); gop.add(new Packet(packet, (ByteBuffer) null)); }
_timescale = pkt.getTimescale(); if (_timescale != pkt.getTimescale()) { pkt.setPts((pkt.getPts() * _timescale) / pkt.getTimescale()); pkt.setDuration((pkt.getPts() * _timescale) / pkt.getDuration()); long compositionOffset = pkt.getPts() - ptsEstimate; if (compositionOffset != lastCompositionOffset) { if (lastCompositionSamples > 0) ptsEstimate += pkt.getDuration(); curChunk.add(pkt.getData()); if (pkt.isKeyFrame()) iframes.add(curFrame + 1); else chunkDuration += pkt.getDuration(); if (curDuration != -1 && pkt.getDuration() != curDuration) { sampleDurations.add(new TimeToSampleEntry((int) sameDurCount, (int) curDuration)); sameDurCount = 0; curDuration = pkt.getDuration(); sameDurCount++; trackTotalDuration += pkt.getDuration();
public int compare(Packet o1, Packet o2) { if (o1 == null && o2 == null) return 0; else if (o1 == null) return -1; else if (o2 == null) return 1; else return o1.getDisplayOrder() > o2.getDisplayOrder() ? 1 : (o1.getDisplayOrder() == o2 .getDisplayOrder() ? 0 : -1); } });
public void addTimecode(Packet packet) throws IOException { if(_timescale == NO_TIMESCALE_SET) _timescale = packet.getTimescale(); if(_timescale != NO_TIMESCALE_SET && _timescale != packet.getTimescale()) throw new RuntimeException("MP4 timecode track doesn't support timescale switching."); if (packet.isKeyFrame()) processGop(); gop.add(Packet.createPacketWithData(packet, (ByteBuffer) null)); }
private void detectFrameType(Packet inVideoPacket) { if (inputVideoCodec.v2 != Codec.H264) { return; } inVideoPacket.setFrameType( H264Utils.isByteBufferIDRSlice(inVideoPacket.getData()) ? FrameType.KEY : FrameType.INTER); }
VideoCodecMeta meta = new MPEGDecoder().getCodecMeta(pkt.getData()); MuxerTrack videoTrack = mp4Muxer.addVideoTrack(Codec.MPEG2, meta); long firstPts = pkt.getPts(); for (int i = 0; pkt != null && i < 150; i++) { videoTrack.addFrame(MP4Packet.createMP4Packet(pkt.getData(), pkt.getPts() - firstPts, pkt.getTimescale(), pkt .getDuration(), pkt.getFrameNo(), pkt.getFrameType(), pkt.getTapeTimecode(), 0, pkt.getPts() - firstPts, 0)); pkt = video.nextFrame();
@Override public Packet nextFrame() throws IOException { if (audioStash.size() == 0) { Packet nextFrame = nextFrameWithBuffer(null); if (nextFrame != null) { ByteBuffer data = nextFrame.getData(); Header adts = ADTSParser.read(data.duplicate()); long nextPts = nextFrame.getPts(); while (data.hasRemaining()) { ByteBuffer data2 = NIOUtils.read(data, adts.getSize()); Packet pkt = Packet.createPacketWithData(nextFrame, data2); pkt.setDuration((pkt.getTimescale() * 1024) / AACConts.AAC_SAMPLE_RATES[adts.getSamplingIndex()]); pkt.setPts(nextPts); nextPts += pkt.getDuration(); audioStash.add(pkt); if (data.hasRemaining()) adts = ADTSParser.read(data.duplicate()); } } } if (audioStash.size() == 0) return null; return audioStash.remove(0); } }
@Override public YUVPicture getFrame(Packet frame) throws IOException { ByteBuffer buf = frame.getData(); return decoder.decode(buf); }
private void addTimecodeInt(Packet packet) throws IOException { TapeTimecode tapeTimecode = packet.getTapeTimecode(); boolean gap = isGap(prevTimecode, tapeTimecode); prevTimecode = tapeTimecode; if (gap) { outTimecodeSample(); firstTimecode = tapeTimecode; fpsEstimate = tapeTimecode.isDropFrame() ? 30 : -1; samplePts += sampleDuration; sampleDuration = 0; tcFrames = 0; } sampleDuration += packet.getDuration(); tcFrames++; }
while ((inFrame = inTrack.nextFrame()) != null && !inFrame.isKeyFrame()) inTrack.gotoFrame(inFrame.getFrameNo()); ByteBuffer data = inFrame.getData(); List<ByteBuffer> nalUnits = H264Utils.splitFrame(data); _rawData.clear();
@Override public void addFrameInternal(Packet pkt, int entryNo) throws IOException { checkState(!finished, "The muxer track has finished muxing"); if (_timescale == NO_TIMESCALE_SET) { if (adtsHeader != null) { _timescale = adtsHeader.getSampleRate(); } else { _timescale = pkt.getTimescale(); } } if (_timescale != pkt.getTimescale()) { pkt.setPts((pkt.getPts() * _timescale) / pkt.getTimescale()); pkt.setDuration((pkt.getPts() * _timescale) / pkt.getDuration()); } if (adtsHeader != null) { pkt.setDuration(1024); } super.addFrameInternal(pkt, entryNo); }
public static Packet createPacketWithData(Packet other, ByteBuffer data) { return new Packet(data, other.pts, other.timescale, other.duration, other.frameNo, other.frameType, other.tapeTimecode, other.displayOrder); }
@Override public void addFrame(Packet pkt) throws IOException { if (codec == Codec.H264) { ByteBuffer result = pkt.getData(); if (pkt.frameType == FrameType.UNKNOWN) { pkt.setFrameType(H264Utils.isByteBufferIDRSlice(result) ? FrameType.KEY : FrameType.INTER); } H264Utils.wipePSinplace(result, spsList, ppsList); result = H264Utils.encodeMOVPacket(result); pkt = Packet.createPacketWithData(pkt, result); } else if (codec == Codec.AAC) { ByteBuffer result = pkt.getData(); adtsHeader = ADTSParser.read(result); // System.out.println(String.format("crc_absent: %d, num_aac_frames: %d, size: %d, remaining: %d, %d, %d, %d", // adtsHeader.getCrcAbsent(), adtsHeader.getNumAACFrames(), adtsHeader.getSize(), result.remaining(), // adtsHeader.getObjectType(), adtsHeader.getSamplingIndex(), adtsHeader.getChanConfig())); pkt = Packet.createPacketWithData(pkt, result); } super.addFrame(pkt); }
private void decodeLeadingFrames() throws IOException { long frameNo = videoTrack.getCurFrame(); long keyFrame = getPreviousKeyFrame(frameNo); if(keyFrame == frameNo) { return; } if(!videoTrack.gotoFrame(keyFrame)) { throw new IOException("Invalid frame no: " + keyFrame); } Packet frame; do { buffer.clear(); frame = videoTrack.nextFrame(buffer); if(frame == null) { throw new IOException("Cannot decode frame"); } demuxerHelper.skipFrame(frame); } while(frame.getFrameNo() < (frameNo - 1)); }
@Override public void addFrame(Packet pkt) throws IOException { if (!headerWritten) { frameRate = pkt.getTimescale(); writeHeader(); headerWritten = true; } ByteBuffer fh = ByteBuffer.allocate(12); fh.order(ByteOrder.LITTLE_ENDIAN); ByteBuffer frame = pkt.getData(); fh.putInt(frame.remaining()); fh.putLong(nFrames); fh.clear(); ch.write(fh); ch.write(frame); nFrames++; }
@Override public VideoFrameWithPacket getNextVideoFrame() throws IOException { Packet inVideoPacket; while ((inVideoPacket = getNextVideoPacket()) != null) { if (inVideoPacket.getFrameType() == FrameType.UNKNOWN) { detectFrameType(inVideoPacket); } Picture decodedFrame = null; LoanerPicture pixelBuffer = getPixelBuffer(inVideoPacket.getData()); decodedFrame = decodeVideo(inVideoPacket.getData(), pixelBuffer.getPicture()); if (decodedFrame == null) { pixelStore.putBack(pixelBuffer); continue; } frameReorderBuffer.add(new VideoFrameWithPacket(inVideoPacket, new LoanerPicture(decodedFrame, 1))); if (frameReorderBuffer.size() > Transcoder.REORDER_BUFFER_SIZE) { return removeFirstFixDuration(frameReorderBuffer); } } // We don't have any more compressed video packets if (frameReorderBuffer.size() > 0) { return removeFirstFixDuration(frameReorderBuffer); } // We don't have any more compressed video packets and nothing's in // the buffers return null; }
if (videoFrame.getPacket().getFrameNo() < firstVideoFrame.getPacket().getFrameNo()) firstVideoFrame = videoFrame; for (int af = 0; af < aqSize; af++) { AudioFrameWithPacket audioFrame = audioQueue.get(0); if (audioFrame.getPacket().getPtsD() >= firstVideoFrame.getPacket().getPtsD() + .2) break; audioQueue.remove(0);
@Override public Packet nextFrameWithBuffer(ByteBuffer buf) throws IOException { PESPacket pkt; if (_pending.size() > 0) { pkt = _pending.remove(0); } else { while ((pkt = demuxer.nextPacket(demuxer.getBuffer())) != null && pkt.streamId != streamId) demuxer.addToStream(pkt); } return pkt == null ? null : Packet.createPacket(pkt.data, pkt.pts, 90000, 0, frameNo++, FrameType.UNKNOWN, null); }
@Override public void outputVideoFrame(VideoFrameWithPacket videoFrame) throws IOException { if (!outputFormat.isVideo() || outputVideoCodec == null) return; Packet outputVideoPacket; ByteBuffer buffer = bufferStore.get(); int bufferSize = videoEncoder.estimateBufferSize(videoFrame.getFrame().getPicture()); if (buffer == null || bufferSize < buffer.capacity()) { buffer = ByteBuffer.allocate(bufferSize); bufferStore.set(buffer); } buffer.clear(); Picture frame = videoFrame.getFrame().getPicture(); EncodedFrame enc = encodeVideo(frame, buffer); outputVideoPacket = Packet.createPacketWithData(videoFrame.getPacket(), NIOUtils.clone(enc.getData())); outputVideoPacket.setFrameType(enc.isKeyFrame() ? FrameType.KEY : FrameType.INTER); outputVideoPacket(outputVideoPacket, org.jcodec.common.VideoCodecMeta.createSimpleVideoCodecMeta(new Size(frame.getWidth(), frame.getHeight()), frame.getColor())); }