public void addTimecode(Packet packet) throws IOException { if(_timescale == NO_TIMESCALE_SET) _timescale = packet.getTimescale(); if(_timescale != NO_TIMESCALE_SET && _timescale != packet.getTimescale()) throw new RuntimeException("MP4 timecode track doesn't support timescale switching."); if (packet.isKeyFrame()) processGop(); gop.add(Packet.createPacketWithData(packet, (ByteBuffer) null)); }
@Override public Packet nextFrame() throws IOException { if (audioStash.size() == 0) { Packet nextFrame = nextFrameWithBuffer(null); if (nextFrame != null) { ByteBuffer data = nextFrame.getData(); Header adts = ADTSParser.read(data.duplicate()); long nextPts = nextFrame.getPts(); while (data.hasRemaining()) { ByteBuffer data2 = NIOUtils.read(data, adts.getSize()); Packet pkt = Packet.createPacketWithData(nextFrame, data2); pkt.setDuration((pkt.getTimescale() * 1024) / AACConts.AAC_SAMPLE_RATES[adts.getSamplingIndex()]); pkt.setPts(nextPts); nextPts += pkt.getDuration(); audioStash.add(pkt); if (data.hasRemaining()) adts = ADTSParser.read(data.duplicate()); } } } if (audioStash.size() == 0) return null; return audioStash.remove(0); } }
@Override public void addFrame(Packet pkt) throws IOException { if (codec == Codec.H264) { ByteBuffer result = pkt.getData(); if (pkt.frameType == FrameType.UNKNOWN) { pkt.setFrameType(H264Utils.isByteBufferIDRSlice(result) ? FrameType.KEY : FrameType.INTER); } H264Utils.wipePSinplace(result, spsList, ppsList); result = H264Utils.encodeMOVPacket(result); pkt = Packet.createPacketWithData(pkt, result); } else if (codec == Codec.AAC) { ByteBuffer result = pkt.getData(); adtsHeader = ADTSParser.read(result); // System.out.println(String.format("crc_absent: %d, num_aac_frames: %d, size: %d, remaining: %d, %d, %d, %d", // adtsHeader.getCrcAbsent(), adtsHeader.getNumAACFrames(), adtsHeader.getSize(), result.remaining(), // adtsHeader.getObjectType(), adtsHeader.getSamplingIndex(), adtsHeader.getChanConfig())); pkt = Packet.createPacketWithData(pkt, result); } super.addFrame(pkt); }
@Override public void outputVideoFrame(VideoFrameWithPacket videoFrame) throws IOException { if (!outputFormat.isVideo() || outputVideoCodec == null) return; Packet outputVideoPacket; ByteBuffer buffer = bufferStore.get(); int bufferSize = videoEncoder.estimateBufferSize(videoFrame.getFrame().getPicture()); if (buffer == null || bufferSize < buffer.capacity()) { buffer = ByteBuffer.allocate(bufferSize); bufferStore.set(buffer); } buffer.clear(); Picture frame = videoFrame.getFrame().getPicture(); EncodedFrame enc = encodeVideo(frame, buffer); outputVideoPacket = Packet.createPacketWithData(videoFrame.getPacket(), NIOUtils.clone(enc.getData())); outputVideoPacket.setFrameType(enc.isKeyFrame() ? FrameType.KEY : FrameType.INTER); outputVideoPacket(outputVideoPacket, org.jcodec.common.VideoCodecMeta.createSimpleVideoCodecMeta(new Size(frame.getWidth(), frame.getHeight()), frame.getColor())); }
@Override public void outputAudioFrame(AudioFrameWithPacket audioFrame) throws IOException { if (!outputFormat.isAudio() || outputAudioCodec == null) return; outputAudioPacket(Packet.createPacketWithData(audioFrame.getPacket(), encodeAudio(audioFrame.getAudio())), org.jcodec.common.AudioCodecMeta.fromAudioFormat(audioFrame.getAudio().getFormat())); }