/** * Construct an audio input stream from which <code>duration</code> seconds of silence can be read. * * @param duration * the desired duration of the silence, in seconds * @param format * the desired audio format of the audio input stream. getFrameSize() and getFrameRate() must return meaningful * values. */ public SilenceAudioInputStream(double duration, AudioFormat format) { super(new ByteArrayInputStream(new byte[(int) (format.getFrameSize() * format.getFrameRate() * duration)]), format, (long) (format.getFrameRate() * duration)); }
/** * Construct an audio input stream from which <code>duration</code> seconds of silence can be read. * * @param duration * the desired duration of the silence, in seconds * @param format * the desired audio format of the audio input stream. getFrameSize() and getFrameRate() must return meaningful * values. */ public SilenceAudioInputStream(double duration, AudioFormat format) { super(new ByteArrayInputStream(new byte[(int) (format.getFrameSize() * format.getFrameRate() * duration)]), format, (long) (format.getFrameRate() * duration)); }
AudioFormat format = m_line.getFormat(); int nFrameSize = format.getFrameSize(); long totalBytesToRead = (long) (millis * format.getFrameRate() * nFrameSize / 1000); if (totalBytesToRead % nFrameSize != 0) { totalBytesToRead += nFrameSize - totalBytesToRead % nFrameSize;
AudioFormat format = m_line.getFormat(); int nFrameSize = format.getFrameSize(); long totalBytesToRead = (long) (millis * format.getFrameRate() * nFrameSize / 1000); if (totalBytesToRead % nFrameSize != 0) { totalBytesToRead += nFrameSize - totalBytesToRead % nFrameSize;
AudioFormat format = m_line.getFormat(); int nFrameSize = format.getFrameSize(); long totalBytesToRead = (long) (millis * format.getFrameRate() * nFrameSize / 1000); if (totalBytesToRead % nFrameSize != 0) { totalBytesToRead += nFrameSize - totalBytesToRead % nFrameSize;
AudioFormat format = m_line.getFormat(); int nFrameSize = format.getFrameSize(); long totalBytesToRead = (long) (millis * format.getFrameRate() * nFrameSize / 1000); if (totalBytesToRead % nFrameSize != 0) { totalBytesToRead += nFrameSize - totalBytesToRead % nFrameSize;
duration = ais.getFrameLength() / ais.getFormat().getFrameRate();
duration = ais.getFrameLength() / ais.getFormat().getFrameRate();
private static void dumpStreamChunk(File file, String dstPath, long offset, long length) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(file); AudioInputStream inputStream = AudioSystem.getAudioInputStream(file); AudioFormat audioFormat = fileFormat.getFormat(); int bitrate = Math.round(audioFormat.getFrameSize() * audioFormat.getFrameRate() / 1000); inputStream.skip(offset * bitrate); AudioInputStream chunkStream = new AudioInputStream(inputStream, audioFormat, length * bitrate); AudioSystem.write(chunkStream, fileFormat.getType(), new File(dstPath)); inputStream.close(); chunkStream.close(); } }
data[2][1] = new Float(audioFormat.getFrameRate());
/** * * @param input * input * @param inputMode * if AudioPlayer.STEREO, average both input streams; if AudioPlayer.LEFT_ONLY, use only the left channel; if * AudioPlayer.RIGHT_ONLY, use only the right channel. */ public MonoAudioInputStream(AudioInputStream input, int inputMode) { super(input, input.getFormat(), input.getFrameLength()); this.newFormat = new AudioFormat(input.getFormat().getEncoding(), input.getFormat().getSampleRate(), input.getFormat() .getSampleSizeInBits(), 1, input.getFormat().getFrameSize() / input.getFormat().getChannels(), input.getFormat() .getFrameRate(), input.getFormat().isBigEndian()); this.inputChannels = input.getFormat().getChannels(); if (inputChannels < 2) throw new IllegalArgumentException("expected more than one input channel!"); this.inputMode = inputMode; if (inputMode == AudioPlayer.MONO) throw new IllegalArgumentException("expected non-mono input mode"); }
/** * * @param input * input * @param inputMode * if AudioPlayer.STEREO, average both input streams; if AudioPlayer.LEFT_ONLY, use only the left channel; if * AudioPlayer.RIGHT_ONLY, use only the right channel. */ public MonoAudioInputStream(AudioInputStream input, int inputMode) { super(input, input.getFormat(), input.getFrameLength()); this.newFormat = new AudioFormat(input.getFormat().getEncoding(), input.getFormat().getSampleRate(), input.getFormat() .getSampleSizeInBits(), 1, input.getFormat().getFrameSize() / input.getFormat().getChannels(), input.getFormat() .getFrameRate(), input.getFormat().isBigEndian()); this.inputChannels = input.getFormat().getChannels(); if (inputChannels < 2) throw new IllegalArgumentException("expected more than one input channel!"); this.inputMode = inputMode; if (inputMode == AudioPlayer.MONO) throw new IllegalArgumentException("expected non-mono input mode"); }
AudioFormat sampleRateConvFormat = new AudioFormat(ais.getFormat().getEncoding(), targetFormat.getSampleRate(), ais.getFormat().getSampleSizeInBits(), ais.getFormat().getChannels(), ais.getFormat().getFrameSize(), ais.getFormat().getFrameRate(), ais.getFormat().isBigEndian()); try { AudioInputStream intermedStream = AudioSystem.getAudioInputStream(sampleRateConvFormat, ais);
/** * * @param input * input * @param outputMode * as defined in AudioPlayer: STEREO, LEFT_ONLY or RIGHT_ONLY. */ public StereoAudioInputStream(AudioInputStream input, int outputMode) { super(input, input.getFormat(), input.getFrameLength()); this.newFormat = new AudioFormat(input.getFormat().getEncoding(), input.getFormat().getSampleRate(), input.getFormat() .getSampleSizeInBits(), 2, 2 * input.getFormat().getFrameSize() / input.getFormat().getChannels(), input .getFormat().getFrameRate(), input.getFormat().isBigEndian()); this.inputChannels = input.getFormat().getChannels(); this.outputMode = outputMode; }
AudioFormat sampleRateConvFormat = new AudioFormat(ais.getFormat().getEncoding(), targetFormat.getSampleRate(), ais.getFormat().getSampleSizeInBits(), ais.getFormat().getChannels(), ais.getFormat().getFrameSize(), ais.getFormat().getFrameRate(), ais.getFormat().isBigEndian()); try { AudioInputStream intermedStream = AudioSystem.getAudioInputStream(sampleRateConvFormat, ais);
/** * * @param input * input * @param outputMode * as defined in AudioPlayer: STEREO, LEFT_ONLY or RIGHT_ONLY. */ public StereoAudioInputStream(AudioInputStream input, int outputMode) { super(input, input.getFormat(), input.getFrameLength()); this.newFormat = new AudioFormat(input.getFormat().getEncoding(), input.getFormat().getSampleRate(), input.getFormat() .getSampleSizeInBits(), 2, 2 * input.getFormat().getFrameSize() / input.getFormat().getChannels(), input .getFormat().getFrameRate(), input.getFormat().isBigEndian()); this.inputChannels = input.getFormat().getChannels(); this.outputMode = outputMode; }
File file = ...; AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(file); AudioFormat format = audioInputStream.getFormat(); long frames = audioInputStream.getFrameLength(); double durationInSeconds = (frames+0.0) / format.getFrameRate();
int size = dataLine.getBufferSize()*(format.getChannels()/2)/bufferUpdateFactor; int framesSinceLast = (int)((timeSinceLastPositionSet/1000f)* dataLine.getFormat().getFrameRate()); int framesRemainingTillTime = size - framesSinceLast; return framePosition 2, format.getSampleSizeInBits()*2/8, // calculate frame size format.getFrameRate(), format.isBigEndian() );
/** Conversion milliseconds to frames (samples) */ public static long millis2Frames(double ms, AudioFormat format) { return (long) millis2FramesD(ms, (double) format.getFrameRate()); } /** Conversion milliseconds to frames (samples) */
private static final AudioFormat createTargetFormat(AudioFormat src, AudioFormat dst) { return new AudioFormat(dst.getEncoding(), src.getSampleRate(), dst.getSampleSizeInBits(), src.getChannels(), dst.getSampleSizeInBits() * src.getChannels() / 8, src.getFrameRate(), dst.isBigEndian()); }