/** * * @param input * input * @param outputMode * as defined in AudioPlayer: STEREO, LEFT_ONLY or RIGHT_ONLY. */ public StereoAudioInputStream(AudioInputStream input, int outputMode) { super(input, input.getFormat(), input.getFrameLength()); this.newFormat = new AudioFormat(input.getFormat().getEncoding(), input.getFormat().getSampleRate(), input.getFormat() .getSampleSizeInBits(), 2, 2 * input.getFormat().getFrameSize() / input.getFormat().getChannels(), input .getFormat().getFrameRate(), input.getFormat().isBigEndian()); this.inputChannels = input.getFormat().getChannels(); this.outputMode = outputMode; }
/** * Turns the AudioInputStream into a 16bit, SIGNED_PCM, little endian audio stream that preserves the original sample * rate of the AudioInputStream. NOTE: this assumes the frame size can be only 1 or 2 bytes. The AudioInputStream * is left in a state of having all of its data being read. * @param ais stream to convert * @return result array * @throws IOException if error occurred */ static public short[] toSignedPCM(AudioInputStream ais) throws IOException { AudioFormat aisFormat = ais.getFormat(); short[] shorts = new short[ais.available() / aisFormat.getFrameSize()]; byte[] frame = new byte[aisFormat.getFrameSize()]; int pos = 0; while (ais.read(frame) != -1) { shorts[pos++] = bytesToShort(aisFormat, frame); } return shorts; }
private static void dumpStreamChunk(File file, String dstPath, long offset, long length) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(file); AudioInputStream inputStream = AudioSystem.getAudioInputStream(file); AudioFormat audioFormat = fileFormat.getFormat(); int bitrate = Math.round(audioFormat.getFrameSize() * audioFormat.getFrameRate() / 1000); inputStream.skip(offset * bitrate); AudioInputStream chunkStream = new AudioInputStream(inputStream, audioFormat, length * bitrate); AudioSystem.write(chunkStream, fileFormat.getType(), new File(dstPath)); inputStream.close(); chunkStream.close(); } }
ais = AudioSystem.getAudioInputStream(file); int bytesToRead = ais.available(); data = new byte[bytesToRead]; int bytesRead = ais.read(data); if (bytesToRead != bytesRead) throw new IllegalStateException("read only " + bytesRead + " of " + bytesToRead + " bytes"); ais = AudioSystem.getAudioInputStream(url); int bytesToRead = ais.available(); data = new byte[bytesToRead]; int bytesRead = ais.read(data); if (bytesToRead != bytesRead) throw new IllegalStateException("read only " + bytesRead + " of " + bytesToRead + " bytes");
public void run() { status = Status.PLAYING; AudioFormat audioFormat = ais.getFormat(); if (audioFormat.getChannels() == 1) { if (outputMode != MONO) { // mono -> convert to stereo ais = new StereoAudioInputStream(ais, outputMode); audioFormat = ais.getFormat(); assert audioFormat.getChannels() == 2 : "Unexpected number of channels: " + audioFormat.getChannels(); if (outputMode == MONO) { ais = new MonoAudioInputStream(ais); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); ais = AudioSystem.getAudioInputStream(targetFormat, ais); audioFormat = ais.getFormat(); info = new DataLine.Info(SourceDataLine.class, audioFormat); line = (SourceDataLine) AudioSystem.getLine(info); line.start(); int nRead = 0; byte[] abData = new byte[65532]; // needs to be a multiple of 4 and 6, to support both 16 and 24 bit stereo while (nRead != -1 && !exitRequested) { try { nRead = ais.read(abData, 0, abData.length); } catch (IOException e) { e.printStackTrace();
AudioInputStream din = null; try { AudioInputStream in = AudioSystem.getAudioInputStream(new URL("http://www.howjsay.com/mp3/"+ args[0] +".mp3")); AudioFormat baseFormat = in.getFormat(); AudioFormat decodedFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false); din = AudioSystem.getAudioInputStream(decodedFormat, in); DataLine.Info info = new DataLine.Info(SourceDataLine.class, decodedFormat); SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info); if(line != null) { line.open(decodedFormat); byte[] data = new byte[4096]; line.start(); while ((nBytesRead = din.read(data, 0, data.length)) != -1) { line.write(data, 0, nBytesRead); line.drain(); line.stop(); line.close(); din.close(); try { din.close(); } catch(IOException e) { }
AudioInputStream ais = AudioSystem.getAudioInputStream(is); AudioFormat audioFormat = ais.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); line = (SourceDataLine) AudioSystem.getLine(info); line.open(audioFormat); line.start(); byte[] samples = new byte[BUFFER_SIZE]; int count = 0; while ((count = ais.read(samples, 0, BUFFER_SIZE)) != -1) { line.write(samples, 0, count);
public EnergyGraph(AudioInputStream ais, int width, int height) { super(); if (!ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) { ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais); } if (ais.getFormat().getChannels() > 1) { throw new IllegalArgumentException("Can only deal with mono audio signals"); } int samplingRate = (int) ais.getFormat().getSampleRate(); DoubleDataSource signal = new AudioDoubleDataSource(ais); initialise(signal, samplingRate, width, height); }
public static void main(String[] args) throws Exception { if (args.length > 0) { for (int file = 0; file < args.length; file++) { AudioInputStream ais = AudioSystem.getAudioInputStream(new File(args[file])); if (!ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) { ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais); if (ais.getFormat().getChannels() > 1) { throw new IllegalArgumentException("Can only deal with mono audio signals"); int samplingRate = (int) ais.getFormat().getSampleRate(); DoubleDataSource signal = new AudioDoubleDataSource(ais); int framelength = (int) (0.01 /* seconds */* samplingRate); AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 1, 2, 44100.0F, false); DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat); AudioInputStream input = null; try { TargetDataLine mic = (TargetDataLine) AudioSystem.getLine(info); mic.open(audioFormat); mic.start(); input = new AudioInputStream(mic); } catch (LineUnavailableException e) { e.printStackTrace();
dataLine = AudioSystem.getSourceDataLine( clip.getFormat() ); return (frames/(long)dataLine.getFormat().getSampleRate())*1000; return (int)(milliseconds/dataLine.getFormat().getSampleRate()); format = stream.getFormat(); format = is1.getFormat(); InputStream is2; if (parent!=null) { dataLine = AudioSystem.getSourceDataLine(afTemp); dataLine.open(); inputStream = new ByteArrayInputStream( audioData ); return dataLine.isOpen(); dataLine.open(); dataLine.start(); newData = tempData; dataLine.write(newData, 0, newData.length); if (startOrMove) { data = new byte[bufSize/
public static void main(String[] args) throws Exception { double samplingRateFactor = Double.valueOf(args[0]).doubleValue(); for (int i = 1; i < args.length; i++) { AudioInputStream ais = AudioSystem.getAudioInputStream(new File(args[i])); AudioFormat af = new AudioFormat((int) (ais.getFormat().getSampleRate() * samplingRateFactor), ais.getFormat() .getSampleSizeInBits(), ais.getFormat().getChannels(), true, ais.getFormat().isBigEndian()); DDSAudioInputStream ais2 = new DDSAudioInputStream(new AudioDoubleDataSource(ais), af); String outFileName = args[i].substring(0, args[i].length() - 4) + "_child.wav"; AudioSystem.write(ais2, AudioFileFormat.Type.WAVE, new File(outFileName)); } } }
public static double getLength(String path) throws Exception { AudioInputStream stream; stream = AudioSystem.getAudioInputStream(new URL(path)); AudioFormat format = stream.getFormat(); if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format .getSampleRate(), format.getSampleSizeInBits() * 2, format .getChannels(), format.getFrameSize() * 2, format .getFrameRate(), true); // big endian stream = AudioSystem.getAudioInputStream(format, stream); DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(), ((int) stream.getFrameLength() * format.getFrameSize())); Clip clip = (Clip) AudioSystem.getLine(info); clip.close(); return clip.getBufferSize() / (clip.getFormat().getFrameSize() * clip.getFormat() .getFrameRate());
audioStream = AudioSystem.getAudioInputStream(soundFile); } catch (Exception e){ e.printStackTrace(); audioFormat = audioStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); try { sourceLine = (SourceDataLine) AudioSystem.getLine(info); sourceLine.open(audioFormat); } catch (LineUnavailableException e) { e.printStackTrace(); sourceLine.start(); while (nBytesRead != -1) { try { nBytesRead = audioStream.read(abData, 0, abData.length); } catch (IOException e) { e.printStackTrace(); int nBytesWritten = sourceLine.write(abData, 0, nBytesRead); sourceLine.drain(); sourceLine.close();
private void load(File file) { try { audioInputStream = AudioSystem.getAudioInputStream(file); audioFormat = audioInputStream.getFormat(); // mp3 decode if (audioFormat.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, audioFormat.getSampleRate(), 16, audioFormat.getChannels(), audioFormat.getChannels() * 2, audioFormat.getSampleRate(), false); audioInputStream = AudioSystem.getAudioInputStream(audioFormat, audioInputStream); } //output DataLine.Info dataLineInfo = new DataLine.Info( SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); sourceDataLine.open(audioFormat); volume = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN); sourceDataLine.start(); } catch (Exception e) { log.error("Couldn't load file: " + file + ' ' + e); } }
AudioInputStream ais = AudioSystem.getAudioInputStream(new File(inputFile)); if (!ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) { ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais); if (ais.getFormat().getChannels() > 1) { throw new IllegalArgumentException("Can only deal with mono audio signals"); int samplingRate = (int) ais.getFormat().getSampleRate(); DoubleDataSource signal = new AudioDoubleDataSource(ais); speechEndLikelihood, shiftFromMinimumEnergyCenter, numClusters); ais.close(); ais = AudioSystem.getAudioInputStream(new File(inputFile)); } catch (UnsupportedAudioFileException e) { double[] x = signal.getAllData(); ais.close(); DDSAudioInputStream outputAudio = new DDSAudioInputStream(new BufferedDoubleDataSource(x), ais.getFormat()); AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outputFile)); } else { DDSAudioInputStream outputAudio = new DDSAudioInputStream(new BufferedDoubleDataSource(y), ais.getFormat()); AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outputFile));
progressBar.setValue(progress); AudioInputStream ais = AudioSystem.getAudioInputStream(wavFiles[i]); if (!ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) { ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais); if (highPassFilter && ais.getFormat().getChannels() == 1) { ais = new AudioConverterUtils.HighPassFilter(50, 40).apply(ais); int maxBitPos = 0; int valueAfterShift; int bitsPerSample = ais.getFormat().getSampleSizeInBits(); for (int k = 0; k < samples.length; k++) { for (int j = bitsPerSample; j >= 1; j--) { ais.close(); int bestShift = maxBitPos - targetBitsPerSample + 2; if (bestShift > globalBestShift) {
import javax.sound.sampled.*; import java.io.*; import javax.swing.*; AudioInputStream as1 = AudioSystem.getAudioInputStream(new java.io.FileInputStream("chickenDance.wav")); AudioFormat af = as1.getFormat(); Clip clip1 = AudioSystem.getClip(); DataLine.Info info = new DataLine.Info(Clip.class, af); Line line1 = AudioSystem.getLine(info); if ( ! line1.isOpen() ) { clip1.open(as1); clip1.loop(Clip.LOOP_CONTINUOUSLY); clip1.start(); }
private static boolean play(final InputStream arquivo, int count) { try (final AudioInputStream audioInputStream = AudioSystem.getAudioInputStream(arquivo)) { // Carrega o formato do audio e cria uma linha final AudioFormat audioFormat = audioInputStream.getFormat(); final DataLine.Info dataLineInfo = new DataLine.Info(Clip.class, audioInputStream.getFormat(), (int) audioInputStream.getFrameLength() * audioFormat.getFrameSize()); // Carrega o som para o dispositivo try (final Clip clip = (Clip) AudioSystem.getLine(dataLineInfo)) { // Evento do LineListener clip.addLineListener(e -> { if (LineEvent.Type.STOP.equals(e.getType())) { e.getLine().close(); } }); clip.open(audioInputStream); clip.loop(count); } return true; } catch (LineUnavailableException | UnsupportedAudioFileException | IOException e) { LogUtils.generate(e); return false; } } }
public void pitchAnalyzeWav(String wavFile) throws UnsupportedAudioFileException, IOException { AudioInputStream inputAudio = AudioSystem.getAudioInputStream(new File(wavFile)); params.fs = (int) inputAudio.getFormat().getSampleRate(); AudioDoubleDataSource signal = new AudioDoubleDataSource(inputAudio); pitchAnalyze(signal); }
format = new AudioFormat(sampleRate, 16, 1, true, false); receivePacket.getData()); ais = new AudioInputStream(baiss, format, receivePacket.getLength()); try { DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format); SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); sourceDataLine.open(format); FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN); volumeControl.setValue(100.0f); sourceDataLine.start(); sourceDataLine.open(format); sourceDataLine.start(); System.out.println("format? :" + sourceDataLine.getFormat());