assert audioFormat.getChannels() == 2 : "Unexpected number of channels: " + audioFormat.getChannels(); if (outputMode == MONO) { ais = new MonoAudioInputStream(ais); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); boolean bIsSupportedDirectly = AudioSystem.isLineSupported(info); if (!bIsSupportedDirectly) { AudioFormat sourceFormat = audioFormat; * (sourceFormat.getSampleSizeInBits() / 8), sourceFormat.getSampleRate(), sourceFormat.isBigEndian()); ais = AudioSystem.getAudioInputStream(targetFormat, ais); audioFormat = ais.getFormat(); info = new DataLine.Info(SourceDataLine.class, audioFormat); line = (SourceDataLine) AudioSystem.getLine(info); line.addLineListener(lineListener); line.open(audioFormat); } catch (Exception e) { e.printStackTrace(); return; line.start(); int nRead = 0; byte[] abData = new byte[65532]; // needs to be a multiple of 4 and 6, to support both 16 and 24 bit stereo
/** * Gets the Mixer to use. Depends upon selectedMixerIndex being defined. * * @see #newProperties */ private Mixer getSelectedMixer() { if (selectedMixerIndex.equals("default")) { return null; } else { Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo(); if (selectedMixerIndex.equals("last")) { return AudioSystem.getMixer(mixerInfo[mixerInfo.length - 1]); } else { int index = Integer.parseInt(selectedMixerIndex); return AudioSystem.getMixer(mixerInfo[index]); } } }
public static void main(String[] args) throws Exception { double samplingRateFactor = Double.valueOf(args[0]).doubleValue(); for (int i = 1; i < args.length; i++) { AudioInputStream ais = AudioSystem.getAudioInputStream(new File(args[i])); AudioFormat af = new AudioFormat((int) (ais.getFormat().getSampleRate() * samplingRateFactor), ais.getFormat() .getSampleSizeInBits(), ais.getFormat().getChannels(), true, ais.getFormat().isBigEndian()); DDSAudioInputStream ais2 = new DDSAudioInputStream(new AudioDoubleDataSource(ais), af); String outFileName = args[i].substring(0, args[i].length() - 4) + "_child.wav"; AudioSystem.write(ais2, AudioFileFormat.Type.WAVE, new File(outFileName)); } } }
import javax.sound.sampled.*; AudioInputStream audioInputStream = AudioSystem.getAudioInputStream( new File("some_file.wav")); Clip clip = AudioSystem.getClip(); clip.open(audioInputStream); FloatControl gainControl = (FloatControl) clip.getControl(FloatControl.Type.MASTER_GAIN); gainControl.setValue(-10.0f); // Reduce volume by 10 decibels. clip.start();
/** * Loops an audio file (in .wav, .mid, or .au format) in a background thread. * * @param filename the name of the audio file * @throws IllegalArgumentException if {@code filename} is {@code null} */ public static synchronized void loop(String filename) { if (filename == null) throw new IllegalArgumentException(); // code adapted from: http://stackoverflow.com/questions/26305/how-can-i-play-sound-in-java try { Clip clip = AudioSystem.getClip(); InputStream is = StdAudio.class.getResourceAsStream(filename); AudioInputStream ais = AudioSystem.getAudioInputStream(is); clip.open(ais); clip.loop(Clip.LOOP_CONTINUOUSLY); } catch (UnsupportedAudioFileException e) { throw new IllegalArgumentException("unsupported audio format: '" + filename + "'", e); } catch (LineUnavailableException e) { throw new IllegalArgumentException("could not play '" + filename + "'", e); } catch (IOException e) { throw new IllegalArgumentException("could not play '" + filename + "'", e); } }
AudioInputStream din = null; try { AudioInputStream in = AudioSystem.getAudioInputStream(new URL("http://www.howjsay.com/mp3/"+ args[0] +".mp3")); AudioFormat baseFormat = in.getFormat(); AudioFormat decodedFormat = new AudioFormat( AudioFormat.Encoding.PCM_SIGNED, baseFormat.getSampleRate(), 16, baseFormat.getChannels(), baseFormat.getChannels() * 2, baseFormat.getSampleRate(), false); din = AudioSystem.getAudioInputStream(decodedFormat, in); DataLine.Info info = new DataLine.Info(SourceDataLine.class, decodedFormat); SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info); if(line != null) { line.open(decodedFormat); byte[] data = new byte[4096]; line.start(); while ((nBytesRead = din.read(data, 0, data.length)) != -1) { line.write(data, 0, nBytesRead); line.drain(); line.stop(); line.close(); din.close();
public EnergyHistogram(AudioInputStream ais, int width, int height) { super(); if (!ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) { ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais); } if (ais.getFormat().getChannels() > 1) { throw new IllegalArgumentException("Can only deal with mono audio signals"); } int samplingRate = (int) ais.getFormat().getSampleRate(); DoubleDataSource signal = new AudioDoubleDataSource(ais); initialise(signal, samplingRate, width, height); }
dataLine = AudioSystem.getSourceDataLine( clip.getFormat() ); return (frames/(long)dataLine.getFormat().getSampleRate())*1000; return (int)(milliseconds/dataLine.getFormat().getSampleRate()); format = stream.getFormat(); is1 = AudioSystem.getAudioInputStream( AudioFormat.Encoding.PCM_SIGNED, stream ); } else { is1 = stream; dataLine = AudioSystem.getSourceDataLine(afTemp); dataLine.open(); inputStream = new ByteArrayInputStream( audioData ); AudioInputStream ais1 = AudioSystem.getAudioInputStream(inputStream); AudioInputStream ais2 = AudioSystem.getAudioInputStream(format, ais1); open(ais2); } catch( UnsupportedAudioFileException uafe ) { return dataLine.isOpen(); dataLine.open(); newData = tempData;
public static double getLength(String path) throws Exception { AudioInputStream stream; stream = AudioSystem.getAudioInputStream(new URL(path)); AudioFormat format = stream.getFormat(); if (format.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { format = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, format .getSampleRate(), format.getSampleSizeInBits() * 2, format .getChannels(), format.getFrameSize() * 2, format .getFrameRate(), true); // big endian stream = AudioSystem.getAudioInputStream(format, stream); DataLine.Info info = new DataLine.Info(Clip.class, stream.getFormat(), ((int) stream.getFrameLength() * format.getFrameSize())); Clip clip = (Clip) AudioSystem.getLine(info); clip.close(); return clip.getBufferSize() / (clip.getFormat().getFrameSize() * clip.getFormat() .getFrameRate());
private static void init() { try { // 44,100 samples per second, 16-bit audio, mono, signed PCM, little Endian AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE); // the internal buffer is a fraction of the actual buffer size, this choice is arbitrary // it gets divided because we can't expect the buffered data to line up exactly with when // the sound card decides to push out its samples. buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE/3]; } catch (LineUnavailableException e) { System.out.println(e.getMessage()); } // no sound gets made before this call line.start(); }
private void load(File file) { try { audioInputStream = AudioSystem.getAudioInputStream(file); audioFormat = audioInputStream.getFormat(); // mp3 decode if (audioFormat.getEncoding() != AudioFormat.Encoding.PCM_SIGNED) { audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, audioFormat.getSampleRate(), 16, audioFormat.getChannels(), audioFormat.getChannels() * 2, audioFormat.getSampleRate(), false); audioInputStream = AudioSystem.getAudioInputStream(audioFormat, audioInputStream); } //output DataLine.Info dataLineInfo = new DataLine.Info( SourceDataLine.class, audioFormat, AudioSystem.NOT_SPECIFIED); sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); sourceDataLine.open(audioFormat); volume = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN); sourceDataLine.start(); } catch (Exception e) { log.error("Couldn't load file: " + file + ' ' + e); } }
AudioInputStream ais = AudioSystem.getAudioInputStream(is); AudioFormat audioFormat = ais.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); line = (SourceDataLine) AudioSystem.getLine(info); line.open(audioFormat); line.start(); byte[] samples = new byte[BUFFER_SIZE]; int count = 0; while ((count = ais.read(samples, 0, BUFFER_SIZE)) != -1) { line.write(samples, 0, count);
public static void main(String[] args) throws Exception { if (args.length > 0) { for (int file = 0; file < args.length; file++) { AudioInputStream ais = AudioSystem.getAudioInputStream(new File(args[file])); if (!ais.getFormat().getEncoding().equals(AudioFormat.Encoding.PCM_SIGNED)) { ais = AudioSystem.getAudioInputStream(AudioFormat.Encoding.PCM_SIGNED, ais); if (ais.getFormat().getChannels() > 1) { throw new IllegalArgumentException("Can only deal with mono audio signals"); int samplingRate = (int) ais.getFormat().getSampleRate(); DoubleDataSource signal = new AudioDoubleDataSource(ais); int framelength = (int) (0.01 /* seconds */* samplingRate); AudioFormat audioFormat = new AudioFormat(AudioFormat.Encoding.PCM_SIGNED, 44100.0F, 16, 1, 2, 44100.0F, false); DataLine.Info info = new DataLine.Info(TargetDataLine.class, audioFormat); AudioInputStream input = null; try { TargetDataLine mic = (TargetDataLine) AudioSystem.getLine(info); mic.open(audioFormat); mic.start();
public static void main(String[] args) throws Exception { for (int i = 0; i < args.length; i++) { AudioInputStream inputAudio = AudioSystem.getAudioInputStream(new File(args[i])); int samplingRate = (int) inputAudio.getFormat().getSampleRate(); AudioDoubleDataSource signal = new AudioDoubleDataSource(inputAudio); Robotiser pv = new Robotiser(signal, samplingRate); DDSAudioInputStream outputAudio = new DDSAudioInputStream(new BufferedDoubleDataSource(pv), inputAudio.getFormat()); String outFileName = args[i].substring(0, args[i].length() - 4) + "_robotised.wav"; AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); } } }
soundNode.add(soundSampledNode); md = new MediaData("Suffixes", "Sound File Suffixes", AudioSystem.getAudioFileTypes()); soundSampledNode.add(new DefaultMutableTreeNode(md)); Mixer.Info[] mixerInfo = AudioSystem.getMixerInfo(); String[][] mixerData = new String[mixerInfo.length][4]; for (int ii=0; ii<mixerData.length; ii++) { Mixer mixer = AudioSystem.getMixer(mixerInfo[ii]); data = mergeArrays( "Source", try { for (int jj=0; jj<source.length; jj++) { all[jj] = AudioSystem.getLine(source[jj]); all[jj] = AudioSystem.getLine(target[jj-source.length]); data[0][1] = new Integer(audioFormat.getChannels()); data[1][1] = audioFormat.getEncoding(); data[2][1] = new Float(audioFormat.getFrameRate()); data[6][1] = new Float(dataLine.getLevel()); data[kk][1] = controls[index];
audioStream = AudioSystem.getAudioInputStream(soundFile); } catch (Exception e){ e.printStackTrace(); audioFormat = audioStream.getFormat(); DataLine.Info info = new DataLine.Info(SourceDataLine.class, audioFormat); try { sourceLine = (SourceDataLine) AudioSystem.getLine(info); sourceLine.open(audioFormat); } catch (LineUnavailableException e) { e.printStackTrace(); sourceLine.start(); while (nBytesRead != -1) { try { nBytesRead = audioStream.read(abData, 0, abData.length); } catch (IOException e) { e.printStackTrace(); int nBytesWritten = sourceLine.write(abData, 0, nBytesRead); sourceLine.drain(); sourceLine.close();
format = new AudioFormat(sampleRate, 16, 1, true, false); receivePacket.getData()); ais = new AudioInputStream(baiss, format, receivePacket.getLength()); try { DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format); SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo); sourceDataLine.open(format); FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN); volumeControl.setValue(100.0f); sourceDataLine.start(); sourceDataLine.open(format); sourceDataLine.start(); System.out.println("format? :" + sourceDataLine.getFormat());
audioInputStream = AudioSystem.getAudioInputStream(clipFile); } catch (Exception e) { e.printStackTrace(); AudioFormat format = audioInputStream.getFormat(); DataLine.Info info = new DataLine.Info(Clip.class, format); try { m_clip = (Clip) AudioSystem.getLine(info); m_clip.open(audioInputStream); } catch (LineUnavailableException e) { e.printStackTrace(); e.printStackTrace(); m_clip.loop(loop); if (waitUntilCompleted) m_clip.drain(); } else { System.out.println("playWavFile<init>(): can't get data from file " + clipFile.getName());
/** * Initialize the audio * * @param audioHeader the audio header */ private void initAudio(EAAudioHeader audioHeader) { AudioFormat format = new AudioFormat(audioHeader.getSampleRate(), audioHeader.getBitsPerSample(), audioHeader.getNumberOfChannels(), true, false); DataLine.Info info = new DataLine.Info(SourceDataLine.class, format); if (!AudioSystem.isLineSupported(info)) { throw new RuntimeException("Line matching " + info + " not supported."); } try { line = (SourceDataLine) AudioSystem.getLine(info); line.open(format, 1024 * 4 * format.getFrameSize()); line.start(); } catch (LineUnavailableException ex) { throw new RuntimeException(ex); } } }
/** * Plays a sound from a file. * * @param filename Path to the sound file */ public static void playSound(String filename) { URL resource = ClassLoader.getSystemClassLoader().getResource(filename); try { final Clip clip = (Clip) AudioSystem.getLine(new Line.Info(Clip.class)); clip.addLineListener(event -> { if (event.getType() == LineEvent.Type.STOP) { clip.close(); } }); clip.open(AudioSystem.getAudioInputStream(resource)); clip.start(); } catch (Exception e) { LOGGER.error("Failed to play sound " + filename, e); } }