File file = new File(filename); logger.info (AudioSystem.getAudioFileFormat(file).toString()); is = AudioSystem.getAudioInputStream(file); } catch (UnsupportedAudioFileException uafe) {
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LPCC-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("LPCC-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("Pitch-synchronous LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
AudioSystem.write(outputAudio, AudioFileFormat.Type.WAVE, new File(outFileName)); long endTime = System.currentTimeMillis(); int audioDuration = (int) (AudioSystem.getAudioFileFormat(new File(file1)).getFrameLength() / (double) samplingRate * 1000); System.out.println("Pitch-synchronous LSF-based interpolatin took " + (endTime - startTime) + " ms for " + audioDuration + " ms of audio");
private static void dumpStreamChunk(File file, String dstPath, long offset, long length) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(file); AudioInputStream inputStream = AudioSystem.getAudioInputStream(file); AudioFormat audioFormat = fileFormat.getFormat(); int bitrate = Math.round(audioFormat.getFrameSize() * audioFormat.getFrameRate() / 1000); inputStream.skip(offset * bitrate); AudioInputStream chunkStream = new AudioInputStream(inputStream, audioFormat, length * bitrate); AudioSystem.write(chunkStream, fileFormat.getType(), new File(dstPath)); inputStream.close(); chunkStream.close(); } }
javax.sound.sampled.AudioSystem.getAudioFileFormat(AudioSystem.java:1004)
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(stream); Type type = fileFormat.getType(); if (type == Type.AIFC || type == Type.AIFF) {
AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(srcWaveFile); AudioFormat format = fileFormat.getFormat();
/** constructor */ public WaveFile(String path) { try { File file=new File(path); javax.sound.sampled.AudioFileFormat audioFileFormat=javax.sound.sampled.AudioSystem.getAudioFileFormat(file); m_stream=javax.sound.sampled.AudioSystem.getAudioInputStream(file); javax.sound.sampled.AudioFormat format=m_stream.getFormat(); javax.sound.sampled.DataLine.Info info=new javax.sound.sampled.DataLine.Info(javax.sound.sampled.Clip.class,format,((int)m_stream.getFrameLength()*format.getFrameSize())); m_clip=(javax.sound.sampled.Clip)javax.sound.sampled.AudioSystem.getLine(info); m_clip.open(m_stream); } catch(Exception e) { e.printStackTrace(); } }
private static void dumpStreamChunk(File file, String dstPath, long offset, long length) throws UnsupportedAudioFileException, IOException { AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(file); AudioInputStream inputStream = AudioSystem.getAudioInputStream(file); AudioFormat audioFormat = fileFormat.getFormat(); int bitrate = Math.round(audioFormat.getFrameSize() * audioFormat.getFrameRate() / 1000); inputStream.skip(offset * bitrate); AudioInputStream chunkStream = new AudioInputStream(inputStream, audioFormat, length * bitrate); AudioSystem.write(chunkStream, fileFormat.getType(), new File(dstPath)); inputStream.close(); chunkStream.close(); } }
/** * Test the fix wave header not having the size due to be streamed. * * @throws IOException Signals that an I/O exception has occurred. * @throws UnsupportedAudioFileException the unsupported audio file exception */ @Test public void testSynthesizeAndFixHeader() throws IOException, UnsupportedAudioFileException { File audio = new File("src/test/resources/text_to_speech/numbers.wav"); InputStream stream = new FileInputStream(audio); assertNotNull(stream); stream = WaveUtils.reWriteWaveHeader(stream); File tempFile = File.createTempFile("output", ".wav"); writeInputStreamToFile(stream, tempFile); assertNotNull(AudioSystem.getAudioFileFormat(tempFile)); }
public SoundInfo(@Nonnull File file) throws SoundException { this.file = file; try { this.infoFormat = AudioSystem.getAudioFileFormat(getFile().handle()); this.properties = new XBasicView<>(new XLinkedMap<>(infoFormat.properties())); } catch (IOException exception) { throw new SoundException(exception); } catch (UnsupportedAudioFileException exception) { throw new SoundException("Unsupported audio file: " + file.getFullName() + " [" + file.getFormat() + ']'); } }
try { File file = new File(sourceFileName); AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(file); AudioFormat format = fileFormat.getFormat(); inputStream = AudioSystem.getAudioInputStream(file);
try { File file = new File(sourceFileName); AudioFileFormat fileFormat = AudioSystem.getAudioFileFormat(file); AudioFormat format = fileFormat.getFormat(); inputStream = AudioSystem.getAudioInputStream(file);
@Override public StreamInfo getStreamInfo (final InputStream is) throws SoundTransformException { if (is instanceof AudioInputStream) { final AudioInputStream ais = (AudioInputStream) is; return this.fromAudioFormat (ais.getFormat (), ais.getFrameLength ()); } try { final AudioFileFormat aff = AudioSystem.getAudioFileFormat (is); return this.fromAudioFormat (aff.getFormat (), aff.getFrameLength ()); } catch (final UnsupportedAudioFileException e) { throw new SoundTransformException (AudioFormatParserErrorCode.WRONG_TYPE, e, is); } catch (final IOException e) { throw new SoundTransformException (AudioFormatParserErrorCode.WRONG_TYPE, e, is); } } }
public Track readSingle(Track track) { TrackData trackData = track.getTrackData(); File file = trackData.getFile(); String title = Util.removeExt(file.getName()); trackData.setTagFieldValues(FieldKey.TITLE, title); try { AudioFileFormat format = AudioSystem.getAudioFileFormat(file); trackData.setStartPosition(0); AudioFormat audioFormat = format.getFormat(); trackData.setSampleRate((int) audioFormat.getSampleRate()); trackData.setTotalSamples(format.getFrameLength()); trackData.setChannels(audioFormat.getChannels()); trackData.setCodec(Util.getFileExt(file).toUpperCase()); if (format.getFrameLength() > 0) trackData.setBitrate((int) (format.getByteLength() / format.getFrameLength() * audioFormat.getSampleRate() / 100)); } catch (Exception e) { System.out.println("Couldn't read file: " + trackData.getFile()); } return track; }
/** * Test the fix wave header not having the size due to be streamed. * * @throws IOException Signals that an I/O exception has occurred. * @throws UnsupportedAudioFileException the unsupported audio file exception */ @Test public void testSynthesizeAndFixHeader() throws IOException, UnsupportedAudioFileException { String text = "one two three four five"; SynthesizeOptions synthesizeOptions = new SynthesizeOptions.Builder() .text(text) .voice(SynthesizeOptions.Voice.EN_US_LISAVOICE) .accept(SynthesizeOptions.Accept.AUDIO_WAV) .build(); InputStream result = service.synthesize(synthesizeOptions).execute(); assertNotNull(result); result = WaveUtils.reWriteWaveHeader(result); File tempFile = File.createTempFile("output", ".wav"); writeInputStreamToFile(result, tempFile); assertNotNull(AudioSystem.getAudioFileFormat(tempFile)); }