在 Java 中读写 .wav 文件

问题描述 投票:0回答:1

我有兴趣学习 Java 中的音频文件操作,因此,在研究了 .wav 文件格式的一些背景之后,我编写了一个简单的程序来读取 .wav 文件,将其放入字节数组中,然后写入将该数组复制到新的 .wav 文件中。但是,输出文件不是输入文件的副本。例如,当我在文本编辑器 (Notepad++) 中打开两个 .wav 文件时,原始文件将以“RIFF”开头,但副本以一系列空字符开头。有人能告诉我我做错了什么吗?

这是我的代码:

package funwithwavs2;

import javax.sound.sampled.*;
import java.io.*;

public class FunWithWavs2 {

public static void main(String[] args) {
    int x;
    byte[] wavBytes=new byte[100000000];

    try {
        AudioInputStream ais=AudioSystem.getAudioInputStream(new File("Centerfold.wav"));


        while((x=ais.read(wavBytes))>0) {
            System.out.println("X: "+x);
        }

        writeToFile(wavBytes);
    }
    catch (Exception ex) {
        System.err.println("Error reading WAV file.");
        ex.printStackTrace();
    }

}


public static void writeToFile(byte[] b) {
    File file=new File ("centerfold3.wav");
    try {
        if(!file.exists()) {
            file.createNewFile();
        }

        FileWriter fw=new FileWriter(file.getAbsoluteFile());
        BufferedWriter bw=new BufferedWriter(fw);
        for(int i=0;i<b.length;i++) {
            bw.write(b[i]);
        }
        bw.close();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

}

谢谢!

编辑以反映下面的评论:我将输入声明切换到下面的代码片段,但它给了我一个标记/重置不支持的异常。

InputStream input=new FileInputStream("Centerfold.wav");
        AudioInputStream ais=AudioSystem.getAudioInputStream(input);

我的问题的关键不是写入输出 .wav 文件。这是因为我从输入文件读取的字节不是我期望的字节。

while((x2=ais.read(wavBytes))>0) {
            System.out.println("X2: "+x2);
        }


        for(int i=0;i<36;i++) {
            System.out.println(i+": "+wavBytes[i]);
        }

查看这段代码,我希望打印出来的前四个字符是 ASCII 值:R I F F,按照 .wav 文件格式。然而,我得到的是一系列 0,即 NULL 的 ASCII 值。为什么会出现这种差异呢?我是否从根本上误解了 .wav 文件格式或 javax.sound.sampled 类的功能?

java audio wav
1个回答
0
投票

当我尝试从磁盘传输长有声读物时,我发现这是必要的。 它的长度粘贴了int range,所以当时java音频无法处理它。所以我制作了一个 44100hz 16bit wavs 的 wav-reader 版本。它考虑可能的多个块。 这是初始化部分。

import javax.sound.sampled.AudioFormat;
import java.io.EOFException;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.file.DirectoryStream;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.LinkedList;
import java.util.Locale;

/**
 * the purpose of this class is to capsulate the information of a wav file and init it a pointer
at the first audio byte to be read by another class.
This is preparation for example to stream 8 hours audio book via nio.
 With very typical single chunk wavs the first byte is at location 44.
 */
public class WaveInitializer {
    //lets make all public for dev from scratch situation
    public FileInputStream fis;
    public File thisFile;
    public String longFileName ="";
public String shortFileName="";
    public long fileLen;
    //*****wav fields
    public String chunkID;
    public long mainChunkSize;
    public String format;
    public LinkedList chunkNames = new LinkedList();
    public short waveSubFormat;
    public String waveSubFormatString=null;
    public short numChannels;
    public int frameRate;
    public int sampleRate;
    public int byteRate;
    public short blockAlign=0;//div by zero chance
    public short bitsPerChannel;
    public short bitDepth;
    public short bitNess;
    public short bitsPerSample;
    public int bytesPerSample;
    public int bitrate;
    public long dataChunkSize;
    public long numFrames;
    public long firstDataByteIndex;
    //********************
    //for convinience
    public int minutes;
    public int seconds;
    public int milliseconds;
    public double totalTime;
//*************************************
    public WaveInitializer(String fileName) throws Exception {
        getWaveProperties( fileName);
    }
    //*************************************
    public void  getWaveProperties(String fileName)throws Exception {
        longFileName = fileName;
        thisFile = new File(fileName);
        shortFileName = thisFile.getName();
        fileLen = thisFile.length();

        int bytesRead=0;//running pointer

        /**
         * Operation is as follows:*
         1. Fileinputstream fis reads the header bytes into small native byte[] array: properties_array.
         2. Native byte array is handled using ByteBuffer wrapper: bb_properties.
         3. Java nio channel containing samples is created and positioned right at first data byte
         4. Samples are converted to normalized values again using ByteBuffer and large byte array
         */

        fis = new FileInputStream(fileName);
        //byte array must be just some enough size. Typically 44 bytes in simple wav header.
        byte[] properties_bytes = new byte[1000];
        //ByteBuffer contains convinience for handling byte array
        //super important is its ablility to handle bit order of target byte array.
        ByteBuffer bb_properties = ByteBuffer.wrap(properties_bytes);

        //java ByteBuffer default is big endian
        //but wav has numbers in little endian
        bb_properties.order(ByteOrder.LITTLE_ENDIAN);

        //first read only 12 bytes  because later chunks' properties can differ greatly
        bytesRead += fis.read(properties_bytes,bytesRead,12);
        //bytesRead tracks the head in fis

        byte[]chunkID_bytes = new byte[4];
        //ByteBuffer fills given byte array and advances internal position
        //when read as byte array, endianness has no meaning
        bb_properties.get(chunkID_bytes);
        chunkID = new String(chunkID_bytes);
        if(!chunkID.equals("RIFF")){
            throw new Exception("not RIFF file!");
        }
        //next 4 bytes mean mainChunkSize
        byte[]bytes = new byte[4];
        bb_properties.get(bytes);
        /* CANNOT use getInt because of extra long files have length more
        than MAX signed int can hold
         */
        mainChunkSize  = getUInt32(bytes);

        /*
        when read as 4 byte int,
        endianness has meaning and must be little endian
        chunkSize = bb_properties.getInt();
        getInt does not work on 8 hour audio books
        we need to get unsigned int or long */
        long total_len =mainChunkSize+8;
        if (fileLen != total_len) {
            // Check that the file size matches the number of bytes listed in ChunkSize
            throw new Exception(
                    "field mainChunkSize (" +(mainChunkSize+8)+ "does not match expected value:"+(fileLen-8));
        }

        byte[]format_bytes= new byte[4];
        bb_properties.get(format_bytes);
        format = new String(format_bytes);
        if(!format.equals("WAVE"))
            throw new Exception("not WAVE file!");

        int chunkSize =0;
        while(true) {

            bytesRead += fis.read(properties_bytes,bytesRead,8);

            byte[]subChunkID_bytes = new byte[4];
            bb_properties.get(subChunkID_bytes);
            String subChunkID = (new String(subChunkID_bytes));

            chunkNames.add(subChunkID);
            if (subChunkID.equals("fmt ")) {

                chunkSize =  bb_properties.getInt();
                if(chunkSize!=16){
                    throw new Exception("not wav file");
                }

                bytesRead += fis.read(properties_bytes, bytesRead, 16);

                waveSubFormat = bb_properties.getShort();

                switch (waveSubFormat) {
                    case 1:
                        waveSubFormatString = "WAVE_FORMAT_PCM";
                        // AudioFormat.Encoding java_af_enc = AudioFormat.Encoding.PCM_FLOAT;
                        AudioFormat.Encoding java_af_enc = AudioFormat.Encoding.PCM_SIGNED;
                        break;
                    case 3:
                        waveSubFormatString = "IEEE float";
                        break;
                    case 6:
                        waveSubFormatString = "8-bit ITU-T G.711 A-law";
                        break;
                    case 7:
                        waveSubFormatString = "8-bit ITU-T G.711 µ-law";
                        break;
                    case (short) 0xFFFE:
                        waveSubFormatString = "Determined by SubFormat";
                        break;
                }
                if (waveSubFormat != 1)
                    throw new Exception("not PCM format!:" + waveSubFormatString);

                numChannels = bb_properties.getShort();

                /**because of multichannel is having two or more samples at the same time
                name "SampleRate" is actually frameRate
                so lets use it:
                 */
                frameRate = bb_properties.getInt();// i.e frameRate
                sampleRate = frameRate;

                /**now let's add cleansing to confusion:
                byteRate understands that there
                might be many channels going on at the same time
                In case of stereo this will have four bytes
                 i.e two groups of 16bits.
                mono 16bit is 88100
                 */
                byteRate = bb_properties.getInt();

                /**blockAlign is sizeof a frame as bytes
                how many bytes
                all the channels take combined
                in vertical sampling moment
                16 bit mono is 2 bytes per sampling instant
                16bit  stereo 4 bytes
                24bit  mono 3 bytes
                24bit  stereo 6 bytes
                 */
                blockAlign = bb_properties.getShort();

                /**lets talk about bits per channel.
                Is this design glitch?
                (it should be bitspersample i.e bitDepth i.e bitNess)
                lets add and init bitsPerSample field to class
                it is same in case of mono and stereo:
                left and right having identical number of bits.
                 */
                bitsPerChannel = bb_properties.getShort();
                bitDepth = bitNess = bitsPerSample = bitsPerChannel;

                /**Calculate the number of bytes
                required to hold 1 individual sample
                it is indentical for stereo and mono*/
                bytesPerSample = blockAlign / numChannels;
                bitrate = numChannels * (8 * bytesPerSample) * frameRate;

                if (bytesPerSample * numChannels != blockAlign)
                    throw new Exception("Block Align does not agree with bytes required for validBits and number of channels");

            }
            else if (subChunkID.equals("data"))
            {
                byte[]datasizebytes = new byte[4];
                bb_properties.get(datasizebytes);

                dataChunkSize  = getUInt32(datasizebytes);
                //dataChunkSize =  bb_properties.getInt();
                if(dataChunkSize%2 != 0)
                    throw new Exception("data align problem with data chunkSize");
                numFrames = dataChunkSize / blockAlign;
                break;
            }
            else //to implement if needed. position can be used to skip bytes
            {

                chunkSize =  bb_properties.getInt();
                bytesRead += fis.read(properties_bytes, bytesRead, chunkSize);
                bb_properties.position(bb_properties.position()+chunkSize);
                //throw new Exception("not supported chunk");
                continue;
            }

        }
        calculateFileDuration();//fill convinience fields
        firstDataByteIndex=bytesRead;


    }
    //*******************************
/**assume the byte buffer is at correct position just before
wanted bytes
and correct endianness*/

    public long getUInt32(byte[]bytes) throws EOFException, IOException {
        //
        long value =
                ((bytes[0] & 0xFF) <<  0) |
                        ((bytes[1] & 0xFF) <<  8) |
                        ((bytes[2] & 0xFF) << 16) |
                        ((long) (bytes[3] & 0xFF) << 24);
        return value;
    }
//**********************************************************
public static void main(String[] args) {

    String defaultWavDir = "F:/wavs";

    traverseWavNames(defaultWavDir);
}
//*****************************************
    public static void traverseWavNames(String directory){
        try {
            Path path = FileSystems.getDefault().getPath(directory);
            DirectoryStream<Path> stream =
                    Files.newDirectoryStream(path, "*.wav");
            for (Path entry: stream) {
                String filePath = entry.toString();
                WaveInitializer wi = new WaveInitializer(filePath);
                System.out.println(wi);
            int a = 0;
            }
        }
        catch(Exception e){
            e.printStackTrace();
        }
    }
//*********************
    public String toString(){

        return this.shortFileName +":"+this.sampleRate+"Hz:"+ bitDepth+"_bits"+getFileDurationString();

    }
    //***************************************
public String getFileDurationString(){
    String ret=new String();
    if(frameRate >0) {
        int decimals = 2;
        String buildFormatString =" %.0"+decimals+"f";
        String disp = String.format(Locale.ROOT,buildFormatString,totalTime);
        ret += ":"+disp +" secs: "+"\n";
        ret += "dur:"+minutes+"."+seconds+"."+milliseconds+"\n";
    }
    return ret;
}
//****************************************
    public void calculateFileDuration(){
        if(frameRate >0) {
            totalTime = (double) this.numFrames / (double) frameRate;
            minutes = (int) (totalTime / 60);
            seconds = (int) (totalTime % 60);
            milliseconds = (int) ((totalTime - Math.floor(totalTime)) * 1000);
        }
    }
}
© www.soinside.com 2019 - 2024. All rights reserved.