这篇文章主要讲解了“怎么使用Android实现录音声波图”,文中的讲解内容简单清晰,易于学习与理解,下面请大家跟着小编的思路慢慢深入,一起来研究和学习“怎么使用Android实现录音声波图”吧!
图像类:
package com.akm.test; /** * Created by toge on 15/12/9. */ import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.util.AttributeSet; import android.view.SurfaceView; import java.util.LinkedList; /** * A view that displays audio data on the screen as a waveform. */ public class WaveformView extends SurfaceView { // The number of buffer frames to keep around (for a nice fade-out visualization). private static final int HISTORY_SIZE = 6; // To make quieter sounds still show up well on the display, we use +/- 8192 as the amplitude // that reaches the top/bottom of the view instead of +/- 32767. Any samples that have // magnitude higher than this limit will simply be clipped during drawing. private static final float MAX_AMPLITUDE_TO_DRAW = 8192.0f; // The queue that will hold historical audio data. private final LinkedList<short[]> mAudioData; private final Paint mPaint; public WaveformView(Context context) { this(context, null, 0); } public WaveformView(Context context, AttributeSet attrs) { this(context, attrs, 0); } public WaveformView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); mAudioData = new LinkedList<short[]>(); mPaint = new Paint(); mPaint.setStyle(Paint.Style.STROKE); mPaint.setColor(Color.WHITE); mPaint.setStrokeWidth(0); mPaint.setAntiAlias(true); } /** * Updates the waveform view with a new "frame" of samples and renders it. The new frame gets * added to the front of the rendering queue, pushing the previous frames back, causing them to * be faded out visually. * * @param buffer the most recent buffer of audio samples */ public synchronized void updateAudioData(short[] buffer) { short[] newBuffer; // We want to keep a small amount of history in the view to provide a nice fading effect. // We use a linked list that we treat as a queue for this. if (mAudioData.size() == HISTORY_SIZE) { newBuffer = mAudioData.removeFirst(); System.arraycopy(buffer, 0, newBuffer, 0, buffer.length); } else { newBuffer = buffer.clone(); } mAudioData.addLast(newBuffer); // Update the display. Canvas canvas = getHolder().lockCanvas(); if (canvas != null) { drawWaveform(canvas); getHolder().unlockCanvasAndPost(canvas); } } /** * Repaints the view's surface. * * @param canvas the {@link Canvas} object on which to draw */ private void drawWaveform(Canvas canvas) { // Clear the screen each time because SurfaceView won't do this for us. canvas.drawColor(Color.BLACK); float width = getWidth(); float height = getHeight(); float centerY = height / 2; // We draw the history from oldest to newest so that the older audio data is further back // and darker than the most recent data. int colorDelta = 255 / (HISTORY_SIZE + 1); int brightness = colorDelta; for (short[] buffer : mAudioData) { mPaint.setColor(Color.argb(brightness, 128, 255, 192)); float lastX = -1; float lastY = -1; // For efficiency, we don't draw all of the samples in the buffer, but only the ones // that align with pixel boundaries. for (int x = 0; x < width; x++) { int index = (int) ((x / width) * buffer.length); short sample = buffer[index]; float y = (sample / MAX_AMPLITUDE_TO_DRAW) * centerY + centerY; if (lastX != -1) { canvas.drawLine(lastX, lastY, x, y, mPaint); } lastX = x; lastY = y; } brightness += colorDelta; } } }
头文件:
/* * Copyright (C) 2015 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.akm.test; import java.io.FileOutputStream; import java.io.IOException; /** * Created by Akm at 15/12/1 上午10:07 * 文件制作 */ public class WAVHeader { private byte[] mHeader; // the complete header. private int mSampleRate; // sampling frequency in Hz (e.g. 44100). private int mChannels; // number of channels. private int mNumSamples; // total number of samples per channel. private int mNumBytesPerSample; // number of bytes per sample, all channels included. public WAVHeader(int sampleRate, int numChannels, int numSamples) { mSampleRate = sampleRate; mChannels = numChannels; mNumSamples = numSamples; mNumBytesPerSample = 2 * mChannels; // assuming 2 bytes per sample (for 1 channel) mHeader = null; setHeader(); } public byte[] getWAVHeader() { return mHeader; } public static byte[] getWAVHeader(int sampleRate, int numChannels, int numSamples) { return new WAVHeader(sampleRate, numChannels, numSamples).mHeader; } public String toString() { String str = ""; if (mHeader == null) { return str; } int num_32bits_per_lines = 8; int count = 0; for (byte b : mHeader) { boolean break_line = count > 0 && count % (num_32bits_per_lines * 4) == 0; boolean insert_space = count > 0 && count % 4 == 0 && !break_line; if (break_line) { str += '\n'; } if (insert_space) { str += ' '; } str += String.format("%02X", b); count++; } return str; } private void setHeader() { byte[] header = new byte[46]; int offset = 0; int size; // set the RIFF chunk System.arraycopy(new byte[]{'R', 'I', 'F', 'F'}, 0, header, offset, 4); offset += 4; size = 36 + mNumSamples * mNumBytesPerSample; header[offset++] = (byte) (size & 0xFF); header[offset++] = (byte) ((size >> 8) & 0xFF); header[offset++] = (byte) ((size >> 16) & 0xFF); header[offset++] = (byte) ((size >> 24) & 0xFF); System.arraycopy(new byte[]{'W', 'A', 'V', 'E'}, 0, header, offset, 4); offset += 4; // set the fmt chunk System.arraycopy(new byte[]{'f', 'm', 't', ' '}, 0, header, offset, 4); offset += 4; System.arraycopy(new byte[]{0x10, 0, 0, 0}, 0, header, offset, 4); // chunk size = 16 offset += 4; System.arraycopy(new byte[]{1, 0}, 0, header, offset, 2); // format = 1 for PCM offset += 2; header[offset++] = (byte) (mChannels & 0xFF); header[offset++] = (byte) ((mChannels >> 8) & 0xFF); header[offset++] = (byte) (mSampleRate & 0xFF); header[offset++] = (byte) ((mSampleRate >> 8) & 0xFF); header[offset++] = (byte) ((mSampleRate >> 16) & 0xFF); header[offset++] = (byte) ((mSampleRate >> 24) & 0xFF); int byteRate = mSampleRate * mNumBytesPerSample; header[offset++] = (byte) (byteRate & 0xFF); header[offset++] = (byte) ((byteRate >> 8) & 0xFF); header[offset++] = (byte) ((byteRate >> 16) & 0xFF); header[offset++] = (byte) ((byteRate >> 24) & 0xFF); header[offset++] = (byte) (mNumBytesPerSample & 0xFF); header[offset++] = (byte) ((mNumBytesPerSample >> 8) & 0xFF); System.arraycopy(new byte[]{0x10, 0}, 0, header, offset, 2); offset += 2; // set the beginning of the data chunk System.arraycopy(new byte[]{'d', 'a', 't', 'a'}, 0, header, offset, 4); offset += 4; size = mNumSamples * mNumBytesPerSample; header[offset++] = (byte) (size & 0xFF); header[offset++] = (byte) ((size >> 8) & 0xFF); header[offset++] = (byte) ((size >> 16) & 0xFF); header[offset++] = (byte) ((size >> 24) & 0xFF); mHeader = header; } public static byte[] getHeader( long totalAudioLen, long totalDataLen, long longSampleRate, int channels, long byteRate) throws IOException { byte[] header = new byte[44]; header[0] = 'R'; // RIFF/WAVE header header[1] = 'I'; header[2] = 'F'; header[3] = 'F'; header[4] = (byte) (totalDataLen & 0xff); header[5] = (byte) ((totalDataLen >> 8) & 0xff); header[6] = (byte) ((totalDataLen >> 16) & 0xff); header[7] = (byte) ((totalDataLen >> 24) & 0xff); header[8] = 'W'; header[9] = 'A'; header[10] = 'V'; header[11] = 'E'; header[12] = 'f'; // 'fmt ' chunk header[13] = 'm'; header[14] = 't'; header[15] = ' '; header[16] = 16; // 4 bytes: size of 'fmt ' chunk header[17] = 0; header[18] = 0; header[19] = 0; header[20] = 1; // format = 1 header[21] = 0; header[22] = (byte) channels; header[23] = 0; header[24] = (byte) (longSampleRate & 0xff); header[25] = (byte) ((longSampleRate >> 8) & 0xff); header[26] = (byte) ((longSampleRate >> 16) & 0xff); header[27] = (byte) ((longSampleRate >> 24) & 0xff); header[28] = (byte) (byteRate & 0xff); header[29] = (byte) ((byteRate >> 8) & 0xff); header[30] = (byte) ((byteRate >> 16) & 0xff); header[31] = (byte) ((byteRate >> 24) & 0xff); header[32] = (byte) (2 * 16 / 8); // block align header[33] = 0; header[34] = 16; // bits per sample header[35] = 0; header[36] = 'd'; header[37] = 'a'; header[38] = 't'; header[39] = 'a'; header[40] = (byte) (totalAudioLen & 0xff); header[41] = (byte) ((totalAudioLen >> 8) & 0xff); header[42] = (byte) ((totalAudioLen >> 16) & 0xff); header[43] = (byte) ((totalAudioLen >> 24) & 0xff); return header; } }
测试:
package com.akm.test; import android.app.Activity; import android.media.AudioFormat; import android.media.AudioManager; import android.media.AudioRecord; import android.media.AudioTrack; import android.media.MediaPlayer; import android.media.MediaRecorder; import android.os.Bundle; import android.os.Environment; import android.os.Message; import android.os.SystemClock; import android.util.DisplayMetrics; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.Chronometer; import android.widget.SeekBar; import android.widget.TextView; import com.ringdroid.R; import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.ShortBuffer; import java.nio.channels.FileChannel; /** * Created by toge on 15/11/30. */ public class Test extends Activity implements View.OnClickListener { WaveformView waveformView; private Button btnStart; private Button btnStop; private String filePath; private boolean mRecordingKeepGoing; private SoundFile soundFile; private RandomAccessFile randomAccessFile; private int totalLength;//总长 private int duration;//时长 private int rate;//采样率 private int channelConfig;//声道 private int samples; private int startPos; private int bufferSize;//缓冲区大小 private int minBufferSize;//最小缓冲区 private AudioRecord audioRecord; private AudioTrack audioTrack; private boolean mThreadFlag; private int i; private int j; private int STATUS = 1; private int STATUS_PAUSE = 2; private int STATUS_PREPARED = 1; private int STATUS_RECORDING = 1; private Thread audioTrackThread; private Thread thread; private int endPos; private int curFileLength; OnFileChangedListener onFileChangedListener; private boolean isRewrite; private boolean audioTrackFlag; private int frequency = 22050;//22050; private int recBufSize; private String outPath; private byte[] bytes; private int time; private Button btnPasue; private Button btnPlay; private Button btnPlay2; private long startTime; private long restOfTime; private int audioFormat;//采集 private int bufferSizeInBytes;//缓冲区大小 private Button btnSave; // private ByteBuffer mDecodedBytes; // private ByteBuffer mDecodedSamples; private byte[] sampleBytes; private MediaPlayer mediaPlayer; private SeekBar seekBar; private android.os.Handler handler = new android.os.Handler(); Runnable updateThread = new Runnable() { public void run() { // 获得歌曲现在播放位置并设置成播放进度条的值 if (mediaPlayer != null) { seekBar.setProgress(mediaPlayer.getCurrentPosition()); Log.e("Test", "run------ updateThread:getCurrentPosition " + mediaPlayer.getCurrentPosition()); // 每次延迟100毫秒再启动线程 handler.postDelayed(updateThread, 100); } } }; private Chronometer et_time; private long falgTime; private long pauseTime; private long subtime; private long beginTime; private TextView currentTime; private int currentProgress; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.test); waveformView = (WaveformView) findViewById(R.id.waveform); btnStart = (Button) findViewById(R.id.button_start); btnPasue = (Button) findViewById(R.id.button_pasue); btnStop = (Button) findViewById(R.id.button2_stop); btnPlay = (Button) findViewById(R.id.button_play); btnPlay2 = (Button) findViewById(R.id.button_play2); btnSave = (Button) findViewById(R.id.button_save); seekBar = (SeekBar) findViewById(R.id.sb); et_time = (Chronometer) this.findViewById(R.id.et_time); currentTime = (TextView) this.findViewById(R.id.currentTime); btnStart.setOnClickListener(this); btnPasue.setOnClickListener(this); btnStop.setOnClickListener(this); btnPlay.setOnClickListener(this); btnPlay2.setOnClickListener(this); btnSave.setOnClickListener(this); initPar(); initRecorder(true); initAudioTack(); } @Override protected void onResume() { super.onResume(); initRecorder(false); } private void initAudioTack() { minBufferSize = AudioTrack.getMinBufferSize(rate, 3, audioFormat); Log.e("Test", "initAudioTack------ minBufferSize:" + minBufferSize); audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, rate, 1, audioFormat, minBufferSize, AudioTrack.MODE_STREAM); } private void initPar() { duration = 60 * 1000;//毫秒 rate = 44100;//声卡一般提供11.025kHz、22.05kHz和44.1kHz等不同的采样频率 channelConfig = AudioFormat.CHANNEL_IN_DEFAULT; audioFormat = AudioFormat.ENCODING_PCM_16BIT; restOfTime = duration; } private void initRecorder(boolean isNew) { initAudioFile(isNew); bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat); Log.d("Test", "initRecorder: bufferSize:" + bufferSize);//44100 1 2 = v fc // int k = audioFormat * rate / 25; // if (bufferSize % k != 0) { // bufferSize = (k * (channelConfig + bufferSize / k)); // } audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize); } public boolean isRecording() { return audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING; } private void initAudioFile(boolean isNew) { filePath = Environment.getExternalStorageDirectory().getAbsolutePath() + "/akm/t.wav"; new File(Environment.getExternalStorageDirectory().getAbsolutePath() + "/akm").mkdir(); File f = new File(filePath); try { if (f.exists() && isNew) { f.delete(); } randomAccessFile = new RandomAccessFile(filePath, "rw"); //文件长度 = 比特率*时间 //= 采样率*位数*声道 / 8 totalLength = (rate * 1 * 16 / 8) * (duration / 1000); // totalLength = (int) (4l * (duration * rate / 1000l)); createWaveFile(randomAccessFile, true, totalLength); totalLength = 44 + totalLength; randomAccessFile = new RandomAccessFile(filePath, "rw"); } catch (Exception e) { e.printStackTrace(); } } private void createWaveFile(RandomAccessFile randomAccessFile, boolean b, int totalLength) { if (b) { try { // randomAccessFile.write(WAVHeader.getHeader(rate,channels,samples)); // long totalAudioLen,long totalDataLen, long longSampleRate, int channels, long byteRate //数据长度 文件长度 采样率 声道 比特率 //比特率(字节/秒)= (采样频率(Hz)× 采样位数(bit) × 声道数)/ 8 // long byteRate = (channelConfig * audioFormat * rate) / 8; byte[] bytes = WAVHeader.getHeader(totalLength - 36l, totalLength, rate, channelConfig, byteRate); FileChannel localFileChannel = randomAccessFile.getChannel(); localFileChannel.map(FileChannel.MapMode.READ_WRITE, 0l, 44l).put(bytes); localFileChannel.close(); Log.e("Test", "createWaveFile------ OK "); } catch (IOException e) { e.printStackTrace(); } } } @Override public void onClick(View view) { switch (view.getId()) { case R.id.button_start: try { mRecordingKeepGoing = true; new Thread() { @Override public void run() { startRecording(); } }.start(); } catch (Exception e) { e.printStackTrace(); } break; case R.id.button_pasue: mRecordingKeepGoing = false; pauseRecord(); try {//暂停后,设置文件索引为末尾 startPos = (int) randomAccessFile.getFilePointer(); } catch (IOException e) { e.printStackTrace(); } pausePlay(); break; case R.id.button_play: startReview(true); break; case R.id.button2_stop: mRecordingKeepGoing = false; stopRecord(); stopPlay(); break; case R.id.button_save: startPos = currentProgress; if (randomAccessFile==null){ try { randomAccessFile = new RandomAccessFile(filePath, "rw"); } catch (IOException e) { e.printStackTrace(); } } try { Log.e("Test", "onClick: pos" + randomAccessFile.getFilePointer()); int size = ((rate * 1 * 16 / 8) * (currentProgress / 1000)); Log.e("Test", "onClick------ size "+size); if (size<44){ size = 44; } randomAccessFile.seek(size); randomAccessFile.write(sampleBytes); randomAccessFile.close(); } catch (IOException e) { e.printStackTrace(); } Log.e("Test", "onClick------pos "+currentProgress); break; case R.id.button_play2: play(); break; } } private void pausePlay() { if (mediaPlayer != null && mediaPlayer.isPlaying()) { // 暂停音乐播放器 mediaPlayer.pause(); btnPasue.setText("续播"); seekBar.setEnabled(false); et_time.stop(); pauseTime = SystemClock.elapsedRealtime(); // System.out.println("1 pauseTime" + pauseTime); } else if (mediaPlayer != null && "续播".equals(btnPasue.getText().toString())) { subtime += SystemClock.elapsedRealtime() - pauseTime; // System.out.println("2 subtime:" + subtime); mediaPlayer.start(); btnPasue.setText("暂停"); seekBar.setEnabled(true); beginTime = falgTime + subtime; // System.out.println("3 beginTime" + beginTime); et_time.setBase(beginTime); et_time.start(); } } private void stopPlay() { if (mediaPlayer != null && mediaPlayer.isPlaying()) { mediaPlayer.stop(); mediaPlayer = null; et_time.setBase(SystemClock.elapsedRealtime()); et_time.start(); et_time.stop(); btnPlay2.setEnabled(true); btnPlay2.setClickable(true); } falgTime = 0; subtime = 0; seekBar.setProgress(0); seekBar.setEnabled(false); } private void play() { mediaPlayer = new MediaPlayer(); try { mediaPlayer.setDataSource(filePath); mediaPlayer.prepareAsync(); // 为播放器注册 mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { public void onPrepared(MediaPlayer mp) { // TODO Auto-generated method stub mediaPlayer.start(); btnPlay2.setEnabled(false); btnPlay2.setClickable(false); seekBar.setMax(mediaPlayer.getDuration()); handler.post(updateThread); seekBar.setEnabled(true); } }); // 注册播放完毕后的监听事件 mediaPlayer.setOnCompletionListener(new MediaPlayer.OnCompletionListener() { public void onCompletion(MediaPlayer mp) { // mediaPlayer.release(); // mediaPlayer = null; btnPlay2.setEnabled(true); btnPlay2.setClickable(true); et_time.setBase(SystemClock.elapsedRealtime()); et_time.start(); et_time.stop(); seekBar.setProgress(0); } }); falgTime = SystemClock.elapsedRealtime(); et_time.setBase(falgTime); et_time.start(); et_time.setOnChronometerTickListener(new Chronometer.OnChronometerTickListener() { @Override public void onChronometerTick(Chronometer chronometer) { } }); seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() { @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if (fromUser == true && mediaPlayer != null) { Log.e("Test", "onProgressChanged------ progress "+progress); mediaPlayer.seekTo(progress); falgTime = SystemClock.elapsedRealtime(); beginTime = falgTime - seekBar.getProgress(); et_time.setBase(beginTime); // et_time.start(); // final int ctime = mediaPlayer.getDuration() / progress; //时间*比特率 = 大小 (rate * 1 * 16 / 8) * (duration / 1000); //时间 = 大小/比特率 int ctime = progress/((rate * 1 * 16 / 8) * (duration / 1000)); currentTime.setText( ctime+ "s"); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { } @Override public void onStopTrackingTouch(SeekBar seekBar) { // startPos = seekBar.getProgress(); currentProgress = seekBar.getProgress(); } }); } catch (IOException e) { e.printStackTrace(); } // mediaPlayer.prepare(); // c/c++ 播放器引擎的初始化 // 同步方法 // 采用异步的方式 } private OnEventListener onEventListener; public void setOnEventListener(OnEventListener onEventListener) { this.onEventListener = onEventListener; } android.os.Handler errorHandler = new android.os.Handler() { @Override public void handleMessage(Message msg) { if (onEventListener != null) { onEventListener.onError("error"); } } }; private void startThread() { if (startPos == 0) {//开始位置 try { randomAccessFile.seek(44); } catch (IOException e) { e.printStackTrace(); } } mThreadFlag = true; i = 0; j = -1; STATUS = 1; thread = new Thread() { @Override public void run() { if (startTime == 0) { startTime = System.currentTimeMillis(); } byte[] arrayOfByte = new byte[bufferSize]; short[] arrayOfShort = new short[bufferSize]; while (mThreadFlag) { try { if (audioRecord == null || STATUS != 1) { continue; } int bufferReadResult = audioRecord.read(arrayOfByte, 0, bufferSize); int bufferReadResult2 = audioRecord.read(arrayOfShort, 0, bufferSize); Log.e("Test", "run------ bufferReadResult "+bufferReadResult); Log.e("Test", "run------ bufferReadResult2 "+bufferReadResult2); if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING || bufferReadResult <= 0) { break; } i = 1; randomAccessFile.write(arrayOfByte); waveformView.updateAudioData(arrayOfShort); byte[] tmpBuf = new byte[bufferReadResult]; System.arraycopy(arrayOfByte, 0, tmpBuf, 0, bufferReadResult); audioTrack.write(tmpBuf, 0, tmpBuf.length); pjByteBuffer(tmpBuf); curFileLength = (int) randomAccessFile.length() / 1024; Log.e("Test", "run------ curFilelength:" + curFileLength + ",startPos:" + startPos + ",endPos:" + endPos + ",isRewrite:" + isRewrite); int time = (int) (System.currentTimeMillis() - startTime); Log.e("Test", "run------ time: " + time); Log.e("Test", "run------ bytes:" + arrayOfByte.length); Log.e("Test", "run------ getFilePointer:" + randomAccessFile.getFilePointer()); startPos = (int) randomAccessFile.getFilePointer(); if (time >= restOfTime) { stopRecord(); } onFileChangedListener.onBufferRecevied(startPos, endPos, arrayOfByte, curFileLength, isRewrite); // Test t1 = Test.this; // t1.i = 1+t1.i; // // long length = randomAccessFile.getFilePointer() + arrayOfByte.length; // xxx(this,length); // if (endPos>curFileLength){ // xxx(this,endPos); // } // randomAccessFile.write(arrayOfByte); // onFileChangedListener.onBufferRecevied(startPos,endPos,arrayOfByte,curFileLength,isRewrite); // // Test t2 =Test.this; // t2.j = 1+t2.j; // if (i!=0||j!=0){ // continue; // } // xxx(this,false); // errorHandler.sendEmptyMessage(0); } catch (Exception e) { e.printStackTrace(); errorHandler.sendEmptyMessage(0); } } } }; thread.start(); } private byte[] pjArray(byte[] src, byte[] dest) { byte[] newBytes = new byte[src.length + dest.length]; System.arraycopy(src, 0, newBytes, 0, src.length); System.arraycopy(dest, 0, newBytes, src.length, dest.length); return newBytes; } private void pjByteBuffer(byte[] tmpBuf) { if (sampleBytes == null) { sampleBytes = tmpBuf; } else { sampleBytes = pjArray(sampleBytes, tmpBuf); } } private void pauseReview() { audioTrackFlag = false; audioTrack.pause(); audioTrack.flush(); Log.e("Test", "pauseReview------ "); } private void startReview(boolean b) { if (audioTrack == null) { initAudioTack(); } audioTrack.play(); audioTrackFlag = true; audioTrackThread = new Thread() { @Override public void run() { try { bytes = new byte[minBufferSize]; while (randomAccessFile.read(bytes) != -1 && audioTrackFlag) { Log.e("Test", "run------ "); Log.e("Test", "run------audiotrackflag is " + audioTrackFlag); Log.e("Test", "run------wrtie data in audiotrack "); } Log.e("Test", "run------ audiotrack end."); } catch (Exception e) { } } }; audioTrackThread.start(); } public void pauseRecord() { if (audioRecord != null) { audioRecord.stop(); mThreadFlag = false; } } private void reset() { startPos = 0; endPos = 0; curFileLength = 44; isRewrite = false; } private void resumeRecord() { while (isRewrite) {//写文件 try { if (randomAccessFile.getFilePointer() != endPos || !isRewrite) {//不可写 ,或者选中位置不是文件指针所在位置 startPos = (int) randomAccessFile.getFilePointer(); // 从文件指针位置开始 } if (!isRewrite) {//不写文件 if (44 + startPos >= endPos) {// continue; } isRewrite = true; } } catch (IOException e) { e.printStackTrace(); } audioRecord.startRecording(); if (thread == null || !mThreadFlag) { startThread(); } } } private void startRecording() { try { audioRecord.startRecording(); // if (thread==null||!mThreadFlag){ // startThread(); // } startThread(); Log.e("Test", "startRecording------ "); } catch (Exception e) { initRecorder(true); } } public int getMinBufferSize() { return minBufferSize; } public void getMinBufSize() { recBufSize = AudioRecord.getMinBufferSize(frequency, 12, 2); } public void setOnFileChangedListener(OnFileChangedListener onFileChangedListener) { this.onFileChangedListener = onFileChangedListener; } public void setDuration(int duration) { this.duration = duration; } interface OnFileChangedListener { void onBufferRecevied(long startPos, long endPos, byte[] b1, long currenFileLength, boolean isRew); } interface OnEventListener { void onError(String s); void onRecordComplete(); void onVolumnChanged(double vl); } public String creatFile() { // copyWaveFile(filePath,filePath,true); return outPath; } private void moveToPrevious(long pl) { try { long l = 44 + 4 * (pl * rate / 1000l); randomAccessFile.seek(l); Log.e("Test", "moveToPrevious------ offset:" + l + " length:" + randomAccessFile.length()); } catch (Exception e) { } } @Override protected void onPause() { super.onPause(); stopRecord(); } @Override protected void onStop() { super.onStop(); closeStream(); } private void stopRecord() { try { mThreadFlag = false; time = 0; thread = null; if (audioRecord != null) { if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) { audioRecord.stop(); Log.e("Test", "stopRecord------ "); } audioRecord.release(); audioRecord = null; } closeStream(); } catch (Exception e) { } } private void closeStream() { try { if (randomAccessFile != null) { randomAccessFile.close(); } } catch (Exception e) { } } float getDensity() { DisplayMetrics metrics = new DisplayMetrics(); getWindowManager().getDefaultDisplay().getMetrics(metrics); return metrics.density; } }
感谢各位的阅读,以上就是“怎么使用Android实现录音声波图”的内容了,经过本文的学习后,相信大家对怎么使用Android实现录音声波图这一问题有了更深刻的体会,具体使用情况还需要大家实践验证。这里是亿速云,小编将为大家推送更多相关知识点的文章,欢迎关注!
免责声明:本站发布的内容(图片、视频和文字)以原创、转载和分享为主,文章观点不代表本网站立场,如果涉及侵权请联系站长邮箱:is@yisu.com进行举报,并提供相关证据,一经查实,将立刻删除涉嫌侵权内容。