我是新来的Android节目及音频可视化。 我想创建一个使用一个简单的音频可视化MediaPlayer
和Visualizer
类。 我的问题是,我不知道是什么wave form data
确实是。 我必须使用它来可视化音频? 我使用下面的代码。 它的问题是,它只会为可视化文件的第10-12秒的音频; 在那之后,我无法获取更多的数据! 我哪里做错了?
public void attachVisualizer()
{
Visualizer vis = new Visualizer(mPlayer.getAudioSessionId());
vis.setCaptureSize(Visualizer.getCaptureSizeRange()[0]);
vis.setDataCaptureListener(new Visualizer.OnDataCaptureListener() {
public void onWaveFormDataCapture(Visualizer visualizer, byte[] bytes, int samplingRate) {
int sum = 0;
for(int i = 0; i < bytes.length; i++) {
sum += bytes[i];
}
if(sum > 8000) {
// Do something which uses mPlayer.getCurrentPosition() in mathematics
}
}
public void onFftDataCapture(Visualizer visualizer, byte[] fft, int samplingRate) {}
}, Visualizer.getMaxCaptureRate() , true, false);
vis.setEnabled(true);
}
编辑
而在我的脑海里的另一个问题是,我该如何记录包含在一个给定的音频段的时间长度?
我这样做:
visualizer = new Visualizer(0);
visualizer.setEnabled(false);
visualizer.setCaptureSize(Visualizer.getCaptureSizeRange()[0]);
visualizer.setDataCaptureListener(
new Visualizer.OnDataCaptureListener() {
public void onWaveFormDataCapture(Visualizer visualizer,
byte[] bytes, int samplingRate) {
eqview.setVSWaveForm(bytes);
}
public void onFftDataCapture(Visualizer visualizer,
byte[] bytes, int samplingRate) {
fftview.setVSFftData(bytes);
}
}, Visualizer.getMaxCaptureRate(), true, true);
visualizer.setEnabled(true);
对于可视化搜索,我发现这个代码在网上,我没有写它:
package app.util;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.view.View;
/***
*
*
* @author yokmama
*
*/
public class VisualizerView extends View {
private byte[] mBytes;
private float[] mPoints;
private Rect mRect = new Rect();
//SharedPreferences prefs;
private Paint mForePaint = new Paint();
public VisualizerView(Context context, AttributeSet attrs) {
super(context, attrs);
//prefs = PreferenceManager.getDefaultSharedPreferences(context);
init();
}
private void init() {
mBytes = null;
//int colorchosen = prefs.getInt("COLOR_PREFERENCE_KEY",
// Color.WHITE);
mForePaint.setStrokeWidth(1);
//mForePaint.setAntiAlias(true);
mForePaint.setColor(Color.WHITE);
//mForePaint.setMaskFilter(new BlurMaskFilter(1, Blur.INNER));
}
public void updateVisualizer(byte[] bytes) {
mBytes = bytes;
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
if (mBytes == null) {
return;
}
if (mPoints == null || mPoints.length < mBytes.length * 4) {
mPoints = new float[mBytes.length * 4];
}
mRect.set(0, 0, getWidth(), getHeight());
for (int i = 0; i < mBytes.length - 1; i++) {
mPoints[i * 4] = mRect.width() * i / (mBytes.length - 1);
mPoints[i * 4 + 1] = mRect.height() / 2
+ ((byte) (mBytes[i] + 128)) * (mRect.height() / 2) / 128;
mPoints[i * 4 + 2] = mRect.width() * (i + 1) / (mBytes.length - 1);
mPoints[i * 4 + 3] = mRect.height() / 2
+ ((byte) (mBytes[i + 1] + 128)) * (mRect.height() / 2)
/ 128;
}
canvas.drawLines(mPoints, mForePaint);
//canvas.drawPoints(mPoints, mForePaint);
}
}