- iOS/Objective-C 元类和类别
- objective-c - -1001 错误,当 NSURLSession 通过 httpproxy 和/etc/hosts
- java - 使用网络类获取 url 地址
- ios - 推送通知中不播放声音
如何在我当前的 android 代码中实现 IIR 带通滤波器?我有一个 Android 应用程序可以记录音频(实际上是频率)并将其保存在 .wav 文件中。
我设法在网上找到了一个 IIR 滤波器库,但我不确定如何将它实现到我的代码中。
https://github.com/ddf/Minim/blob/master/src/ddf/minim/effects/BandPass.java https://github.com/DASAR/Minim-Android/blob/master/src/ddf/minim/effects/IIRFilter.java
我应该在将接收到的声音信号输出到 .wav 文件之前将 18k-20k 带通滤波器添加到代码中。
我当前的代码
package com.example.audio;
import ddf.minim.effects.*;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import com.varma.samples.audiorecorder.R;
import android.app.Activity;
import android.content.Context;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioRecord;
import android.media.AudioTrack;
import android.media.MediaRecorder;
import android.media.MediaScannerConnection;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.text.SpannableStringBuilder;
import android.text.style.RelativeSizeSpan;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.Toast;
public class RecorderActivity extends Activity {
private static final int RECORDER_BPP = 16;
private static final String AUDIO_RECORDER_FILE_EXT_WAV = ".wav";
private static final String AUDIO_RECORDER_FOLDER = "AudioRecorder";
private static final String AUDIO_RECORDER_TEMP_FILE = "record_temp.raw";
private static final int RECORDER_SAMPLERATE = 44100;// 44100; //18000
private static final int RECORDER_CHANNELS = AudioFormat.CHANNEL_IN_STEREO; //AudioFormat.CHANNEL_IN_STEREO;
private static final int RECORDER_AUDIO_ENCODING = AudioFormat.ENCODING_PCM_16BIT;
private static final int PLAY_CHANNELS = AudioFormat.CHANNEL_OUT_STEREO; //AudioFormat.CHANNEL_OUT_STEREO;
private static final int FREQUENCY_LEFT = 2000; //Original:18000 (16 Dec)
private static final int FREQUENCY_RIGHT = 2000; //Original:18000 (16 Dec)
private static final int AMPLITUDE_LEFT = 1;
private static final int AMPLITUDE_RIGHT = 1;
private static final int DURATION_SECOND = 10;
private static final int SAMPLE_RATE = 44100;
private static final float SWEEP_RANGE = 1000.0f;
String store;
private AudioRecord recorder = null;
private int bufferSize = 0;
private Thread recordingThread = null;
private boolean isRecording = false;
double time;
float[] buffer1;
float[] buffer2;
byte[] byteBuffer1;
byte[] byteBuffer2;
byte[] byteBufferFinal;
int bufferIndex;
short x;
short y;
AudioTrack audioTrack;
Button btnPlay, btnStart, btnStop;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
setButtonHandlers();
enableButtons(false);
btnPlay = (Button) findViewById(R.id.btnPlay);
btnStop = (Button) findViewById(R.id.btnStop);
btnStart = (Button) findViewById(R.id.btnStart);
bufferSize = AudioRecord.getMinBufferSize(RECORDER_SAMPLERATE, RECORDER_CHANNELS, RECORDER_AUDIO_ENCODING);
buffer1 = new float[(int) (DURATION_SECOND * SAMPLE_RATE)];
buffer2 = new float[(int) (DURATION_SECOND * SAMPLE_RATE)];
float f1 = 0.0f, f2 = 0.0f;
for (int sample = 0, step = 0; sample < buffer1.length; sample++) {
time = sample / (SAMPLE_RATE * 1.0);
//f1 = (float)(FREQUENCY_LEFT + ((sample / (buffer1.length * 1.0)) * SWEEP_RANGE)); // frequency sweep
//f2 = (float)(FREQUENCY_RIGHT + ((sample / (buffer1.length * 1.0)) * SWEEP_RANGE)); // frequency sweep
f1 = FREQUENCY_LEFT; // static frequency
f2 = FREQUENCY_RIGHT; // static frequency
buffer1[sample] = (float) (AMPLITUDE_LEFT * Math.sin(2 * Math.PI * f1 * time));
buffer2[sample] = (float) (AMPLITUDE_RIGHT * Math.sin(2 * Math.PI * f2 * time));
}
byteBuffer1 = new byte[buffer1.length * 2]; // two bytes per audio
// frame, 16 bits
for (int i = 0, bufferIndex = 0; i < byteBuffer1.length; i++) {
x = (short) (buffer1[bufferIndex++] * 32767.0); // [2^16 - 1]/2 =
// 32767.0
byteBuffer1[i] = (byte) x; // low byte
byteBuffer1[++i] = (byte) (x >>> 8); // high byte
}
byteBuffer2 = new byte[buffer2.length * 2];
for (int j = 0, bufferIndex = 0; j < byteBuffer2.length; j++) {
y = (short) (buffer2[bufferIndex++] * 32767.0);
byteBuffer2[j] = (byte) y; // low byte
byteBuffer2[++j] = (byte) (y >>> 8); // high byte
}
byteBufferFinal = new byte[byteBuffer1.length * 2];
// LL RR LL RR LL RR
for (int k = 0, index = 0; index < byteBufferFinal.length - 4; k = k + 2) {
byteBufferFinal[index] = byteBuffer1[k]; // LEFT
// {0,1/4,5/8,9/12,13;...}
byteBufferFinal[index + 1] = byteBuffer1[k + 1];
index = index + 2;
byteBufferFinal[index] = byteBuffer2[k]; // RIGHT
// {2,3/6,7/10,11;...}
byteBufferFinal[index + 1] = byteBuffer2[k + 1];
index = index + 2;
}
try {
FileOutputStream ss = new FileOutputStream(Environment.getExternalStorageDirectory().getPath() + "/" + AUDIO_RECORDER_FOLDER + "/source.txt");
ss.write(byteBufferFinal);
ss.flush();
ss.close();
}
catch (IOException ioe){
Log.e("IO Error","Write source error.");
}
}
private void setButtonHandlers() {
((Button) findViewById(R.id.btnStart)).setOnClickListener(startClick);
((Button) findViewById(R.id.btnStop)).setOnClickListener(stopClick);
((Button) findViewById(R.id.btnPlay)).setOnClickListener(playClick);
}
private void enableButton(int id, boolean isEnable) {
((Button) findViewById(id)).setEnabled(isEnable);
}
private void enableButtons(boolean isRecording) {
enableButton(R.id.btnStart, !isRecording);
enableButton(R.id.btnStop, isRecording);
enableButton(R.id.btnPlay, isRecording);
}
private String getFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
MediaScannerConnection.scanFile(this, new String[]{filepath}, null, null);
store = file.getAbsolutePath() + "/" + "Audio"
+ AUDIO_RECORDER_FILE_EXT_WAV;
return store;
}
private String getTempFilename() {
String filepath = Environment.getExternalStorageDirectory().getPath();
File file = new File(filepath, AUDIO_RECORDER_FOLDER);
if (!file.exists()) {
file.mkdirs();
}
File tempFile = new File(filepath, AUDIO_RECORDER_TEMP_FILE);
if (tempFile.exists())
tempFile.delete();
return (file.getAbsolutePath() + "/" + AUDIO_RECORDER_TEMP_FILE);
}
private void startRecording() {
//BandPass bandpass = new BandPass(19000,2000,44100);
/* BandPass bandpass = new BandPass(50,2,SAMPLE_RATE);
int [] freqR = {FREQUENCY_RIGHT};
int [] freqL = {FREQUENCY_LEFT};
float[] testL = shortToFloat(freqR);
float [] testR = shortToFloat(freqL);
bandpass.process(testL,testR);
bandpass.printCoeff();
*/
recorder = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
am.setStreamVolume(AudioManager.STREAM_MUSIC, am.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
/*
* AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
* (int) RECORDER_SAMPLERATE,AudioFormat.CHANNEL_OUT_STEREO,
* AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);
*/
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) SAMPLE_RATE, PLAY_CHANNELS,
AudioFormat.ENCODING_PCM_16BIT, byteBufferFinal.length,
AudioTrack.MODE_STATIC);
audioTrack.write(byteBufferFinal, 0, byteBufferFinal.length);
audioTrack.play();
BandPass bandpass = new BandPass(50,2,SAMPLE_RATE);
int [] freqR = {FREQUENCY_RIGHT};
int [] freqL = {FREQUENCY_LEFT};
float[] testL = shortToFloat(freqR);
float [] testR = shortToFloat(freqL);
bandpass.process(testL,testR);
bandpass.printCoeff();
audioTrack.setPlaybackRate(RECORDER_SAMPLERATE);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
@Override
public void run() {
try {
writeAudioDataToFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}, "AudioRecorder Thread");
recordingThread.start();
}
double[][] deinterleaveData(double[] samples, int numChannels) {
// assert(samples.length() % numChannels == 0);
int numFrames = samples.length / numChannels;
double[][] result = new double[numChannels][];
for (int ch = 0; ch < numChannels; ch++) {
result[ch] = new double[numFrames];
for (int i = 0; i < numFrames; i++) {
result[ch][i] = samples[numChannels * i + ch];
}
}
return result;
}
private void writeAudioDataToFile() throws IOException {
int read = 0;
byte data[] = new byte[bufferSize];
String filename = getTempFilename();
FileOutputStream os = null;
FileOutputStream rs = null;
try {
os = new FileOutputStream(filename);
rs = new FileOutputStream(getFilename().split(".wav")[0] + ".txt");
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
if (null != os) {
while (isRecording) {
read = recorder.read(data, 0, bufferSize);
if (AudioRecord.ERROR_INVALID_OPERATION != read) {
try {
os.write(data);
rs.write(data);
} catch (IOException e) {
e.printStackTrace();
}
}
}
try {
os.close();
rs.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void stopRecording() {
if (null != recorder) {
isRecording = false;
audioTrack.flush();
audioTrack.stop();
audioTrack.release();
recorder.stop();
recorder.release();
recorder = null;
recordingThread = null;
}
copyWaveFile(getTempFilename(), getFilename());
deleteTempFile();
MediaScannerConnection.scanFile(this, new String[]{getFilename()}, null, null);
AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
am.setStreamVolume(AudioManager.STREAM_MUSIC, 0, 0);
}
private void deleteTempFile() {
File file = new File(getTempFilename());
file.delete();
}
private void copyWaveFile(String inFilename, String outFilename) {
FileInputStream in = null;
FileOutputStream out = null;
long totalAudioLen = 0;
long totalDataLen = totalAudioLen + 36;
long longSampleRate = RECORDER_SAMPLERATE;
int channels = 2;
long byteRate = RECORDER_BPP * RECORDER_SAMPLERATE * channels / 8;
byte[] data = new byte[bufferSize];
try {
in = new FileInputStream(inFilename);
out = new FileOutputStream(outFilename);
totalAudioLen = in.getChannel().size();
totalDataLen = totalAudioLen + 36;
WriteWaveFileHeader(out, totalAudioLen, totalDataLen,
longSampleRate, channels, byteRate);
while (in.read(data) != -1) {
out.write(data);
}
in.close();
out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void playWaveFile() {
String filepath = store;
Log.d("PLAYWAVEFILE", "I AM INSIDE");
// define the buffer size for audio track
int minBufferSize = AudioTrack.getMinBufferSize(8000,
AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
int bufferSize = 512;
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) RECORDER_SAMPLERATE, AudioFormat.CHANNEL_OUT_STEREO,
AudioFormat.ENCODING_PCM_16BIT, minBufferSize,
AudioTrack.MODE_STREAM);
int count = 0;
byte[] data = new byte[bufferSize];
try {
FileInputStream fileInputStream = new FileInputStream(filepath);
DataInputStream dataInputStream = new DataInputStream(
fileInputStream);
audioTrack.play();
Toast.makeText(RecorderActivity.this, "this is my Toast message!!! =)",
Toast.LENGTH_LONG).show();
while ((count = dataInputStream.read(data, 0, bufferSize)) > -1) {
Log.d("PLAYWAVEFILE", "WHILE INSIDE");
audioTrack.write(data, 0, count);
//BandPass bandpass = new BandPass(19000,2000,44100); //Actual
//BandPass bandpass = new BandPass(5000,2000,44100); //Test
//int [] freqR = {FREQUENCY_RIGHT};
//int [] freqL = {FREQUENCY_LEFT};
//float[] testR = shortToFloat(freqR);
//float [] testL = shortToFloat(freqL);
//bandpass.process(testR,testL);
// BandPass bandpass = new BandPass(19000,2000,44100);
//float bw = bandpass.getBandWidth();
//float hello = bandpass.getBandWidth();
//float freq = bandpass.frequency();
//float[] test = {FREQUENCY_RIGHT,FREQUENCY_LEFT};
//shortToFloat(test);
//test [0] = FREQUENCY_RIGHT;
//test [1] = FREQUENCY_LEFT;
//bandpass.process(FREQUENCY_LEFT,FREQUENCY_RIGHT);
//Log.d("MyApp","I am here");
//Log.d("ADebugTag", "Valueeees: " + Float.toString(hello));
//Log.d("Bandwidth: " , "Bandwidth: " + Float.toString(bw));
//Log.d("Frequency: " , "Frequency is " + Float.toString(freq));
//bandpass.setBandWidth(20);
//bandpass.printCoeff();
}
audioTrack.stop();
audioTrack.release();
dataInputStream.close();
fileInputStream.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
private void WriteWaveFileHeader(FileOutputStream out, long totalAudioLen,
long totalDataLen, long longSampleRate, int channels, long byteRate)
throws IOException {
byte[] header = new byte[44];
header[0] = 'R'; // RIFF/WAVE header
header[1] = 'I';
header[2] = 'F';
header[3] = 'F';
header[4] = (byte) (totalDataLen & 0xff);
header[5] = (byte) ((totalDataLen >> 8) & 0xff);
header[6] = (byte) ((totalDataLen >> 16) & 0xff);
header[7] = (byte) ((totalDataLen >> 24) & 0xff);
header[8] = 'W';
header[9] = 'A';
header[10] = 'V';
header[11] = 'E';
header[12] = 'f'; // 'fmt ' chunk
header[13] = 'm';
header[14] = 't';
header[15] = ' ';
header[16] = 16; // 4 bytes: size of 'fmt ' chunk
header[17] = 0;
header[18] = 0;
header[19] = 0;
header[20] = 1; // format = 1
header[21] = 0;
header[22] = (byte) channels;
header[23] = 0;
header[24] = (byte) (longSampleRate & 0xff);
header[25] = (byte) ((longSampleRate >> 8) & 0xff);
header[26] = (byte) ((longSampleRate >> 16) & 0xff);
header[27] = (byte) ((longSampleRate >> 24) & 0xff);
header[28] = (byte) (byteRate & 0xff);
header[29] = (byte) ((byteRate >> 8) & 0xff);
header[30] = (byte) ((byteRate >> 16) & 0xff);
header[31] = (byte) ((byteRate >> 24) & 0xff);
header[32] = (byte) (2 * 16 / 8); // block align
header[33] = 0;
header[34] = RECORDER_BPP; // bits per sample
header[35] = 0;
header[36] = 'd';
header[37] = 'a';
header[38] = 't';
header[39] = 'a';
header[40] = (byte) (totalAudioLen & 0xff);
header[41] = (byte) ((totalAudioLen >> 8) & 0xff);
header[42] = (byte) ((totalAudioLen >> 16) & 0xff);
header[43] = (byte) ((totalAudioLen >> 24) & 0xff);
out.write(header, 0, 44);
}
private View.OnClickListener startClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
Thread recordThread = new Thread(new Runnable() {
@Override
public void run() {
isRecording = true;
startRecording();
}
});
recordThread.start();
btnStart.setEnabled(false);
btnStop.setEnabled(true);
btnPlay.setEnabled(false);
}
};
private View.OnClickListener stopClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
// TODO Auto-generated method stub
stopRecording();
enableButtons(false);
btnPlay.setEnabled(true);
// stop();
}
}, 100);
}
};
private View.OnClickListener playClick = new View.OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
playWaveFile();
btnPlay.setEnabled(true);
String filepath = store;
final String promptPlayRecord = "PlayWaveFile()\n" + filepath;
SpannableStringBuilder biggerText = new SpannableStringBuilder(promptPlayRecord);
biggerText.setSpan(new RelativeSizeSpan(2.05f), 0, promptPlayRecord.length(), 0);
Toast.makeText(RecorderActivity.this, biggerText, Toast.LENGTH_LONG).show();
}
};
}
下面的方法是将我的 16 位整数转换为 float ,因为库使用 float
/**
* Convert int[] audio to 32 bit float format.
* From [-32768,32768] to [-1,1]
* @param audio
*/
private float[] shortToFloat(int[] audio) {
Log.d("SHORTTOFLOAT","INSIDE SHORTTOFLOAT");
float[] converted = new float[audio.length];
for (int i = 0; i < converted.length; i++) {
// [-32768,32768] -> [-1,1]
converted[i] = audio[i] / 32768f; /* default range for Android PCM audio buffers) */
}
return converted;
}
尝试在“SaveRecording”方法下实现带通滤波器
//BandPass bandpass = new BandPass(19000,2000,44100);
Since I am trying to implement a range of 18k to 20k, I input the above values to the bandpass filter.
BandPass bandpass = new BandPass(50,2,44100); (This is just to test if the frequency has any changes since 18k-20k is not within human range)
int [] freqR = {FREQUENCY_RIGHT};
int [] freqL = {FREQUENCY_LEFT};
float[] testL = shortToFloat(freqR);
float [] testR = shortToFloat(freqL);
bandpass.process(testL,testR);
bandpass.printCoeff();
因为我是用立体声录音的,所以我用的是
public final synchronized void process(float[] sigLeft, float[] sigRight) {} 在 IIRFilter.java 类中找到。
但是,即使我实现了上述方法,我也没有听到任何差异。我究竟做错了什么?有人可以建议/帮助我吗?
非常感谢!这种信号处理非常新。非常感谢任何关于如何进步的提示!
已更新
由于我必须输出带有过滤后声音信号的 .wav 文件,我认为这样做的方法是将 BandPass 滤波器置于“StartRecording”方法下,但是,它不起作用。为什么我做错了?
private void startRecording() {
int count = 0;
recorder = new AudioRecord(MediaRecorder.AudioSource.CAMCORDER,
RECORDER_SAMPLERATE, RECORDER_CHANNELS,
RECORDER_AUDIO_ENCODING, bufferSize);
AudioManager am = (AudioManager)getSystemService(Context.AUDIO_SERVICE);
am.setStreamVolume(AudioManager.STREAM_MUSIC, am.getStreamMaxVolume(AudioManager.STREAM_MUSIC), 0);
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
(int) SAMPLE_RATE, PLAY_CHANNELS,
AudioFormat.ENCODING_PCM_16BIT, byteBufferFinal.length,
AudioTrack.MODE_STATIC);
BandPass bandpass = new BandPass(19000,2000,44100);
float[][] signals = deinterleaveData(byteToFloat(byteBufferFinal), 2);
bandpass.process(signals[0], signals[1]);
audioTrack.write(interleaveData(signals), 0, count, WRITE_NON_BLOCKING);
audioTrack.play();
//audioTrack.write(byteBufferFinal, 0, byteBufferFinal.length); //Original
audioTrack.setPlaybackRate(RECORDER_SAMPLERATE);
recorder.startRecording();
isRecording = true;
recordingThread = new Thread(new Runnable() {
@Override
public void run() {
try {
writeAudioDataToFile();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}, "AudioRecorder Thread");
recordingThread.start();
}
更新(2016 年 3 月 14 日)这是 Audacity 显示的输出 .wav 文件的图像:
请问算过滤了吗?我应该注意哪些特征以确保它被正确过滤。
上图是按下黑色三角形产生的
Analyse->Plot Specturm生成的上图
这张图怎么样?它是否成功实现了带通滤波器?谢谢
最佳答案
您与 Bandpass.java source 的交互方式存在问题,可能是由于似乎有点误解:IIR 滤波器不处理频率,但它们处理时域数据样本(可能表现出振荡行为)。
因此,您必须提供这些时域样本作为 Bandpass.process()
的输入。由于您正在从文件中读取原始字节,因此您需要将这些字节转换为 float
。你可以这样做:
/**
* Convert byte[] raw audio to 16 bit int format.
* @param rawdata
*/
private int[] byteToShort(byte[] rawdata) {
int[] converted = new int[rawdata.length / 2];
for (int i = 0; i < converted.length; i++) {
// Wave file data are stored in little-endian order
int lo = rawdata[2*i];
int hi = rawdata[2*i+1];
converted[i] = ((hi&0xFF)<<8) | (lo&0xFF);
}
return converted;
}
private float[] byteToFloat(byte[] audio) {
return shortToFloat(byteToShort(audio));
}
对于立体声波形文件,您将从交错的波形文件中获取样本。因此,您还需要对样本进行去交错处理。这可以通过与您使用 deinterleaveData
类似的方式来实现,除了您需要一个转换为 float[][]
而不是 double 的变体[][]
因为 Bandpass.process
需要 float
数组。
当然,您还需要在过滤后但在将过滤后的信号反馈给 audioTrack
之前将两个 channel 重新组合在一起:
float[] interleaveData(float[][] data) {
int numChannels = data.length;
int numFrames = data[0].length;
float[] result = new float[numFrames*numChannels];
for (int i = 0; i < numFrames; i++) {
for (int ch = 0; ch < numChannels; ch++) {
result[numChannels * i + ch] = data[ch][i];
}
}
return result;
}
您现在应该拥有过滤音频所需的构建 block :
BandPass bandpass = new BandPass(19000,2000,44100);
while ((count = dataInputStream.read(data, 0, bufferSize)) > -1) {
// decode and deinterleave stereo 16-bit per sample data
float[][] signals = deinterleaveData(byteToFloat(data), 2);
// filter data samples, updating the buffers with the filtered samples.
bandpass.process(signals[0], signals[1]);
// recombine signals for playback
audioTrack.write(interleaveData(signals), 0, count, WRITE_NON_BLOCKING);
}
P.S.:作为最后的说明,您当前正在读取所有波形文件作为数据样本,包括标题。这将导致开始时出现短暂的嘈杂突发。为避免这种情况,您应该跳过标题。
关于java - 如何在 Android 中为输入音频实现 IIR 带通滤波器,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/34319751/
运行 PostgreSQL(7.4 和 8.x),我认为这是可行的,但现在我遇到了错误。 我可以单独运行查询,它工作得很好,但如果我使用 UNION 或 UNION ALL,它会抛出错误。 这个错误:
我试图为我的应用程序创建一个导航,使用抽屉导航我的 fragment 之一(HomeFragment)有一个 ViewPager,可容纳 3 个 fragment (Bundy Clock、Annou
以我目前正在开发的应用为例: - 它有一个包含多个项目的抽屉导航;现在有两个项目让我感兴趣,我将它们称为 X 和 Y。 X 和 Y 都在单击时显示包含 x 元素或 y 元素列表的 fragment 选
我有一个形状为 (370,275,210) 的 NumPy 数组,我想将其重新整形为 (275,210,370)。我将如何在 Python 中实现这一点? 370是波段数,275是行数,210是图像包
我们如何与被子 UIViewController 阻止的父 UIViewController(具有按钮)交互。显然,触摸事件不会通过子 Nib 。 (启用用户交互) 注意:我正在加载默认和自定义 NI
我是 Jpa 新手,我想执行过程 我的代码如下 private static final String PERSISTENCE_UNIT_NAME = "todos"; private static
与安装了 LAMP 的 GCE 相比,选择与 Google Cloud SQL 链接的 GCE 实例有哪些优势? 我确定 GCE 是可扩展的,但是安装在其上的 mysql 数据库的可扩展性如何? 使用
这个问题在这里已经有了答案: Value receiver vs. pointer receiver (3 个答案) 关闭 3 年前。 我刚接触 golang。只是想了解为 Calc 类型声明的两种
我不小心按了一个快捷键,一个非常漂亮的断线出现在日期上。 有点像 # 23 Jun 2010 -------------------- 有人知道有问题的快捷方式吗?? (我在 mac 上工作!) 在
我正在Scala中编写正则表达式 val regex = "^foo.*$".r 这很好,但是如果我想做 var x = "foo" val regex = s"""^$x.*$""".r 现在我们有
以下 XML 文档在技术上是否相同? James Dean 19 和: James Dean 19 最佳答案 这两个文档在语义上是相同的。在 X
我在对数据帧列表运行稳健的线性回归模型(使用 MASS 库中的 rlm)时遇到问题。 可重现的示例: var1 <- c(1:100) var2 <- var1*var1 df1 <- data.f
好的,我有一个自定义数字键盘,可以在标签(numberField)中将数字显示为 0.00,现在我需要它显示 $0.00。 NSString *digit = sender.currentTitle;
在基于文档的应用程序中,使用 XIB 文件,创建新窗口时其行为是: 根据最后一个事件的位置进行定位和调整大小 window 。 如果最后一个事件窗口仍然可见,则新窗口 窗口应该是级联的,这样它就不会直
我想使用参数进行查询,如下所示: SELECT * FROM MATABLE WHERE MT_ID IN (368134, 181956) 所以我考虑一下 SELECT * FROM MATABLE
我遇到一些性能问题。 我有一个大约有 200 万行的表。 CREATE TABLE [dbo].[M8]( [M8_ID] [int] IDENTITY(1,1) NOT NULL,
我在 jquery 中的按键功能遇到问题。我不知道为什么按键功能不起作用。我已经使用了正确的 key 代码。在我的函数中有 2 个代码,其中包含 2 个事件键,按一个键表示 (+) 代码 107 和(
我想显示音频波形,我得到了此代码,它需要.raw音频输入并显示音频波形,但是当我放入.3gp,.mp3音频时,我得到白噪声,有人可以帮助我如何使其按需与.3gp一起使用使用.3gp音频运行它。 Inp
我无法让 stristr 函数返回真值,我相信这是因为我的搜索中有一个 $ 字符。 当我这样做时: var_dump($nopricecart); 完整的 $nopricecart 值是 $0 ,我得
如果我有这样的循环: for(int i=0;i O(n) 次。所以do some执行了O(n)次。如果做某事是线性时间,那么代码片段的复杂度是O(n^2)。 关于algorithm - 带 If 语
我是一名优秀的程序员,十分优秀!