gpt4 book ai didi

android - 成功录音和处理几秒钟后,Android应用程序卡住

转载 作者:行者123 更新时间:2023-12-03 01:46:03 27 4
gpt4 key购买 nike

经过约10秒钟的预期行为,我的应用程序冻结了,我很难查明原因。

  • 是否存在内存问题?
  • 音频缓冲区有问题吗?

  • 该应用程序应该从麦克风采样音频,对其进行处理以检测峰值频率并在屏幕上可视化结果。我有MainActivity,其中定义了一个线程来更新 View 。我有另外两个单独实现的线程,一个用于通过AudioRecord类记录音频,一个用于处理音频信号并检测峰值频率。记录和处理似乎可以正常工作并产生预期的结果。但是,经过一段时间(确切的时间从一次运行到下一次运行,时间会有所不同),该应用程序将冻结。日志然后显示重复的消息

    W/art: Suspending all threads took: x.xxxms

    I/art: Background sticky concurrent mark sweep GC freed ...


    GC删除的对象数量可能非常高(一种情况:62119(3MB)),我想知道在哪里创建这么多对象以及如何避免它?我怀疑音频缓冲区有问题,但不知道如何最好地诊断出来。
    这里相关代码:
    MainActivity.java
    public class MainActivity extends AppCompatActivity {

    private static final String TAG = "StringTuner";

    VerticalLineDrawingView drawView; // view to draw frequency indicator
    TextView textView; // view for text output

    private AudioRecorderThread recorder; // thread for recording audio
    private ProcessingThread processor; // thread for processing audio signal
    private Thread viewUpdater; // thread for updating views

    @Override
    public void onDestroy() {
    super.onDestroy();
    recorder.releaseAudioRecord();
    }

    @Override
    protected void onCreate(Bundle savedInstanceState) {

    Log.d(TAG, "Creating the main activity");
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);

    drawView = (VerticalLineDrawingView) this.findViewById(R.id.drawView_NoteIndicator);
    textView = (TextView) this.findViewById(R.id.textView_TextOutput);


    Log.d(TAG, "Setting up AudioRecord and processing thread");
    recorder = new AudioRecorderThread();
    recorder.start();

    processor = new ProcessingThread(recorder);
    processor.start();

    // thread for updating view
    if (viewUpdater == null){
    viewUpdater = new Thread() {
    public void run() {
    try {
    while (recorder != null && processor != null) {
    runOnUiThread(new Runnable() {
    public void run() {
    if (processor != null){
    textView.setText(String.valueOf(processor.getPeakFrequency()) + " Hz\n" + processor.getProcessingTime() + " ms");
    drawView.reposition((int) (processor.getPosition() * drawView.getViewWidth()));
    }
    }
    });
    sleep(1);
    }
    } catch (Exception e) {
    e.printStackTrace();
    } finally {
    viewUpdater = null;
    }
    }
    };
    viewUpdater.start();
    }

    }

    }
    AudioRecorderThread.java
    public class AudioRecorderThread extends Thread {

    private AudioRecord audioRecord;
    private boolean isRecording;
    private int nSample;
    private int frameByteSize;
    private byte[] frame;
    private byte[] buffer;

    public AudioRecorderThread(){

    int channel = AudioFormat.CHANNEL_IN_MONO; // channel configuration
    int encoding = AudioFormat.ENCODING_PCM_16BIT; // audio encoding
    int fS = getMinimumSampleRate(); // sample rate [Hz]

    // get minimum buffer size for AudioRecord session
    int minBuffer = AudioRecord.getMinBufferSize(fS, channel, encoding);

    int src = MediaRecorder.AudioSource.MIC; // audio source

    // instantiate AudioRecord session
    audioRecord = new AudioRecord(src, fS, channel, encoding, minBuffer);
    if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
    throw new RuntimeException("AudioRecord session could not be initialized.");
    }

    nSample = getNextPowerOf2(fS); // frequency resolution below 1 Hz
    frameByteSize = 2 * nSample; // 16bit -> 1 frame = 2* sample size
    frame = new byte[frameByteSize];

    buffer = new byte[frameByteSize]; // buffer for reading data

    }

    public AudioRecord getAudioRecord(){
    return audioRecord;
    }

    public boolean isRecording(){
    return this.isAlive() && isRecording;
    }

    public void startRecording(){
    try{
    audioRecord.startRecording();
    isRecording = true;
    } catch (Exception e) {
    e.printStackTrace();
    }
    }

    public void stopRecording(){
    try{
    audioRecord.stop();
    isRecording = false;
    } catch (Exception e) {
    e.printStackTrace();
    }
    }

    public void releaseAudioRecord(){
    try{
    audioRecord.release();
    } catch (Exception e) {
    e.printStackTrace();
    }
    }

    public byte[] getFrame(){
    audioRecord.read(buffer, 0, 800); // one sample = 2 bytes (16 bits)
    System.arraycopy(frame,800,frame,0,800);
    System.arraycopy(buffer,0,frame,frameByteSize-800,800);
    return frame;
    }

    public int getFrameByteSize(){
    return frameByteSize;
    }

    public double getFrequencySteps(){
    return ((double) audioRecord.getSampleRate()) / ((double) nSample);
    }

    public void run() {
    startRecording();

    // fill buffer
    audioRecord.read(buffer, 0, frameByteSize); // read 1 frame of data into buffer
    System.arraycopy(buffer, 0, frame, 0, frameByteSize);

    }
    }
    ProcessingThread.java
    public class ProcessingThread extends Thread{

    private AudioRecorderThread recorder;
    private volatile Thread _thread;
    private int bytesPerSample;

    private long t_process;
    private double f_peak;
    private double position;

    private double[] stringFrequenciesLog = new double[] {Math.log(82.4), Math.log(110), Math.log(146.8), Math.log(196), Math.log(246.9), Math.log(329.6)};
    private double[] stringPositions = new double[] {2.0/9.0, 3.0/9.0, 4.0/9.0, 5.0/9.0, 6.0/9.0, 7.0/9.0};

    public ProcessingThread(AudioRecorderThread recorder){

    this.recorder = recorder;
    AudioRecord audioRecord = recorder.getAudioRecord();

    if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_16BIT){
    bytesPerSample = 2;
    }
    else if (audioRecord.getAudioFormat() == AudioFormat.ENCODING_PCM_8BIT){
    bytesPerSample = 1;
    }

    }

    public void start() {
    _thread = new Thread(this);
    _thread.start();
    }

    public void stopDetection(){
    _thread = null;
    }

    public void run() {
    try {
    double maxValue;
    long t0;
    byte[] frame;
    short[] sample = new short[recorder.getFrameByteSize()/2];
    double[] sampleAsDouble = new double[sample.length];
    double[] magnitudes;
    FastFourierTransform fft = new com.dsp.FastFourierTransform();

    Thread thisThread = Thread.currentThread();
    while (_thread == thisThread) {

    t0 = System.nanoTime();

    // read recorded audio data
    frame = recorder.getFrame();

    // processing
    if (frame != null) {
    // sound detected
    sample = decodeSample(frame);

    for (int j=0;j<sample.length;j++) {
    sampleAsDouble[j] = (double)sample[j];
    }

    // todo: bandpass filter the audio signal

    // todo: fft the audio signal
    magnitudes = fft.getMagnitudes(sampleAsDouble);

    // todo: identify peak frequency
    maxValue = magnitudes[0];
    f_peak = 0.0;
    for (int i=1; i<magnitudes.length; i++) {
    if (magnitudes[i]>maxValue) {
    maxValue = magnitudes[i];
    f_peak = (double) i * recorder.getFrequencySteps();
    }
    }

    frequency2position(f_peak);

    t_process = System.nanoTime() - t0;

    }
    else{
    // no sound detected
    f_peak = -1;
    position = -5;
    }
    }
    } catch (Exception e) {
    e.printStackTrace();
    }
    }

    private short[] decodeSample(byte[] buffer) {

    short[] sample;

    if (bytesPerSample==2) {

    sample = new short[buffer.length/2];

    for (int i = 0; i < buffer.length; i += 2) {
    sample[i/2] = (short) ((buffer[i]) | buffer[i + 1] << 8);
    }

    } else {

    sample = new short[buffer.length];

    for (int i = 0; i < buffer.length; i++) {
    sample[i] = (short) buffer[i];
    }

    }

    return sample;

    }

    private void frequency2position(double f){

    // check f is in valid frequency range
    double f_min = 30;
    double f_max = 3000;

    if (f<f_min) {
    position = -1;
    return;
    }

    if (f>f_max) {
    position = -2;
    return;
    }

    double f_log = Math.log(f);

    // find closest string frequency
    int closestIndex = 0;
    double distance = Math.abs(f_log - stringFrequenciesLog[closestIndex]);
    while ((closestIndex < stringFrequenciesLog.length-1) && (Math.abs(f_log - stringFrequenciesLog[closestIndex+1])) < distance) {
    distance = Math.abs(f_log - stringFrequenciesLog[closestIndex+1]);
    closestIndex++;
    }

    double alpha = f_log/stringFrequenciesLog[closestIndex];
    position = alpha * stringPositions[closestIndex];

    if (position<0) {
    position = -3;
    return;
    }

    if (position>1) {
    position = -4;
    }

    }

    public double getPosition(){
    return position;
    }

    public double getPeakFrequency(){
    return f_peak;
    }

    public long getProcessingTime(){
    return t_process/1000000; // in ms
    }

    }
    VerticalLineDrawingView.java
    public class VerticalLineDrawingView extends android.support.v7.widget.AppCompatTextView {

    private int position = -1;
    private int screenHeight = 0;
    private int screenWidth = 0;

    private Paint mLinePaint = new Paint();

    public VerticalLineDrawingView(final Context ct) {
    super(ct);
    init();
    }

    public VerticalLineDrawingView(final Context ct, final AttributeSet attrs) {
    super(ct, attrs);
    init();
    }

    public VerticalLineDrawingView(final Context ct, final AttributeSet attrs, final int defStyle) {
    super(ct, attrs, defStyle);
    init();
    }

    private void init() {
    mLinePaint.setColor(ResourcesCompat.getColor(getResources(), R.color.greenPrimary, null));
    mLinePaint.setStrokeWidth(5);
    }

    @Override
    protected void dispatchDraw(Canvas canv) {
    if (position>0) {
    canv.drawLine(position, 0, position, screenHeight, mLinePaint);
    getLayout().draw(canv);
    }
    }

    @Override
    protected void onMeasure(final int widthMeasureSpec, final int heightMeasureSpec) {
    super.onMeasure(widthMeasureSpec, heightMeasureSpec);
    screenHeight = getMeasuredHeight();
    screenWidth = getMeasuredWidth();
    }

    @Override
    protected void onSizeChanged(int w, int h, int oldw, int oldh) {
    super.onSizeChanged(w, h, oldw, oldh);
    screenWidth = w;
    screenHeight = h;
    }

    public void reposition(int newpos) {
    position = newpos;
    invalidate();
    }

    public int getViewWidth() {
    return screenWidth;
    }

    public int getPosition() {
    return position;
    }

    }

    最佳答案

    您正在对主线程执行许多操作,这就是它挂起的原因,以避免使用Asynctask。

    关于android - 成功录音和处理几秒钟后,Android应用程序卡住,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/43628860/

    27 4 0
    Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
    广告合作:1813099741@qq.com 6ren.com