gpt4 book ai didi

java - 将实时 Android 音频流式传输到服务器

转载 作者:IT老高 更新时间:2023-10-28 20:25:24 25 4
gpt4 key购买 nike

我目前正在尝试将实时麦克风音频从 Android 设备流式传输到 Java 程序。我开始在两个安卓设备之间发送实时音频,以确认我的方法是正确的。在接收设备上几乎没有任何延迟,可以完美地听到音频。接下来,我将相同的音频流发送到一个小型 Java 程序,并且我验证了数据也正确地发送到这里。现在我想做的是对这些数据进行编码,并以某种方式在运行 Java 程序的服务器上播放它。我宁愿在使用 HTML5 或 JavaScript 的网络浏览器中播放它,但我对 VLC 等替代方法持开放态度。

这是发送实时麦克风音频的 Android 应用的代码

public class MainActivity extends Activity {


private Button startButton,stopButton;

public byte[] buffer;
public static DatagramSocket socket;
AudioRecord recorder;

private int sampleRate = 44100;
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
private boolean status = true;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

startButton = (Button) findViewById (R.id.start_button);
stopButton = (Button) findViewById (R.id.stop_button);

startButton.setOnClickListener(startListener);
stopButton.setOnClickListener(stopListener);

minBufSize += 2048;
}

@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.main, menu);
return true;
}

private final OnClickListener stopListener = new OnClickListener() {

@Override
public void onClick(View arg0) {
status = false;
recorder.release();
Log.d("VS","Recorder released");
}
};

private final OnClickListener startListener = new OnClickListener() {

@Override
public void onClick(View arg0) {
status = true;
startStreaming();
}
};



public void startStreaming()
{
Thread streamThread = new Thread(new Runnable(){
@Override
public void run()
{
try{

DatagramSocket socket = new DatagramSocket();
Log.d("VS", "Socket Created");

byte[] buffer = new byte[minBufSize];

Log.d("VS","Buffer created of size " + minBufSize);


Log.d("VS", "Address retrieved");
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRate,channelConfig,audioFormat,minBufSize);
Log.d("VS", "Recorder initialized");


recorder.startRecording();


InetAddress IPAddress = InetAddress.getByName("192.168.1.5");
byte[] sendData = new byte[1024];
byte[] receiveData = new byte[1024];


while (status == true)
{
DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, IPAddress, 50005);
socket.send(sendPacket);
}

} catch(UnknownHostException e) {
Log.e("VS", "UnknownHostException");
} catch (IOException e) {
Log.e("VS", "IOException");
e.printStackTrace();
}


}

});
streamThread.start();
}
}

这是Java程序读取数据的代码..

class Server
{
public static void main(String args[]) throws Exception
{
DatagramSocket serverSocket = new DatagramSocket(50005);
byte[] receiveData = new byte[1024];
byte[] sendData = new byte[1024];
while(true)
{
DatagramPacket receivePacket = new DatagramPacket(receiveData, receiveData.length);



serverSocket.receive(receivePacket);
String sentence = new String( receivePacket.getData().toString());

System.out.println("RECEIVED: " + sentence);
}
}
}

我知道在将音频发送到 Java 程序之前,我应该在应用程序端对音频进行编码,但我不确定在使用 AudioRecorder 时如何进行编码。我宁愿不使用 NDK,因为我没有使用它的经验,也没有时间学习如何使用它....但是 :)

最佳答案

所以我解决了我的问题。问题主要出在接收方。接收器接收音频流并将其推送到 PC 的扬声器。由此产生的声音仍然很迟钝和 splinter ,但它仍然有效。调整缓冲区大小可能会改善这一点。

编辑:您使用线程来读取音频以避免延迟。此外,最好使用 16 000 的采样大小,因为它可以用于语音。

安卓代码:

package com.example.mictest2;

import java.io.IOException;
import java.net.DatagramPacket;
import java.net.DatagramSocket;
import java.net.InetAddress;
import java.net.UnknownHostException;

import android.app.Activity;
import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;

public class Send extends Activity {
private Button startButton,stopButton;

public byte[] buffer;
public static DatagramSocket socket;
private int port=50005;

AudioRecord recorder;

private int sampleRate = 16000 ; // 44100 for music
private int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
private boolean status = true;


@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

startButton = (Button) findViewById (R.id.start_button);
stopButton = (Button) findViewById (R.id.stop_button);

startButton.setOnClickListener (startListener);
stopButton.setOnClickListener (stopListener);

}

private final OnClickListener stopListener = new OnClickListener() {

@Override
public void onClick(View arg0) {
status = false;
recorder.release();
Log.d("VS","Recorder released");
}

};

private final OnClickListener startListener = new OnClickListener() {

@Override
public void onClick(View arg0) {
status = true;
startStreaming();
}

};

public void startStreaming() {


Thread streamThread = new Thread(new Runnable() {

@Override
public void run() {
try {

DatagramSocket socket = new DatagramSocket();
Log.d("VS", "Socket Created");

byte[] buffer = new byte[minBufSize];

Log.d("VS","Buffer created of size " + minBufSize);
DatagramPacket packet;

final InetAddress destination = InetAddress.getByName("192.168.1.5");
Log.d("VS", "Address retrieved");


recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,sampleRate,channelConfig,audioFormat,minBufSize*10);
Log.d("VS", "Recorder initialized");

recorder.startRecording();


while(status == true) {


//reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);

//putting buffer in the packet
packet = new DatagramPacket (buffer,buffer.length,destination,port);

socket.send(packet);
System.out.println("MinBufferSize: " +minBufSize);


}



} catch(UnknownHostException e) {
Log.e("VS", "UnknownHostException");
} catch (IOException e) {
e.printStackTrace();
Log.e("VS", "IOException");
}
}

});
streamThread.start();
}
}

Android XML:

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context=".MainActivity" >

<TextView
android:id="@+id/textView1"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/hello_world" />

<Button
android:id="@+id/start_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@+id/textView1"
android:layout_centerHorizontal="true"
android:layout_marginTop="130dp"
android:text="Start" />

<Button
android:id="@+id/stop_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignLeft="@+id/button1"
android:layout_below="@+id/button1"
android:layout_marginTop="64dp"
android:text="Stop" />

</RelativeLayout>

服务器代码:

package com.datagram;

import java.io.ByteArrayInputStream;
import java.net.DatagramPacket;
import java.net.DatagramSocket;

import javax.sound.sampled.AudioFormat;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.DataLine;
import javax.sound.sampled.FloatControl;
import javax.sound.sampled.SourceDataLine;

class Server {

AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 50005;
static int sampleRate = 44100;

public static void main(String args[]) throws Exception {


DatagramSocket serverSocket = new DatagramSocket(50005);


byte[] receiveData = new byte[1280];
// ( 1280 for 16 000Hz and 3584 for 44 100Hz (use AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat) to get the correct size)

format = new AudioFormat(sampleRate, 16, 1, true, false);

while (status == true) {
DatagramPacket receivePacket = new DatagramPacket(receiveData,
receiveData.length);

serverSocket.receive(receivePacket);

ByteArrayInputStream baiss = new ByteArrayInputStream(
receivePacket.getData());

ais = new AudioInputStream(baiss, format, receivePacket.getLength());

// A thread solve the problem of chunky audio
new Thread(new Runnable() {
@Override
public void run() {
toSpeaker(receivePacket.getData());
}
}).start();
}
}

public static void toSpeaker(byte soundbytes[]) {
try {

DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);

sourceDataLine.open(format);

FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN);
volumeControl.setValue(100.0f);

sourceDataLine.start();
sourceDataLine.open(format);

sourceDataLine.start();

System.out.println("format? :" + sourceDataLine.getFormat());

sourceDataLine.write(soundbytes, 0, soundbytes.length);
System.out.println(soundbytes.toString());
sourceDataLine.drain();
sourceDataLine.close();
} catch (Exception e) {
System.out.println("Not working in speakers...");
e.printStackTrace();
}
}
}

我希望这可以帮助人们减少几个小时的痛苦:)

关于java - 将实时 Android 音频流式传输到服务器,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/15349987/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com