gpt4 book ai didi

Android AudioRecord 到服务器 over UDP 播放问题

转载 作者:IT老高 更新时间:2023-10-28 23:35:16 25 4
gpt4 key购买 nike

我正在尝试制作一个简单的 Android 应用程序,将实时麦克风音频流式传输到服务器进行播放。由此产生的播放听起来很奇怪,音频中有很大的间隙。有谁知道我做错了什么?

编辑:已解决。原来我假设每个传入的缓冲区都已满,这是我的错误假设。

这是我的 Activity :

public class MainActivity extends Activity {
private static String TAG = "AudioClient";

// the server information
private static final String SERVER = "xx.xx.xx.xx";
private static final int PORT = 50005;

// the audio recording options
private static final int RECORDING_RATE = 44100;
private static final int CHANNEL = AudioFormat.CHANNEL_IN_MONO;
private static final int FORMAT = AudioFormat.ENCODING_PCM_16BIT;

// the button the user presses to send the audio stream to the server
private Button sendAudioButton;

// the audio recorder
private AudioRecord recorder;

// the minimum buffer size needed for audio recording
private static int BUFFER_SIZE = AudioRecord.getMinBufferSize(
RECORDING_RATE, CHANNEL, FORMAT);

// are we currently sending audio data
private boolean currentlySendingAudio = false;

@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);

Log.i(TAG, "Creating the Audio Client with minimum buffer of "
+ BUFFER_SIZE + " bytes");

// set up the button
sendAudioButton = (Button) findViewById(R.id.start_button);
sendAudioButton.setOnTouchListener(new OnTouchListener() {

@Override
public boolean onTouch(View v, MotionEvent event) {

switch (event.getAction()) {

case MotionEvent.ACTION_DOWN:
startStreamingAudio();
break;

case MotionEvent.ACTION_UP:
stopStreamingAudio();
break;
}

return false;
}
});
}

private void startStreamingAudio() {

Log.i(TAG, "Starting the audio stream");
currentlySendingAudio = true;
startStreaming();
}

private void stopStreamingAudio() {

Log.i(TAG, "Stopping the audio stream");
currentlySendingAudio = false;
recorder.release();
}

private void startStreaming() {

Log.i(TAG, "Starting the background thread to stream the audio data");

Thread streamThread = new Thread(new Runnable() {

@Override
public void run() {
try {

Log.d(TAG, "Creating the datagram socket");
DatagramSocket socket = new DatagramSocket();

Log.d(TAG, "Creating the buffer of size " + BUFFER_SIZE);
byte[] buffer = new byte[BUFFER_SIZE];

Log.d(TAG, "Connecting to " + SERVER + ":" + PORT);
final InetAddress serverAddress = InetAddress
.getByName(SERVER);
Log.d(TAG, "Connected to " + SERVER + ":" + PORT);

Log.d(TAG, "Creating the reuseable DatagramPacket");
DatagramPacket packet;

Log.d(TAG, "Creating the AudioRecord");
recorder = new AudioRecord(MediaRecorder.AudioSource.MIC,
RECORDING_RATE, CHANNEL, FORMAT, BUFFER_SIZE * 10);

Log.d(TAG, "AudioRecord recording...");
recorder.startRecording();

while (currentlySendingAudio == true) {

// read the data into the buffer
int read = recorder.read(buffer, 0, buffer.length);

// place contents of buffer into the packet
packet = new DatagramPacket(buffer, read,
serverAddress, PORT);

// send the packet
socket.send(packet);
}

Log.d(TAG, "AudioRecord finished recording");

} catch (Exception e) {
Log.e(TAG, "Exception: " + e);
}
}
});

// start the thread
streamThread.start();
}
}

这是我的服务器端代码:

class Server {

AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 50005;
static int sampleRate = 11025;
static int bufferSize = 9728;

static Long lastTime;
static long totalBytesReceived = 0L;

private static final int audioStreamBufferSize = bufferSize * 20;
static byte[] audioStreamBuffer = new byte[audioStreamBufferSize];
private static int audioStreamBufferIndex = 0;

public static void main(String args[]) throws Exception {

Log("Starting the AudioServer...");

Log("Creating the datagram socket on port " + port + "...");
DatagramSocket serverSocket = new DatagramSocket(null);
serverSocket.setReuseAddress(true);
serverSocket.bind(new InetSocketAddress(port));

Log("Creating the buffer to hold the received data of size "
+ bufferSize + "...");
byte[] receiveData = new byte[bufferSize];

Log("Setting the audio rate to " + sampleRate + "hz...");
format = new AudioFormat(sampleRate, 16, 1, true, false);

Log("Ready to receive audio data");
while (status == true) {

DatagramPacket receivePacket = new DatagramPacket(receiveData,
receiveData.length);
serverSocket.receive(receivePacket);
bufferAudioForPlayback(receivePacket.getData(),
receivePacket.getOffset(), receivePacket.getLength());
}

serverSocket.close();
}

private static void bufferAudioForPlayback(byte[] buffer, int offset,
int length) {

byte[] actualBytes = new byte[length];

for (int i = 0; i < length; i++) {
actualBytes[i] = buffer[i];
}

for (byte sample : actualBytes) {

int percentage = (int) (((double) audioStreamBufferIndex / (double) audioStreamBuffer.length) * 100.0);
Log("buffer is " + percentage + "% full");

audioStreamBuffer[audioStreamBufferIndex] = sample;
audioStreamBufferIndex++;
Log("Buffer " + audioStreamBufferIndex + " / "
+ audioStreamBuffer.length + " " + percentage);

if (audioStreamBufferIndex == audioStreamBuffer.length - 1) {
toSpeaker(audioStreamBuffer);
audioStreamBufferIndex = 0;
System.exit(0);
}
}
}

private static void Log(String log) {
System.out.println(log);
}

public static void toSpeaker(byte soundbytes[]) {
try {

DataLine.Info dataLineInfo = new DataLine.Info(
SourceDataLine.class, format);
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem
.getLine(dataLineInfo);

sourceDataLine.open(format);

FloatControl volumeControl = (FloatControl) sourceDataLine
.getControl(FloatControl.Type.MASTER_GAIN);
volumeControl.setValue(100.0f);

sourceDataLine.start();
sourceDataLine.open(format);
sourceDataLine.start();
sourceDataLine.write(soundbytes, 0, soundbytes.length);
sourceDataLine.drain();
sourceDataLine.close();
} catch (Exception e) {
System.out.println("Error with audio playback: " + e);
e.printStackTrace();
}
}
}

最后,这里是主要 Activity 的资源xml文件:

<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:padding="20dip">

<ImageView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:src="@drawable/ic_launcher"
android:scaleType="fitCenter"/>

<TextView
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:text="@string/app_info"
android:layout_weight="1.0"
android:textSize="20dip"/>

<LinearLayout
android:orientation="horizontal"
android:layout_width="fill_parent"
android:layout_height="wrap_content">

<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/btnStart"
android:text="@string/start_recording"
android:layout_weight="1.0"/>

<Button
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:id="@+id/btnStop"
android:text="@string/stop_recording"
android:layout_weight="1.0"/>
</LinearLayout>
</LinearLayout>

编辑:像这样播放音频 suh-suh-suh-suh-o-ou-ou-ou-nds-nds-ds。

最佳答案

您可以尝试以下方法,而不是:

// read the data into the buffer
recorder.read(buffer, 0, buffer.length);

// place contents of buffer into the packet
packet = new DatagramPacket(buffer, buffer.length, serverAddress, PORT);

不要期望您从 recorder 收到完全读取缓冲区,而是使用实际读取值

// read the data into the buffer
int read = recorder.read(buffer, 0, buffer.length);

// place contents of buffer into the packet
packet = new DatagramPacket(buffer, read, serverAddress, PORT);

或类似的东西。

关于Android AudioRecord 到服务器 over UDP 播放问题,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/15955958/

25 4 0
Copyright 2021 - 2024 cfsdn All Rights Reserved 蜀ICP备2022000587号
广告合作:1813099741@qq.com 6ren.com