我有一个 android Activity ,它连接到一个 java 类并以套接字的形式向它发送数据包。该类接收声音数据包并将它们丢给 PC 扬声器。代码运行良好,但在 PC 扬声器中播放声音时会出现持续的抖动/中断。
android Activity :
public class SendActivity extends Activity {
private Button startButton, stopButton;
public byte[] buffer;
public static DatagramSocket socket;
private int port = 50005;
AudioRecord recorder;
private int sampleRate = 8000;
@SuppressWarnings("deprecation")
private int channelConfig = AudioFormat.CHANNEL_IN_MONO;
private int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
int minBufSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
audioFormat);
private boolean status = true;
int bufferSizeInBytes;
int bufferSizeInShorts;
int shortsRead;
short audioBuffer[];
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_send);
startButton = (Button) findViewById(R.id.start_button);
stopButton = (Button) findViewById(R.id.stop_button);
startButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
status = true;
startStreaming();
}
});
stopButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
status = false;
recorder.release();
Log.d("VS", "Recorder released");
}
});
minBufSize += 5120;
System.out.println("minBufSize: " + minBufSize);
}
public void startStreaming() {
Thread streamThread = new Thread(new Runnable() {
@Override
public void run() {
try {
DatagramSocket socket = new DatagramSocket();
Log.d("VS", "Socket Created");
byte[] buffer = new byte[minBufSize];
Log.d("VS", "Buffer created of size " + minBufSize);
DatagramPacket packet;
//machine's IP
final InetAddress destination = InetAddress
.getByName("192.168.1.20");
Log.d("VS", "Address retrieved");
recorder = new AudioRecord(MediaRecorder.AudioSource.VOICE_RECOGNITION,
sampleRate, channelConfig, audioFormat,
minBufSize * 10);
Log.d("VS", "Recorder initialized");
recorder.startRecording();
while (status == true) {
// reading data from MIC into buffer
minBufSize = recorder.read(buffer, 0, buffer.length);
// putting buffer in the packet
packet = new DatagramPacket(buffer, buffer.length,
destination, port);
socket.send(packet);
System.out.println("MinBufferSize: " + minBufSize);
}
} catch (UnknownHostException e) {
Log.e("VS", "UnknownHostException");
} catch (IOException e) {
e.printStackTrace();
Log.e("VS", "IOException");
}
}
});
streamThread.start();
}
}
android 布局:
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:paddingBottom="@dimen/activity_vertical_margin"
android:paddingLeft="@dimen/activity_horizontal_margin"
android:paddingRight="@dimen/activity_horizontal_margin"
android:paddingTop="@dimen/activity_vertical_margin"
tools:context=".SendActivity" >
<Button
android:id="@+id/stop_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignBaseline="@+id/start_button"
android:layout_alignBottom="@+id/start_button"
android:layout_toRightOf="@+id/start_button"
android:text="Stop" />
<Button
android:id="@+id/start_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentLeft="true"
android:layout_alignParentTop="true"
android:layout_marginLeft="79dp"
android:layout_marginTop="163dp"
android:text="Start" />
</RelativeLayout>
Android list :
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.audiostreamsample"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="8"
android:targetSdkVersion="17" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" >
</uses-permission>
<uses-permission android:name="android.permission.INTERNET" >
</uses-permission>
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" >
</uses-permission>
<uses-permission android:name="android.permission.READ_PHONE_STATE" >
</uses-permission>
<uses-permission android:name="android.permission.ACCESS_WIFI_STATE" />
<uses-permission android:name="android.permission.CHANGE_WIFI_STATE" />
<uses-permission android:name="android.permission.GET_ACCOUNTS" />
<uses-permission android:name="android.permission.CALL_PHONE" />
<uses-permission android:name="android.permission.RECORD_AUDIO" />
<application
android:allowBackup="true"
android:icon="@drawable/ic_launcher"
android:label="@string/app_name"
android:theme="@style/AppTheme" >
<activity
android:name="com.example.audiostreamsample.SendActivity"
android:label="@string/app_name" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
接收数据包并丢给PC音箱的类:
class Server {
AudioInputStream audioInputStream;
static AudioInputStream ais;
static AudioFormat format;
static boolean status = true;
static int port = 50005;
static int sampleRate = 8000;
public static void main(String args[]) throws Exception {
DatagramSocket serverSocket = new DatagramSocket(50005);
/**
* Formula for lag = (byte_size/sample_rate)*2
* Byte size 9728 will produce ~ 0.45 seconds of lag. Voice slightly broken.
* Byte size 1400 will produce ~ 0.06 seconds of lag. Voice extremely broken.
* Byte size 4000 will produce ~ 0.18 seconds of lag. Voice slightly more broken then 9728.
*/
byte[] receiveData = new byte[5000];
format = new AudioFormat(sampleRate, 16, 1, true, false);
while (status == true) {
DatagramPacket receivePacket = new DatagramPacket(receiveData,
receiveData.length);
serverSocket.receive(receivePacket);
ByteArrayInputStream baiss = new ByteArrayInputStream(
receivePacket.getData());
ais = new AudioInputStream(baiss, format, receivePacket.getLength());
toSpeaker(receivePacket.getData());
}
}
public static void toSpeaker(byte soundbytes[]) {
try {
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceDataLine.open(format);
FloatControl volumeControl = (FloatControl) sourceDataLine.getControl(FloatControl.Type.MASTER_GAIN);
volumeControl.setValue(6.0206f);
sourceDataLine.start();
sourceDataLine.open(format);
sourceDataLine.start();
System.out.println("format? :" + sourceDataLine.getFormat());
sourceDataLine.write(soundbytes, 0, soundbytes.length);
System.out.println(soundbytes.toString());
sourceDataLine.drain();
sourceDataLine.close();
} catch (Exception e) {
System.out.println("Not working in speakers...");
e.printStackTrace();
}
}
}
如果您想在您的 IDE 中测试该应用程序,则只需创建两个不同的项目,一个用于 Android 应用程序,一个用于服务器类。
在android应用中只需添加 native 的IP并在设备上运行应用,手机和电脑应属于同一网络。请将服务器类作为 Java 应用程序执行。
抖动会很明显且令人恼火,但声音会或多或少清晰。请建议我如何获得更清晰的输出。
最佳答案
您需要为实际流式传输提供一些编码支持。 除了发送数据报并希望最好的结果之外,还有更多需要考虑的事情。
真实的网络并不完美。
- 延迟:数据包需要时间
- 抖动:数据包在飞行中花费的时间不是恒定的
- 丢包:有时他们不会成功。
- 重新排序:有时数据包到达的顺序与发送顺序不同。
您应该阅读简单的媒体流协议(protocol)(如 RTP),或许可以使用为两端提供 RTP 的库。 RTP 通常位于 UDP 之上。
音频的 TCP 流式传输不如 UDP/RTP 有用,因为您必须关闭 Nagling。
您至少需要在接收器端有一个小缓冲区,以防止缓冲区空导致声音丢失。
关于java - 在 java 类和 android Activity 之间流式传输时音频不清晰,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/20193645/