android webrtc入门教程一(简单一对一通话实现)
webrtc Android入门非常的简单不要被那些博客给带乱了,我看了几篇这方面的博客都是给你零散的代码更本就不能实现通话,学这个要先从全局流程再到详细步骤来学习。
简单介绍下实现webrtc通话总体流程并且给出全部代码,复制粘贴即可
本文福利, 免费领取C++音视频学习资料包+学习路线大纲、技术视频/代码,内容包括(音视频开发,面试题,FFmpeg ,webRTC ,rtmp ,hls ,rtsp ,ffplay ,编解码,推拉流,srs)↓↓↓↓↓↓见下面↓↓文章底部点击免费领取↓↓
1、用户A和用户B实现通信双方间建立链接最重要的是双方获取彼此的sdp信息和ice信息。
sdp就是一段文本描述,里面包含了当前本地设备所支持的一些信息,比如设备是否支持h264编码,传输协议是什么。
ice也是一段文本,是配合p2p打洞服务器stun/turn让双方知道各自的公网ip和端口,从而实现端对端通信。
对于实现一个简单入门通信案例来说,不要太去深入的理解每个名词的具体意思,只要能总的明白是做什么就行了,等入门后再去深入理解每个知识点。
简单理解就是sdp是设备描述文本,ice就是ip端口描述文本
2、sdp,ice的创建和使用
用户A,B都需要设置setLocationDescription和setRemoteDescription。
呼叫者调用createOffer创建sdp,被呼叫者调用createAnswer创建sdp
ice信息是在创建peerConnection后会自动从stun/turn服务器请求回调。
下面来具体实现
xml布局
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:background="#000"
tools:context=".ui.SimpleMeetingActivity">
<!--播放远端视频-->
<org.webrtc.SurfaceViewRenderer
android:layout_marginBottom="200dp"
android:id="@+id/ivRemoteRender"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
<!--播放本地视频-->
<org.webrtc.SurfaceViewRenderer
android:id="@+id/ivLocalRender"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:layout_width="150dp"
android:layout_height="200dp"/>
<View
android:id="@+id/ivJoin"
android:layout_marginBottom="20dp"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
android:background="#4CAF50"
android:layout_width="100dp"
android:layout_height="100dp"/>
</androidx.constraintlayout.widget.ConstraintLayout>
1.引入webrtc Android 和websocket
implementation 'org.webrtc:google-webrtc:1.0.32006'
//引入socket依赖
implementation 'org.java-websocket:Java-WebSocket:1.4.0'
2.全局初始化
//在初次使用PeerConnectionFactory之前,必须调用静态方法initialize()对其进行全局的初始化与资源加载
PeerConnectionFactory.InitializationOptions initializationOptions =
PeerConnectionFactory.InitializationOptions
.builder(this)
.setEnableInternalTracer(true)// 启用内部追踪器,用来记录一些相关数据
.createInitializationOptions();
PeerConnectionFactory.initialize(initializationOptions);
3.创建PeerConnectionFactory
//-----------创建视频编码和解码器
VideoEncoderFactory encoderFactory = new DefaultVideoEncoderFactory(mRootEGL.getEglBaseContext(),
true, true);
VideoDecoderFactory decoderFactory = new DefaultVideoDecoderFactory(mRootEGL.getEglBaseContext());
//-----------创建PeerConnectionFactory
AudioDeviceModule adm = JavaAudioDeviceModule.builder(this).createAudioDeviceModule();//音频配置当前JAVA实现,还有native
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
//options.disableEncryption : true表示不用数据加密
//options.disableNetworkMonitor : true表示禁用网络监视器
factory = PeerConnectionFactory.builder()
.setOptions(options)//设置网络配置,使用默认
.setAudioDeviceModule(adm)//设置音频采集和播放使用的配置,当前使用java中的audioTrack 和audioRecord
.setVideoEncoderFactory(encoderFactory)
.setVideoDecoderFactory(decoderFactory)
.createPeerConnectionFactory();
4.创建声音源
//配置音频约束
MediaConstraints audioConstraints = new MediaConstraints();
//回声消除
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googEchoCancellation", "true"));
//自动增益
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googAutoGainControl", "true"));
//高音过滤
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googHighpassFilter", "true"));
//噪音处理
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
audioSource = factory.createAudioSource(audioConstraints);
//创建音频轨道
localAudioTrack = factory.createAudioTrack("102", audioSource);
5.创建视频源
//ScreenCapturerAndroid 录屏 ;FileVideoCapturer文件
//从相机里获取视频流
CameraEnumerator enumerator = new Camera2Enumerator(this);
String[] deviceNames = enumerator.getDeviceNames();
//遍历所有摄像头找到前置摄像头
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
videoCapturer = enumerator.createCapturer(deviceName, null);
}
}
assert videoCapturer != null;
videoSource = factory.createVideoSource(videoCapturer.isScreencast());
//创建视频轨道
videoTrack = factory.createVideoTrack("103", videoSource);
6.播放本地视频
//因为使用的是opengl 渲染
surfaceTextureHelper = SurfaceTextureHelper.create("capture-thread", mRootEGL.getEglBaseContext());
videoCapturer.initialize(surfaceTextureHelper, this, videoSource.getCapturerObserver());
//开始录制
videoCapturer.startCapture(
720,//宽
1080,//高
25//fps 帧率
);
//播放本地视频
localRender.init(mRootEGL.getEglBaseContext(), null);
//SCALE_ASPECT_FILL 自动适应屏幕比例, 画面存在被裁剪的可能
//SCALE_ASPECT_FIT 自动适应画面比例,屏幕上可能存在黑边
//SCALE_ASPECT_BALANCED 视频尺寸非等比缩放。保证视频内容全部显示,且填满视窗。
localRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
localRender.setMirror(true);//启用镜像
videoTrack.addSink(localRender);//最后播放
7.初始化远端render
//初始化远端render ,因为使用opengl渲染所以 必须在在主线程初始化
remoteRender.init(mRootEGL.getEglBaseContext(), null);
remoteRender.setMirror(true);//启用镜像
remoteRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
8.安装ice服务器
//在ubunto服务器上安装coturn软件即可实现ice服务器
sudo apt-get install libssl-dev
sudo apt-get install libevent-dev
git clone https://github.com/coturn/coturn
cd coturn
./configure
make
sudo make install
//启动服务器, wzp:123456 表示用户名:密码也可用通过配置文件来设置
sudo nohup turnserver -L 0.0.0.0 -a -u wzp:123456 -v -f -r nort.text &
//查看端口号,默认3478端口,
sudo lsof -i:3478
9.创建peerConntion
//ice服务器列表
List<PeerConnection.IceServer> iceServers = new ArrayList<>();
//添加一个turn服务器,turn服务器主要用户下面的stun服务器打洞失败的时候使用这个turn服务器转发数据流,可以添加多个
iceServers.add(
PeerConnection.IceServer.builder("turn:**.**.**.**3478")//这是你服务器的地址
.setUsername("wzp")//用户名
.setPassword("123456")//密码
.createIceServer());
//添加一个stun服务器,
iceServers.add(PeerConnection.IceServer.builder("stun:**.**.**.**:3478").createIceServer());
peerConnection = factory.createPeerConnection(iceServers, new PeerConnection.Observer() {
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
//ICE 连接状态变化后回调
}
@Override
public void onIceConnectionReceivingChange(boolean b) {
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
//自动请求stun/turn服务器后回调这个方法
//发送Ice信息给对端用户 ,下面的代码只是用于发送信息给远端用户,我使用的是websocket,自己可以用其他方式实现。最后结尾我会给出服务器端的代码。
JSONObject sendObj = new JSONObject();
try {
sendObj.put("cmd", cmd_ice);
sendObj.put("uid", uid);
sendObj.put("remoteUid", remoteUid);
sendObj.put("roomId",roomId);
JSONObject msgObj = new JSONObject();
msgObj.put("sdpMid", iceCandidate.sdpMid);
msgObj.put("sdpMLineIndex", iceCandidate.sdpMLineIndex);
msgObj.put("sdp", iceCandidate.sdp);
sendObj.put("msg", msgObj);
socket.send(sendObj.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {
}
@Override
public void onAddStream(MediaStream mediaStream) {
//收到远端数据流信息
mediaStream.videoTracks.get(0).addSink(remoteRender);
mediaStream.audioTracks.get(0).setEnabled(true);
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
}
@Override
public void onDataChannel(DataChannel dataChannel) {
}
@Override
public void onRenegotiationNeeded() {
}
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
}
});
// List<String> mediaStreamLabels = Collections.singletonList("ARDAMS");
// peerConnection.addTrack(videoTrack,mediaStreamLabels);
// peerConnection.addTrack(localAudioTrack,mediaStreamLabels);
//将本地流添加到peerConnection,远端的onAddStream回调将接受该数据流
MediaStream stream = factory.createLocalMediaStream("110");
stream.addTrack(videoTrack);
stream.addTrack(localAudioTrack);
peerConnection.addStream(stream);
10.建立websocket链接,只是用于演示效果,所以这里只是简单的实现。重要的是如何简单的去理解流程
URI uri = null;//信令服务器地址
try {
uri = new URI("ws://192.168.2.134:8090");
} catch (Exception e) {
}
socket = new WebSocketClient(uri) {
@Override
public void onOpen(ServerHandshake handshakedata) {
if (isDestroyed()) {
return;
}
Log.e(tag,"链接socket成功");
}
@Override
public void onMessage(String message) {
if (isDestroyed()) {
return;
}
try {
JSONObject msgObj = new JSONObject(message);
String cmd = msgObj.getString("cmd");
Log.e(tag,"收到消息:" + message);
if (cmd.equals(cmd_new_peer)) {
//有新人加入房间
handleNewPeer(msgObj);
return;
}
if (cmd.equals(cmd_offer)) {
//收到offer请求
handleOffer(msgObj);
return;
}
if (cmd.equals(cmd_answer)) {
//收到answer请求
handleAnswer(msgObj);
return;
}
if (cmd.equals(cmd_ice)) {
//收到ice信息
handleIce(msgObj);
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public void onClose(int code, String reason, boolean remote) {
if (isDestroyed()) {
return;
}
}
@Override
public void onError(Exception ex) {
if (isDestroyed()) {
return;
}
Log.e(tag,"socket错误" + ex.toString());
}
};
socket.connect();
11.发起通话者创建offer并且设置本地setLocalDescription,最后发送给被呼叫者
private void handleNewPeer(JSONObject msgObj) {
/*新人加入房间,创建offer,发起通话*/
try {
//被呼叫者的uid
remoteUid = msgObj.getString("uid");
MediaConstraints constraints = new MediaConstraints();
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
peerConnection.createOffer(new SdpObserver() {
private SessionDescription localSdp;
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
//创建offer成功即成功
localSdp = sessionDescription;
peerConnection.setLocalDescription(this, sessionDescription);
}
@Override
public void onSetSuccess() {
//setLocalDescription 成功后回调
JSONObject sendObj = new JSONObject();
try {
sendObj.put("cmd", cmd_offer);
sendObj.put("uid", uid);
sendObj.put("remoteUid", remoteUid);
sendObj.put("roomId",roomId);
sendObj.put("msg", localSdp.description);
socket.send(sendObj.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
}, constraints);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
12.被呼叫者收到offer后设置setRemoteDescription ,创建answer并设置setLocalDescription,最后发送给呼叫者
private void handleOffer(JSONObject msgObj) {
//收到offer, 当前身份是被呼叫者
try {
//发起通话者的uid
remoteUid = msgObj.getString("uid");
String sdpDescription = msgObj.getString("msg");
SessionDescription sdp = new SessionDescription(SessionDescription.Type.OFFER, sdpDescription);
peerConnection.setRemoteDescription(new SdpObserver() {
private boolean isCreateAnswer;
private String sdpDescription;
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
//createAnswer 创建成功时候回调
Log.e(tag,"创建answer成功");
sdpDescription = sessionDescription.description;
peerConnection.setLocalDescription(this, sessionDescription);
}
@Override
public void onSetSuccess() {
//setRemoteDescription setLocalDescription 成功时候的回调
if (!isCreateAnswer) {
Log.e(tag,"onSetSuccess1");
//还未创建answer 说明是setRemoteDescription回调
isCreateAnswer = true;
MediaConstraints constraints = new MediaConstraints();
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
peerConnection.createAnswer(this, constraints);
} else {
Log.e(tag,"onSetSuccess2");
//是setLocalDescription回调
JSONObject sendObj = new JSONObject();
try {
sendObj.put("cmd", cmd_answer);
sendObj.put("uid", uid);
sendObj.put("remoteUid", remoteUid);
sendObj.put("roomId",roomId);
sendObj.put("msg", sdpDescription);
socket.send(sendObj.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
}, sdp);
} catch (JSONException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
13.发起通话者收到answer后设置setRemoteDescription
private void handleAnswer(JSONObject msgObj) {
//收到answer,当前是发起通话者
try {
String sdpDescription = msgObj.getString("msg");
SessionDescription sdp = new SessionDescription(SessionDescription.Type.ANSWER, sdpDescription);
peerConnection.setRemoteDescription(new SdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
}
@Override
public void onSetSuccess() {
// setRemoteDescription 设置成功
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
}, sdp);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
14,各自收到对方发来的ice信息时候addIceCandidate
private void handleIce(JSONObject msgObj) {
//收到对方的ice信息
try {
Log.e(tag,"设置ICE信息");
JSONObject iceObj = msgObj.getJSONObject("msg");
IceCandidate iceCandidate = new IceCandidate(iceObj.getString("sdpMid"),
iceObj.getInt("sdpMLineIndex"),
iceObj.getString("sdp"));
peerConnection.addIceCandidate(iceCandidate);
} catch (JSONException e) {
Log.e(tag,"ice设置失败:" + e.getMessage());
throw new RuntimeException(e);
}
}
15.销毁
@Override
protected void onDestroy() {
if (socket != null && socket.isOpen()) {
socket.close();
}
socket = null;
if(peerConnection != null){
peerConnection.close();
peerConnection = null;
}
if(videoSource != null){
videoSource.dispose();
videoSource = null;
}
if (audioSource != null) {
audioSource.dispose();
audioSource = null;
}
if(videoCapturer != null){
try {
videoCapturer.stopCapture();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
videoCapturer.dispose();
videoCapturer = null;
}
if(surfaceTextureHelper != null){
surfaceTextureHelper.dispose();
surfaceTextureHelper = null;
}
localRender.release();
remoteRender.release();
if(factory != null){
factory.dispose();
factory = null;
}
if(mRootEGL != null){
mRootEGL.release();
mRootEGL = null;
}
super.onDestroy();
}
最后给出java全部源码
package com.example.rtcmy.ui;
import androidx.appcompat.app.AppCompatActivity;
import androidx.databinding.BindingConversion;
import androidx.databinding.DataBindingUtil;
import androidx.databinding.ObservableField;
import android.database.Observable;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.TextView;
import com.example.rtcmy.R;
import org.java_websocket.client.WebSocketClient;
import org.java_websocket.handshake.ServerHandshake;
import org.json.JSONException;
import org.json.JSONObject;
import org.webrtc.AudioSource;
import org.webrtc.AudioTrack;
import org.webrtc.Camera1Enumerator;
import org.webrtc.Camera2Enumerator;
import org.webrtc.CameraEnumerator;
import org.webrtc.DataChannel;
import org.webrtc.DefaultVideoDecoderFactory;
import org.webrtc.DefaultVideoEncoderFactory;
import org.webrtc.EglBase;
import org.webrtc.IceCandidate;
import org.webrtc.Logging;
import org.webrtc.MediaConstraints;
import org.webrtc.MediaStream;
import org.webrtc.PeerConnection;
import org.webrtc.PeerConnectionFactory;
import org.webrtc.RendererCommon;
import org.webrtc.RtpReceiver;
import org.webrtc.ScreenCapturerAndroid;
import org.webrtc.SdpObserver;
import org.webrtc.SessionDescription;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.SurfaceViewRenderer;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoDecoder;
import org.webrtc.VideoDecoderFactory;
import org.webrtc.VideoEncoderFactory;
import org.webrtc.VideoSource;
import org.webrtc.VideoTrack;
import org.webrtc.audio.AudioDeviceModule;
import org.webrtc.audio.JavaAudioDeviceModule;
import java.net.URI;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
public class SimpleMeetingActivity extends AppCompatActivity {
private WebSocketClient socket;
private String roomId = "1";//房间号
private String cmd_join_room = "cmd_join_room";//加入房间命令
private String cmd_new_peer = "cmd_new_peer";//有新人加入房间
private String cmd_offer = "cmd_offer";
private String cmd_answer = "cmd_answer";
private String cmd_ice = "cmd_ice";
private SurfaceViewRenderer remoteRender;
private SurfaceViewRenderer localRender;
private TextView ivStatus;
private View ivJoin;
private EglBase mRootEGL;
private PeerConnectionFactory factory;
private VideoCapturer videoCapturer;
private AudioSource audioSource;
private AudioTrack localAudioTrack;
private VideoSource videoSource;
private VideoTrack videoTrack;
private List<PeerConnection.IceServer> iceServers = new ArrayList<>();
private SurfaceTextureHelper surfaceTextureHelper;
private PeerConnection peerConnection;
private String remoteUid;
private String uid = UUID.randomUUID().toString();
private String tag = "simpleWebrtc";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_simple_metting);
ivStatus = findViewById(R.id.ivStatus);
ivJoin = findViewById(R.id.ivJoin);
remoteRender = findViewById(R.id.ivRemoteRender);
localRender = findViewById(R.id.ivLocalRender);
//开启日志
iceServers.add(
PeerConnection.IceServer.builder("turn:123.60.151.37:3478")
.setUsername("wzp")
.setPassword("123456")
.createIceServer());
iceServers.add(PeerConnection.IceServer.builder("stun:123.60.151.37:3478").createIceServer());
mRootEGL = EglBase.create();
//在初次使用PeerConnectionFactory之前,必须调用静态方法initialize()对其进行全局的初始化与资源加载
PeerConnectionFactory.InitializationOptions initializationOptions =
PeerConnectionFactory.InitializationOptions
.builder(this)
.setEnableInternalTracer(true)// 启用内部追踪器,用来记录一些相关数据
.createInitializationOptions();
PeerConnectionFactory.initialize(initializationOptions);
//-----------创建视频编码和解码器
VideoEncoderFactory encoderFactory = new DefaultVideoEncoderFactory(mRootEGL.getEglBaseContext(),
true, true);
VideoDecoderFactory decoderFactory = new DefaultVideoDecoderFactory(mRootEGL.getEglBaseContext());
//-----------创建PeerConnectionFactory
AudioDeviceModule adm = JavaAudioDeviceModule.builder(this).createAudioDeviceModule();//音频配置当前JAVA实现,还有native
PeerConnectionFactory.Options options = new PeerConnectionFactory.Options();
//options.disableEncryption : true表示不用数据加密
//options.disableNetworkMonitor : true表示禁用网络监视器
factory = PeerConnectionFactory.builder()
.setOptions(options)//设置网络配置,使用默认
.setAudioDeviceModule(adm)//设置音频采集和播放使用的配置,当前使用java中的audioTrack 和audioRecord
.setVideoEncoderFactory(encoderFactory)
.setVideoDecoderFactory(decoderFactory)
.createPeerConnectionFactory();
//-----------创建声音源
//配置音频约束
MediaConstraints audioConstraints = new MediaConstraints();
//回声消除
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googEchoCancellation", "true"));
//自动增益
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googAutoGainControl", "true"));
//高音过滤
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googHighpassFilter", "true"));
//噪音处理
audioConstraints.mandatory.add(new MediaConstraints.KeyValuePair("googNoiseSuppression", "true"));
audioSource = factory.createAudioSource(audioConstraints);
localAudioTrack = factory.createAudioTrack("102", audioSource);
//----------创建视频源
//ScreenCapturerAndroid 录屏 ;FileVideoCapturer文件
//从相机里获取视频流
CameraEnumerator enumerator = new Camera2Enumerator(this);
String[] deviceNames = enumerator.getDeviceNames();
//遍历所有摄像头找到前置摄像头
for (String deviceName : deviceNames) {
if (enumerator.isFrontFacing(deviceName)) {
videoCapturer = enumerator.createCapturer(deviceName, null);
}
}
assert videoCapturer != null;
videoSource = factory.createVideoSource(videoCapturer.isScreencast());
//创建视频轨道
videoTrack = factory.createVideoTrack("103", videoSource);
//因为使用的是opengl 渲染
surfaceTextureHelper = SurfaceTextureHelper.create("capture-thread", mRootEGL.getEglBaseContext());
videoCapturer.initialize(surfaceTextureHelper, this, videoSource.getCapturerObserver());
//开始录制
videoCapturer.startCapture(
720,//宽
1080,//高
25//fps 帧率
);
//播放本地视频
localRender.init(mRootEGL.getEglBaseContext(), null);
//SCALE_ASPECT_FILL 自动适应屏幕比例, 画面存在被裁剪的可能
//SCALE_ASPECT_FIT 自动适应画面比例,屏幕上可能存在黑边
//SCALE_ASPECT_BALANCED 视频尺寸非等比缩放。保证视频内容全部显示,且填满视窗。
localRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
localRender.setMirror(true);//启用镜像
videoTrack.addSink(localRender);//最后播放
//初始化远端render ,因为使用opengl渲染所以 必须在在主线程初始化
remoteRender.init(mRootEGL.getEglBaseContext(), null);
remoteRender.setMirror(true);//启用镜像
remoteRender.setScalingType(RendererCommon.ScalingType.SCALE_ASPECT_FILL);
//创建peerConnection
peerConnection = factory.createPeerConnection(iceServers, new PeerConnection.Observer() {
@Override
public void onSignalingChange(PeerConnection.SignalingState signalingState) {
}
@Override
public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectionState) {
//ICE 连接状态变化后回调
}
@Override
public void onIceConnectionReceivingChange(boolean b) {
}
@Override
public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringState) {
}
@Override
public void onIceCandidate(IceCandidate iceCandidate) {
//自动请求stun/turn服务器后回调这个方法
//发送Ice信息给对端用户
JSONObject sendObj = new JSONObject();
try {
sendObj.put("cmd", cmd_ice);
sendObj.put("uid", uid);
sendObj.put("remoteUid", remoteUid);
sendObj.put("roomId",roomId);
JSONObject msgObj = new JSONObject();
msgObj.put("sdpMid", iceCandidate.sdpMid);
msgObj.put("sdpMLineIndex", iceCandidate.sdpMLineIndex);
msgObj.put("sdp", iceCandidate.sdp);
sendObj.put("msg", msgObj);
socket.send(sendObj.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public void onIceCandidatesRemoved(IceCandidate[] iceCandidates) {
}
@Override
public void onAddStream(MediaStream mediaStream) {
//收到远端数据流信息
mediaStream.videoTracks.get(0).addSink(remoteRender);
mediaStream.audioTracks.get(0).setEnabled(true);
}
@Override
public void onRemoveStream(MediaStream mediaStream) {
}
@Override
public void onDataChannel(DataChannel dataChannel) {
}
@Override
public void onRenegotiationNeeded() {
}
@Override
public void onAddTrack(RtpReceiver rtpReceiver, MediaStream[] mediaStreams) {
}
});
// List<String> mediaStreamLabels = Collections.singletonList("ARDAMS");
// peerConnection.addTrack(videoTrack,mediaStreamLabels);
// peerConnection.addTrack(localAudioTrack,mediaStreamLabels);
//将本地流添加到peerConnection,远端的onAddStream回调将接受该数据流
MediaStream stream = factory.createLocalMediaStream("110");
stream.addTrack(videoTrack);
stream.addTrack(localAudioTrack);
peerConnection.addStream(stream);
createSocket();
ivJoin.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (!socket.isOpen()) {
return;
}
ivStatus.setText("正在加入房间...");
JSONObject sendObj = new JSONObject();
try {
sendObj.put("cmd", cmd_join_room);
sendObj.put("uid", uid);
sendObj.put("roomId", roomId);
socket.send(sendObj.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
}
private void createSocket() {
if (socket != null) {
socket.close();
}
URI uri = null;//信令服务器地址
try {
uri = new URI("ws://192.168.2.134:8090");
} catch (Exception e) {
}
socket = new WebSocketClient(uri) {
@Override
public void onOpen(ServerHandshake handshakedata) {
if (isDestroyed()) {
return;
}
Log.e(tag,"链接socket成功");
}
@Override
public void onMessage(String message) {
if (isDestroyed()) {
return;
}
try {
JSONObject msgObj = new JSONObject(message);
String cmd = msgObj.getString("cmd");
Log.e(tag,"收到消息:" + message);
if (cmd.equals(cmd_new_peer)) {
//有新人加入房间
handleNewPeer(msgObj);
return;
}
if (cmd.equals(cmd_offer)) {
//收到offer请求
handleOffer(msgObj);
return;
}
if (cmd.equals(cmd_answer)) {
//收到answer请求
handleAnswer(msgObj);
return;
}
if (cmd.equals(cmd_ice)) {
//收到ice信息
handleIce(msgObj);
}
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public void onClose(int code, String reason, boolean remote) {
if (isDestroyed()) {
return;
}
}
@Override
public void onError(Exception ex) {
if (isDestroyed()) {
return;
}
Log.e(tag,"socket错误" + ex.toString());
}
};
socket.connect();
}
private void handleIce(JSONObject msgObj) {
//收到对方的ice信息
try {
Log.e(tag,"设置ICE信息");
JSONObject iceObj = msgObj.getJSONObject("msg");
IceCandidate iceCandidate = new IceCandidate(iceObj.getString("sdpMid"),
iceObj.getInt("sdpMLineIndex"),
iceObj.getString("sdp"));
peerConnection.addIceCandidate(iceCandidate);
} catch (JSONException e) {
Log.e(tag,"ice设置失败:" + e.getMessage());
throw new RuntimeException(e);
}
}
private void handleAnswer(JSONObject msgObj) {
//收到answer,当前是发起通话者
try {
String sdpDescription = msgObj.getString("msg");
SessionDescription sdp = new SessionDescription(SessionDescription.Type.ANSWER, sdpDescription);
peerConnection.setRemoteDescription(new SdpObserver() {
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
}
@Override
public void onSetSuccess() {
// setRemoteDescription 设置成功
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
}, sdp);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
private void handleOffer(JSONObject msgObj) {
//收到offer, 当前身份是被呼叫者
try {
//发起通话者的uid
remoteUid = msgObj.getString("uid");
String sdpDescription = msgObj.getString("msg");
SessionDescription sdp = new SessionDescription(SessionDescription.Type.OFFER, sdpDescription);
peerConnection.setRemoteDescription(new SdpObserver() {
private boolean isCreateAnswer;
private String sdpDescription;
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
//createAnswer 创建成功时候回调
Log.e(tag,"创建answer成功");
sdpDescription = sessionDescription.description;
peerConnection.setLocalDescription(this, sessionDescription);
}
@Override
public void onSetSuccess() {
//setRemoteDescription setLocalDescription 成功时候的回调
if (!isCreateAnswer) {
Log.e(tag,"onSetSuccess1");
//还未创建answer 说明是setRemoteDescription回调
isCreateAnswer = true;
MediaConstraints constraints = new MediaConstraints();
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
peerConnection.createAnswer(this, constraints);
} else {
Log.e(tag,"onSetSuccess2");
//是setLocalDescription回调
JSONObject sendObj = new JSONObject();
try {
sendObj.put("cmd", cmd_answer);
sendObj.put("uid", uid);
sendObj.put("remoteUid", remoteUid);
sendObj.put("roomId",roomId);
sendObj.put("msg", sdpDescription);
socket.send(sendObj.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
}, sdp);
} catch (JSONException e) {
e.printStackTrace();
throw new RuntimeException(e);
}
}
private void handleNewPeer(JSONObject msgObj) {
/*新人加入房间,创建offer,发起通话*/
try {
//被呼叫者的uid
remoteUid = msgObj.getString("uid");
MediaConstraints constraints = new MediaConstraints();
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true"));
constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true"));
peerConnection.createOffer(new SdpObserver() {
private SessionDescription localSdp;
@Override
public void onCreateSuccess(SessionDescription sessionDescription) {
//创建offer成功即成功
localSdp = sessionDescription;
peerConnection.setLocalDescription(this, sessionDescription);
}
@Override
public void onSetSuccess() {
//setLocalDescription 成功后回调
JSONObject sendObj = new JSONObject();
try {
sendObj.put("cmd", cmd_offer);
sendObj.put("uid", uid);
sendObj.put("remoteUid", remoteUid);
sendObj.put("roomId",roomId);
sendObj.put("msg", localSdp.description);
socket.send(sendObj.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
public void onCreateFailure(String s) {
}
@Override
public void onSetFailure(String s) {
}
}, constraints);
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@Override
protected void onDestroy() {
if (socket != null && socket.isOpen()) {
socket.close();
}
socket = null;
if(peerConnection != null){
peerConnection.close();
peerConnection = null;
}
if(videoSource != null){
videoSource.dispose();
videoSource = null;
}
if (audioSource != null) {
audioSource.dispose();
audioSource = null;
}
if(videoCapturer != null){
try {
videoCapturer.stopCapture();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
videoCapturer.dispose();
videoCapturer = null;
}
if(surfaceTextureHelper != null){
surfaceTextureHelper.dispose();
surfaceTextureHelper = null;
}
localRender.release();
remoteRender.release();
if(factory != null){
factory.dispose();
factory = null;
}
if(mRootEGL != null){
mRootEGL.release();
mRootEGL = null;
}
super.onDestroy();
}
}
服务器端node js全部源码
//简单信令服务器 npm install nodejs-websocket
let ws = require("nodejs-websocket")
class Client {
constructor(uid,conn,roomId) {
this.uid = uid;
this.conn = conn;
this.conn.uid = this.uid;
this.roomId = roomId;
this.conn.roomId = roomId;
}
}
let roomMaps = new Map
function handleJoinRoom(receiveObj, conn) {
//有人加入房间
let uid = receiveObj.uid;
let roomId = receiveObj.roomId;
let room = roomMaps.get(roomId);
let client = new Client(uid,conn,roomId)
if(!room){
//创建房间
room = new Map
}
if(room.get(uid)){
//已经在房间了
console.log("已经在房间里了")
return
}
room.set(uid,client)
roomMaps.set(roomId,room);
console.log("加入房间了");
if(room.size > 1){
let clients = Array.from(room.keys())
clients.forEach(remoteUid => {
if(remoteUid !== uid){
//通知房间其他人有新人加入
let sendObj = {
cmd: "cmd_new_peer",
uid: uid,
remoteUid
}
let remoteClient = room.get(remoteUid)
remoteClient.conn.sendText(JSON.stringify(sendObj))
console.log("新人发送成功");
}
})
}
}
function handleOffer(receiveObj) {
//转发offer
let remoteUid = receiveObj.remoteUid
let roomId = receiveObj.roomId
let room = roomMaps.get(roomId)
let client = room.get(remoteUid)
client.conn.sendText(JSON.stringify(receiveObj))
}
function handleAnswer(receiveObj) {
//转发answer;
let remoteUid = receiveObj.remoteUid
let roomId = receiveObj.roomId
let room = roomMaps.get(roomId)
let client = room.get(remoteUid)
client.conn.sendText(JSON.stringify(receiveObj))
}
function handleIce(receiveObj) {
//转发ice
let remoteUid = receiveObj.remoteUid
let roomId = receiveObj.roomId
let room = roomMaps.get(roomId)
let client = room.get(remoteUid)
client.conn.sendText(JSON.stringify(receiveObj))
}
function handleClose(conn) {
let uid = conn.uid
let roomId = conn.roomId
let room = roomMaps.get(roomId)
room.delete(uid)
}
ws.createServer(function (conn) {
//有客服端链接
conn.on("text",function (str) {
//收到消息
let receiveObj = JSON.parse(str);
console.log(str)
switch (receiveObj.cmd) {
case "cmd_join_room":
//有人加入
handleJoinRoom(receiveObj,conn)
break
case "cmd_offer":
//转发offer
handleOffer(receiveObj)
break
case "cmd_answer":
handleAnswer(receiveObj);
break
case "cmd_ice":
handleIce(receiveObj);
break
}
})
conn.on("close",function (code,reason) {
console.log("链接关闭")
handleClose(conn);
});
conn.on("error",function (err){
console.log(err);
})
}).listen(8090)
本文福利, 免费领取C++音视频学习资料包+学习路线大纲、技术视频/代码,内容包括(音视频开发,面试题,FFmpeg ,webRTC ,rtmp ,hls ,rtsp ,ffplay ,编解码,推拉流,srs)↓↓↓↓↓↓见下面↓↓文章底部点击免费领取↓↓?
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。 如若内容造成侵权/违法违规/事实不符,请联系我的编程经验分享网邮箱:veading@qq.com进行投诉反馈,一经查实,立即删除!