Jianghu应用功能点
12147webRtc
WebRTC是一个免费的开源项目,它使得浏览器和移动应用程序之间可以进行实时的音频、视频和数据通信。它建立在WebRTC API之上,该API是一组标准化通信协议的规范,用于浏览器和其他终端之间的通信。WebRTC使用各种技术,包括Opus和VP8编解码器用于音频和视频,以及DataChannel API用于点对点数据传输。WebRTC的一个关键优势是它能够在不需要第三方插件或软件的情况下实现安全、低延迟的通信。这使得它成为视频会议、在线游戏、文件共享等应用的流行选择。
注意事项
- 使用了SRS服务端,与普通WebRtc使用不太一样
- 当web发起请求开麦或播放的时候要new一个webRtc实例,播放的时候,有几个播放流就要有几个webRtc实例。
引入方式
//pubspec.yaml 中声明
dependencies:
flutter_webrtc:
path: "../jianghu_packages/flutter_webrtc-0.8.6"
//引入
import 'package:flutter_webrtc/flutter_webrtc.dart' as webrtc;
工作流程
参考:/jianghuAppBrowser/jianghuBrowser/lib/webRtc/SrsSdk.dart
- 通过webrtc.createPeerConnection()实例化RTCPeerConnection对象,用于建立实时通信。
loopbackConstraints = <String, dynamic>{
'optional': [
{'DtlsSrtpKeyAgreement': true}
]
};
webrtc.RTCPeerConnection? _pc = await webrtc.createPeerConnection({'sdpSemantics': "unified-plan"}, loopbackConstraints);
- 添加音视频通道配置,包括设置流方向,如RecvOnly(只接受)、SendOnly(只发送)等。
//音频
_pc!.addTransceiver(
kind: webrtc.RTCRtpMediaType.RTCRtpMediaTypeAudio,
init: webrtc.RTCRtpTransceiverInit(direction: direction),
);
//视频
_pc!.addTransceiver(
kind: webrtc.RTCRtpMediaType.RTCRtpMediaTypeVideo,
init: webrtc.RTCRtpTransceiverInit(direction: direction),
- 调用_pc!.createOffer()方法,生成当前设备媒体参数,确定是否可以接收音视频流。
offer = await _pc!.createOffer({
'mandatory': {'OfferToReceiveAudio': !isPublish, 'OfferToReceiveVideo': !isPublish},
});
- 将媒体参数放入系统,设置本地说明。
await _pc!.setLocalDescription(offer!);
- 调用getSrsAnswer()方法,将生成的媒体参数发送给服务器,在交换answer时,需要定义解码模式等详细信息。
//调用getSrsAnswer()方法
while (answer == null && !isClosed) {
EasyLoading.showToast(toast == true ? '通讯异常,正在重连${count > 1 ? '.$count.' : ''}' : '通讯连接中${count > 1 ? '.$count.' : ''}',
duration: const Duration(seconds: 30), toastPosition: EasyLoadingToastPosition.bottom);
answer = await getSrsAnswer(webRtcUrl!, offer!, isPublish ? 'publish' : 'play');
count++;
await Future.delayed(const Duration(milliseconds: 1500));
}
//getSrsAnswer 片段
//webRtcHostUri是应用域名。如果是推流,type=publish,如果是接收,type=play。
String api = "${Constants.webRtcHostUri}rtc/v1/$type/";
Response<dynamic> result = await DioUtil.dioClient
.post("${Constants.webRtcHostUri}rtc/v1/$type/", data: {"api": api, 'tid': Constants.random100_200, 'streamurl': webrtcUrl, 'clientip': null, 'sdp': offer.sdp});
- 接收到服务器返回的answer以后,调用以下方法,设置远程说明。
_pc!.setRemoteDescription(answer!)
- 如果是需要录音,可根据需要获取本地录音流,并将其添加到_pc。在添加流时还可以禁用视频通道。
publishTask() async {
GetUserMediaSample userMedia = GetUserMediaSample();
userMedia.startPublish((webrtc.MediaStream? stream, String? type) async {
//拿到流
_localStream = stream;
//check声音
webrtc.Helper.setVolume(100, _localStream!.getAudioTracks().first);
//循环流
_localStream!.getTracks().forEach((track) {
//把视频通道禁用
if (track.kind == 'video') {
track.enabled = false;
}
//把流添加到_pc
_pc!.addTrack(track, _localStream!);
});
}, 'audio');
}
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/webRtc/GetUserMediaSample.dart
void startPublish(OnStreamOK _onStreamOK, type) async {
onStreamOK = _onStreamOK;
//配置流参数,web端和app端配置尽量一样,如果参数不一样,会返回400 日志提示设备配型不对
final mediaConstraints = <String, dynamic>{
'audio': true,
'video': {
'mandatory': {
'minWidth': '320', // Provide your own width, height and frame rate here
'minHeight': '240',
'minFrameRate': '30',
},
'facingMode': 'user',
'optional': [],
}
};
try {
//拿到流
MediaStream stream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
_localStream = stream;
//设置流
_onStreamOK(stream, type);
// 切换清晰度,保持声轨
// 可以新建个stream -> 移出旧的track(removeTrack) -> 再添加新的视频的track(addTrack)
} catch (e) {
debugPrint(e.toString());
}
_inCalling = true;
}
以上就是WebRTC的基本工作流程。
在具体的应用实现中,我们还需要根据具体的需求,调用不同的方法和参数,例如setVolume()方法、startPublish()方法等,来实现音视频录制、数据通道的建立等功能。另外,我们还可以对WebRTC进行优化,以提高实时音视频通信的质量和性能。
推流
- web端发起推流请求
//代码来源:jianghuAppBrowser/jianghuBrowser/lib/layout/WebViewHandler.dart
case 'senderPublish':
PublishInstance().create("${result['actionTime']}", result['url']);
break;
- 实例SrsSdk,实例化的时候不需要new
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/webRtc/PublishInstance.dart
create(String callBackId, String? url) {
publisher = SrsSdk(publish: true, callBackId: callBackId, url: url);
}
FLV (fijkPlayer)
使用tcp协议,只有播放功能,声音比较稳定。
//拦截web端Jessibuca播放功能,转发到app端处理
//代码来源:/jianghuAppBrowser/jianghuBrowser/assets/jianghuJsBridge.js
function initJessibucaProClass() {
if(window.JessibucaPro) {
window.JessibucaPro = function(option) {
this.play = (flvUrl) => {
this.flvUrl = flvUrl;
jianghuBridgePostMessage({ action: 'flvPlay', url: flvUrl}, (action, message) => {});
};
this.setVolume = (volume) => {};
this.destroy = () => {
jianghuBridgePostMessage({ action: 'flvDestroy', url: this.flvUrl}, (action, message) => {});
};
}
} else {
setTimeout(() => {
initJessibucaProClass();
}, 300)
}
}
// app端接收flv事件
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/layout/WebViewHandler.dart
case 'flvPlay':
FlvHandler().playFlv(false, result['url']);
break;
case 'flvDestroy':
FlvHandler().close();
//处理相关事件,网络变化重连
//参考:/jianghuAppBrowser/jianghuBrowser/lib/webRtc/FlvHandler.dart
void playFlv(bool localMuted, String flvUrl) {
_flvUrl = flvUrl;
Map json = {"flvUrl": flvUrl};
if (flvJson.value == null || !flvJson.value!.containsKey('flvUrl') || flvJson.value!.containsKey('flvUrl') && flvJson.value!['flvUrl'] != flvUrl) {
flvJson.value = json;
RenderKey.needFlv.value = true;
}
RenderKey.connectivityResult.addListener(onNetworkChange);
}
void close() {
flvJson.value = null;
fijkState.value = FijkState.idle;
RenderKey.needFlv.value = false;
RenderKey.connectivityResult.removeListener(onNetworkChange);
}
void onNetworkChange() {
if (flvJson.value == null) return;
if (_flvUrl == null) return;
// 之前在播放,现在断开了,重链接网络后,重新播放
if (RenderKey.connectivityResult.value == ConnectivityResult.wifi || RenderKey.connectivityResult.value == ConnectivityResult.mobile) {
EasyLoading.showToast('通讯异常,正在重连', duration: const Duration(seconds: 300), toastPosition: EasyLoadingToastPosition.bottom);
flvJson.value = null;
fijkState.value = FijkState.idle;
RenderKey.needFlv.value = false;
Future.delayed(const Duration(milliseconds: 1500), () {
playFlv(flvLocalMuted.value, _flvUrl!);
});
} else {
print("当前没网络");
}
}
/*
* 使用fijkplayer包,实现音视频播放
* 参考:/jianghuAppBrowser/jianghuBrowser/lib/webRtc/IjkPlayerContainer.dart
*/
import 'package:fijkplayer/fijkplayer.dart';
Future<void> playFlv() async {
if (FlvHandler().flvJson.value == null) {
print('FijkPlayer.release()');
if(fijkPlayer != null && fijkPlayer!.state == FijkState.started) {
await fijkPlayer!.stop();
}
FlvHandler().fijkState.value = FijkState.idle;
} else {
fijkPlayer ??= FijkPlayer();
if (!fijkPlayerInit) {
await initIjkPlayer();
}
FlvHandler().fijkState.value = FijkState.prepared;
print('flvJson: ${FlvHandler().flvJson}');
await fijkPlayer!.reset();
await fijkPlayer!.setDataSource(FlvHandler().flvJson.value!['flvUrl'], autoPlay: true);
print("fijkPlayer!.setDataSource");
EasyLoading.dismiss();
setState(() { });
}
}
websocketAudio
使用流程:
//核心代码:`/jh_websocket_audio/android/src/main/kotlin/org/fsll/socket_audio_app/SoundStreamPlugin.kt`
//拦截web端WSAudio相关的事件,转发到app端处理
//代码来源:/jianghuAppBrowser/jianghuBrowser/assets/jianghuJsBridge.js
function initWsAudio() {
if(window.WSAudio) {
window.WSAudio = function(option) {
this.startTalk = () => {
jianghuBridgePostMessage({ action: 'wsAudioStartTalk'}, (action, message) => {});
};
this.stopTalk = () => {
jianghuBridgePostMessage({ action: 'wsAudioStopTalk'}, (action, message) => {});
};
this.setPlayerMute = () => {
jianghuBridgePostMessage({ action: 'wsAudioSetPlayerMute'}, (action, message) => {});
};
this.setPlayerUnMute = () => {
jianghuBridgePostMessage({ action: 'wsAudioSetPlayerUnMute'}, (action, message) => {});
};
this.getPlayerMap = () => {
jianghuBridgePostMessage({ action: 'wsAudioGetPlayerMap'}, (action, message) => {});
};
this.destroy = () => {
jianghuBridgePostMessage({ action: 'wsAudioDestroy'}, (action, message) => {});
};
console.log('WSAudio init')
jianghuBridgePostMessage({ action: 'initWsAudio'}, (action, message) => {});
}
} else {
setTimeout(() => {
initWsAudio();
}, 300)
}
}
// app端接收wsAudio事件
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/layout/WebViewHandler.dart
case 'initWsAudio': // 进房间触发
WsAudioHandler().initWsAudio();
break;
case 'wsAudioStartTalk': // 开卖触发
WsAudioHandler().wsAudioStartTalk();
break;
case 'wsAudioStopTalk': // 关麦触发
WsAudioHandler().wsAudioStopTalk();
break;
case 'wsAudioSetPlayerMute':
WsAudioHandler().toggleLocalMuted(true);
break;
case 'wsAudioSetPlayerUnMute':
WsAudioHandler().toggleLocalMuted(false);
break;
case 'wsAudioGetPlayerMap':
break;
case 'wsAudioDestroy': // 离开房间触发
WsAudioHandler().wsAudioDestroyAll();
break;
//初始化wsAudio,并处理相关事件
//参考:/jianghuAppBrowser/jianghuBrowser/lib/wsAudio/WsAudioHandler.dart
//流程:初始化socket,判断网络连接状态 -> 定时任务定期检查socket连接状态 -> 监听网络变化,并重连-> player初始化、播放音频流,initPublish、推流
initWsAudio() {
// socket检查初始化
checkInstanceAndSocket();
timeTaskCheckSocket();
RenderKey.connectivityResult.addListener(onNetworkChange);
// player初始化
jhWebsocketAudioInstance!.initPlay();
// 发送者初始化
jhWebsocketAudioInstance!.initPublish();
}
/*
* wsAudio相关事件实现
* 参考:/jianghuAppBrowser/jianghu_packages/jh_websocket_audio/lib/jh_websocket_audio.dart
*/
//初始化Socket
initSocket() {
if(socket != null) socket!.destroy();
socket = IO.io(liveHost.toString(), {
'path': liveSocket,
'auth': requestBody,
'closeOnBeforeunload': true,
'transports': [ 'websocket' ],
'forceNew': true,
'timeout': 5000,
'pingInterval': 5000,
'secure': false // 是否支持SSL/TLS
});
// 设置持久化连接
socket!.on('connect', (_) {
socket!.emit('setPersistence', true);
});
//接收audio事件
socket!.on("audio", (msg) {
if (!audioMuted && initializePlayer) {
_player.writeChunk(msg['data'], msg['id']);
}
});
//audio关闭事件
socket!.on("audio-break", (msg) {
print("audio-break $msg");
_player.removePlayer(msgId: msg['id']);
});
}
// 10毫秒接收一次流,tcp网络有抖动,时间不准
initPlay() async {
await _player.initialize();
_playerStatus = _player.status.listen((status) {
print("_playerStatus$status");
});
initializePlayer = true;
}
//10毫秒发送一次流
Future<void> initPublish() async {
_recorderStatus = _recorder.status.listen((status) {
isRecording.value = status == SoundStreamStatus.Playing;
});
_audioStream = _recorder.audioStream.listen((data) {
socket!.emit('audio', {
"sts": DateTime.now().millisecondsSinceEpoch,
"dts": DateTime.now().millisecondsSinceEpoch,
"samplerate": "16000",
"data": data,
});
});
await _recorder.initialize();
initializeRecord = true;
}
openFileX
打开文件、图片,浏览器等。
//pubspec.yaml 中声明
dependencies:
open_filex:
path: "../jianghu_packages/open_file-master"
//引入
import 'package:open_filex/open_filex.dart';
//使用
OpenFilex.openBrowser(result['link']);
OpenFilex.open(file.path);
audioMode
web端发送过来的消息,可选择使用听筒、蓝牙、耳机、外放播放。
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/layout/WebViewHandler.dart
case 'changeAudioMode':
AudioMode().changeAudioMode(result);
break;
//通过_methodChannel给安卓或ios系统发送消息,安卓或ios接收到之后,切换设备。
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/webRtc/audioMode.dart
changeAudioMode(Map result) async {
if(audioModeInitStatus != 'success') {
await initialize();
}
if (audioModeInitStatus == 'success') {
if(audioFocus.value == "AUDIOFOCUS_GAIN") {
await changeAudioModeChannel(audioMode: result['mode']);
} else {
await changeAudioModeChannel(audioMode: result['mode']);
}
}
}
Future<dynamic> changeAudioModeChannel({String audioMode = 'speaker'}) async {
requestAudioMode = audioMode;
_methodChannel.invokeMethod("changeAudioMode", {
"audioMode": audioMode,
});
}
//更换模式
//代码来源:/jianghuAppBrowser/jianghuBrowser/android/app/src/main/kotlin/org/fsll/jianghu_browser/AudioModeManagement.kt
@SuppressLint("MissingPermission")
private fun changeMode() {
if (audioFocus == AudioManager.AUDIOFOCUS_GAIN || audioFocus == AudioManager.AUDIOFOCUS_GAIN_TRANSIENT || audioFocus == AudioManager.AUDIOFOCUS_GAIN_TRANSIENT_MAY_DUCK) {
Log.i(this.javaClass.name, "changeMode")
when (playMode) {
//耳机
AudioMode.Headset -> {
if (mContext.registerReceiver(null, IntentFilter(Intent.ACTION_HEADSET_PLUG))
?.getIntExtra("state", 0) == 1
) {
changeToHeadset()
} else {
_audioDeviceEvent?.success("没有插入耳机")
}
}
//外放
AudioMode.Speaker -> {
Log.d(this.javaClass.name, "切换到外放")
changeToSpeaker()
}
//听筒
AudioMode.Receiver -> {
Log.d(this.javaClass.name, "切换到听筒")
changeToReceiver()
}
//蓝牙
AudioMode.Bluetooth -> {
Log.d(this.javaClass.name, "切换到蓝牙")
// 申请打开蓝牙
if (hasDeviceBluetooth) {
changeToBluetooth()
} else {
_audioDeviceEvent?.success("没有连接蓝牙设备")
}
}
}
} else {
_audioDeviceEvent?.success("没有声音控制权")
}
}
进入房间先申请设备声音控制权,离开房间释放控制权。
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/layout/WebViewHandler.dart
case 'enterLive':
await AudioMode().joinRoom();
break;
case 'leaveLive':
await AudioMode().outRoom();
break;
//代码来源:/jianghuAppBrowser/jianghuBrowser/lib/webRtc/audioMode.dart
Future<dynamic> joinRoom() async {
if(audioModeInitStatus != 'success') {
await initialize();
}
if(audioModeInitStatus == 'success') {
_methodChannel.invokeMethod("joinRoom", { });
}
}
Future<dynamic> outRoom() async {
if(audioModeInitStatus != 'success') {
await initialize();
}
if(audioModeInitStatus == 'success') {
_methodChannel.invokeMethod("outRoom", { });
}
}
musicPlayer
参考:音频播放
网络状况
connectivity插件允许 Flutter 应用程序发现网络连接并相应地配置。它可以区分蜂窝连接和 WiFi 连接。
参考:/jianghuAppBrowser/jianghuBrowser/lib/layout/JianghuConfigHandler.dart
//pubspec.yaml 中声明
dependencies:
connectivity:
//引入
import 'package:connectivity/connectivity.dart';
//获取当前网络状态,WiFi或者流量
RenderKey.connectivityResult.value = await Connectivity().checkConnectivity();
//监听
Connectivity().onConnectivityChanged.listen((ConnectivityResult event) {
RenderKey.connectivityResult.value = event;
});