From e107d8a1a43ca8b324c1bae65857ca64ef3b2e20 Mon Sep 17 00:00:00 2001 From: Zohaib Iqbal Kambrani <> Date: Thu, 9 Jun 2022 15:41:37 +0300 Subject: [PATCH] no message --- ios/Podfile.lock | 2 +- lib/config/config.dart | 2 +- lib/models/LiveCare/IncomingCallData.dart | 3 + .../fragments/home_page_fragment2.dart | 2 +- lib/pages/landing/landing_page.dart | 42 ++ lib/pages/livecare/incoming_call.dart | 22 +- .../js_wrapper/multistreammixer-wrapper.dart | 15 + .../utils/blob-extension.dart | 45 ++ .../utils/device_info.dart | 15 + .../utils/device_info_web.dart | 11 + .../videocall-webrtc-rnd/utils/turn.dart | 19 + .../videocall-webrtc-rnd/utils/turn_web.dart | 13 + .../utils/upload-recording.dart | 97 ++++ .../videocall-webrtc-rnd/utils/websocket.dart | 75 +++ .../utils/websocket_web.dart | 48 ++ .../videocall-webrtc-rnd/web_recorder.dart | 234 ++++++++ .../webrtc/random_string.dart | 77 +++ .../videocall-webrtc-rnd/webrtc/settings.dart | 35 ++ .../webrtc/signaling.dart | 510 ++++++++++++++++++ .../webrtc/start_video_call.dart | 240 +++++++++ 20 files changed, 1492 insertions(+), 15 deletions(-) create mode 100644 lib/pages/videocall-webrtc-rnd/js_wrapper/multistreammixer-wrapper.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/blob-extension.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/device_info.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/device_info_web.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/turn.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/turn_web.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/upload-recording.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/websocket.dart create mode 100644 lib/pages/videocall-webrtc-rnd/utils/websocket_web.dart create mode 100644 lib/pages/videocall-webrtc-rnd/web_recorder.dart create mode 100644 lib/pages/videocall-webrtc-rnd/webrtc/random_string.dart create mode 100644 lib/pages/videocall-webrtc-rnd/webrtc/settings.dart create mode 100644 lib/pages/videocall-webrtc-rnd/webrtc/signaling.dart create mode 100644 lib/pages/videocall-webrtc-rnd/webrtc/start_video_call.dart diff --git a/ios/Podfile.lock b/ios/Podfile.lock index 3c59323c..2d3d257b 100644 --- a/ios/Podfile.lock +++ b/ios/Podfile.lock @@ -521,4 +521,4 @@ SPEC CHECKSUMS: PODFILE CHECKSUM: 87c85be30da5343f66afbb978b1a421a87b82b01 -COCOAPODS: 1.11.2 +COCOAPODS: 1.11.3 diff --git a/lib/config/config.dart b/lib/config/config.dart index 0a4113ed..34ea5128 100644 --- a/lib/config/config.dart +++ b/lib/config/config.dart @@ -396,7 +396,7 @@ var UPDATE_COVID_QUESTIONNAIRE = 'Services/Doctors.svc/REST/COVID19_Questionnar var CHANNEL = 3; var GENERAL_ID = 'Cs2020@2016\$2958'; var IP_ADDRESS = '10.20.10.20'; -var VERSION_ID = 7.7; +var VERSION_ID = 9.0; var SETUP_ID = '91877'; var LANGUAGE = 2; var PATIENT_OUT_SA = 0; diff --git a/lib/models/LiveCare/IncomingCallData.dart b/lib/models/LiveCare/IncomingCallData.dart index cfb1395d..507bcc85 100644 --- a/lib/models/LiveCare/IncomingCallData.dart +++ b/lib/models/LiveCare/IncomingCallData.dart @@ -25,6 +25,7 @@ class IncomingCallData { String token; String isCall; String sound; + String server; IncomingCallData( {this.msgID, @@ -78,6 +79,7 @@ class IncomingCallData { picture = json['picture']; isCall = json['is_call']; sound = json['sound']; + server = json['server']; } Map toJson() { @@ -106,6 +108,7 @@ class IncomingCallData { data['picture'] = this.picture; data['is_call'] = this.isCall; data['sound'] = this.sound; + data['server'] = this.server; return data; } } diff --git a/lib/pages/landing/fragments/home_page_fragment2.dart b/lib/pages/landing/fragments/home_page_fragment2.dart index 6828b56f..4d95694d 100644 --- a/lib/pages/landing/fragments/home_page_fragment2.dart +++ b/lib/pages/landing/fragments/home_page_fragment2.dart @@ -284,7 +284,7 @@ class _HomePageFragment2State extends State { } Widget offersButton() { - final bypassPrivilageCheck = false; + final bypassPrivilageCheck = true; return Expanded( flex: 1, child: InkWell( diff --git a/lib/pages/landing/landing_page.dart b/lib/pages/landing/landing_page.dart index 5b4a6bd1..a939eea2 100644 --- a/lib/pages/landing/landing_page.dart +++ b/lib/pages/landing/landing_page.dart @@ -13,7 +13,9 @@ import 'package:diplomaticquarterapp/pages/BookAppointment/Search.dart'; import 'package:diplomaticquarterapp/pages/DrawerPages/family/my-family.dart'; import 'package:diplomaticquarterapp/pages/ToDoList/ToDo.dart'; import 'package:diplomaticquarterapp/pages/landing/home_page_2.dart'; +import 'package:diplomaticquarterapp/pages/livecare/incoming_call.dart'; import 'package:diplomaticquarterapp/pages/medical/medical_profile_page_new.dart'; +import 'package:diplomaticquarterapp/pages/videocall-webrtc-rnd/webrtc/start_video_call.dart'; import 'package:diplomaticquarterapp/services/authentication/auth_provider.dart'; import 'package:diplomaticquarterapp/services/clinic_services/get_clinic_service.dart'; import 'package:diplomaticquarterapp/services/family_files/family_files_provider.dart' as family; @@ -296,6 +298,9 @@ class _LandingPageState extends State with WidgetsBindingObserver { } login() async { + dummyCall(); + return; + var data = await sharedPref.getObject(IMEI_USER_DATA); sharedPref.remove(REGISTER_DATA_FOR_LOGIIN); if (data != null) { @@ -321,6 +326,43 @@ class _LandingPageState extends State with WidgetsBindingObserver { } } + + + dummyCall() async { + final json = { + "callerID": "s1", + "PatientID": "s2", + "msgID": "123", + "notfID": "123", + "notification_foreground": "true", + "count": "1", + + "message": "Doctor is calling ", + "AppointmentNo": "123", + "title": "Rayyan Hospital", + "ProjectID": "123", + "NotificationType": "10", + "background": "1", + "doctorname": "Dr Sulaiman Al Habib", + "clinicname": "ENT Clinic", + "speciality": "Speciality", + "appointmentdate": "Sun, 15th Dec, 2019", + "appointmenttime": "09:00", + "type": "video", + "session_id": + "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiIsImN0eSI6InR3aWxpby1mcGE7dj0xIn0.eyJqdGkiOiJTS2I2NjYyOWMzN2ZhOTM3YjFjNDI2Zjg1MTgyNWFmN2M0LTE1OTg3NzQ1MDYiLCJpc3MiOiJTS2I2NjYyOWMzN2ZhOTM3YjFjNDI2Zjg1MTgyNWFmN2M0Iiwic3ViIjoiQUNhYWQ1YTNmOGM2NGZhNjczNTY3NTYxNTc0N2YyNmMyYiIsImV4cCI6MTU5ODc3ODEwNiwiZ3JhbnRzIjp7ImlkZW50aXR5IjoiSGFyb29uMSIsInZpZGVvIjp7InJvb20iOiJTbWFsbERhaWx5U3RhbmR1cCJ9fX0.7XUS5uMQQJfkrBZu9EjQ6STL6R7iXkso6BtO1HmrQKk", + "identity": "Haroon1", + "name": "SmallDailyStandup", + "videoUrl": "video", + "picture": "video", + "is_call": "true", + "server": "192.168.8.104", + }; + + IncomingCallData incomingCallData = IncomingCallData.fromJson(json); + final result = await Navigator.push(context, MaterialPageRoute(builder: (context) => IncomingCall(incomingCallData: incomingCallData))); + } + getNotificationCount(token) async { if (await sharedPref.getObject(USER_PROFILE) != null) { var data = AuthenticatedUser.fromJson(await sharedPref.getObject(USER_PROFILE)); diff --git a/lib/pages/livecare/incoming_call.dart b/lib/pages/livecare/incoming_call.dart index 7f264712..66593df4 100644 --- a/lib/pages/livecare/incoming_call.dart +++ b/lib/pages/livecare/incoming_call.dart @@ -1,14 +1,10 @@ import 'dart:ui'; import 'package:camera/camera.dart'; -import 'package:diplomaticquarterapp/config/config.dart'; import 'package:diplomaticquarterapp/models/LiveCare/IncomingCallData.dart'; -import 'package:diplomaticquarterapp/models/LiveCare/room_model.dart'; -import 'package:diplomaticquarterapp/pages/conference/web_rtc/call_home_page_.dart'; import 'package:diplomaticquarterapp/pages/conference/widgets/platform_exception_alert_dialog.dart'; import 'package:diplomaticquarterapp/pages/landing/landing_page.dart'; -import 'package:diplomaticquarterapp/pages/livecare/video-call-web-page.dart'; -import 'package:diplomaticquarterapp/pages/webRTC/OpenTok/OpenTok.dart'; +import 'package:diplomaticquarterapp/pages/videocall-webrtc-rnd/webrtc/start_video_call.dart'; import 'package:diplomaticquarterapp/pages/webRTC/signaling.dart'; import 'package:diplomaticquarterapp/services/livecare_services/livecare_provider.dart'; import 'package:diplomaticquarterapp/uitl/translations_delegate_base.dart'; @@ -241,18 +237,20 @@ class _IncomingCallState extends State with SingleTickerProviderSt MaterialPageRoute( // fullscreenDialog: true, builder: (BuildContext context) { + final caller = widget.incomingCallData.callerID; + final receiver = widget.incomingCallData.receiverID; + final host = widget.incomingCallData.server; + return StartVideoCall(caller: caller, receiver: receiver, iAmCaller: false, host: host,); + // return VideoCallWebPage(receiverId: widget.incomingCallData.receiverID, callerId: widget.incomingCallData.callerID); // Web WebRTC VideoCall + // return CallHomePage(receiverId: widget.incomingCallData.receiverID, callerId: widget.incomingCallData.callerID); // App WebRTC VideoCall + // return OpenTokConnectCallPage( // apiKey: OPENTOK_API_KEY, - // sessionId: '1_MX40NjIwOTk2Mn5-MTY0NzE3MzcwNjA0MH55RUJoZnd0ZGh2U3BPc01ENVZBelQvT1Z-fg', - // token: 'T1==cGFydG5lcl9pZD00NjIwOTk2MiZzaWc9M2I4ODYxYTIzMGQ0ZDA3OTYyNDhkODIxNzI5ZjIzODM1NjY2YzExMzpzZXNzaW9uX2lkPTFfTVg0ME5qSXdPVGsyTW41LU1UWTBOekUzTXpjd05qQTBNSDU1UlVKb1puZDBaR2gyVTNCUGMwMUVOVlpCZWxRdlQxWi1mZyZjcmVhdGVfdGltZT0xNjQ3MTczNzA2Jm5vbmNlPTAuMjA3NDI0MzA1NzcwMzY4MiZyb2xlPW1vZGVyYXRvciZleHBpcmVfdGltZT0xNjQ3MjYwMTA2JmluaXRpYWxfbGF5b3V0X2NsYXNzX2xpc3Q9' + // sessionId: widget.incomingCallData.sessionId, + // token: widget.incomingCallData.token // ); - return OpenTokConnectCallPage( - apiKey: OPENTOK_API_KEY, - sessionId: widget.incomingCallData.sessionId, - token: widget.incomingCallData.token - ); }, ), ); diff --git a/lib/pages/videocall-webrtc-rnd/js_wrapper/multistreammixer-wrapper.dart b/lib/pages/videocall-webrtc-rnd/js_wrapper/multistreammixer-wrapper.dart new file mode 100644 index 00000000..b21a345d --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/js_wrapper/multistreammixer-wrapper.dart @@ -0,0 +1,15 @@ +@JS() +library streammixer.js; +import 'package:js/js.dart'; + + +@JS('multiStreamsMixerFlutter') +external initMultiStreamsMixer(arrayOfMediaStreams); + +@JS("MultiStreamsMixer") +class MultiStreamsMixer { + external MultiStreamsMixer(arrayOfMediaStreams, elementClass); + + external getMixedStream(); + external start(); +} \ No newline at end of file diff --git a/lib/pages/videocall-webrtc-rnd/utils/blob-extension.dart b/lib/pages/videocall-webrtc-rnd/utils/blob-extension.dart new file mode 100644 index 00000000..7f20a6f5 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/blob-extension.dart @@ -0,0 +1,45 @@ +// import 'dart:async'; +// import 'dart:html' as html; +// +// import 'dart:typed_data'; +// +// extension xHtmlBlob on html.Blob{ +// +// /* File Reader --- */ +// Future _fileDataUrlReader()async{ +// html.FileReader reader = html.FileReader(); +// reader.readAsDataUrl(this); +// return reader; +// } +// +// Future _fileByteReader()async{ +// html.FileReader reader = html.FileReader(); +// reader.readAsArrayBuffer(this); +// return reader; +// } +// /* --- File Reader */ +// +// Future readDataUrl() async{ +// Completer completer = Completer(); +// _fileDataUrlReader().then((reader){ +// reader.onLoadEnd.listen((e) async { +// dynamic data = reader.result; +// completer.complete(data); +// }); +// }); +// final data = await completer.future; +// return data; +// } +// +// Future readBytes() async{ +// Completer completer = Completer(); +// _fileByteReader().then((reader){ +// reader.onLoadEnd.listen((e) async { +// dynamic data = reader.result; +// completer.complete(data); +// }); +// }); +// final data = await completer.future; +// return data; +// } +// } \ No newline at end of file diff --git a/lib/pages/videocall-webrtc-rnd/utils/device_info.dart b/lib/pages/videocall-webrtc-rnd/utils/device_info.dart new file mode 100644 index 00000000..668f2f12 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/device_info.dart @@ -0,0 +1,15 @@ +import 'dart:io'; + +class DeviceInfo { + static String get label { + return 'Flutter ' + + Platform.operatingSystem + + '(' + + Platform.localHostname + + ")"; + } + + static String get userAgent { + return 'flutter-webrtc/' + 'Web' + '-plugin 0.0.1'; + } +} diff --git a/lib/pages/videocall-webrtc-rnd/utils/device_info_web.dart b/lib/pages/videocall-webrtc-rnd/utils/device_info_web.dart new file mode 100644 index 00000000..34fad372 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/device_info_web.dart @@ -0,0 +1,11 @@ +// ignore: avoid_web_libraries_in_flutter + +class DeviceInfo { + static String get label { + return 'Flutter Web'; + } + + static String get userAgent { + return 'flutter-webrtc/web-plugin 0.0.1 (Mobile)'; + } +} diff --git a/lib/pages/videocall-webrtc-rnd/utils/turn.dart b/lib/pages/videocall-webrtc-rnd/utils/turn.dart new file mode 100644 index 00000000..0d4a00f9 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/turn.dart @@ -0,0 +1,19 @@ +import 'dart:convert'; +import 'dart:async'; +import 'dart:io'; + +Future getTurnCredential(String host, int port) async { + HttpClient client = HttpClient(context: SecurityContext()); + client.badCertificateCallback = + (X509Certificate cert, String host, int port) { + print('getTurnCredential: Allow self-signed certificate => $host:$port. '); + return true; + }; + var url = 'https://$host:$port/api/turn?service=turn&username=flutter-webrtc'; + var request = await client.getUrl(Uri.parse(url)); + var response = await request.close(); + var responseBody = await response.transform(Utf8Decoder()).join(); + print('getTurnCredential:response => $responseBody.'); + Map data = JsonDecoder().convert(responseBody); + return data; + } diff --git a/lib/pages/videocall-webrtc-rnd/utils/turn_web.dart b/lib/pages/videocall-webrtc-rnd/utils/turn_web.dart new file mode 100644 index 00000000..bf2ea7ad --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/turn_web.dart @@ -0,0 +1,13 @@ +import 'dart:convert'; +import 'package:http/http.dart' as http; + +Future getTurnCredential(String host, int port) async { + var url = 'https://$host:$port/api/turn?service=turn&username=flutter-webrtc'; + final res = await http.get(Uri.parse(url)); + if (res.statusCode == 200) { + var data = json.decode(res.body); + print('getTurnCredential:response => $data.'); + return data; + } + return {}; +} diff --git a/lib/pages/videocall-webrtc-rnd/utils/upload-recording.dart b/lib/pages/videocall-webrtc-rnd/utils/upload-recording.dart new file mode 100644 index 00000000..53eb2230 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/upload-recording.dart @@ -0,0 +1,97 @@ +// import 'dart:convert'; +// import 'dart:html' as html; +// +// import 'package:diplomaticquarterapp/pages/videocall-webrtc-rnd/utils/blob-extension.dart'; +// import 'package:flutter/cupertino.dart'; +// import 'package:http/http.dart' as http; +// +// // should run on port '65482' add to additional arguments --web-port=65482 +// class UploadRecording{ +// +// final url = "https://vcallapi.hmg.com/api/videocall/trackMeeting"; +// // final url = "https://10.20.200.186/api/videocall/trackMeeting"; +// upload(html.Blob blob, {@required Map params, @required Function completion, @required Function(double) onProgress}) async{ +// +// final bytes = await blob.readBytes(); +// final filename = "${DateTime.now().millisecondsSinceEpoch}.webm"; +// final file = http.MultipartFile.fromBytes('file', bytes ?? [], filename: filename); +// +// var request = http.MultipartRequest('POST', Uri.parse(url)); +// request.files.add(file); +// request.fields.addAll({'fileName':filename,...params}); +// +// print("Uploading video of '${blob.size}' bytes"); +// var response = await request.send().catchError((e){ +// print(e.toString()); +// completion(e.toString()); +// }); +// +// print(response.statusCode); +// if (response.statusCode == 200) { +// final bytes = await response.stream.toBytes(); +// final jsonString = utf8.decode(bytes); +// final json = jsonDecode(jsonString); +// if(json == null) +// completion('200: Invalid response format'); +// else +// completion(json); +// +// }else { +// print(response.reasonPhrase); +// completion(response.reasonPhrase); +// } +// } +// +// _upload(html.Blob blob, {@required Map params, @required Function completion, @required Function(double) onProgress}) async{ +// final blobBytes = await blob.readBytes(); +// +// Map body = {}; +// body.addAll(params); +// body['file'] = blobBytes ?? []; +// body['fileName'] = "${DateTime.now().millisecondsSinceEpoch}.webm"; +// +// final json = jsonEncode(body); +// +// var request = http.StreamedRequest('POST', Uri.parse(url)); +// +// var totalLength = json.length;// Total length (to calculate upload progress) +// var transferredLength = 0;// Length transferred (to calculate upload progress) +// var uploadProgress = 0.0; // Upload progress (from 0.0 to 1.0) +// +// Stream.value(json).transform(utf8.encoder).listen((chunk) { +// +// transferredLength += chunk.length; +// uploadProgress = transferredLength / totalLength; +// // print("Chunk: ${chunk.length}, transferred: $transferredLength, progress: $uploadProgress"); +// request.sink.add(chunk); +// onProgress(uploadProgress*100); +// +// }, onDone: () { +// print("Done. Total: $totalLength, transferred: $transferredLength, progress: $uploadProgress"); +// request.sink.close(); +// onProgress(uploadProgress*100); +// }); +// +// +// print("Uploading video of '${blob.size}' bytes"); +// var response = await request.send(); +// +// print(response.statusCode); +// if (response.statusCode == 200){ +// +// final bytes = await response.stream.toBytes(); +// final jsonString = utf8.decode(bytes); +// final json = jsonDecode(jsonString); +// if(json == null) +// completion('200: Invalid response format'); +// else +// completion(json); +// +// +// }else { +// print("${response.statusCode}: ${response.reasonPhrase}"); +// completion(response.reasonPhrase); +// } +// +// } +// } \ No newline at end of file diff --git a/lib/pages/videocall-webrtc-rnd/utils/websocket.dart b/lib/pages/videocall-webrtc-rnd/utils/websocket.dart new file mode 100644 index 00000000..fc6b215c --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/websocket.dart @@ -0,0 +1,75 @@ +import 'dart:io'; +import 'dart:math'; +import 'dart:convert'; +import 'dart:async'; + +class SimpleWebSocket { + String _url; + var _socket; + Function() onOpen; + Function(dynamic msg) onMessage; + Function(int code, String reaso) onClose; + SimpleWebSocket(this._url); + + connect() async { + try { + //_socket = await WebSocket.connect(_url); + print('connecting to $_url'); + _socket = await _connectForSelfSignedCert(_url); + onOpen?.call(); + _socket.listen((data) { + onMessage?.call(data); + }, onDone: () { + onClose?.call(_socket.closeCode, _socket.closeReason); + }); + } catch (e) { + onClose?.call(500, e.toString()); + } + } + + send(data) { + if (_socket != null) { + _socket.add(data); + print('send: $data'); + } + } + + close() { + if (_socket != null) _socket.close(); + } + + Future _connectForSelfSignedCert(url) async { + try { + Random r = new Random(); + String key = base64.encode(List.generate(8, (_) => r.nextInt(255))); + HttpClient client = HttpClient(context: SecurityContext()); + client.badCertificateCallback = + (X509Certificate cert, String host, int port) { + print( + 'SimpleWebSocket: Allow self-signed certificate => $host:$port. '); + return true; + }; + + HttpClientRequest request = + await client.getUrl(Uri.parse(url)); // form the correct url here + request.headers.add('Connection', 'Upgrade'); + request.headers.add('Upgrade', 'websocket'); + request.headers.add( + 'Sec-WebSocket-Version', '13'); // insert the correct version here + request.headers.add('Sec-WebSocket-Key', key.toLowerCase()); + + HttpClientResponse response = await request.close(); + // ignore: close_sinks + Socket socket = await response.detachSocket(); + var webSocket = WebSocket.fromUpgradedSocket( + socket, + protocol: 'signaling', + serverSide: false, + ); + + return webSocket; + } catch (e) { + throw e; + } + } +} diff --git a/lib/pages/videocall-webrtc-rnd/utils/websocket_web.dart b/lib/pages/videocall-webrtc-rnd/utils/websocket_web.dart new file mode 100644 index 00000000..73cb3bf9 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/utils/websocket_web.dart @@ -0,0 +1,48 @@ +// ignore: avoid_web_libraries_in_flutter +import 'dart:html'; + +class SimpleWebSocket { + String _url; + var _socket; + Function() onOpen; + Function(dynamic msg) onMessage; + Function(int code, String reason) onClose; + + SimpleWebSocket(this._url) { + _url = _url.replaceAll('https:', 'wss:'); + } + + connect() async { + try { + _socket = WebSocket(_url); + _socket.onOpen.listen((e) { + onOpen?.call(); + }); + + _socket.onMessage.listen((e) { + onMessage?.call(e.data); + }); + + _socket.onClose.listen((e) { + onClose?.call(e.code, e.reason); + }); + } catch (e) { + onClose?.call(500, e.toString()); + } + } + + send(data) { + if (_socket != null && _socket.readyState == WebSocket.OPEN) { + _socket.send(data); + print('send: $data'); + } else { + print('WebSocket not connected, message $data not sent'); + } + } + + close() { + if (_socket != null) { + _socket.close(); + } + } +} diff --git a/lib/pages/videocall-webrtc-rnd/web_recorder.dart b/lib/pages/videocall-webrtc-rnd/web_recorder.dart new file mode 100644 index 00000000..99217d68 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/web_recorder.dart @@ -0,0 +1,234 @@ +// +// import 'dart:async'; +// import 'dart:collection'; +// import 'dart:html' as html; +// import 'dart:io'; +// import 'dart:js' as js; +// import 'dart:typed_data'; +// import 'package:diplomaticquarterapp/pages/videocall-webrtc-rnd/js_wrapper/multistreammixer-wrapper.dart'; +// import 'package:diplomaticquarterapp/pages/videocall-webrtc-rnd/utils/blob-extension.dart'; +// import 'package:diplomaticquarterapp/pages/videocall-webrtc-rnd/utils/upload-recording.dart'; +// import 'package:flutter/cupertino.dart'; +// import 'package:flutter_webrtc/flutter_webrtc.dart'; +// +// final _recorder_options = { +// 'mimeType': 'video/webm;codecs=vp9,opus', +// 'bitsPerSecond': 128000, +// 'audioBitsPerSecond': 128000,// only for audio track +// 'videoBitsPerSecond': 128000// only for video track +// }; +// +// class WebRtcStreamRecorder{ +// html.MediaStream remoteStreamWeb; +// html.MediaStream localStreamWeb; +// +// html.MediaRecorder _local_recorder; +// html.MediaRecorder _mixed_recorder; +// html.MediaRecorder _remote_recorder; +// Duration chunkDuration; +// +// WebRtcStreamRecorder({@required MediaStream remoteStream, @required MediaStream localStream}){ +// remoteStreamWeb = (remoteStream as dynamic).jsStream; // MediaStreamWeb.jsStream +// localStreamWeb = (localStream as dynamic).jsStream; // MediaStreamWeb.jsStream +// } +// +// Function(html.Blob blob, bool isLast, html.MediaRecorder recorder) _blobChunkCallback; +// +// onBlobChunk(Duration chunkDuration,{@required Function(html.Blob blob, bool isLast, html.MediaRecorder recorder) onBlobChunk}){ +// _blobChunkCallback = onBlobChunk; +// this.chunkDuration = chunkDuration; +// } +// +// stop(){ +// _mixed_recorder?.stop(); +// _remote_recorder?.stop(); +// _local_recorder?.stop(); +// _sliceTimer?.cancel(); +// } +// +// WebRtcStreamRecorder start({bool withMixedStream = true}){ +// if(withMixedStream){ +// _recordMixedStream(); +// +// }else{ +// Future.wait([_startLocal(), _startRemote()]).then((value) async{ +// final localVideoData = await value[0].readBytes(); +// final remoteVideoData = await value[1].readBytes(); +// +// processAndSendClip(localVideoData, 'local-video'); +// processAndSendClip(remoteVideoData, 'remote-video'); +// print("\n\n\n"); +// print("------------------------------------------------------------------------------------------------------------------------"); +// print("Recordings Completed"); +// print("------------------------------------------------------------------------------------------------------------------------"); +// print(" Local: ${localVideoData == null ? 'not recorded' : 'recorded'}"); +// print("Remote: ${remoteVideoData == null ? 'not recorded' : 'recorded'}"); +// print("------------------------------------------------------------------------------------------------------------------------"); +// print("\n\n\n"); +// +// }); +// } +// return this; +// } +// +// _recordMixedStream(){ +// final mixer = initMultiStreamsMixer([remoteStreamWeb,localStreamWeb]); +// final jsMixedStream = mixer.getMixedStream(); +// _mixed_recorder = html.MediaRecorder(jsMixedStream, _recorder_options); +// _startRecording(_mixed_recorder).then((blob) async { +// final data = await blob?.readBytes(); +// // final url = await blob?.readDataUrl(); +// final params = { +// 'requesterId':'1', +// 'source':'ziktest4', +// 'target':'adeltest4', +// 'callStart':'2022-04-13 09:00', +// 'callEnd':'2022-04-13 10:00', +// 'archive':'1' +// }; +// +// print("Uploading called"); +// UploadRecording().upload(blob, params: params, completion: (response){ +// print("Response: $response"); +// }, onProgress: (progress){ +// print("Upload: ${progress.toInt()}/100"); +// }); +// +// // String file = await FileSaver.instance.saveFile("recorded", data!, "webm", mimeType: MimeType.WEBM).catchError((e){ +// // print(e); +// // }); +// // print(file); +// }); +// } +// +// Future _startRemote() async{ +// _remote_recorder = html.MediaRecorder(remoteStreamWeb, _recorder_options); +// return _startRecording(_remote_recorder); +// } +// +// Future _startLocal() async{ +// _local_recorder = html.MediaRecorder(localStreamWeb, _recorder_options); +// return _startRecording(_local_recorder); +// } +// +// processAndSendClip(Uint8List data, String filename) async{ +// if(data != null){ +// // String file = await FileSaver.instance.saveFile("$filename", data, "webm", mimeType: MimeType.OTHER).catchError((e){ +// // print(e); +// // }); +// // print(file); +// }else{ +// +// } +// } +// } +// +// Timer _sliceTimer; +// extension xWebRtcStreamRecorder on WebRtcStreamRecorder{ +// +// @override +// Future _startRecording(html.MediaRecorder recorder) async{ +// html.Blob _blob; +// bool isLast = false; +// if(recorder != null){ +// final completer = Completer(); +// recorder.addEventListener('dataavailable', (html.Event event) async{ +// _blob = js.JsObject.fromBrowserObject(event)['data']; +// isLast = recorder.state == 'inactive'; +// final data = await _blob?.readBytes(); +// print("dataavailable isLast:$isLast data:${data?.length}"); +// if(_blobChunkCallback != null) +// _blobChunkCallback(_blob, isLast, recorder); +// +// if(isLast) +// completer.complete(); +// }); +// recorder.start(); +// print("_startRecording Start"); +// _enableSliceChunks(recorder); +// +// await completer.future; +// } +// +// print("_startRecording Finish"); +// return _blob; +// } +// +// _enableSliceChunks(html.MediaRecorder recorder){ +// if(chunkDuration != null && recorder != null){ +// _sliceTimer = Timer.periodic(chunkDuration, (timer) { +// if(recorder.stream?.active == true){ +// recorder.requestData(); +// }else{ +// timer.cancel(); +// } +// }); +// } +// } +// } +// +// // class WebRecorder { +// // static bool isNotRecording = true; +// // static html.MediaRecorder? recorder; +// // +// // final Future Function() whenRecorderStart; // Function to call when recording starts +// // final Future Function() whenRecorderStop; // Function to call when recording finishs +// // final Future Function(dynamic) whenReceiveData; +// // +// // WebRecorder({ +// // required this.whenRecorderStart, +// // required this.whenRecorderStop, +// // required this.whenReceiveData +// // }); +// // +// // WebRecorder openRecorder({MediaStream? stream}){ +// // WebRecorder.isNotRecording = !WebRecorder.isNotRecording; +// // if(WebRecorder.isNotRecording) +// // stopRecoring().whenComplete(whenRecorderStop); +// // else +// // if(stream == null){ +// // html.window.navigator +// // .getUserMedia(audio: true) +// // .then((stream) { +// // recorder = html.MediaRecorder(stream); +// // recorder?.addEventListener('dataavailable', hundlerFunctionStream); +// // }) +// // .whenComplete((){ +// // startRecording().whenComplete(whenRecorderStart); +// // }) +// // .catchError((e)=> print); +// // }else{ +// // final htmlStream = html.MediaStream(stream.getTracks()); +// // recorder = html.MediaRecorder(htmlStream); +// // recorder?.addEventListener('dataavailable', hundlerFunctionStream); +// // } +// // +// // return this; +// // } +// // +// // Future startRecording(){ +// // WebRecorder.recorder?.start(); +// // return Future.value(true); +// // } +// // +// // Future stopRecoring() async{ +// // WebRecorder.recorder?.stop(); +// // return Future.value(true); +// // } +// // +// // hundlerFunctionStream(event) async{ +// // html.FileReader reader = html.FileReader(); +// // html.Blob blob = js.JsObject.fromBrowserObject(event)['data']; +// // reader.readAsArrayBuffer(blob); +// // reader.onLoadEnd.listen((e) async { +// // setData(reader.result); +// // }); +// // } +// // +// // setData(data) => whenReceiveData(data); +// // +// // dispose(){ +// // WebRecorder.recorder?.removeEventListener('dataavailable', hundlerFunctionStream); +// // WebRecorder.recorder = null; +// // } +// // } diff --git a/lib/pages/videocall-webrtc-rnd/webrtc/random_string.dart b/lib/pages/videocall-webrtc-rnd/webrtc/random_string.dart new file mode 100644 index 00000000..73c2f089 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/webrtc/random_string.dart @@ -0,0 +1,77 @@ +// Copyright (c) 2016, Damon Douglas. All rights reserved. Use of this source code +// is governed by a BSD-style license that can be found in the LICENSE file. + +/// Simple library for generating random ascii strings. +/// +/// More dartdocs go here. +/// +/// +/// A simple usage example: +/// +/// import 'package:random_string/random_string.dart' as random; +/// main() { +/// print(randomBetween(10,20)); // some integer between 10 and 20 +/// print(randomNumeric(4)); // sequence of 4 random numbers i.e. 3259 +/// print(randomString(10)); // random sequence of 10 characters i.e. e~f93(4l- +/// print(randomAlpha(5)); // random sequence of 5 alpha characters i.e. aRztC +/// print(randomAlphaNumeric(10)); // random sequence of 10 alpha numeric i.e. aRztC1y32B +/// } + +library random_string; + +import 'dart:math'; + +const ASCII_START = 33; +const ASCII_END = 126; +const NUMERIC_START = 48; +const NUMERIC_END = 57; +const LOWER_ALPHA_START = 97; +const LOWER_ALPHA_END = 122; +const UPPER_ALPHA_START = 65; +const UPPER_ALPHA_END = 90; + +/// Generates a random integer where [from] <= [to]. +int randomBetween(int from, int to) { + if (from > to) throw Exception('$from cannot be > $to'); + var rand = Random(); + return ((to - from) * rand.nextDouble()).toInt() + from; +} + +/// Generates a random string of [length] with characters +/// between ascii [from] to [to]. +/// Defaults to characters of ascii '!' to '~'. +String randomString(int length, {int from: ASCII_START, int to: ASCII_END}) { + return String.fromCharCodes( + List.generate(length, (index) => randomBetween(from, to))); +} + +/// Generates a random string of [length] with only numeric characters. +String randomNumeric(int length) => + randomString(length, from: NUMERIC_START, to: NUMERIC_END); +/* +/// Generates a random string of [length] with only alpha characters. +String randomAlpha(int length) { + var lowerAlphaLength = randomBetween(0, length); + var upperAlphaLength = length - lowerAlphaLength; + var lowerAlpha = randomString(lowerAlphaLength, + from: LOWER_ALPHA_START, to: LOWER_ALPHA_END); + var upperAlpha = randomString(upperAlphaLength, + from: UPPER_ALPHA_START, to: UPPER_ALPHA_END); + return randomMerge(lowerAlpha, upperAlpha); +} + +/// Generates a random string of [length] with alpha-numeric characters. +String randomAlphaNumeric(int length) { + var alphaLength = randomBetween(0, length); + var numericLength = length - alphaLength; + var alpha = randomAlpha(alphaLength); + var numeric = randomNumeric(numericLength); + return randomMerge(alpha, numeric); +} + +/// Merge [a] with [b] and scramble characters. +String randomMerge(String a, String b) { + var mergedCodeUnits = List.from("$a$b".codeUnits); + mergedCodeUnits.shuffle(); + return String.fromCharCodes(mergedCodeUnits); +}*/ diff --git a/lib/pages/videocall-webrtc-rnd/webrtc/settings.dart b/lib/pages/videocall-webrtc-rnd/webrtc/settings.dart new file mode 100644 index 00000000..fe9c9535 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/webrtc/settings.dart @@ -0,0 +1,35 @@ +import 'package:flutter/material.dart'; +import 'dart:core'; + +class CallSettings extends StatefulWidget { + static String tag = 'call_settings'; + + @override + _CallSettingsState createState() => _CallSettingsState(); +} + +class _CallSettingsState extends State { + @override + initState() { + super.initState(); + } + + @override + deactivate() { + super.deactivate(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('Settings'), + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center(child: Text("settings")); + }, + ), + ); + } +} diff --git a/lib/pages/videocall-webrtc-rnd/webrtc/signaling.dart b/lib/pages/videocall-webrtc-rnd/webrtc/signaling.dart new file mode 100644 index 00000000..d6e0a071 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/webrtc/signaling.dart @@ -0,0 +1,510 @@ +import 'dart:convert'; +import 'dart:async'; +import 'package:flutter/cupertino.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +import '../utils/websocket.dart' if (dart.library.js) '../utils/websocket_web.dart'; +import '../utils/turn.dart' if (dart.library.js) '../utils/turn_web.dart'; + + +enum SignalingState { Open, Closed, Error } +enum CallState { Calling, Ringing, Invite, Connected, Bye } + + +const JsonEncoder _encoder = JsonEncoder(); +const JsonDecoder _decoder = JsonDecoder(); + +class SessionOneToOne { + String id; + SocketUser local_user; + SocketUser remote_user; + SessionOneToOne({@required this.id, @required this.local_user, @required this.remote_user}); + + + RTCPeerConnection pc; + RTCDataChannel dc; + List remoteCandidates = []; +} + +class SocketUser{ + String id; + String name; + String userAgent; + Map moreInfo; + + SocketUser({@required this.id, @required this.name, @required this.userAgent, @required this.moreInfo}); + + SocketUser.from(dynamic json){ + id = json['id']; + name = json['name']; + userAgent = json['user_agent']; + moreInfo = json['more_info']; + } + + Map toJson() => { + "id": id, + "name": name, + "user_agent": userAgent, + "more_info": moreInfo + }; +} + +class Signaling { + var _host; + var _port = 8086; + var _turnCredential; + + SimpleWebSocket _socket; + SessionOneToOne session; + + Signaling(this._host, {@required this.session}); + + MediaStream localStream; + final List remoteStreams = []; + + Function(SignalingState state) onSignalingStateChange; + Function(SessionOneToOne session, CallState state) onCallStateChange; + Function(MediaStream stream) onLocalStream; + Function(SessionOneToOne session, MediaStream stream) onAddRemoteStream; + Function(SessionOneToOne session, MediaStream stream) onRemoveRemoteStream; + Function(dynamic event) onPeersUpdate; + Function(dynamic event) onConnected; + Function(dynamic event) onRemoteConnected; + Function(SessionOneToOne session, RTCDataChannel dc, RTCDataChannelMessage data) onDataChannelMessage; + Function(SessionOneToOne session, RTCDataChannel dc) onDataChannel; + + String get sdpSemantics => WebRTC.platformIsWindows ? 'plan-b' : 'unified-plan'; + + Map _iceServers = { + 'iceServers': [ + {'url': 'stun:stun.l.google.com:19302'}, + /* + * turn server configuration example. + { + 'url': 'turn:123.45.67.89:3478', + 'username': 'change_to_real_user', + 'credential': 'change_to_real_secret' + }, + */ + ] + }; + + final Map _config = { + 'mandatory': {}, + 'optional': [ + {'DtlsSrtpKeyAgreement': true}, + ] + }; + + final Map _dcConstraints = { + 'mandatory': { + 'OfferToReceiveAudio': false, + 'OfferToReceiveVideo': false, + }, + 'optional': [], + }; + + + close() async { + await finishSessions(); + _socket?.close(); + } + + void switchCamera() { + if (localStream != null) { + Helper.switchCamera(localStream .getVideoTracks()[0]); + } + } + + void muteMic() { + if (localStream != null) { + bool enabled = localStream .getAudioTracks()[0].enabled; + localStream .getAudioTracks()[0].enabled = !enabled; + } + } + + + callAccepted(SessionOneToOne session){ + _send('call_accepted', { + 'to': session.remote_user?.id, + 'from': session.local_user.id, + 'session_id': session.id, + }); + } + + void offer(String media, bool useScreen) async { + if(session == null) + return; + + if (media == 'data') { + _createDataChannel(session); + } + _createOffer(session, media); + onCallStateChange?.call(session, CallState.Calling); + } + + void bye(SessionOneToOne session) { + _send('bye', { + 'session_id': session.id, + 'from': session.local_user.id, + }); + _closeSession(session); + } + + void onMessage(message) async { + Map mapData = message; + var data = mapData['data']; + + switch (mapData['type']) { + case 'call_accepted': + { + onRemoteConnected?.call(data); + } + break; + + case 'connected': + { + if (onConnected != null) { + onConnected?.call(data); + } + } + break; + + case 'offer': + { + + var peerId = data['from']; + var description = data['description']; + var media = data['media']; + + await _initPeerConnection(session, media: media, screenSharing: false); + await session.pc?.setRemoteDescription(RTCSessionDescription(description['sdp'], description['type'])); + await _createAnswer(session, media); + + if (session.remoteCandidates.isNotEmpty) { + session.remoteCandidates.forEach((candidate) async { + await session.pc?.addCandidate(candidate); + }); + session.remoteCandidates.clear(); + } + onCallStateChange?.call(session, CallState.Calling); + } + break; + case 'answer': + { + + var description = data['description']; + var sessionId = data['session_id']; + session.pc?.setRemoteDescription( + RTCSessionDescription(description['sdp'], description['type'])); + } + break; + case 'candidate': + { + + var peerId = data['from']; + var candidateMap = data['candidate']; + var sessionId = data['session_id']; + RTCIceCandidate candidate = RTCIceCandidate(candidateMap['candidate'], + candidateMap['sdpMid'], candidateMap['sdpMLineIndex']); + + if (session != null) { + if (session.pc != null) { + await session.pc?.addCandidate(candidate); + } else { + session.remoteCandidates.add(candidate); + } + } else { + // _sessions[sessionId] = SessionOneToOne(pid: peerId, sid: sessionId) + // ..remoteCandidates.add(candidate); + } + } + break; + case 'leave': + { + var peerId = data as String; + _closeSessionById(peerId); + } + break; + case 'bye': + { + + var sessionId = data['session_id']; + print('bye: ' + sessionId); + if (session != null) { + onCallStateChange?.call(session, CallState.Bye); + _closeSession(session); + } + } + break; + case 'keepalive': + { + print('keepalive response!'); + } + break; + default: + break; + } + } + + Future connect() async { + _socket = SimpleWebSocket('https://$_host:$_port/ws'); + + if (_turnCredential == null) { + try { + _turnCredential = await getTurnCredential(_host, _port); + /* + { + "username": "1584195784:mbzrxpgjys", + "password": "isyl6FF6nqMTB9/ig5MrMRUXqZg", + "ttl": 86400, + "uris": ["turn:127.0.0.1:19302?transport=udp"] + } + */ + _iceServers = { + 'iceServers': [ + { + 'urls': _turnCredential['uris'][0], + 'username': _turnCredential['username'], + 'credential': _turnCredential['password'] + }, + ] + }; + } catch (e) {} + } + + _socket?.onOpen = () { + print('onOpen'); + onSignalingStateChange?.call(SignalingState.Open); + _send('connect', session.local_user.toJson()); + }; + + _socket?.onMessage = (message) { + print('Received data: ' + message); + onMessage(_decoder.convert(message)); + }; + + _socket?.onClose = (int code, String reason) { + print('Closed by server [$code => $reason]!'); + onSignalingStateChange?.call(SignalingState.Closed); + }; + + await _socket?.connect(); + } + + Future createStream(String media, bool userScreen) async { + final Map mediaConstraints = { + 'audio': userScreen ? false : true, + 'video': userScreen + ? true + : { + 'mandatory': { + 'minWidth': '640', // Provide your own width, height and frame rate here + 'minHeight': '480', + 'minFrameRate': '30', + }, + 'facingMode': 'user', + 'optional': [], + } + }; + + MediaStream stream = userScreen + ? await navigator.mediaDevices.getDisplayMedia(mediaConstraints) + : await navigator.mediaDevices.getUserMedia(mediaConstraints); + onLocalStream?.call(stream); + return stream; + } + + Future _initPeerConnection(SessionOneToOne session, {@required String media, @required bool screenSharing}) async { + + if (media != 'data') + localStream = await createStream(media, screenSharing); + print(_iceServers); + RTCPeerConnection pc = await createPeerConnection({ + ..._iceServers, + ...{'sdpSemantics': sdpSemantics} + }, _config); + if (media != 'data') { + switch (sdpSemantics) { + case 'plan-b': + pc.onAddStream = (MediaStream stream) { + onAddRemoteStream?.call(session, stream); + remoteStreams.add(stream); + }; + await pc.addStream(localStream); + break; + case 'unified-plan': + // Unified-Plan + pc.onTrack = (event) { + if (event.track.kind == 'video') { + onAddRemoteStream?.call(session, event.streams[0]); + } + }; + localStream .getTracks().forEach((track) { + pc.addTrack(track, localStream); + }); + break; + } + + // Unified-Plan: Simuclast + /* + await pc.addTransceiver( + track: _localStream.getAudioTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendOnly, streams: [_localStream]), + ); + + await pc.addTransceiver( + track: _localStream.getVideoTracks()[0], + init: RTCRtpTransceiverInit( + direction: TransceiverDirection.SendOnly, + streams: [ + _localStream + ], + sendEncodings: [ + RTCRtpEncoding(rid: 'f', active: true), + RTCRtpEncoding( + rid: 'h', + active: true, + scaleResolutionDownBy: 2.0, + maxBitrate: 150000, + ), + RTCRtpEncoding( + rid: 'q', + active: true, + scaleResolutionDownBy: 4.0, + maxBitrate: 100000, + ), + ]), + );*/ + /* + var sender = pc.getSenders().find(s => s.track.kind == "video"); + var parameters = sender.getParameters(); + if(!parameters) + parameters = {}; + parameters.encodings = [ + { rid: "h", active: true, maxBitrate: 900000 }, + { rid: "m", active: true, maxBitrate: 300000, scaleResolutionDownBy: 2 }, + { rid: "l", active: true, maxBitrate: 100000, scaleResolutionDownBy: 4 } + ]; + sender.setParameters(parameters); + */ + } + pc.onIceCandidate = (candidate) async { + if (candidate == null) { + print('onIceCandidate: complete!'); + return; + } + // This delay is needed to allow enough time to try an ICE candidate + // before skipping to the next one. 1 second is just an heuristic value + // and should be thoroughly tested in your own environment. + await Future.delayed( + const Duration(seconds: 1), + () => _send('candidate', { + 'to': session.remote_user?.id, + 'from': session.local_user.id, + 'candidate': { + 'sdpMLineIndex': candidate.sdpMlineIndex, + 'sdpMid': candidate.sdpMid, + 'candidate': candidate.candidate, + }, + 'session_id': session.id, + })); + }; + + pc.onIceConnectionState = (state) {}; + + pc.onRemoveStream = (stream) { + onRemoveRemoteStream?.call(session, stream); + remoteStreams.removeWhere((it) { + return (it.id == stream.id); + }); + }; + + pc.onDataChannel = (channel) { + _addDataChannel(session, channel); + }; + + session.pc = pc; + return session; + } + + void _addDataChannel(SessionOneToOne session, RTCDataChannel channel) { + channel.onDataChannelState = (e) {}; + channel.onMessage = (RTCDataChannelMessage data) { + onDataChannelMessage?.call(session, channel, data); + }; + session.dc = channel; + onDataChannel?.call(session, channel); + } + + Future _createDataChannel(SessionOneToOne session, {label: 'fileTransfer'}) async { + RTCDataChannelInit dataChannelDict = RTCDataChannelInit() + ..maxRetransmits = 30; + RTCDataChannel channel = + await session.pc .createDataChannel(label, dataChannelDict); + _addDataChannel(session, channel); + } + + Future _createOffer(SessionOneToOne session, String media) async { + try { + RTCSessionDescription s = + await session.pc .createOffer(media == 'data' ? _dcConstraints : {}); + await session.pc .setLocalDescription(s); + _send('offer', { + 'to': session.remote_user?.id, + 'from': session.local_user.id, + 'description': {'sdp': s.sdp, 'type': s.type}, + 'session_id': session.id, + 'media': media, + }); + } catch (e) { + print(e.toString()); + } + } + + Future _createAnswer(SessionOneToOne session, String media) async { + try { + RTCSessionDescription s = + await session.pc .createAnswer(media == 'data' ? _dcConstraints : {}); + await session.pc .setLocalDescription(s); + _send('answer', { + 'to': session.remote_user?.id, + 'from': session.local_user.id, + 'description': {'sdp': s.sdp, 'type': s.type}, + 'session_id': session.id, + }); + } catch (e) { + print(e.toString()); + } + } + + _send(event, data) { + var request = Map(); + request["type"] = event; + request["data"] = data; + _socket?.send(_encoder.convert(request)); + } + + Future finishSessions() async { + _closeSessionById(session.id); + } + + void _closeSessionById(String sessionId) { + if (session != null && session.id == sessionId) { + _closeSession(session); + onCallStateChange?.call(session, CallState.Bye); + } + } + + Future _closeSession(SessionOneToOne session) async { + localStream?.getTracks()?.forEach((element) async { + await element.stop(); + }); + await localStream?.dispose(); + localStream = null; + + await session.pc?.close(); + await session.dc?.close(); + } +} diff --git a/lib/pages/videocall-webrtc-rnd/webrtc/start_video_call.dart b/lib/pages/videocall-webrtc-rnd/webrtc/start_video_call.dart new file mode 100644 index 00000000..ea3bbcb0 --- /dev/null +++ b/lib/pages/videocall-webrtc-rnd/webrtc/start_video_call.dart @@ -0,0 +1,240 @@ +import 'dart:async'; + +import 'package:diplomaticquarterapp/pages/conference/conference_button_bar.dart'; +import 'package:diplomaticquarterapp/pages/conference/web_rtc/widgets/cam_view_widget.dart'; +import 'package:diplomaticquarterapp/pages/videocall-webrtc-rnd/utils/device_info.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter/services.dart'; +import 'dart:core'; +import 'signaling.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class StartVideoCall extends StatefulWidget { + static String tag = 'webrtc'; + final String host; + + String caller; + String receiver; + bool iAmCaller; + StartVideoCall({@required this.caller, @required this.receiver, this.iAmCaller = false, @required this.host}); + + @override + StartVideoCallState createState() => StartVideoCallState(receiverId: receiver, callerId: caller, iAmCaller: iAmCaller); +} + +class StartVideoCallState extends State { + Signaling _signaling; + SessionOneToOne _session; + + StartVideoCallState({@required String callerId, @required String receiverId, @required bool iAmCaller}){ + + final self_role = iAmCaller ? "Caller" : "Receiver"; + final self_id = iAmCaller ? callerId : receiverId; + final self_user = SocketUser(id: self_id, name: "$self_role-$self_id", userAgent: DeviceInfo.userAgent, moreInfo: {}); + + final remote_role = !iAmCaller ? "Caller" : "Receiver"; + final remote_id = !iAmCaller ? callerId : receiverId; + final remote_user = SocketUser(id: remote_id, name: "$remote_role-$remote_id", userAgent: DeviceInfo.userAgent, moreInfo: {}); + + final session_id = "$callerId-$receiverId"; + _session = SessionOneToOne(id: session_id, local_user: self_user, remote_user: remote_user); + } + + bool showNoise = true; + final StreamController _audioButton = StreamController.broadcast(); + final StreamController _videoButton = StreamController.broadcast(); + final StreamController _onButtonBarVisibleStreamController = StreamController.broadcast(); + final StreamController _onButtonBarHeightStreamController = StreamController.broadcast(); + + final RTCVideoRenderer _localRenderer = RTCVideoRenderer(); + final RTCVideoRenderer _remoteRenderer = RTCVideoRenderer(); + + MediaStream get localMediaStream => _signaling.localStream; + MediaStream get remoteMediaStream => _signaling.remoteStreams.first; + + @override + initState() { + super.initState(); + initRenderers(); + _connect(); + } + + initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + @override + deactivate() { + super.deactivate(); + _signaling?.close(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + } + + @override + Widget build(BuildContext context) { + return Scaffold( + // appBar: AppBar( + // title: Text('P2P Call Sample' + (_session?.local_user != null ? ' [Your ID (${_session?.local_user?.id})] ' : '')), + // actions: [ + // IconButton( + // icon: const Icon(Icons.settings), + // onPressed: (){ + // setState(() { + // }); + // }, + // tooltip: 'setup', + // ), + // ], + // ), + body: videoCanvasWidgets(), + ); + } + + LayoutBuilder videoCanvasWidgets() { + return LayoutBuilder( + builder: (BuildContext context, BoxConstraints constraints) { + return Stack( + children: [ + CamViewWidget( + localRenderer: _localRenderer, + remoteRenderer: _remoteRenderer, + constraints: constraints, + onButtonBarVisibleStreamController: _onButtonBarVisibleStreamController, + onButtonBarHeightStreamController: _onButtonBarHeightStreamController, + ), + ConferenceButtonBar( + audioEnabled: _audioButton.stream, + videoEnabled: _videoButton.stream, + onAudioEnabled: _onAudioEnable, + onVideoEnabled: _onVideoEnabled, + onSwitchCamera: _onSwitchCamera, + onHangup: _onHangup, + onPersonAdd: () {}, + onPersonRemove: () {}, + onHeight: _onHeightBar, + onShow: _onShowBar, + onHide: _onHideBar, + ), + ], + ); + }, + ); + } + + + void _connect() async { + if(_session == null) + return; + + _signaling ??= Signaling(widget.host, session: _session)..connect(); + + _signaling?.onSignalingStateChange = (SignalingState state) { + switch (state) { + case SignalingState.Closed: + case SignalingState.Error: + case SignalingState.Open: + break; + } + }; + + _signaling?.onCallStateChange = (SessionOneToOne session, CallState state) { + switch (state) { + case CallState.Calling: + setState(() {}); + break; + case CallState.Bye: + setState(() { + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + _session = null; + }); + break; + case CallState.Invite: + case CallState.Connected: + case CallState.Ringing: + } + }; + + _signaling?.onConnected = ((event) { + _signaling?.callAccepted(_session); + }); + + _signaling?.onRemoteConnected = ((event) { + + }); + + _signaling?.onLocalStream = ((stream) { + _localRenderer.srcObject = stream; + setState(() { + }); + }); + + _signaling?.onAddRemoteStream = ((_, stream) { + _remoteRenderer.srcObject = stream; + setState(() {}); + }); + + _signaling?.onRemoveRemoteStream = ((_, stream) { + setState(() { + _remoteRenderer.srcObject = null; + }); + }); + } + + _hangUp() async{ + if (_session != null) { + _signaling?.bye(_session); + } + } + + _switchCamera() { + _signaling?.switchCamera(); + } + + _muteMic() { + _signaling?.muteMic(); + } + + Function _onAudioEnable() { + final audioTrack = localMediaStream.getAudioTracks()[0]; + final mute = audioTrack.muted; + Helper.setMicrophoneMute(!mute, audioTrack); + _audioButton.add(mute); + } + + Function _onVideoEnabled() { + final videoTrack = localMediaStream.getVideoTracks()[0]; + bool videoEnabled = videoTrack.enabled; + localMediaStream.getVideoTracks()[0].enabled = !videoEnabled; + _videoButton.add(!videoEnabled); + } + + Function _onSwitchCamera() { + Helper.switchCamera(localMediaStream.getVideoTracks()[0]); + } + + void _onShowBar() { + setState(() { + }); + _onButtonBarVisibleStreamController.add(true); + } + + void _onHeightBar(double height) { + _onButtonBarHeightStreamController.add(height); + } + + void _onHideBar() { + setState(() { + SystemChrome.setEnabledSystemUIMode(SystemUiMode.manual, overlays: [SystemUiOverlay.bottom]); + }); + _onButtonBarVisibleStreamController.add(false); + } + + Future _onHangup() async { + _signaling?.finishSessions(); + print('onHangup'); + Navigator.of(context).pop(); + } +} \ No newline at end of file