diff --git a/assets/images/powerd-by.jpg b/assets/images/powerd-by.jpg index 93bdbb18..eda1688c 100644 Binary files a/assets/images/powerd-by.jpg and b/assets/images/powerd-by.jpg differ diff --git a/assets/payment_options/payment_options.png b/assets/payment_options/payment_options.png new file mode 100644 index 00000000..4bb83fec Binary files /dev/null and b/assets/payment_options/payment_options.png differ diff --git a/lib/core/model/prescriptions/prescription_report.dart b/lib/core/model/prescriptions/prescription_report.dart index 8c18df62..5545b25b 100644 --- a/lib/core/model/prescriptions/prescription_report.dart +++ b/lib/core/model/prescriptions/prescription_report.dart @@ -134,7 +134,7 @@ class PrescriptionReport { frequencyID = json['frequencyID']; routeID = json['routeID']; name = json['name']; - itemDescriptionN = json['itemDescriptionN']; + itemDescriptionN = json['ItemDescriptionN']; routeN = json['routeN']; frequencyN = json['frequencyN']; } diff --git a/lib/core/model/prescriptions/prescription_report_enh.dart b/lib/core/model/prescriptions/prescription_report_enh.dart index 203eaaff..01a34c47 100644 --- a/lib/core/model/prescriptions/prescription_report_enh.dart +++ b/lib/core/model/prescriptions/prescription_report_enh.dart @@ -15,6 +15,7 @@ class PrescriptionReportEnh { String imageThumbUrl; String isCovered; String itemDescription; + String itemDescriptionN; int itemID; String orderDate; int patientID; @@ -35,38 +36,39 @@ class PrescriptionReportEnh { PrescriptionReportEnh( {this.address, - this.appointmentNo, - this.clinic, - this.companyName, - this.days, - this.doctorName, - this.doseDailyQuantity, - this.frequency, - this.frequencyNumber, - this.image, - this.imageExtension, - this.imageSRCUrl, - this.imageString, - this.imageThumbUrl, - this.isCovered, - this.itemDescription, - this.itemID, - this.orderDate, - this.patientID, - this.patientName, - this.phoneOffice1, - this.prescriptionQR, - this.prescriptionTimes, - this.productImage, - this.productImageBase64, - this.productImageString, - this.projectID, - this.projectName, - this.remarks, - this.route, - this.sKU, - this.scaleOffset, - this.startDate}); + this.appointmentNo, + this.clinic, + this.companyName, + this.days, + this.doctorName, + this.doseDailyQuantity, + this.frequency, + this.frequencyNumber, + this.image, + this.imageExtension, + this.imageSRCUrl, + this.imageString, + this.imageThumbUrl, + this.isCovered, + this.itemDescription, + this.itemDescriptionN, + this.itemID, + this.orderDate, + this.patientID, + this.patientName, + this.phoneOffice1, + this.prescriptionQR, + this.prescriptionTimes, + this.productImage, + this.productImageBase64, + this.productImageString, + this.projectID, + this.projectName, + this.remarks, + this.route, + this.sKU, + this.scaleOffset, + this.startDate}); PrescriptionReportEnh.fromJson(Map json) { address = json['Address']; @@ -85,6 +87,7 @@ class PrescriptionReportEnh { imageThumbUrl = json['ImageThumbUrl']; isCovered = json['IsCovered']; itemDescription = json['ItemDescription']; + itemDescriptionN = json['ItemDescriptionN']; itemID = json['ItemID']; orderDate = json['OrderDate']; patientID = json['PatientID']; @@ -122,6 +125,7 @@ class PrescriptionReportEnh { data['ImageThumbUrl'] = this.imageThumbUrl; data['IsCovered'] = this.isCovered; data['ItemDescription'] = this.itemDescription; + data['ItemDescriptionN'] = this.itemDescriptionN; data['ItemID'] = this.itemID; data['OrderDate'] = this.orderDate; data['PatientID'] = this.patientID; diff --git a/lib/core/service/medical/prescriptions_service.dart b/lib/core/service/medical/prescriptions_service.dart index e9741a7f..af34f2ba 100644 --- a/lib/core/service/medical/prescriptions_service.dart +++ b/lib/core/service/medical/prescriptions_service.dart @@ -2,6 +2,7 @@ import 'package:diplomaticquarterapp/config/config.dart'; import 'package:diplomaticquarterapp/core/model/prescriptions/Prescriptions.dart'; import 'package:diplomaticquarterapp/core/model/prescriptions/perscription_pharmacy.dart'; import 'package:diplomaticquarterapp/core/model/prescriptions/prescription_report.dart'; +import 'package:diplomaticquarterapp/core/model/prescriptions/prescription_report_inp.dart'; import 'package:diplomaticquarterapp/core/model/prescriptions/prescription_report_enh.dart'; import 'package:diplomaticquarterapp/core/model/prescriptions/prescriptions_order.dart'; import 'package:diplomaticquarterapp/core/model/prescriptions/request_get_list_pharmacy_for_prescriptions.dart'; @@ -15,7 +16,7 @@ import 'package:flutter/cupertino.dart'; class PrescriptionsService extends BaseService { List prescriptionsList = List(); - + List prescriptionReportListINP = List(); List prescriptionsOrderList = List(); Future getPrescriptions() async { @@ -56,32 +57,40 @@ class PrescriptionsService extends BaseService { appointmentNo: 0, isDentalAllowedBackend: false); List prescriptionReportList = List(); - Future getPrescriptionReport( - {Prescriptions prescriptions}) async { + Future getPrescriptionReport({Prescriptions prescriptions}) async { hasError = false; - _requestPrescriptionReport.dischargeNo = prescriptions.dischargeNo; + if (prescriptions.isInOutPatient == false) { + _requestPrescriptionReport.dischargeNo = prescriptions.dischargeNo; + } else { + _requestPrescriptionReport.dischargeNo = 0; + } _requestPrescriptionReport.projectID = prescriptions.projectID; _requestPrescriptionReport.clinicID = prescriptions.clinicID; _requestPrescriptionReport.setupID = prescriptions.setupID; _requestPrescriptionReport.episodeID = prescriptions.episodeID; _requestPrescriptionReport.appointmentNo = prescriptions.appointmentNo; - await baseAppClient.post(prescriptions.isInOutPatient? GET_PRESCRIPTION_REPORT_ENH : GET_PRESCRIPTION_REPORT, + await baseAppClient.post( + prescriptions.isInOutPatient + ? GET_PRESCRIPTION_REPORT_ENH + : GET_PRESCRIPTION_REPORT, onSuccess: (dynamic response, int statusCode) { prescriptionReportList.clear(); prescriptionReportEnhList.clear(); - if(prescriptions.isInOutPatient){ + if (prescriptions.isInOutPatient) { response['ListPRM'].forEach((prescriptions) { - prescriptionReportList.add(PrescriptionReport.fromJson(prescriptions)); - prescriptionReportEnhList.add(PrescriptionReportEnh.fromJson(prescriptions)); + prescriptionReportList + .add(PrescriptionReport.fromJson(prescriptions)); + prescriptionReportEnhList + .add(PrescriptionReportEnh.fromJson(prescriptions)); }); - }else{ + } else { + prescriptionReportListINP.clear(); response['INP_GetPrescriptionReport_List'].forEach((prescriptions) { - prescriptionReportList.add(PrescriptionReport.fromJson(prescriptions)); + prescriptionReportListINP + .add(PrescriptionReportINP.fromJson(prescriptions)); }); } - - }, onFailure: (String error, int statusCode) { hasError = true; super.error = error; @@ -90,35 +99,42 @@ class PrescriptionsService extends BaseService { RequestSendPrescriptionEmail _requestSendPrescriptionEmail = RequestSendPrescriptionEmail( - isDentalAllowedBackend: false,); + isDentalAllowedBackend: false, + ); Future sendPrescriptionEmail(String appointmentDate, int patientID, - String clinicName, String doctorName, int projectID) async { + String clinicName, String doctorName, int doctorID, int projectID) async { _requestSendPrescriptionEmail.listPrescriptions = prescriptionReportList; _requestSendPrescriptionEmail.appointmentDate = appointmentDate; _requestSendPrescriptionEmail.patientID = patientID; _requestSendPrescriptionEmail.clinicName = clinicName; _requestSendPrescriptionEmail.doctorName = doctorName; _requestSendPrescriptionEmail.projectID = projectID; - _requestSendPrescriptionEmail.to = user.emailAddress; - _requestSendPrescriptionEmail.dateofBirth = user.dateofBirth; - _requestSendPrescriptionEmail.patientIditificationNum = user.patientIdentificationNo; - _requestSendPrescriptionEmail.patientMobileNumber = user.mobileNumber; - _requestSendPrescriptionEmail.patientName = user.firstName +" "+ user.lastName; - _requestSendPrescriptionEmail.setupID = user.setupID; + _requestSendPrescriptionEmail.to = user.emailAddress; + _requestSendPrescriptionEmail.dateofBirth = user.dateofBirth; + _requestSendPrescriptionEmail.patientIditificationNum = + user.patientIdentificationNo; + _requestSendPrescriptionEmail.patientMobileNumber = user.mobileNumber; + _requestSendPrescriptionEmail.doctorID = doctorID; + _requestSendPrescriptionEmail.patientName = + user.firstName + " " + user.lastName; + _requestSendPrescriptionEmail.setupID = user.setupID; + _requestSendPrescriptionEmail.to = user.emailAddress; hasError = false; - await baseAppClient.post(SEND_PRESCRIPTION_EMAIL, onSuccess: (response, statusCode) {}, + await baseAppClient + .post(SEND_PRESCRIPTION_EMAIL, onSuccess: (response, statusCode) {}, onFailure: (String error, int statusCode) { hasError = true; super.error = error; }, body: _requestSendPrescriptionEmail.toJson()); } - RequestGetListPharmacyForPrescriptions requestGetListPharmacyForPrescriptions = RequestGetListPharmacyForPrescriptions( + RequestGetListPharmacyForPrescriptions + requestGetListPharmacyForPrescriptions = + RequestGetListPharmacyForPrescriptions( latitude: 0, longitude: 0, isDentalAllowedBackend: false, - ); List pharmacyPrescriptionsList = List(); @@ -139,18 +155,19 @@ class PrescriptionsService extends BaseService { } RequestPrescriptionReportEnh _requestPrescriptionReportEnh = - RequestPrescriptionReportEnh(isDentalAllowedBackend: false,); + RequestPrescriptionReportEnh( + isDentalAllowedBackend: false, + ); List prescriptionReportEnhList = List(); - Future getPrescriptionReportEnh({PrescriptionsOrder prescriptionsOrder}) async { - + Future getPrescriptionReportEnh( + {PrescriptionsOrder prescriptionsOrder}) async { ///This logic copy from the old app from class [order-history.component.ts] in line 45 bool isInPatient = false; prescriptionsList.forEach((element) { if (prescriptionsOrder.appointmentNo == "0") { if (element.dischargeNo == int.parse(prescriptionsOrder.dischargeID)) { - _requestPrescriptionReportEnh.appointmentNo = element.appointmentNo; _requestPrescriptionReportEnh.clinicID = element.clinicID; _requestPrescriptionReportEnh.projectID = element.projectID; @@ -160,46 +177,45 @@ class PrescriptionsService extends BaseService { isInPatient = element.isInOutPatient; } } else { - if (int.parse(prescriptionsOrder.appointmentNo) == element.appointmentNo) { + if (int.parse(prescriptionsOrder.appointmentNo) == + element.appointmentNo) { _requestPrescriptionReportEnh.appointmentNo = element.appointmentNo; _requestPrescriptionReportEnh.clinicID = element.clinicID; _requestPrescriptionReportEnh.projectID = element.projectID; _requestPrescriptionReportEnh.episodeID = element.episodeID; _requestPrescriptionReportEnh.setupID = element.setupID; _requestPrescriptionReportEnh.dischargeNo = element.dischargeNo; - isInPatient = element.isInOutPatient;///call inpGetPrescriptionReport + isInPatient = element.isInOutPatient; + + ///call inpGetPrescriptionReport } } }); hasError = false; - await baseAppClient.post(isInPatient? GET_PRESCRIPTION_REPORT_ENH : GET_PRESCRIPTION_REPORT, + await baseAppClient.post( + isInPatient ? GET_PRESCRIPTION_REPORT_ENH : GET_PRESCRIPTION_REPORT, onSuccess: (dynamic response, int statusCode) { prescriptionReportEnhList.clear(); - if(isInPatient){ + if (isInPatient) { response['ListPRM'].forEach((prescriptions) { - prescriptionReportEnhList.add(PrescriptionReportEnh.fromJson(prescriptions)); + prescriptionReportEnhList + .add(PrescriptionReportEnh.fromJson(prescriptions)); }); - }else{ + } else { response['INP_GetPrescriptionReport_List'].forEach((prescriptions) { - - PrescriptionReportEnh reportEnh = PrescriptionReportEnh.fromJson(prescriptions); - reportEnh.itemDescription = prescriptions['ItemDescriptionN']; + PrescriptionReportEnh reportEnh = + PrescriptionReportEnh.fromJson(prescriptions); + reportEnh.itemDescription = prescriptions['ItemDescriptionN']; prescriptionReportEnhList.add(reportEnh); - }); - } - - }, onFailure: (String error, int statusCode) { hasError = true; super.error = error; }, body: _requestPrescriptionReportEnh.toJson()); - - } Future updatePressOrder({@required int presOrderID}) async { @@ -211,12 +227,10 @@ class PrescriptionsService extends BaseService { body['PresOrderStatus'] = 4; body['isDentalAllowedBackend'] = false; await baseAppClient.post(UPDATE_PRESS_ORDER, - onSuccess: (dynamic response, int statusCode) { - - }, onFailure: (String error, int statusCode) { - hasError = true; - super.error = error; - }, body: body); + onSuccess: (dynamic response, int statusCode) {}, + onFailure: (String error, int statusCode) { + hasError = true; + super.error = error; + }, body: body); } - } diff --git a/lib/core/viewModels/medical/prescriptions_view_model.dart b/lib/core/viewModels/medical/prescriptions_view_model.dart index 2c32a081..3ad9df99 100644 --- a/lib/core/viewModels/medical/prescriptions_view_model.dart +++ b/lib/core/viewModels/medical/prescriptions_view_model.dart @@ -4,7 +4,7 @@ import 'package:diplomaticquarterapp/core/model/prescriptions/prescription_repor import 'package:diplomaticquarterapp/core/model/prescriptions/prescriptions_order.dart'; import 'package:diplomaticquarterapp/uitl/app_toast.dart'; import 'package:flutter/cupertino.dart'; - +import 'package:diplomaticquarterapp/core/model/prescriptions/prescription_report_inp.dart'; import '../../../core/enum/filter_type.dart'; import '../../../core/enum/viewstate.dart'; import '../../../core/model/prescriptions/Prescriptions.dart'; @@ -19,17 +19,20 @@ class PrescriptionsViewModel extends BaseViewModel { List _prescriptionsOrderListClinic = List(); List _prescriptionsOrderListHospital = List(); - List get prescriptionReportList => _prescriptionsService.prescriptionReportList; + List get prescriptionReportListINP => + _prescriptionsService.prescriptionReportListINP; + List get prescriptionsList => _prescriptionsService.prescriptionsList; - List get prescriptionsHistory => _prescriptionsService.prescriptionsOrderList; - - List get pharmacyPrescriptionsList => _prescriptionsService.pharmacyPrescriptionsList ; + List get prescriptionsHistory => + _prescriptionsService.prescriptionsOrderList; + List get pharmacyPrescriptionsList => + _prescriptionsService.pharmacyPrescriptionsList; List get prescriptionsOrderList => filterType == FilterType.Clinic @@ -105,7 +108,8 @@ class PrescriptionsViewModel extends BaseViewModel { getPrescriptionReport({Prescriptions prescriptions}) async { setState(ViewState.Busy); - await _prescriptionsService.getPrescriptionReport(prescriptions: prescriptions); + await _prescriptionsService.getPrescriptionReport( + prescriptions: prescriptions); if (_prescriptionsService.hasError) { error = _prescriptionsService.error; setState(ViewState.ErrorLocal); @@ -119,11 +123,12 @@ class PrescriptionsViewModel extends BaseViewModel { int patientID, String clinicName, String doctorName, - String mes, + int doctorID, + String mes, int projectID}) async { setState(ViewState.BusyLocal); - await _prescriptionsService.sendPrescriptionEmail( - appointmentDate, patientID, clinicName, doctorName, projectID); + await _prescriptionsService.sendPrescriptionEmail(appointmentDate, + patientID, clinicName, doctorName, doctorID, projectID); if (_prescriptionsService.hasError) { error = _prescriptionsService.error; setState(ViewState.ErrorLocal); @@ -145,12 +150,13 @@ class PrescriptionsViewModel extends BaseViewModel { } } - - List get prescriptionReportEnhList => _prescriptionsService.prescriptionReportEnhList; + List get prescriptionReportEnhList => + _prescriptionsService.prescriptionReportEnhList; getPrescriptionReportEnh({PrescriptionsOrder prescriptionsOrder}) async { setState(ViewState.Busy); - await _prescriptionsService.getPrescriptionReportEnh(prescriptionsOrder: prescriptionsOrder); + await _prescriptionsService.getPrescriptionReportEnh( + prescriptionsOrder: prescriptionsOrder); if (_prescriptionsService.hasError) { error = _prescriptionsService.error; setState(ViewState.Error); @@ -159,16 +165,14 @@ class PrescriptionsViewModel extends BaseViewModel { } } - - Future updatePressOrder({@required int presOrderID}) async { + Future updatePressOrder({@required int presOrderID}) async { setState(ViewState.Busy); await _prescriptionsService.updatePressOrder(presOrderID: presOrderID); if (_prescriptionsService.hasError) { error = _prescriptionsService.error; setState(ViewState.Error); } else { - await getPrescriptions(); + await getPrescriptions(); } } - } diff --git a/lib/pages/AlHabibMedicalService/E-Referral/New_E_Referral/new_e_referral_step_one_page.dart b/lib/pages/AlHabibMedicalService/E-Referral/New_E_Referral/new_e_referral_step_one_page.dart index b6cddf18..5a3271f2 100644 --- a/lib/pages/AlHabibMedicalService/E-Referral/New_E_Referral/new_e_referral_step_one_page.dart +++ b/lib/pages/AlHabibMedicalService/E-Referral/New_E_Referral/new_e_referral_step_one_page.dart @@ -292,7 +292,9 @@ class MobileNumberTextFiled extends StatelessWidget { padding: EdgeInsets.all(5), decoration: BoxDecoration( borderRadius: BorderRadius.circular(15), color: Colors.white), - child: Row(children: [ + child: Row( + textDirection: TextDirection.ltr, + children: [ Expanded( flex: 1, child: Icon( diff --git a/lib/pages/AlHabibMedicalService/h2o/month_page.dart b/lib/pages/AlHabibMedicalService/h2o/month_page.dart index e0118495..8867fb77 100644 --- a/lib/pages/AlHabibMedicalService/h2o/month_page.dart +++ b/lib/pages/AlHabibMedicalService/h2o/month_page.dart @@ -1,5 +1,6 @@ import 'package:diplomaticquarterapp/core/enum/viewstate.dart'; import 'package:diplomaticquarterapp/core/viewModels/AlHabibMedicalService/H2O_view_model.dart'; +import 'package:diplomaticquarterapp/core/viewModels/project_view_model.dart'; import 'package:diplomaticquarterapp/pages/base/base_view.dart'; import 'package:diplomaticquarterapp/uitl/translations_delegate_base.dart'; import 'package:diplomaticquarterapp/widgets/charts/app_bar_chart.dart'; @@ -8,15 +9,17 @@ import 'package:diplomaticquarterapp/widgets/others/app_scaffold_widget.dart'; import 'package:diplomaticquarterapp/widgets/progress_indicator/app_circular_progress_Indeicator.dart'; import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; +import 'package:provider/provider.dart'; class MonthPage extends StatelessWidget { @override Widget build(BuildContext context) { + ProjectViewModel projectViewModel = Provider.of(context); return BaseView( onModelReady: (model) => model.getUserProgressForMonthData(), builder: (_, model, widget) => AppScaffold( isShowAppBar: false, - appBarTitle: TranslationBase.of(context).h2o, + appBarTitle: TranslationBase.of(context).h2o, baseViewModel: model, body: Padding( padding: EdgeInsets.all(8.0), @@ -46,7 +49,7 @@ class MonthPage extends StatelessWidget { ], ), // SizedBox(height: 8), - AppBarChart(seriesList: model.userProgressForMonthDataSeries), + AppBarChart(seriesList: model.userProgressForMonthDataSeries, isArabic: projectViewModel.isArabic), ], ), ), diff --git a/lib/pages/AlHabibMedicalService/health_calculator/ovulation_period/ovulation_result_page.dart b/lib/pages/AlHabibMedicalService/health_calculator/ovulation_period/ovulation_result_page.dart index 6669bf75..6ced14b2 100644 --- a/lib/pages/AlHabibMedicalService/health_calculator/ovulation_period/ovulation_result_page.dart +++ b/lib/pages/AlHabibMedicalService/health_calculator/ovulation_period/ovulation_result_page.dart @@ -1,6 +1,8 @@ +import 'package:diplomaticquarterapp/pages/AlHabibMedicalService/health_calculator/doctor_list.dart'; import 'package:diplomaticquarterapp/widgets/buttons/button.dart'; import 'package:diplomaticquarterapp/widgets/data_display/text.dart'; import 'package:diplomaticquarterapp/widgets/others/app_scaffold_widget.dart'; +import 'package:diplomaticquarterapp/widgets/transitions/fade_page.dart'; import 'package:flutter/material.dart'; import 'package:intl/intl.dart'; @@ -9,8 +11,7 @@ class OvulationResult extends StatelessWidget { var dateTo; var conceivedDate; var deliveryDue; - OvulationResult( - {this.dateFrom, this.dateTo, this.deliveryDue, this.conceivedDate}); + OvulationResult({this.dateFrom, this.dateTo, this.deliveryDue, this.conceivedDate}); //var newFormat = DateFormat("yy-MM-dd"); @override @@ -84,6 +85,12 @@ class OvulationResult extends StatelessWidget { width: 350, child: Button( label: 'See List Of Doctors', + onTap: () { + Navigator.push( + context, + FadePage(page: DoctorList()), + ); + }, ), ), ], diff --git a/lib/pages/AlHabibMedicalService/my_web_view.dart b/lib/pages/AlHabibMedicalService/my_web_view.dart index 59163c15..c4841a24 100644 --- a/lib/pages/AlHabibMedicalService/my_web_view.dart +++ b/lib/pages/AlHabibMedicalService/my_web_view.dart @@ -8,8 +8,7 @@ class MyWebView extends StatelessWidget { final String title; final String selectedUrl; - final Completer _controller = - Completer(); + final Completer _controller = Completer(); MyWebView({ @required this.title, @@ -21,17 +20,12 @@ class MyWebView extends StatelessWidget { return AppScaffold( isShowAppBar: true, appBarTitle: title, - - body: - WebView( + body: WebView( initialUrl: selectedUrl, javascriptMode: JavascriptMode.unrestricted, onWebViewCreated: (WebViewController webViewController) { _controller.complete(webViewController); }, - ) - - - ); + )); } -} \ No newline at end of file +} diff --git a/lib/pages/AlHabibMedicalService/parking_page.dart b/lib/pages/AlHabibMedicalService/parking_page.dart index 82c390ea..19f06a56 100644 --- a/lib/pages/AlHabibMedicalService/parking_page.dart +++ b/lib/pages/AlHabibMedicalService/parking_page.dart @@ -8,7 +8,7 @@ import 'package:diplomaticquarterapp/widgets/others/app_scaffold_widget.dart'; import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; import 'package:maps_launcher/maps_launcher.dart'; - +import 'package:diplomaticquarterapp/core/model/ImagesInfo.dart'; import '../../d_q_icons_icons.dart'; class ParkingPage extends StatelessWidget { @@ -19,6 +19,13 @@ class ParkingPage extends StatelessWidget { builder: (_, model, widget) => AppScaffold( isShowAppBar: true, appBarTitle: TranslationBase.of(context).parking, + description: TranslationBase.of(context).parkingDescription, + imagesInfo: [ + ImagesInfo( + imageAr: 'assets/images/bc_parking.png', + imageEn: 'assets/images/bc_parking.png', + isAsset: true) + ], body: SingleChildScrollView( padding: EdgeInsets.all(12), child: !model.isSavePark diff --git a/lib/pages/DrawerPages/family/my-family.dart b/lib/pages/DrawerPages/family/my-family.dart index baac5c63..e4e582bb 100644 --- a/lib/pages/DrawerPages/family/my-family.dart +++ b/lib/pages/DrawerPages/family/my-family.dart @@ -351,9 +351,21 @@ class _MyFamily extends State with TickerProviderStateMixin { Padding( padding: EdgeInsets.only(left: 10, right: 10), child: Row(children: [ - Expanded(flex: 3, child: AppText('Name')), - Expanded(flex: 1, child: AppText('Allow')), - Expanded(flex: 1, child: AppText('Reject')), + Expanded( + flex: 3, + child: AppText( + TranslationBase.of(context) + .name)), + Expanded( + flex: 1, + child: AppText( + TranslationBase.of(context) + .allow)), + Expanded( + flex: 1, + child: AppText( + TranslationBase.of(context) + .reject)), ])), Column( children: familyFileProvider.allSharedRecordsByStatusResponse.getAllSharedRecordsByStatusList.map((result) { @@ -487,8 +499,16 @@ class _MyFamily extends State with TickerProviderStateMixin { Padding( padding: EdgeInsets.only(left: 10, right: 10), child: Row(children: [ - Expanded(flex: 3, child: AppText('Name')), - Expanded(flex: 1, child: AppText('Delete')), + Expanded( + flex: 3, + child: AppText( + TranslationBase.of(context) + .name)), + Expanded( + flex: 1, + child: AppText( + TranslationBase.of(context) + .delete)), ])), Column( children: familyFileProvider.allSharedRecordsByStatusResponse.getAllSharedRecordsByStatusList.map((result) { diff --git a/lib/pages/ErService/NearestEr.dart b/lib/pages/ErService/NearestEr.dart index eaf0d4ef..cbafacce 100644 --- a/lib/pages/ErService/NearestEr.dart +++ b/lib/pages/ErService/NearestEr.dart @@ -31,6 +31,7 @@ class NearestEr extends StatelessWidget { : (model) => model.getProjectAvgERWaitingTimeOrders(), builder: (_, mode, widget) => AppScaffold( isShowAppBar: true, + isShowDecPage: false, appBarTitle: TranslationBase.of(context).NearestEr, baseViewModel: mode, body: mode.ProjectAvgERWaitingTimeModeList.length > 0 diff --git a/lib/pages/medical/balance/my_balance_page.dart b/lib/pages/medical/balance/my_balance_page.dart index 7639bb2f..f097bda5 100644 --- a/lib/pages/medical/balance/my_balance_page.dart +++ b/lib/pages/medical/balance/my_balance_page.dart @@ -8,7 +8,6 @@ import 'package:diplomaticquarterapp/widgets/others/app_scaffold_widget.dart'; import 'package:diplomaticquarterapp/widgets/transitions/fade_page.dart'; import 'package:flutter/cupertino.dart'; import 'package:flutter/material.dart'; -import 'package:hexcolor/hexcolor.dart'; import 'advance_payment_page.dart'; @@ -105,12 +104,12 @@ class MyBalancePage extends StatelessWidget { ), ), bottomSheet: Container( - height: MediaQuery.of(context).size.height * 0.12, + // height: MediaQuery.of(context).size.height * 0.12, + height: 70.0, color: Colors.white, width: double.infinity, padding: EdgeInsets.all(12), child: SecondaryButton( - // color: Colors.grey[900], textColor: Colors.white, label: TranslationBase.of(context).createAdvancedPayment, onTap: () { diff --git a/lib/pages/medical/balance/new_text_Field.dart b/lib/pages/medical/balance/new_text_Field.dart index 3e20dc00..ec3eaf3b 100644 --- a/lib/pages/medical/balance/new_text_Field.dart +++ b/lib/pages/medical/balance/new_text_Field.dart @@ -42,39 +42,39 @@ final _mobileFormatter = NumberTextInputFormatter(); class NewTextFields extends StatefulWidget { NewTextFields( {Key key, - this.type, - this.hintText, - this.suffixIcon, - this.autoFocus, - this.onChanged, - this.initialValue, - this.minLines, - this.maxLines, - this.inputFormatters, - this.padding, - this.focus = false, - this.maxLengthEnforced = true, - this.suffixIconColor, - this.inputAction, - this.onSubmit, - this.keepPadding = true, - this.textCapitalization = TextCapitalization.none, - this.controller, - this.keyboardType, - this.validator, - this.borderOnlyError = false, - this.onSaved, - this.onSuffixTap, - this.readOnly: false, - this.maxLength, - this.prefixIcon, - this.bare = false, - this.onTap, - this.fontSize = 16.0, - this.fontWeight = FontWeight.w700, - this.autoValidate = false, - this.hintColor, - this.isEnabled = true,this.counterText=""}) + this.type, + this.hintText, + this.suffixIcon, + this.autoFocus, + this.onChanged, + this.initialValue, + this.minLines, + this.maxLines, + this.inputFormatters, + this.padding, + this.focus = false, + this.maxLengthEnforced = true, + this.suffixIconColor, + this.inputAction, + this.onSubmit, + this.keepPadding = true, + this.textCapitalization = TextCapitalization.none, + this.controller, + this.keyboardType, + this.validator, + this.borderOnlyError = false, + this.onSaved, + this.onSuffixTap, + this.readOnly: false, + this.maxLength, + this.prefixIcon, + this.bare = false, + this.onTap, + this.fontSize = 16.0, + this.fontWeight = FontWeight.w700, + this.autoValidate = false, + this.hintColor, + this.isEnabled = true,this.counterText=""}) : super(key: key); final String hintText; @@ -171,10 +171,10 @@ class _NewTextFieldsState extends State { textCapitalization: widget.textCapitalization, onFieldSubmitted: widget.inputAction == TextInputAction.next ? (widget.onSubmit != null - ? widget.onSubmit - : (val) { - _focusNode.nextFocus(); - }) + ? widget.onSubmit + : (val) { + _focusNode.nextFocus(); + }) : widget.onSubmit, textInputAction: widget.inputAction, minLines: widget.minLines ?? 1, @@ -194,15 +194,15 @@ class _NewTextFieldsState extends State { fontSize: widget.fontSize, fontWeight: widget.fontWeight), inputFormatters: widget.keyboardType == TextInputType.phone ? [ - WhitelistingTextInputFormatter.digitsOnly, - _mobileFormatter, - ] + WhitelistingTextInputFormatter.digitsOnly, + _mobileFormatter, + ] : widget.inputFormatters, decoration: InputDecoration( labelText: widget.hintText, counterText: widget.counterText, labelStyle: - TextStyle(color: Theme.of(context).textTheme.bodyText1.color), + TextStyle(color: Theme.of(context).textTheme.bodyText1.color), errorBorder: OutlineInputBorder( borderSide: BorderSide( color: Theme.of(context).errorColor.withOpacity(0.5), diff --git a/lib/pages/medical/my_trackers/my_trackers.dart b/lib/pages/medical/my_trackers/my_trackers.dart index 5e5c4527..c133c67a 100644 --- a/lib/pages/medical/my_trackers/my_trackers.dart +++ b/lib/pages/medical/my_trackers/my_trackers.dart @@ -15,6 +15,7 @@ class MyTrackers extends StatelessWidget { return AppScaffold( appBarTitle: TranslationBase.of(context).myTracker, isShowAppBar: true, + isShowDecPage: false, body: SingleChildScrollView( child: Container( padding: EdgeInsets.all(12), @@ -27,22 +28,24 @@ class MyTrackers extends StatelessWidget { children: [ Expanded( child: InkWell( - onTap: ()=> Navigator.push(context, FadePage(page: BloodSugarHomePage())), + onTap: () => Navigator.push(context, FadePage(page: BloodSugarHomePage())), child: Container( margin: EdgeInsets.all(5), - width: MediaQuery.of(context).size.width*0.35, - height:MediaQuery.of(context).size.width*0.35 , + width: MediaQuery.of(context).size.width * 0.35, + height: MediaQuery.of(context).size.width * 0.35, decoration: BoxDecoration( - shape: BoxShape.rectangle, - borderRadius: BorderRadius.circular(8), - color: Colors.white - ), + shape: BoxShape.rectangle, borderRadius: BorderRadius.circular(8), color: Colors.white), child: Column( crossAxisAlignment: CrossAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center, children: [ - Image.asset('assets/tracker/blood-suger.png',width: 60.0,), - SizedBox(height: 15,), + Image.asset( + 'assets/tracker/blood-suger.png', + width: 60.0, + ), + SizedBox( + height: 15, + ), Texts(TranslationBase.of(context).bloodSugar), ], ), @@ -51,22 +54,24 @@ class MyTrackers extends StatelessWidget { ), Expanded( child: InkWell( - onTap: ()=> Navigator.push(context, FadePage(page: BloodPressureHomePage())), + onTap: () => Navigator.push(context, FadePage(page: BloodPressureHomePage())), child: Container( margin: EdgeInsets.all(5), - width: MediaQuery.of(context).size.width*0.35, - height:MediaQuery.of(context).size.width*0.35 , + width: MediaQuery.of(context).size.width * 0.35, + height: MediaQuery.of(context).size.width * 0.35, decoration: BoxDecoration( - shape: BoxShape.rectangle, - borderRadius: BorderRadius.circular(8), - color: Colors.white - ), + shape: BoxShape.rectangle, borderRadius: BorderRadius.circular(8), color: Colors.white), child: Column( crossAxisAlignment: CrossAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center, children: [ - Image.asset('assets/tracker/blood-pressure.png',width: 60.0,), - SizedBox(height: 15,), + Image.asset( + 'assets/tracker/blood-pressure.png', + width: 60.0, + ), + SizedBox( + height: 15, + ), Texts(TranslationBase.of(context).bloodPressure), ], ), @@ -80,22 +85,24 @@ class MyTrackers extends StatelessWidget { children: [ Expanded( child: InkWell( - onTap: ()=> Navigator.push(context, FadePage(page: WeightHomePage())), + onTap: () => Navigator.push(context, FadePage(page: WeightHomePage())), child: Container( margin: EdgeInsets.all(5), - width: MediaQuery.of(context).size.width*0.35, - height:MediaQuery.of(context).size.width*0.35 , + width: MediaQuery.of(context).size.width * 0.35, + height: MediaQuery.of(context).size.width * 0.35, decoration: BoxDecoration( - shape: BoxShape.rectangle, - borderRadius: BorderRadius.circular(8), - color: Colors.white - ), + shape: BoxShape.rectangle, borderRadius: BorderRadius.circular(8), color: Colors.white), child: Column( crossAxisAlignment: CrossAxisAlignment.center, mainAxisAlignment: MainAxisAlignment.center, children: [ - Image.asset('assets/tracker/weight.png',width: 60.0,), - SizedBox(height: 15,), + Image.asset( + 'assets/tracker/weight.png', + width: 60.0, + ), + SizedBox( + height: 15, + ), Texts(TranslationBase.of(context).weight), ], ), diff --git a/lib/pages/medical/my_trackers/widget/MonthCurvedChartBloodPressure.dart b/lib/pages/medical/my_trackers/widget/MonthCurvedChartBloodPressure.dart index 135d0a95..ee7e00af 100644 --- a/lib/pages/medical/my_trackers/widget/MonthCurvedChartBloodPressure.dart +++ b/lib/pages/medical/my_trackers/widget/MonthCurvedChartBloodPressure.dart @@ -21,7 +21,7 @@ class MonthCurvedChartBloodPressure extends StatelessWidget { Widget build(BuildContext context) { getXaxix(); return AspectRatio( - aspectRatio: 1.1, + aspectRatio: 1.0, child: Container( decoration: const BoxDecoration( borderRadius: BorderRadius.all(Radius.circular(18)), @@ -137,19 +137,21 @@ class MonthCurvedChartBloodPressure extends StatelessWidget { ), leftTitles: SideTitles( showTitles: true, + interval:getMaxY() - getMinY() <=500?50:getMaxY() - getMinY() <=1000?100:200, + getTextStyles: (value) => const TextStyle( color: Colors.black, fontWeight: FontWeight.bold, fontSize: 10, ), - getTitles: (value) { - if (value.toInt() == 0) - return '${value.toInt()}'; - else if (value.toInt() % horizontalInterval == 0) - return '${value.toInt()}'; - else - return ''; - }, + // getTitles: (value) { + // // if (value.toInt() == 0) + // // return '${value.toInt()}'; + // // else if (value.toInt() % horizontalInterval == 0) + // // return '${value.toInt()}'; + // // else + // return '${value.toInt()}'; + // }, margin: 12, ), ), diff --git a/lib/pages/medical/my_trackers/widget/MonthLineChartCurved.dart b/lib/pages/medical/my_trackers/widget/MonthLineChartCurved.dart index 607f3ab0..5ef71f5b 100644 --- a/lib/pages/medical/my_trackers/widget/MonthLineChartCurved.dart +++ b/lib/pages/medical/my_trackers/widget/MonthLineChartCurved.dart @@ -10,11 +10,7 @@ class MonthLineChartCurved extends StatelessWidget { final int indexes; final double horizontalInterval; - MonthLineChartCurved( - {this.title, - this.timeSeries, - this.indexes, - this.horizontalInterval = 15.0}); + MonthLineChartCurved({this.title, this.timeSeries, this.indexes, this.horizontalInterval = 15.0}); List xAxixs = List(); List yAxixs = List(); @@ -40,8 +36,7 @@ class MonthLineChartCurved extends StatelessWidget { ), Text( title, - style: TextStyle( - color: Colors.black, fontSize: 15, letterSpacing: 2), + style: TextStyle(color: Colors.black, fontSize: 15, letterSpacing: 2), textAlign: TextAlign.center, ), SizedBox( @@ -49,8 +44,7 @@ class MonthLineChartCurved extends StatelessWidget { ), Expanded( child: Padding( - padding: const EdgeInsets.only( - right: 18.0, left: 16.0, top: 15, bottom: 15), + padding: const EdgeInsets.only(right: 18.0, left: 16.0, top: 15, bottom: 15), child: LineChart( sampleData1(context), swapAnimationDuration: const Duration(milliseconds: 250), @@ -97,10 +91,7 @@ class MonthLineChartCurved extends StatelessWidget { handleBuiltInTouches: true, ), gridData: FlGridData( - horizontalInterval: horizontalInterval, - show: true, - drawVerticalLine: true, - drawHorizontalLine: true), + horizontalInterval: horizontalInterval, show: true, drawVerticalLine: true, drawHorizontalLine: true), titlesData: FlTitlesData( bottomTitles: SideTitles( showTitles: true, @@ -120,14 +111,16 @@ class MonthLineChartCurved extends StatelessWidget { fontWeight: FontWeight.bold, fontSize: 10, ), - getTitles: (value) { - if (value.toInt() == 0) - return '${value.toInt()}'; - else if (value.toInt() % horizontalInterval == 0) - return '${value.toInt()}'; - else - return ''; - }, + interval:getMaxY() - getMinY() <=500?50:getMaxY() - getMinY() <=1000?100:200, + + // getTitles: (value) { + // if (value.toInt() == 0) + // return '${value.toInt()}'; + // else if (value.toInt() % horizontalInterval == 0) + // return '${value.toInt()}'; + // else + // return ''; + // }, margin: 12, ), ), @@ -180,6 +173,9 @@ class MonthLineChartCurved extends StatelessWidget { List getData(context) { List spots = List(); + if (timeSeries.length == 0) { + spots.add(FlSpot(0, 0)); + } for (int index = 0; index < timeSeries.length; index++) { spots.add(FlSpot(index.toDouble(), timeSeries[index].sales)); } diff --git a/lib/pages/medical/prescriptions/pharmacy_for_prescriptions_page.dart b/lib/pages/medical/prescriptions/pharmacy_for_prescriptions_page.dart index 96227fba..8224e61d 100644 --- a/lib/pages/medical/prescriptions/pharmacy_for_prescriptions_page.dart +++ b/lib/pages/medical/prescriptions/pharmacy_for_prescriptions_page.dart @@ -10,92 +10,94 @@ import 'package:maps_launcher/maps_launcher.dart'; import 'package:url_launcher/url_launcher.dart'; class PharmacyForPrescriptionsPage extends StatelessWidget { - final PrescriptionReport prescriptionReport; + final itemID; - PharmacyForPrescriptionsPage({Key key, this.prescriptionReport}); + PharmacyForPrescriptionsPage({Key key, this.itemID}); @override Widget build(BuildContext context) { - return BaseView( - onModelReady: (model) => model.getListPharmacyForPrescriptions(itemId: prescriptionReport.itemID), - builder: (_, model, widget) => AppScaffold( - isShowAppBar: true, - appBarTitle: TranslationBase.of(context).availability, - baseViewModel: model, - body: ListView.builder( - itemBuilder: (context, index) => Container( - width: double.infinity, - margin: EdgeInsets.only(top: 10, left: 10, right: 10), - padding: EdgeInsets.all(8.0), - decoration: BoxDecoration( - color: Colors.white, - borderRadius: BorderRadius.all( - Radius.circular(10.0), - ), - border: Border.all(color: Colors.grey[200], width: 0.5), + return BaseView( + onModelReady: (model) => + model.getListPharmacyForPrescriptions(itemId: itemID), + builder: (_, model, widget) => AppScaffold( + isShowAppBar: true, + appBarTitle: TranslationBase.of(context).availability, + baseViewModel: model, + body: ListView.builder( + itemBuilder: (context, index) => Container( + width: double.infinity, + margin: EdgeInsets.only(top: 10, left: 10, right: 10), + padding: EdgeInsets.all(8.0), + decoration: BoxDecoration( + color: Colors.white, + borderRadius: BorderRadius.all( + Radius.circular(10.0), ), - child: Row( - children: [ - ClipRRect( - borderRadius: BorderRadius.all(Radius.circular(5)), - child: Image.network( - model.pharmacyPrescriptionsList[index].projectImageURL, - fit: BoxFit.cover, - width: 60, - height: 70, - ), + border: Border.all(color: Colors.grey[200], width: 0.5), + ), + child: Row( + children: [ + ClipRRect( + borderRadius: BorderRadius.all(Radius.circular(5)), + child: Image.network( + model.pharmacyPrescriptionsList[index].projectImageURL, + fit: BoxFit.cover, + width: 60, + height: 70, ), - Expanded( - child: Padding( - padding: const EdgeInsets.all(8.0), - child: Column( - crossAxisAlignment: CrossAxisAlignment.start, - children: [ - Texts(model.pharmacyPrescriptionsList[index] - .locationDescription), - SizedBox( - height: 5, - ), - Texts(model.pharmacyPrescriptionsList[index].cityName), - ], - ), + ), + Expanded( + child: Padding( + padding: const EdgeInsets.all(8.0), + child: Column( + crossAxisAlignment: CrossAxisAlignment.start, + children: [ + Texts(model.pharmacyPrescriptionsList[index] + .locationDescription), + SizedBox( + height: 5, + ), + Texts(model.pharmacyPrescriptionsList[index].cityName), + ], ), ), - InkWell( - onTap: () { - MapsLauncher.launchCoordinates( - double.parse( - model.pharmacyPrescriptionsList[index].latitude), - double.parse( - model.pharmacyPrescriptionsList[index].longitude)); - }, + ), + InkWell( + onTap: () { + MapsLauncher.launchCoordinates( + double.parse( + model.pharmacyPrescriptionsList[index].latitude), + double.parse( + model.pharmacyPrescriptionsList[index].longitude)); + }, + child: Icon( + Icons.pin_drop, + size: 18, + color: Colors.red[900], + ), + ), + SizedBox( + width: 15, + ), + InkWell( + onTap: Feedback.wrapForTap(() { + launch( + "tel://${model.pharmacyPrescriptionsList[index].phoneNumber}"); + }, context), + child: Container( child: Icon( - Icons.pin_drop, + Icons.call, size: 18, color: Colors.red[900], ), ), - SizedBox( - width: 15, - ), - InkWell( - onTap: Feedback.wrapForTap((){ - launch("tel://${model.pharmacyPrescriptionsList[index].phoneNumber}"); - },context), - child: Container( - child: Icon( - Icons.call, - size: 18, - color: Colors.red[900], - ), - ), - ) - ], - ), + ) + ], ), - itemCount: model.pharmacyPrescriptionsList.length, ), + itemCount: model.pharmacyPrescriptionsList.length, ), - ); + ), + ); } } diff --git a/lib/pages/medical/prescriptions/prescription_details_page.dart b/lib/pages/medical/prescriptions/prescription_details_page.dart index 72e830cf..218d7ac6 100644 --- a/lib/pages/medical/prescriptions/prescription_details_page.dart +++ b/lib/pages/medical/prescriptions/prescription_details_page.dart @@ -53,7 +53,7 @@ class PrescriptionDetailsPage extends StatelessWidget { child: Texts( prescriptionReport.itemDescription.isNotEmpty ? prescriptionReport.itemDescription - : prescriptionReport.itemDescriptionN), + : prescriptionReport.itemDescriptionN ?? ''), ), ), ) @@ -70,7 +70,7 @@ class PrescriptionDetailsPage extends StatelessWidget { context, FadePage( page: PharmacyForPrescriptionsPage( - prescriptionReport: prescriptionReport), + itemID: prescriptionReport.itemID), ), ), child: Center( @@ -118,14 +118,19 @@ class PrescriptionDetailsPage extends StatelessWidget { height: 40, width: double.infinity, child: Center( - child: Texts(TranslationBase.of(context).route, fontSize: 14,))), + child: Texts( + TranslationBase.of(context).route, + fontSize: 14, + ))), Container( color: Colors.white, height: 40, width: double.infinity, child: Center( - child: - Texts(TranslationBase.of(context).frequency, fontSize: 14,))), + child: Texts( + TranslationBase.of(context).frequency, + fontSize: 14, + ))), Container( color: Colors.white, height: 40, @@ -133,14 +138,18 @@ class PrescriptionDetailsPage extends StatelessWidget { padding: EdgeInsets.symmetric(horizontal: 4), child: Center( child: Texts( - "${TranslationBase.of(context).dailyDoses}", fontSize: 14,))), + "${TranslationBase.of(context).dailyDoses}", + fontSize: 14, + ))), Container( color: Colors.white, height: 40, width: double.infinity, child: Center( - child: - Texts(TranslationBase.of(context).duration, fontSize: 14,))), + child: Texts( + TranslationBase.of(context).duration, + fontSize: 14, + ))), ], ), TableRow( @@ -149,8 +158,8 @@ class PrescriptionDetailsPage extends StatelessWidget { color: Colors.white, height: 50, width: double.infinity, - child: - Center(child: Text(prescriptionReport.routeN))), + child: Center( + child: Text(prescriptionReport.routeN ?? ''))), Container( color: Colors.white, height: 50, diff --git a/lib/pages/medical/prescriptions/prescription_items_page.dart b/lib/pages/medical/prescriptions/prescription_items_page.dart index c6581493..e47b8ea7 100644 --- a/lib/pages/medical/prescriptions/prescription_items_page.dart +++ b/lib/pages/medical/prescriptions/prescription_items_page.dart @@ -4,6 +4,7 @@ import 'package:diplomaticquarterapp/core/model/prescriptions/prescription_repor import 'package:diplomaticquarterapp/core/viewModels/medical/prescriptions_view_model.dart'; import 'package:diplomaticquarterapp/core/viewModels/project_view_model.dart'; import 'package:diplomaticquarterapp/pages/base/base_view.dart'; +import 'package:diplomaticquarterapp/pages/medical/prescriptions/prescription_details_inp.dart'; import 'package:diplomaticquarterapp/pages/medical/prescriptions/prescription_details_page.dart'; import 'package:diplomaticquarterapp/uitl/translations_delegate_base.dart'; import 'package:diplomaticquarterapp/widgets/buttons/button.dart'; @@ -24,7 +25,7 @@ class PrescriptionItemsPage extends StatelessWidget { //TODO @override Widget build(BuildContext context) { - ProjectViewModel projectViewModel = Provider.of(context); + ProjectViewModel projectViewModel = Provider.of(context); return BaseView( onModelReady: (model) => model.getPrescriptionReport(prescriptions: prescriptions), @@ -38,14 +39,14 @@ class PrescriptionItemsPage extends StatelessWidget { children: [ if (!prescriptions.isInOutPatient) ...List.generate( - model.prescriptionReportList.length, + model.prescriptionReportListINP.length, (index) => InkWell( onTap: () => Navigator.push( context, FadePage( - page: PrescriptionDetailsPage( + page: PrescriptionDetailsPageINP( prescriptionReport: - model.prescriptionReportList[index], + model.prescriptionReportListINP[index], ), ), ), @@ -68,7 +69,7 @@ class PrescriptionItemsPage extends StatelessWidget { borderRadius: BorderRadius.all(Radius.circular(5)), child: Image.network( - model.prescriptionReportList[index] + model.prescriptionReportListINP[index] .imageSRCUrl, fit: BoxFit.cover, width: 60, @@ -83,13 +84,17 @@ class PrescriptionItemsPage extends StatelessWidget { padding: const EdgeInsets.all(8.0), child: Center( child: Texts(model - .prescriptionReportList[index] + .prescriptionReportListINP[index] .itemDescription .isNotEmpty - ? model.prescriptionReportList[index] + ? model + .prescriptionReportListINP[index] .itemDescription - : model.prescriptionReportList[index] - .itemDescriptionN)), + : model + .prescriptionReportListINP[ + index] + .itemDescriptionN ?? + '')), )), Icon( Icons.arrow_forward_ios, @@ -163,7 +168,12 @@ class PrescriptionItemsPage extends StatelessWidget { crossAxisAlignment: CrossAxisAlignment.start, children: [ Texts(model.prescriptionReportEnhList[index] - .itemDescription), + .itemDescription.isNotEmpty + ? model.prescriptionReportEnhList[index] + .itemDescription + : model.prescriptionReportEnhList[index] + .itemDescriptionN ?? + ''), ], ), ), @@ -188,36 +198,37 @@ class PrescriptionItemsPage extends StatelessWidget { child: Column( children: [ Divider(), - if(projectViewModel.havePrivilege(13)) - Container( - width: MediaQuery.of(context).size.width * 0.8, - child: Button( - label: TranslationBase.of(context).sendCopy, - onTap: () { - showConfirmMessage(context,model); - }, - loading: model.state == ViewState.BusyLocal, - ), - ), + if (projectViewModel.havePrivilege(13)) Container( - width: MediaQuery.of(context).size.width * 0.8, - child: Button( - label: TranslationBase.of(context).resendOrder, - backgroundColor: Colors.green[800], - onTap: (){ - - Navigator.push( - context, - FadePage( - page: PrescriptionDeliveryAddressPage( - prescriptions: prescriptions, - prescriptionReportList: model.prescriptionReportList, - prescriptionReportEnhList: model.prescriptionReportEnhList, - ), + width: MediaQuery.of(context).size.width * 0.8, + child: Button( + label: TranslationBase.of(context).sendCopy, + onTap: () { + showConfirmMessage(context, model); + }, + loading: model.state == ViewState.BusyLocal, + ), + ), + Container( + width: MediaQuery.of(context).size.width * 0.8, + child: Button( + label: TranslationBase.of(context).resendOrder, + backgroundColor: Colors.green[800], + onTap: () { + Navigator.push( + context, + FadePage( + page: PrescriptionDeliveryAddressPage( + prescriptions: prescriptions, + prescriptionReportList: + model.prescriptionReportList, + prescriptionReportEnhList: + model.prescriptionReportEnhList, ), - ); - }, - )) + ), + ); + }, + )) ], ), ), @@ -236,6 +247,7 @@ class PrescriptionItemsPage extends StatelessWidget { patientID: prescriptions.patientID, clinicName: prescriptions.companyName, doctorName: prescriptions.doctorName, + doctorID: prescriptions.doctorID, mes: TranslationBase.of(context).sendSuc, projectID: prescriptions.projectID); }, diff --git a/lib/pages/medical/reports/monthly_reports.dart b/lib/pages/medical/reports/monthly_reports.dart index 4484bd5f..80f6ac76 100644 --- a/lib/pages/medical/reports/monthly_reports.dart +++ b/lib/pages/medical/reports/monthly_reports.dart @@ -19,15 +19,24 @@ class MonthlyReportsPage extends StatefulWidget { } class _MonthlyReportsPageState extends State { - bool isAgree = false; - bool isSummary = false; - String email = ""; + bool isAgree; + bool isSummary; + String email; final formKey = GlobalKey(); @override Widget build(BuildContext context) { - return BaseView( - builder: (_, model, w) => AppScaffold( + return BaseView(builder: (_, model, w) { + if (isSummary == null) { + isSummary = model.receiveHealthSummaryReport; + } + if (isAgree == null) { + isAgree = model.receiveHealthSummaryReport; + } + if (email == null) { + email = model?.user?.emailAddress ?? ""; + } + return AppScaffold( isShowAppBar: true, appBarTitle: TranslationBase.of(context).monthlyReports, body: SingleChildScrollView( @@ -41,15 +50,10 @@ class _MonthlyReportsPageState extends State { Container( padding: EdgeInsets.all(9), height: 55, - decoration: BoxDecoration( - color: Colors.white, - borderRadius: BorderRadius.all(Radius.circular(8)), - shape: BoxShape.rectangle, - border: Border.all(color: Colors.grey)), + decoration: BoxDecoration(color: Colors.white, borderRadius: BorderRadius.all(Radius.circular(8)), shape: BoxShape.rectangle, border: Border.all(color: Colors.grey)), child: Row( mainAxisAlignment: MainAxisAlignment.spaceBetween, children: [ - Texts( TranslationBase.of(context).patientHealthSummaryReport, bold: true, @@ -62,12 +66,12 @@ class _MonthlyReportsPageState extends State { setState(() { isSummary = !isSummary; }); - if(!isSummary) { + if (!isSummary) { GifLoaderDialogUtils.showMyDialog(context); - await model.updatePatientHealthSummaryReport( - message: TranslationBase - .of(context) - .updateSuccessfully, isSummary: isSummary); + await model.updatePatientHealthSummaryReport(message: TranslationBase.of(context).updateSuccessfully, isSummary: isSummary); + model.receiveHealthSummaryReport = isSummary; + isAgree = isSummary; + model.user.emailAddress = ""; GifLoaderDialogUtils.hideDialog(context); } }, @@ -80,11 +84,11 @@ class _MonthlyReportsPageState extends State { ), Container( margin: EdgeInsets.all(8), - child: TextFields( + child: TextFields( fillColor: Colors.red, hintText: 'email@email.com', fontSize: 20, - initialValue: model.user.emailAddress, + initialValue: email, fontWeight: FontWeight.w600, onChanged: (text) { email = text; @@ -111,8 +115,7 @@ class _MonthlyReportsPageState extends State { crossAxisAlignment: CrossAxisAlignment.center, children: [ Expanded( - child: Texts(TranslationBase.of(context) - .toViewTheTermsAndConditions), + child: Texts(TranslationBase.of(context).toViewTheTermsAndConditions), ), InkWell( onTap: () { @@ -156,19 +159,15 @@ class _MonthlyReportsPageState extends State { child: SecondaryButton( textColor: Colors.white, label: TranslationBase.of(context).save, - disabled: (!isAgree || !isSummary ), + disabled: (!isAgree || !isSummary), onTap: () async { final form = formKey.currentState; if (form.validate()) { GifLoaderDialogUtils.showMyDialog(context); await model.updatePatientHealthSummaryReport( - message: TranslationBase - .of(context) - .updateSuccessfully, - isSummary: isSummary, - isUpdateEmail: true, - email: email.isNotEmpty ? email : model.user - .emailAddress); + message: TranslationBase.of(context).updateSuccessfully, isSummary: isSummary, isUpdateEmail: true, email: email.isNotEmpty ? email : model.user.emailAddress); + model.receiveHealthSummaryReport = isSummary; + model.user.emailAddress = email.isNotEmpty ? email : model.user.emailAddress; GifLoaderDialogUtils.hideDialog(context); } }, @@ -177,8 +176,7 @@ class _MonthlyReportsPageState extends State { Padding( padding: const EdgeInsets.all(5.0), child: Texts( - TranslationBase.of(context) - .instructionAgree, + TranslationBase.of(context).instructionAgree, fontWeight: FontWeight.normal, ), ), @@ -191,7 +189,7 @@ class _MonthlyReportsPageState extends State { ), ), ), - ), - ); + ); + }); } } diff --git a/lib/pages/vaccine/my_vaccines_screen.dart b/lib/pages/vaccine/my_vaccines_screen.dart index fb723a1f..27ce8399 100644 --- a/lib/pages/vaccine/my_vaccines_screen.dart +++ b/lib/pages/vaccine/my_vaccines_screen.dart @@ -30,173 +30,190 @@ class _MyVaccinesState extends State { onModelReady: (model) => model.getVaccine(), builder: (BuildContext context, VaccineViewModel model, Widget child) => AppScaffold( - isShowAppBar: true, - appBarTitle: TranslationBase.of(context).myVaccines, - baseViewModel: model, - body: Container( - margin: EdgeInsets.only( - left: SizeConfig.screenWidth * 0.004, - right: SizeConfig.screenWidth * 0.004, - top: SizeConfig.screenWidth * 0.04, - ), - child: Column( - children: [ - RoundedContainer( - backgroundColor: Colors.white, - child: ExpansionTile( - title: Container( - height: 65.0, - child: Text('2018'), - ), - children: [ - Container( - child: ListView.builder( - scrollDirection: Axis.vertical, - shrinkWrap: true, - itemCount: model.vaccineList == null - ? 0 - : model.vaccineList.length, - itemBuilder: (BuildContext context, int index) { - return Column( - children: [ - RoundedContainer( - child: Column( - children: [ - Row( + isShowAppBar: true, + baseViewModel: model, + appBarTitle: TranslationBase.of(context).myVaccines, + body: Container( + margin: EdgeInsets.only( + left: SizeConfig.screenWidth * 0.004, + right: SizeConfig.screenWidth * 0.004, + top: SizeConfig.screenWidth * 0.04, + ), + child: Column( + children: [ + RoundedContainer( + backgroundColor: Colors.white, + child: ExpansionTile( + title: Container( + height: 65.0, + child: Text('2018'), + ), + children: [ + Container( + child: ListView.builder( + scrollDirection: Axis.vertical, + shrinkWrap: true, + itemCount: model.vaccineList == null + ? 0 + : model.vaccineList.length, + itemBuilder: (BuildContext context, int index) { + return Column( + children: [ + RoundedContainer( + child: Column( children: [ - Expanded( - child: Column( - children: [ - Padding( - padding: EdgeInsets.symmetric( - horizontal: 20.0, - vertical: 20.0), - child: Image.network( - model.vaccineList[index] - .doctorImageURL, - height: SizeConfig + Row( + children: [ + Expanded( + child: Column( + children: [ + Padding( + padding: EdgeInsets + .symmetric( + horizontal: + 20.0, + vertical: 20.0), + child: Image.network( + model + .vaccineList[ + index] + .doctorImageURL, + height: SizeConfig .imageSizeMultiplier * - 23, - width: SizeConfig + 23, + width: SizeConfig .imageSizeMultiplier * - 20, - fit: BoxFit.fill, - ), + 20, + fit: BoxFit.fill, + ), + ), + ], ), - ], - ), - flex: 2, - ), - Expanded( - child: Container( - child: Column( - crossAxisAlignment: - CrossAxisAlignment.start, - children: [ - Text( - model.vaccineList[index] + flex: 2, + ), + Expanded( + child: Container( + child: Column( + crossAxisAlignment: + CrossAxisAlignment + .start, + children: [ + Text( + model + .vaccineList[ + index] .doctorTitle + - model.vaccineList[index] - .doctorName, - style: TextStyle( - fontWeight: + model + .vaccineList[ + index] + .doctorName, + style: TextStyle( + fontWeight: FontWeight.w900, - fontSize: 16.6, - ), - ), - SizedBox(height: 7.0), - Text( - model.vaccineList[index] - .projectName, - style: TextStyle( - fontSize: 17.0, - letterSpacing: 0.5, - ), - ), - SizedBox(height: 7.0), - Text( - model.vaccineList[index] - .vaccineName, - style: TextStyle( - fontSize: 17.0, - ), - ), - SizedBox(height: 7.0), - Text( - 'Date Taken ' + - convertDateFormat(model - .vaccineList[index] - .invoiceDate), - style: TextStyle( - fontSize: 17.0), + fontSize: 16.6, + ), + ), + SizedBox(height: 7.0), + Text( + model + .vaccineList[ + index] + .projectName, + style: TextStyle( + fontSize: 17.0, + letterSpacing: 0.5, + ), + ), + SizedBox(height: 7.0), + Text( + model + .vaccineList[ + index] + .vaccineName, + style: TextStyle( + fontSize: 17.0, + ), + ), + SizedBox(height: 7.0), + Text( + 'Date Taken ' + + convertDateFormat(model + .vaccineList[ + index] + .invoiceDate), + style: TextStyle( + fontSize: 17.0), + ), + ], ), - ], + ), + flex: 5, ), - ), - flex: 5, + ], ), ], ), - ], - ), - ), - ], - ); - }), - ) - ], - ), - ), - // SpaceBetweenTexts(space: 165.0), - ], - ), - ), - bottomSheet: Container( - color: Theme.of(context).scaffoldBackgroundColor, - padding: EdgeInsets.all(12), - height: MediaQuery.of(context).size.height * 0.25, - width: double.infinity, - child: Column( - children: [ - Divider( - height: 2, - thickness: 1, - ), - SizedBox( - height: 6, - ), - Container( - width: double.infinity, - // height: 80.0, - child: Button( - disabled: true, - label: TranslationBase.of(context).checkVaccineAvailability, - backgroundColor: Color(0xff9EA3A4), - onTap: () => Navigator.push( - context, FadePage(page: MyVaccinesItemPage())), - ), + ), + ], + ); + }), + ) + ], + ), + ), + // SpaceBetweenTexts(space: 165.0), + ], ), - if (projectViewModel.havePrivilege(27)) - Container( - width: double.infinity, - // height: 80.0, - child: SecondaryButton( - label: TranslationBase.of(context).sendEmail, - color: Color(0xffF62426), - textColor: Colors.white, - disabled: model.vaccineList.length == 0, - loading: model.state == ViewState.BusyLocal, - onTap: () async { - model.sendEmail( - message: TranslationBase.of(context) - .emailSentSuccessfully); - }, + ), + bottomSheet: model.state == ViewState.Busy?Container(height: 0,): model.vaccineList.length > 0 + ? Container( + color: Theme.of(context).scaffoldBackgroundColor, + padding: EdgeInsets.all(12), + height: MediaQuery.of(context).size.height * 0.25, + width: double.infinity, + child: Column( + children: [ + Divider( + height: 2, + thickness: 1, + ), + SizedBox( + height: 6, ), - ), - ], + Container( + width: double.infinity, + // height: 80.0, + child: Button( + disabled: true, + label: TranslationBase.of(context) + .checkVaccineAvailability, + backgroundColor: Color(0xff9EA3A4), + onTap: () => Navigator.push( + context, FadePage(page: MyVaccinesItemPage())), + ), + ), + if (projectViewModel.havePrivilege(27)) + Container( + width: double.infinity, + // height: 80.0, + child: SecondaryButton( + label: TranslationBase.of(context).sendEmail, + color: Color(0xffF62426), + textColor: Colors.white, + disabled: model.vaccineList.length == 0, + loading: model.state == ViewState.BusyLocal, + onTap: () async { + model.sendEmail( + message: TranslationBase.of(context) + .emailSentSuccessfully); + }, + ), + ), + ], + ), + ) + : Container(height: 0,), ), - ), - ), ); } diff --git a/lib/services/permission/permission_service.dart b/lib/services/permission/permission_service.dart index ea408ce2..d88ec599 100644 --- a/lib/services/permission/permission_service.dart +++ b/lib/services/permission/permission_service.dart @@ -10,9 +10,12 @@ import 'package:flutter/services.dart'; import 'package:localstorage/localstorage.dart'; import 'package:permission_handler/permission_handler.dart'; import 'package:vibration/vibration.dart'; +import 'package:geolocator/geolocator.dart' as geo; +// import 'package:open_settings/open_settings.dart'; + class PermissionService extends BaseService { final LocalStorage storage = new LocalStorage("permission"); - + geo.LocationPermission locationPermission; AppGlobal appGlobal = new AppGlobal(); setVibrationPermission(flag) async { @@ -20,23 +23,24 @@ class PermissionService extends BaseService { } isVibrationEnabled() { - return (storage.getItem('isVibration') ==null) || (storage.getItem('isVibration')) ==false ? false :true; + return (storage.getItem('isVibration') == null) || + (storage.getItem('isVibration')) == false + ? false + : true; } - vibrate(callback, context) async{ - if (callback == null) - return null; - if(isVibrationEnabled() ==true){ - if (await Vibration.hasVibrator()) { - Vibration.vibrate(duration: 100); - callback(); + vibrate(callback, context) async { + if (callback == null) return null; + if (isVibrationEnabled() == true) { + if (await Vibration.hasVibrator()) { + Vibration.vibrate(duration: 100); + callback(); } - }else{ + } else { callback(); } } - setTheme(flag) async { storage.setItem('isTheme', flag); } @@ -44,23 +48,36 @@ class PermissionService extends BaseService { isThemeEnabled() { return storage.getItem('isTheme'); } - cameraPermission() async{ - Map statuses = await [ - Permission.camera, - ].request(); - } - isCameraEnabled() async{ - print(await Permission.camera.status); - return await Permission.camera.status == PermissionStatus.granted ? true : false; + + cameraPermission() async { + Map statuses = await [ + Permission.camera, + ].request(); + } + + isCameraEnabled() async { + return await Permission.camera.isGranted; } - setCameraLocationPermission(context) async{ + + setCameraLocationPermission(context) async { Navigator.pop(context); openAppSettings(); } - isLocationEnabled() async{ - return await Permission.location.status == PermissionStatus.granted ? true : false; + + isLocationEnabled() async { + var permission = await geo.Geolocator.checkPermission(); + if (permission == geo.LocationPermission.denied) { + return false; + } else { + return true; + } } - openSettings() async{ + + openSettings() async { openAppSettings(); } + + openAccessbility() { + // OpenSettings.openAppSetting(); + } } diff --git a/lib/services/pharmacy_services/pharmacyAddress_service.dart b/lib/services/pharmacy_services/pharmacyAddress_service.dart index 592359ca..2909f18d 100644 --- a/lib/services/pharmacy_services/pharmacyAddress_service.dart +++ b/lib/services/pharmacy_services/pharmacyAddress_service.dart @@ -89,7 +89,7 @@ class PharmacyAddressService extends BaseService { Map body = Map(); body["customer"] = customerObject; - await baseAppClient.post("$url", onSuccess: (response, statusCode) async { + await baseAppClient.postPharmacy("$url", onSuccess: (response, statusCode) async { addresses.clear(); response['customers'][0]['addresses'].forEach((item) { addresses.add(Addresses.fromJson(item)); diff --git a/lib/widgets/nfc/nfc_reader_sheet.dart b/lib/widgets/nfc/nfc_reader_sheet.dart new file mode 100644 index 00000000..067c0c9a --- /dev/null +++ b/lib/widgets/nfc/nfc_reader_sheet.dart @@ -0,0 +1,196 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:nfc_in_flutter/nfc_in_flutter.dart'; + +void showNfcReader(BuildContext context, {Function onNcfScan}) { + showModalBottomSheet( + context: context, + enableDrag: false, + isDismissible: false, + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.only( + topLeft: Radius.circular(12), topRight: Radius.circular(12)), + ), + backgroundColor: Colors.white, + builder: (context) { + return NfcLayout( + onNcfScan: onNcfScan, + ); + }); +} + +class NfcLayout extends StatefulWidget { + Function onNcfScan; + + NfcLayout({this.onNcfScan}); + + @override + _NfcLayoutState createState() => _NfcLayoutState(); +} + +class _NfcLayoutState extends State { + StreamSubscription _stream; + bool _reading = false; + Widget mainWidget; + String nfcId; + + @override + void initState() { + super.initState(); + + setState(() { + // _reading = true; + // Start reading using NFC.readNDEF() + _stream = NFC + .readNDEF( + once: false, + throwOnUserCancel: false, + readerMode: NFCDispatchReaderMode()) + .listen((NDEFMessage message) { + setState(() { + _reading = true; + mainWidget = doneNfc(); + }); + Future.delayed(const Duration(milliseconds: 500), () { + _stream?.cancel(); + widget.onNcfScan(nfcId); + Navigator.pop(context); + }); + print("read NDEF id: ${message.id}"); + // widget.onNcfScan(message.id); + nfcId = message.id; + }, onError: (e) { + // Check error handling guide below + }); + }); + } + + @override + Widget build(BuildContext context) { + (mainWidget == null && !_reading) + ? mainWidget = scanNfc() + : mainWidget = doneNfc(); + return AnimatedSwitcher( + duration: Duration(milliseconds: 500), child: mainWidget); + } + + Widget scanNfc() { + return Container( + key: ValueKey(1), + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + SizedBox( + height: 30, + ), + Text( + "Ready To Scan", + style: TextStyle( + fontWeight: FontWeight.bold, + fontSize: 24, + ), + ), + SizedBox( + height: 30, + ), + Image.asset( + "assets/images/nfc/ic_nfc.png", + height: MediaQuery.of(context).size.width / 3, + ), + SizedBox( + height: 30, + ), + Text( + "Approach an NFC Tag", + style: TextStyle( + fontSize: 18, + ), + ), + SizedBox( + height: 30, + ), + ButtonTheme( + minWidth: MediaQuery.of(context).size.width / 1.2, + height: 45.0, + buttonColor: Colors.grey[300], + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(6), + ), + child: RaisedButton( + onPressed: () { + _stream?.cancel(); + Navigator.pop(context); + }, + child: Text("CANCEL"), + ), + ), + SizedBox( + height: 30, + ), + ], + ), + ); + } + + Widget doneNfc() { + return Container( + key: ValueKey(2), + child: Column( + mainAxisSize: MainAxisSize.min, + children: [ + SizedBox( + height: 30, + ), + Text( + "Successfully Scanned", + style: TextStyle( + fontWeight: FontWeight.bold, + fontSize: 24, + ), + ), + SizedBox( + height: 30, + ), + Image.asset( + "assets/images/nfc/ic_done.png", + height: MediaQuery.of(context).size.width / 3, + ), + SizedBox( + height: 30, + ), + Text( + "Approach an NFC Tag", + style: TextStyle( + fontSize: 18, + ), + ), + SizedBox( + height: 30, + ), + ButtonTheme( + minWidth: MediaQuery.of(context).size.width / 1.2, + height: 45.0, + buttonColor: Colors.grey[300], + shape: RoundedRectangleBorder( + borderRadius: BorderRadius.circular(6), + ), + child: RaisedButton( + // onPressed: () { + // _stream?.cancel(); + // widget.onNcfScan(nfcId); + // Navigator.pop(context); + // }, + onPressed: null, + + child: Text("DONE"), + ), + ), + SizedBox( + height: 30, + ), + ], + ), + ); + } +} diff --git a/lib/widgets/others/not_auh_page.dart b/lib/widgets/others/not_auh_page.dart index d247c9f5..9bef623b 100644 --- a/lib/widgets/others/not_auh_page.dart +++ b/lib/widgets/others/not_auh_page.dart @@ -71,6 +71,34 @@ class _NotAutPageState extends State { widget.description, style: TextStyle(fontSize: 14, fontFamily: "Poppins", fontWeight: FontWeight.w600, color: Color(0xff2B353E), letterSpacing: -0.56, height: 21 / 14), ), + SizedBox( + height: 14, + ), + if (!projectViewModel.isInternetConnection) + Center( + child: SizedBox( + height: MediaQuery.of(context).size.height * 0.55, + width: MediaQuery.of(context).size.width * 0.50, + child: Image.asset(projectViewModel.isArabic ? 'assets/images/Wifi-AR.png' : 'assets/images/wifi-EN.png'), + ), + ), + if (projectViewModel.isInternetConnection && widget.imagesInfo != null) + CarouselSlider( + items: widget.imagesInfo.map((image) { + return Builder( + builder: (BuildContext context) { + return SizedBox( + width: MediaQuery.of(context).size.width * 0.50, + child: image.isAsset ? Image.asset(projectViewModel.isArabic ? image.imageAr : image.imageEn) : Image.network(projectViewModel.isArabic ? image.imageAr : image.imageEn)); + }, + ); + }).toList(), + options: CarouselOptions( + height: MediaQuery.of(context).size.height * 0.55, + autoPlay: widget.imagesInfo.length > 1, + viewportFraction: 1.0, + ), + ), ], ), ), diff --git a/pubspec.yaml b/pubspec.yaml index f7064e86..9c1d1042 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -15,7 +15,7 @@ dependencies: # Localizations flutter_localizations: sdk: flutter - intl: ^0.16.1 + intl: ^0.16.0 # web view webview_flutter: ^0.3.24 # http client @@ -52,6 +52,7 @@ dependencies: shared_preferences: ^0.5.8 flutter_flexible_toast: ^0.1.4 firebase_messaging: ^7.0.3 + firebase_analytics: ^6.3.0 cloud_firestore: ^0.14.3 android_intent: ^0.3.7+7 # Progress bar @@ -162,7 +163,7 @@ dependencies: # recase: ^3.0.0 wakelock: ^0.1.4 after_layout: ^1.0.7 - twilio_programmable_video: ^0.5.0+3 + twilio_programmable_video: ^0.6.2 cached_network_image: ^2.4.1 flutter_tts: path: flutter_tts-voice_enhancement @@ -171,12 +172,13 @@ dependencies: wifi: ^0.1.5 vibration: ^1.7.3 + nfc_in_flutter: ^2.0.5 speech_to_text: path: speech_to_text badges: ^1.1.4 - +# open_settings: ^2.0.1 # Dep by Zohaib shimmer: ^1.1.2 carousel_slider: ^2.3.1 @@ -217,10 +219,14 @@ flutter: - assets/images/svg/ - assets/tracker/ - assets/images/AlHabibMedicalService/ + - assets/images/AlHabibMedicalService/health_calculator/ - assets/images/pharmacy_module/ - assets/images/gif/ - assets/images/pharmacy_module/payment/ - assets/images/pharmacy_module/lakum/ + - assets/images/nfc/ + - assets/payment_options/ + - assets/images/new/ - assets/images/new/bottom_nav/ - assets/images/new/services/ diff --git a/speech_to_text_/example/assets/sounds/speech_to_text_cancel.m4r b/speech_to_text_/example/assets/sounds/speech_to_text_cancel.m4r new file mode 100644 index 00000000..ccb3afe3 Binary files /dev/null and b/speech_to_text_/example/assets/sounds/speech_to_text_cancel.m4r differ diff --git a/speech_to_text_/example/assets/sounds/speech_to_text_listening.m4r b/speech_to_text_/example/assets/sounds/speech_to_text_listening.m4r new file mode 100644 index 00000000..3131d60f Binary files /dev/null and b/speech_to_text_/example/assets/sounds/speech_to_text_listening.m4r differ diff --git a/speech_to_text_/example/assets/sounds/speech_to_text_stop.m4r b/speech_to_text_/example/assets/sounds/speech_to_text_stop.m4r new file mode 100644 index 00000000..8817f01b Binary files /dev/null and b/speech_to_text_/example/assets/sounds/speech_to_text_stop.m4r differ diff --git a/speech_to_text_/example/pubspec.yaml b/speech_to_text_/example/pubspec.yaml new file mode 100644 index 00000000..d2bfcff7 --- /dev/null +++ b/speech_to_text_/example/pubspec.yaml @@ -0,0 +1,33 @@ +name: speech_to_text_example +description: Demonstrates how to use the speech_to_text plugin. +version: 1.1.0 +publish_to: 'none' + +environment: + sdk: ">=2.1.0 <3.0.0" + +dependencies: + flutter: + sdk: flutter + + cupertino_icons: ^0.1.2 + permission_handler: ^5.0.1+1 + + provider: + +dev_dependencies: + flutter_test: + sdk: flutter + + speech_to_text: + path: ../ + +# The following section is specific to Flutter. +flutter: + + uses-material-design: true + + assets: + - assets/sounds/speech_to_text_listening.m4r + - assets/sounds/speech_to_text_cancel.m4r + - assets/sounds/speech_to_text_stop.m4r diff --git a/speech_to_text_/lib/speech_recognition_error.dart b/speech_to_text_/lib/speech_recognition_error.dart new file mode 100644 index 00000000..2ab6cd4d --- /dev/null +++ b/speech_to_text_/lib/speech_recognition_error.dart @@ -0,0 +1,44 @@ +import 'package:json_annotation/json_annotation.dart'; + +part 'speech_recognition_error.g.dart'; + +/// A single error returned from the underlying speech services. +/// +/// Errors are either transient or permanent. Permanent errors +/// block speech recognition from continuing and must be +/// addressed before recogntion will work. Transient errors +/// cause individual recognition sessions to fail but subsequent +/// attempts may well succeed. +@JsonSerializable() +class SpeechRecognitionError { + /// Use this to differentiate the various error conditions. + /// + /// Not meant for display to the user. + final String errorMsg; + + /// True means that recognition cannot continue until + /// the error is resolved. + final bool permanent; + + SpeechRecognitionError(this.errorMsg, this.permanent); + + factory SpeechRecognitionError.fromJson(Map json) => + _$SpeechRecognitionErrorFromJson(json); + Map toJson() => _$SpeechRecognitionErrorToJson(this); + + @override + String toString() { + return "SpeechRecognitionError msg: $errorMsg, permanent: $permanent"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionError && + errorMsg == other.errorMsg && + permanent == other.permanent; + } + + @override + int get hashCode => errorMsg.hashCode; +} diff --git a/speech_to_text_/lib/speech_recognition_error.g.dart b/speech_to_text_/lib/speech_recognition_error.g.dart new file mode 100644 index 00000000..65299f6d --- /dev/null +++ b/speech_to_text_/lib/speech_recognition_error.g.dart @@ -0,0 +1,22 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'speech_recognition_error.dart'; + +// ************************************************************************** +// JsonSerializableGenerator +// ************************************************************************** + +SpeechRecognitionError _$SpeechRecognitionErrorFromJson( + Map json) { + return SpeechRecognitionError( + json['errorMsg'] as String, + json['permanent'] as bool, + ); +} + +Map _$SpeechRecognitionErrorToJson( + SpeechRecognitionError instance) => + { + 'errorMsg': instance.errorMsg, + 'permanent': instance.permanent, + }; diff --git a/speech_to_text_/lib/speech_recognition_event.dart b/speech_to_text_/lib/speech_recognition_event.dart new file mode 100644 index 00000000..71729365 --- /dev/null +++ b/speech_to_text_/lib/speech_recognition_event.dart @@ -0,0 +1,30 @@ +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +enum SpeechRecognitionEventType { + finalRecognitionEvent, + partialRecognitionEvent, + errorEvent, + statusChangeEvent, + soundLevelChangeEvent, +} + +/// A single event in a stream of speech recognition events. +/// +/// Use [eventType] to determine what type of event it is and depending on that +/// use the other properties to get information about it. +class SpeechRecognitionEvent { + final SpeechRecognitionEventType eventType; + final SpeechRecognitionError _error; + final SpeechRecognitionResult _result; + final bool _listening; + final double _level; + + SpeechRecognitionEvent( + this.eventType, this._result, this._error, this._listening, this._level); + + bool get isListening => _listening; + double get level => _level; + SpeechRecognitionResult get recognitionResult => _result; + SpeechRecognitionError get error => _error; +} diff --git a/speech_to_text_/lib/speech_recognition_result.dart b/speech_to_text_/lib/speech_recognition_result.dart new file mode 100644 index 00000000..38509f65 --- /dev/null +++ b/speech_to_text_/lib/speech_recognition_result.dart @@ -0,0 +1,140 @@ +import 'dart:collection'; + +import 'package:json_annotation/json_annotation.dart'; + +part 'speech_recognition_result.g.dart'; + +/// A sequence of recognized words from the speech recognition +/// service. +/// +/// Depending on the platform behaviour the words may come in all +/// at once at the end or as partial results as each word is +/// recognized. Use the [finalResult] flag to determine if the +/// result is considered final by the platform. +@JsonSerializable(explicitToJson: true) +class SpeechRecognitionResult { + List _alternates; + + /// Returns a list of possible transcriptions of the speech. + /// + /// The first value is always the same as the [recognizedWords] + /// value. Use the confidence for each alternate transcription + /// to determine how likely it is. Note that not all platforms + /// do a good job with confidence, there are convenience methods + /// on [SpeechRecogntionWords] to work with possibly missing + /// confidence values. + List get alternates => + UnmodifiableListView(_alternates); + + /// The sequence of words that is the best transcription of + /// what was said. + /// + /// This is the same as the first value of [alternates]. + String get recognizedWords => + _alternates.isNotEmpty ? _alternates.first.recognizedWords : ""; + + /// False means the words are an interim result, true means + /// they are the final recognition. + final bool finalResult; + + /// The confidence that the [recognizedWords] are correct. + /// + /// Confidence is expressed as a value between 0 and 1. -1 + /// means that the confidence value was not available. + double get confidence => + _alternates.isNotEmpty ? _alternates.first.confidence : 0; + + /// true if there is confidence in this recognition, false otherwise. + /// + /// There are two separate ways for there to be confidence, the first + /// is if the confidence is missing, which is indicated by a value of + /// -1. The second is if the confidence is greater than or equal + /// [threshold]. If [threshold] is not provided it defaults to 0.8. + bool isConfident( + {double threshold = SpeechRecognitionWords.confidenceThreshold}) => + _alternates.isNotEmpty + ? _alternates.first.isConfident(threshold: threshold) + : false; + + /// true if [confidence] is not the [missingConfidence] value, false + /// otherwise. + bool get hasConfidenceRating => + _alternates.isNotEmpty ? _alternates.first.hasConfidenceRating : false; + + SpeechRecognitionResult(this._alternates, this.finalResult); + + @override + String toString() { + return "SpeechRecognitionResult words: $_alternates, final: $finalResult"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionResult && + recognizedWords == other.recognizedWords && + finalResult == other.finalResult; + } + + @override + int get hashCode => recognizedWords.hashCode; + + factory SpeechRecognitionResult.fromJson(Map json) => + _$SpeechRecognitionResultFromJson(json); + Map toJson() => _$SpeechRecognitionResultToJson(this); +} + +/// A set of words recognized in a [SpeechRecognitionResult]. +/// +/// Each result will have one or more [SpeechRecognitionWords] +/// with a varying degree of confidence about each set of words. +@JsonSerializable() +class SpeechRecognitionWords { + /// The sequence of words recognized + final String recognizedWords; + + /// The confidence that the [recognizedWords] are correct. + /// + /// Confidence is expressed as a value between 0 and 1. 0 + /// means that the confidence value was not available. Use + /// [isConfident] which will ignore 0 values automatically. + final double confidence; + + static const double confidenceThreshold = 0.8; + static const double missingConfidence = -1; + + const SpeechRecognitionWords(this.recognizedWords, this.confidence); + + /// true if there is confidence in this recognition, false otherwise. + /// + /// There are two separate ways for there to be confidence, the first + /// is if the confidence is missing, which is indicated by a value of + /// -1. The second is if the confidence is greater than or equal + /// [threshold]. If [threshold] is not provided it defaults to 0.8. + bool isConfident({double threshold = confidenceThreshold}) => + confidence == missingConfidence || confidence >= threshold; + + /// true if [confidence] is not the [missingConfidence] value, false + /// otherwise. + bool get hasConfidenceRating => confidence != missingConfidence; + + @override + String toString() { + return "SpeechRecognitionWords words: $recognizedWords, confidence: $confidence"; + } + + @override + bool operator ==(Object other) { + return identical(this, other) || + other is SpeechRecognitionWords && + recognizedWords == other.recognizedWords && + confidence == other.confidence; + } + + @override + int get hashCode => recognizedWords.hashCode; + + factory SpeechRecognitionWords.fromJson(Map json) => + _$SpeechRecognitionWordsFromJson(json); + Map toJson() => _$SpeechRecognitionWordsToJson(this); +} diff --git a/speech_to_text_/lib/speech_recognition_result.g.dart b/speech_to_text_/lib/speech_recognition_result.g.dart new file mode 100644 index 00000000..023e5485 --- /dev/null +++ b/speech_to_text_/lib/speech_recognition_result.g.dart @@ -0,0 +1,41 @@ +// GENERATED CODE - DO NOT MODIFY BY HAND + +part of 'speech_recognition_result.dart'; + +// ************************************************************************** +// JsonSerializableGenerator +// ************************************************************************** + +SpeechRecognitionResult _$SpeechRecognitionResultFromJson( + Map json) { + return SpeechRecognitionResult( + (json['alternates'] as List) + ?.map((e) => e == null + ? null + : SpeechRecognitionWords.fromJson(e as Map)) + ?.toList(), + json['finalResult'] as bool, + ); +} + +Map _$SpeechRecognitionResultToJson( + SpeechRecognitionResult instance) => + { + 'alternates': instance.alternates?.map((e) => e?.toJson())?.toList(), + 'finalResult': instance.finalResult, + }; + +SpeechRecognitionWords _$SpeechRecognitionWordsFromJson( + Map json) { + return SpeechRecognitionWords( + json['recognizedWords'] as String, + (json['confidence'] as num)?.toDouble(), + ); +} + +Map _$SpeechRecognitionWordsToJson( + SpeechRecognitionWords instance) => + { + 'recognizedWords': instance.recognizedWords, + 'confidence': instance.confidence, + }; diff --git a/speech_to_text_/lib/speech_to_text.dart b/speech_to_text_/lib/speech_to_text.dart new file mode 100644 index 00000000..343706e6 --- /dev/null +++ b/speech_to_text_/lib/speech_to_text.dart @@ -0,0 +1,511 @@ +import 'dart:async'; +import 'dart:convert'; +import 'dart:math'; + +import 'package:clock/clock.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter/services.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +enum ListenMode { + deviceDefault, + dictation, + search, + confirmation, +} + +/// Notified as words are recognized with the current set of recognized words. +/// +/// See the [onResult] argument on the [listen] method for use. +typedef SpeechResultListener = void Function(SpeechRecognitionResult result); + +/// Notified if errors occur during recognition or intialization. +/// +/// Possible errors per the Android docs are described here: +/// https://developer.android.com/reference/android/speech/SpeechRecognizer +/// "error_audio_error" +/// "error_client" +/// "error_permission" +/// "error_network" +/// "error_network_timeout" +/// "error_no_match" +/// "error_busy" +/// "error_server" +/// "error_speech_timeout" +/// See the [onError] argument on the [initialize] method for use. +typedef SpeechErrorListener = void Function( + SpeechRecognitionError errorNotification); + +/// Notified when recognition status changes. +/// +/// See the [onStatus] argument on the [initialize] method for use. +typedef SpeechStatusListener = void Function(String status); + +/// Notified when the sound level changes during a listen method. +/// +/// [level] is a measure of the decibels of the current sound on +/// the recognition input. See the [onSoundLevelChange] argument on +/// the [listen] method for use. +typedef SpeechSoundLevelChange = Function(double level); + +/// An interface to device specific speech recognition services. +/// +/// The general flow of a speech recognition session is as follows: +/// ```Dart +/// SpeechToText speech = SpeechToText(); +/// bool isReady = await speech.initialize(); +/// if ( isReady ) { +/// await speech.listen( resultListener: resultListener ); +/// } +/// ... +/// // At some point later +/// speech.stop(); +/// ``` +class SpeechToText { + static const String listenMethod = 'listen'; + static const String textRecognitionMethod = 'textRecognition'; + static const String notifyErrorMethod = 'notifyError'; + static const String notifyStatusMethod = 'notifyStatus'; + static const String soundLevelChangeMethod = "soundLevelChange"; + static const String notListeningStatus = "notListening"; + static const String listeningStatus = "listening"; + + static const MethodChannel speechChannel = + const MethodChannel('plugin.csdcorp.com/speech_to_text'); + static final SpeechToText _instance = + SpeechToText.withMethodChannel(speechChannel); + bool _initWorked = false; + bool _recognized = false; + bool _listening = false; + bool _cancelOnError = false; + bool _partialResults = false; + int _listenStartedAt = 0; + int _lastSpeechEventAt = 0; + Duration _pauseFor; + Duration _listenFor; + + /// True if not listening or the user called cancel / stop, false + /// if cancel/stop were invoked by timeout or error condition. + bool _userEnded = false; + String _lastRecognized = ""; + String _lastStatus = ""; + double _lastSoundLevel = 0; + Timer _listenTimer; + LocaleName _systemLocale; + SpeechRecognitionError _lastError; + SpeechResultListener _resultListener; + SpeechErrorListener errorListener; + SpeechStatusListener statusListener; + SpeechSoundLevelChange _soundLevelChange; + + final MethodChannel channel; + factory SpeechToText() => _instance; + + @visibleForTesting + SpeechToText.withMethodChannel(this.channel); + + /// True if words have been recognized during the current [listen] call. + /// + /// Goes false as soon as [cancel] is called. + bool get hasRecognized => _recognized; + + /// The last set of recognized words received. + /// + /// This is maintained across [cancel] calls but cleared on the next + /// [listen]. + String get lastRecognizedWords => _lastRecognized; + + /// The last status update received, see [initialize] to register + /// an optional listener to be notified when this changes. + String get lastStatus => _lastStatus; + + /// The last sound level received during a listen event. + /// + /// The sound level is a measure of how loud the current + /// input is during listening. Use the [onSoundLevelChange] + /// argument in the [listen] method to get notified of + /// changes. + double get lastSoundLevel => _lastSoundLevel; + + /// True if [initialize] succeeded + bool get isAvailable => _initWorked; + + /// True if [listen] succeeded and [stop] or [cancel] has not been called. + /// + /// Also goes false when listening times out if listenFor was set. + bool get isListening => _listening; + bool get isNotListening => !isListening; + + /// The last error received or null if none, see [initialize] to + /// register an optional listener to be notified of errors. + SpeechRecognitionError get lastError => _lastError; + + /// True if an error has been received, see [lastError] for details + bool get hasError => null != lastError; + + /// Returns true if the user has already granted permission to access the + /// microphone, does not prompt the user. + /// + /// This method can be called before [initialize] to check if permission + /// has already been granted. If this returns false then the [initialize] + /// call will prompt the user for permission if it is allowed to do so. + /// Note that applications cannot ask for permission again if the user has + /// denied them permission in the past. + Future get hasPermission async { + bool hasPermission = await channel.invokeMethod('has_permission'); + return hasPermission; + } + + /// Initialize speech recognition services, returns true if + /// successful, false if failed. + /// + /// This method must be called before any other speech functions. + /// If this method returns false no further [SpeechToText] methods + /// should be used. Should only be called once if successful but does protect + /// itself if called repeatedly. False usually means that the user has denied + /// permission to use speech. The usual option in that case is to give them + /// instructions on how to open system settings and grant permission. + /// + /// [onError] is an optional listener for errors like + /// timeout, or failure of the device speech recognition. + /// [onStatus] is an optional listener for status changes from + /// listening to not listening. + /// [debugLogging] controls whether there is detailed logging from the underlying + /// plugins. It is off by default, usually only useful for troubleshooting issues + /// with a paritcular OS version or device, fairly verbose + Future initialize( + {SpeechErrorListener onError, + SpeechStatusListener onStatus, + debugLogging = false}) async { + if (_initWorked) { + return Future.value(_initWorked); + } + errorListener = onError; + statusListener = onStatus; + channel.setMethodCallHandler(_handleCallbacks); + _initWorked = await channel + .invokeMethod('initialize', {"debugLogging": debugLogging}); + return _initWorked; + } + + /// Stops the current listen for speech if active, does nothing if not. + /// + /// Stopping a listen session will cause a final result to be sent. Each + /// listen session should be ended with either [stop] or [cancel], for + /// example in the dispose method of a Widget. [cancel] is automatically + /// invoked by a permanent error if [cancelOnError] is set to true in the + /// [listen] call. + /// + /// *Note:* Cannot be used until a successful [initialize] call. Should + /// only be used after a successful [listen] call. + Future stop() async { + _userEnded = true; + return _stop(); + } + + Future _stop() async { + if (!_initWorked) { + return; + } + _shutdownListener(); + await channel.invokeMethod('stop'); + } + + /// Cancels the current listen for speech if active, does nothing if not. + /// + /// Canceling means that there will be no final result returned from the + /// recognizer. Each listen session should be ended with either [stop] or + /// [cancel], for example in the dispose method of a Widget. [cancel] is + /// automatically invoked by a permanent error if [cancelOnError] is set + /// to true in the [listen] call. + /// + /// *Note* Cannot be used until a successful [initialize] call. Should only + /// be used after a successful [listen] call. + Future cancel() async { + _userEnded = true; + return _cancel(); + } + + Future _cancel() async { + if (!_initWorked) { + return; + } + _shutdownListener(); + await channel.invokeMethod('cancel'); + } + + /// Starts a listening session for speech and converts it to text, + /// invoking the provided [onResult] method as words are recognized. + /// + /// Cannot be used until a successful [initialize] call. There is a + /// time limit on listening imposed by both Android and iOS. The time + /// depends on the device, network, etc. Android is usually quite short, + /// especially if there is no active speech event detected, on the order + /// of ten seconds or so. + /// + /// When listening is done always invoke either [cancel] or [stop] to + /// end the session, even if it times out. [cancelOnError] provides an + /// automatic way to ensure this happens. + /// + /// [onResult] is an optional listener that is notified when words + /// are recognized. + /// + /// [listenFor] sets the maximum duration that it will listen for, after + /// that it automatically stops the listen for you. + /// + /// [pauseFor] sets the maximum duration of a pause in speech with no words + /// detected, after that it automatically stops the listen for you. + /// + /// [localeId] is an optional locale that can be used to listen in a language + /// other than the current system default. See [locales] to find the list of + /// supported languages for listening. + /// + /// [onSoundLevelChange] is an optional listener that is notified when the + /// sound level of the input changes. Use this to update the UI in response to + /// more or less input. The values currently differ between Ancroid and iOS, + /// haven't yet been able to determine from the Android documentation what the + /// value means. On iOS the value returned is in decibels. + /// + /// [cancelOnError] if true then listening is automatically canceled on a + /// permanent error. This defaults to false. When false cancel should be + /// called from the error handler. + /// + /// [partialResults] if true the listen reports results as they are recognized, + /// when false only final results are reported. Defaults to true. + /// + /// [onDevice] if true the listen attempts to recognize locally with speech never + /// leaving the device. If it cannot do this the listen attempt will fail. This is + /// usually only needed for sensitive content where privacy or security is a concern. + Future listen( + {SpeechResultListener onResult, + Duration listenFor, + Duration pauseFor, + String localeId, + SpeechSoundLevelChange onSoundLevelChange, + cancelOnError = false, + partialResults = true, + onDevice = false, + ListenMode listenMode = ListenMode.confirmation}) async { + if (!_initWorked) { + throw SpeechToTextNotInitializedException(); + } + _userEnded = false; + _cancelOnError = cancelOnError; + _recognized = false; + _resultListener = onResult; + _soundLevelChange = onSoundLevelChange; + _partialResults = partialResults; + Map listenParams = { + "partialResults": partialResults || null != pauseFor, + "onDevice": onDevice, + "listenMode": listenMode.index, + }; + if (null != localeId) { + listenParams["localeId"] = localeId; + } + try { + bool started = await channel.invokeMethod(listenMethod, listenParams); + if (started) { + _listenStartedAt = clock.now().millisecondsSinceEpoch; + _setupListenAndPause(pauseFor, listenFor); + } + } on PlatformException catch (e) { + throw ListenFailedException(e.details); + } + } + + void _setupListenAndPause(Duration pauseFor, Duration listenFor) { + _pauseFor = null; + _listenFor = null; + if (null == pauseFor && null == listenFor) { + return; + } + var minDuration; + if (null == pauseFor) { + _listenFor = Duration(milliseconds: listenFor.inMilliseconds); + minDuration = listenFor; + } else if (null == listenFor) { + _pauseFor = Duration(milliseconds: pauseFor.inMilliseconds); + minDuration = pauseFor; + } else { + _listenFor = Duration(milliseconds: listenFor.inMilliseconds); + _pauseFor = Duration(milliseconds: pauseFor.inMilliseconds); + var minMillis = min(listenFor.inMilliseconds - _elapsedListenMillis, + pauseFor.inMilliseconds); + minDuration = Duration(milliseconds: minMillis); + } + _listenTimer = Timer(minDuration, _stopOnPauseOrListen); + } + + int get _elapsedListenMillis => + clock.now().millisecondsSinceEpoch - _listenStartedAt; + int get _elapsedSinceSpeechEvent => + clock.now().millisecondsSinceEpoch - _lastSpeechEventAt; + + void _stopOnPauseOrListen() { + if (null != _listenFor && + _elapsedListenMillis >= _listenFor.inMilliseconds) { + _stop(); + } else if (null != _pauseFor && + _elapsedSinceSpeechEvent >= _pauseFor.inMilliseconds) { + _stop(); + } else { + _setupListenAndPause(_pauseFor, _listenFor); + } + } + + /// returns the list of speech locales available on the device. + /// + /// This method is useful to find the identifier to use + /// for the [listen] method, it is the [localeId] member of the + /// [LocaleName]. + /// + /// Each [LocaleName] in the returned list has the + /// identifier for the locale as well as a name for + /// display. The name is localized for the system locale on + /// the device. + Future> locales() async { + if (!_initWorked) { + throw SpeechToTextNotInitializedException(); + } + final List locales = await channel.invokeMethod('locales'); + List filteredLocales = locales + .map((locale) { + var components = locale.split(":"); + if (components.length != 2) { + return null; + } + return LocaleName(components[0], components[1]); + }) + .where((item) => item != null) + .toList(); + if (filteredLocales.isNotEmpty) { + _systemLocale = filteredLocales.first; + } else { + _systemLocale = null; + } + filteredLocales.sort((ln1, ln2) => ln1.name.compareTo(ln2.name)); + return filteredLocales; + } + + /// returns the locale that will be used if no localeId is passed + /// to the [listen] method. + Future systemLocale() async { + if (null == _systemLocale) { + await locales(); + } + return Future.value(_systemLocale); + } + + Future _handleCallbacks(MethodCall call) async { + // print("SpeechToText call: ${call.method} ${call.arguments}"); + switch (call.method) { + case textRecognitionMethod: + if (call.arguments is String) { + _onTextRecognition(call.arguments); + } + break; + case notifyErrorMethod: + if (call.arguments is String) { + await _onNotifyError(call.arguments); + } + break; + case notifyStatusMethod: + if (call.arguments is String) { + _onNotifyStatus(call.arguments); + } + break; + case soundLevelChangeMethod: + if (call.arguments is double) { + _onSoundLevelChange(call.arguments); + } + break; + default: + } + } + + void _onTextRecognition(String resultJson) { + _lastSpeechEventAt = clock.now().millisecondsSinceEpoch; + Map resultMap = jsonDecode(resultJson); + SpeechRecognitionResult speechResult = + SpeechRecognitionResult.fromJson(resultMap); + if (!_partialResults && !speechResult.finalResult) { + return; + } + _recognized = true; + // print("Recognized text $resultJson"); + + _lastRecognized = speechResult.recognizedWords; + if (null != _resultListener) { + _resultListener(speechResult); + } + } + + Future _onNotifyError(String errorJson) async { + if (isNotListening && _userEnded) { + return; + } + Map errorMap = jsonDecode(errorJson); + SpeechRecognitionError speechError = + SpeechRecognitionError.fromJson(errorMap); + _lastError = speechError; + if (null != errorListener) { + errorListener(speechError); + } + if (_cancelOnError && speechError.permanent) { + await _cancel(); + } + } + + void _onNotifyStatus(String status) { + _lastStatus = status; + _listening = status == listeningStatus; + // print(status); + if (null != statusListener) { + statusListener(status); + } + } + + void _onSoundLevelChange(double level) { + if (isNotListening) { + return; + } + _lastSoundLevel = level; + if (null != _soundLevelChange) { + _soundLevelChange(level); + } + } + + _shutdownListener() { + _listening = false; + _recognized = false; + _listenTimer?.cancel(); + _listenTimer = null; + } + + @visibleForTesting + Future processMethodCall(MethodCall call) async { + return await _handleCallbacks(call); + } +} + +/// A single locale with a [name], localized to the current system locale, +/// and a [localeId] which can be used in the [listen] method to choose a +/// locale for speech recognition. +class LocaleName { + final String localeId; + final String name; + LocaleName(this.localeId, this.name); +} + +/// Thrown when a method is called that requires successful +/// initialization first. +class SpeechToTextNotInitializedException implements Exception {} + +/// Thrown when listen fails to properly start a speech listening session +/// on the device +class ListenFailedException implements Exception { + final String details; + ListenFailedException(this.details); +} diff --git a/speech_to_text_/lib/speech_to_text_provider.dart b/speech_to_text_/lib/speech_to_text_provider.dart new file mode 100644 index 00000000..91adf3b4 --- /dev/null +++ b/speech_to_text_/lib/speech_to_text_provider.dart @@ -0,0 +1,201 @@ +import 'dart:async'; + +import 'package:flutter/material.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_event.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +/// Simplifies interaction with [SpeechToText] by handling all the callbacks and notifying +/// listeners as events happen. +/// +/// Here's an example of using the [SpeechToTextProvider] +/// ``` +/// var speechProvider = SpeechToTextProvider( SpeechToText()); +/// var available = await speechProvider.initialize(); +/// StreamSubscription _subscription; +/// _subscription = speechProvider.recognitionController.stream.listen((recognitionEvent) { +/// if (recognitionEvent.eventType == SpeechRecognitionEventType.finalRecognitionEvent ) { +/// print("I heard: ${recognitionEvent.recognitionResult.recognizedWords}"); +/// } +/// }); +/// speechProvider.addListener(() { +/// var words = speechProvider.lastWords; +/// }); +class SpeechToTextProvider extends ChangeNotifier { + final StreamController _recognitionController = + StreamController.broadcast(); + final SpeechToText _speechToText; + SpeechRecognitionResult _lastResult; + double _lastLevel = 0; + List _locales = []; + LocaleName _systemLocale; + + /// Only construct one instance in an application. + /// + /// Do not call `initialize` on the [SpeechToText] that is passed as a parameter, instead + /// call the [initialize] method on this class. + SpeechToTextProvider(this._speechToText); + + Stream get stream => _recognitionController.stream; + + /// Returns the last result received, may be null. + SpeechRecognitionResult get lastResult => _lastResult; + + /// Returns the last error received, may be null. + SpeechRecognitionError get lastError => _speechToText.lastError; + + /// Returns the last sound level received. + /// + /// Note this is only available when the `soundLevel` is set to true on + /// a call to [listen], will be 0 at all other times. + double get lastLevel => _lastLevel; + + /// Initializes the provider and the contained [SpeechToText] instance. + /// + /// Returns true if [SpeechToText] was initialized successful and can now + /// be used, false otherwse. + Future initialize() async { + if (isAvailable) { + return isAvailable; + } + bool availableBefore = _speechToText.isAvailable; + bool available = + await _speechToText.initialize(onStatus: _onStatus, onError: _onError); + if (available) { + _locales = []; + _locales.addAll(await _speechToText.locales()); + _systemLocale = await _speechToText.systemLocale(); + } + if (availableBefore != available) { + notifyListeners(); + } + return available; + } + + /// Returns true if the provider has been initialized and can be used to recognize speech. + bool get isAvailable => _speechToText.isAvailable; + + /// Returns true if the provider cannot be used to recognize speech, either because it has not + /// yet been initialized or because initialization failed. + bool get isNotAvailable => !_speechToText.isAvailable; + + /// Returns true if [SpeechToText] is listening for new speech. + bool get isListening => _speechToText.isListening; + + /// Returns true if [SpeechToText] is not listening for new speech. + bool get isNotListening => _speechToText.isNotListening; + + /// Returns true if [SpeechToText] has a previous error. + bool get hasError => _speechToText.hasError; + + /// Returns true if [lastResult] has a last result. + bool get hasResults => null != _lastResult; + + /// Returns the list of locales that are available on the device for speech recognition. + List get locales => _locales; + + /// Returns the locale that is currently set as active on the device. + LocaleName get systemLocale => _systemLocale; + + /// Start listening for new events, set [partialResults] to true to receive interim + /// recognition results. + /// + /// [soundLevel] set to true to be notified on changes to the input sound level + /// on the microphone. + /// + /// [listenFor] sets the maximum duration that it will listen for, after + /// that it automatically stops the listen for you. + /// + /// [pauseFor] sets the maximum duration of a pause in speech with no words + /// detected, after that it automatically stops the listen for you. + /// + /// Call this only after a successful [initialize] call + void listen( + {bool partialResults = false, + bool soundLevel = false, + Duration listenFor, + Duration pauseFor}) { + _lastLevel = 0; + _lastResult = null; + if (soundLevel) { + _speechToText.listen( + partialResults: partialResults, + listenFor: listenFor, + pauseFor: pauseFor, + cancelOnError: true, + onResult: _onListenResult, + // onSoundLevelChange: _onSoundLevelChange); + ); + } else { + _speechToText.listen( + partialResults: partialResults, + listenFor: listenFor, + pauseFor: pauseFor, + cancelOnError: true, + onResult: _onListenResult); + } + } + + /// Stops a current active listening session. + /// + /// Call this after calling [listen] to stop the recognizer from listening further + /// and return the current result as final. + void stop() { + _speechToText.stop(); + notifyListeners(); + } + + /// Cancel a current active listening session. + /// + /// Call this after calling [listen] to stop the recognizer from listening further + /// and ignore any results recognized so far. + void cancel() { + _speechToText.cancel(); + notifyListeners(); + } + + void _onError(SpeechRecognitionError errorNotification) { + _recognitionController.add(SpeechRecognitionEvent( + SpeechRecognitionEventType.errorEvent, + null, + errorNotification, + isListening, + null)); + notifyListeners(); + } + + void _onStatus(String status) { + _recognitionController.add(SpeechRecognitionEvent( + SpeechRecognitionEventType.statusChangeEvent, + null, + null, + isListening, + null)); + notifyListeners(); + } + + void _onListenResult(SpeechRecognitionResult result) { + _lastResult = result; + _recognitionController.add(SpeechRecognitionEvent( + result.finalResult + ? SpeechRecognitionEventType.finalRecognitionEvent + : SpeechRecognitionEventType.partialRecognitionEvent, + result, + null, + isListening, + null)); + notifyListeners(); + } + + // void _onSoundLevelChange(double level) { + // _lastLevel = level; + // _recognitionController.add(SpeechRecognitionEvent( + // SpeechRecognitionEventType.soundLevelChangeEvent, + // null, + // null, + // null, + // level)); + // notifyListeners(); + // } +} diff --git a/speech_to_text_/pubspec.yaml b/speech_to_text_/pubspec.yaml new file mode 100644 index 00000000..34b3da29 --- /dev/null +++ b/speech_to_text_/pubspec.yaml @@ -0,0 +1,31 @@ +name: speech_to_text +description: A Flutter plugin that exposes device specific speech to text recognition capability. + + + +environment: + sdk: ">=2.1.0 <3.0.0" + flutter: ">=1.10.0" + +dependencies: + flutter: + sdk: flutter + json_annotation: ^3.0.0 + clock: ^1.0.1 + +dev_dependencies: + flutter_test: + sdk: flutter + build_runner: ^1.0.0 + json_serializable: ^3.0.0 + fake_async: ^1.0.1 + +flutter: + plugin: + platforms: + android: + package: com.csdcorp.speech_to_text + pluginClass: SpeechToTextPlugin + ios: + pluginClass: SpeechToTextPlugin + diff --git a/speech_to_text_/test/speech_recognition_error_test.dart b/speech_to_text_/test/speech_recognition_error_test.dart new file mode 100644 index 00000000..202ae4cd --- /dev/null +++ b/speech_to_text_/test/speech_recognition_error_test.dart @@ -0,0 +1,65 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; + +void main() { + const String msg1 = "msg1"; + + setUp(() {}); + + group('properties', () { + test('equals true for same object', () { + SpeechRecognitionError error = SpeechRecognitionError(msg1, false); + expect(error, error); + }); + test('equals true for different object same values', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError(msg1, false); + expect(error1, error2); + }); + test('equals false for different object', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError("msg2", false); + expect(error1, isNot(error2)); + }); + test('hash same for same object', () { + SpeechRecognitionError error = SpeechRecognitionError(msg1, false); + expect(error.hashCode, error.hashCode); + }); + test('hash same for different object same values', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError(msg1, false); + expect(error1.hashCode, error2.hashCode); + }); + test('hash different for different object', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + SpeechRecognitionError error2 = SpeechRecognitionError("msg2", false); + expect(error1.hashCode, isNot(error2.hashCode)); + }); + test('toString as expected', () { + SpeechRecognitionError error1 = SpeechRecognitionError(msg1, false); + expect(error1.toString(), + "SpeechRecognitionError msg: $msg1, permanent: false"); + }); + }); + group('json', () { + test('loads properly', () { + var json = jsonDecode('{"errorMsg":"$msg1","permanent":true}'); + SpeechRecognitionError error = SpeechRecognitionError.fromJson(json); + expect(error.errorMsg, msg1); + expect(error.permanent, isTrue); + json = jsonDecode('{"errorMsg":"$msg1","permanent":false}'); + error = SpeechRecognitionError.fromJson(json); + expect(error.permanent, isFalse); + }); + test('roundtrips properly', () { + var json = jsonDecode('{"errorMsg":"$msg1","permanent":true}'); + SpeechRecognitionError error = SpeechRecognitionError.fromJson(json); + var roundtripJson = error.toJson(); + SpeechRecognitionError roundtripError = + SpeechRecognitionError.fromJson(roundtripJson); + expect(error, roundtripError); + }); + }); +} diff --git a/speech_to_text_/test/speech_recognition_event_test.dart b/speech_to_text_/test/speech_recognition_event_test.dart new file mode 100644 index 00000000..ceaaab8a --- /dev/null +++ b/speech_to_text_/test/speech_recognition_event_test.dart @@ -0,0 +1,42 @@ +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_event.dart'; + +import 'test_speech_channel_handler.dart'; + +void main() { + group('properties', () { + test('status listening matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.statusChangeEvent, null, null, true, null); + expect(event.eventType, SpeechRecognitionEventType.statusChangeEvent); + expect(event.isListening, isTrue); + }); + test('result matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.finalRecognitionEvent, + TestSpeechChannelHandler.firstRecognizedResult, + null, + null, + null); + expect(event.eventType, SpeechRecognitionEventType.finalRecognitionEvent); + expect(event.recognitionResult, + TestSpeechChannelHandler.firstRecognizedResult); + }); + test('error matches', () { + var event = SpeechRecognitionEvent(SpeechRecognitionEventType.errorEvent, + null, TestSpeechChannelHandler.firstError, null, null); + expect(event.eventType, SpeechRecognitionEventType.errorEvent); + expect(event.error, TestSpeechChannelHandler.firstError); + }); + test('sound level matches', () { + var event = SpeechRecognitionEvent( + SpeechRecognitionEventType.soundLevelChangeEvent, + null, + null, + null, + TestSpeechChannelHandler.level1); + expect(event.eventType, SpeechRecognitionEventType.soundLevelChangeEvent); + expect(event.level, TestSpeechChannelHandler.level1); + }); + }); +} diff --git a/speech_to_text_/test/speech_recognition_result_test.dart b/speech_to_text_/test/speech_recognition_result_test.dart new file mode 100644 index 00000000..1516779a --- /dev/null +++ b/speech_to_text_/test/speech_recognition_result_test.dart @@ -0,0 +1,134 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +void main() { + final String firstRecognizedWords = 'hello'; + final String secondRecognizedWords = 'hello there'; + final double firstConfidence = 0.85; + final double secondConfidence = 0.62; + final String firstRecognizedJson = + '{"alternates":[{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}],"finalResult":false}'; + final String secondRecognizedJson = + '{"alternates":[{"recognizedWords":"$secondRecognizedWords","confidence":$secondConfidence}],"finalResult":false}'; + final SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + final SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + + setUp(() {}); + + group('recognizedWords', () { + test('empty if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.recognizedWords, isEmpty); + }); + test('matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.recognizedWords, firstRecognizedWords); + }); + }); + group('alternates', () { + test('empty if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.alternates, isEmpty); + }); + test('expected contents', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.alternates, contains(firstWords)); + expect(result.alternates, contains(secondWords)); + }); + test('in order', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.alternates.first, firstWords); + }); + }); + group('confidence', () { + test('0 if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.confidence, 0); + }); + test('isConfident false if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.isConfident(), isFalse); + }); + test('isConfident matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.isConfident(), firstWords.isConfident()); + }); + test('hasConfidenceRating false if no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect(result.hasConfidenceRating, isFalse); + }); + test('hasConfidenceRating matches first alternate', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords, secondWords], true); + expect(result.hasConfidenceRating, firstWords.hasConfidenceRating); + }); + }); + group('json', () { + test('loads correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionResult result = SpeechRecognitionResult.fromJson(json); + expect(result.recognizedWords, firstRecognizedWords); + expect(result.confidence, firstConfidence); + }); + test('roundtrips correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionResult result = SpeechRecognitionResult.fromJson(json); + var roundTripJson = result.toJson(); + SpeechRecognitionResult roundtripResult = + SpeechRecognitionResult.fromJson(roundTripJson); + expect(result, roundtripResult); + }); + }); + group('overrides', () { + test('toString works with no alternates', () { + SpeechRecognitionResult result = SpeechRecognitionResult([], true); + expect( + result.toString(), "SpeechRecognitionResult words: [], final: true"); + }); + test('toString works with alternates', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result.toString(), + "SpeechRecognitionResult words: [SpeechRecognitionWords words: hello, confidence: 0.85], final: true"); + }); + test('hash same for same object', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result.hashCode, result.hashCode); + }); + test('hash differs for different objects', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result2 = + SpeechRecognitionResult([secondWords], true); + expect(result1.hashCode, isNot(result2.hashCode)); + }); + test('equals same for same object', () { + SpeechRecognitionResult result = + SpeechRecognitionResult([firstWords], true); + expect(result, result); + }); + test('equals same for different object same values', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result1a = + SpeechRecognitionResult([firstWords], true); + expect(result1, result1a); + }); + test('equals differs for different objects', () { + SpeechRecognitionResult result1 = + SpeechRecognitionResult([firstWords], true); + SpeechRecognitionResult result2 = + SpeechRecognitionResult([secondWords], true); + expect(result1, isNot(result2)); + }); + }); +} diff --git a/speech_to_text_/test/speech_recognitions_words_test.dart b/speech_to_text_/test/speech_recognitions_words_test.dart new file mode 100644 index 00000000..36a9ef0e --- /dev/null +++ b/speech_to_text_/test/speech_recognitions_words_test.dart @@ -0,0 +1,86 @@ +import 'dart:convert'; + +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; + +void main() { + final String firstRecognizedWords = 'hello'; + final String secondRecognizedWords = 'hello there'; + final double firstConfidence = 0.85; + final double secondConfidence = 0.62; + final String firstRecognizedJson = + '{"recognizedWords":"$firstRecognizedWords","confidence":$firstConfidence}'; + final SpeechRecognitionWords firstWords = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + final SpeechRecognitionWords secondWords = + SpeechRecognitionWords(secondRecognizedWords, secondConfidence); + + setUp(() {}); + + group('properties', () { + test('words', () { + expect(firstWords.recognizedWords, firstRecognizedWords); + expect(secondWords.recognizedWords, secondRecognizedWords); + }); + test('confidence', () { + expect(firstWords.confidence, firstConfidence); + expect(secondWords.confidence, secondConfidence); + expect(firstWords.hasConfidenceRating, isTrue); + }); + test('equals true for same object', () { + expect(firstWords, firstWords); + }); + test('equals true for different object with same values', () { + SpeechRecognitionWords firstWordsA = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + expect(firstWords, firstWordsA); + }); + test('equals false for different results', () { + expect(firstWords, isNot(secondWords)); + }); + test('hash same for same object', () { + expect(firstWords.hashCode, firstWords.hashCode); + }); + test('hash same for different object with same values', () { + SpeechRecognitionWords firstWordsA = + SpeechRecognitionWords(firstRecognizedWords, firstConfidence); + expect(firstWords.hashCode, firstWordsA.hashCode); + }); + test('hash different for different results', () { + expect(firstWords.hashCode, isNot(secondWords.hashCode)); + }); + }); + group('isConfident', () { + test('true when >= 0.8', () { + expect(firstWords.isConfident(), isTrue); + }); + test('false when < 0.8', () { + expect(secondWords.isConfident(), isFalse); + }); + test('respects threshold', () { + expect(secondWords.isConfident(threshold: 0.5), isTrue); + }); + test('true when missing', () { + SpeechRecognitionWords words = SpeechRecognitionWords( + firstRecognizedWords, SpeechRecognitionWords.missingConfidence); + expect(words.isConfident(), isTrue); + expect(words.hasConfidenceRating, isFalse); + }); + }); + group('json', () { + test('loads correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionWords words = SpeechRecognitionWords.fromJson(json); + expect(words.recognizedWords, firstRecognizedWords); + expect(words.confidence, firstConfidence); + }); + test('roundtrips correctly', () { + var json = jsonDecode(firstRecognizedJson); + SpeechRecognitionWords words = SpeechRecognitionWords.fromJson(json); + var roundTripJson = words.toJson(); + SpeechRecognitionWords roundtripWords = + SpeechRecognitionWords.fromJson(roundTripJson); + expect(words, roundtripWords); + }); + }); +} diff --git a/speech_to_text_/test/speech_to_text_provider_test.dart b/speech_to_text_/test/speech_to_text_provider_test.dart new file mode 100644 index 00000000..25366b6f --- /dev/null +++ b/speech_to_text_/test/speech_to_text_provider_test.dart @@ -0,0 +1,196 @@ +import 'package:fake_async/fake_async.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_to_text.dart'; +import 'package:speech_to_text/speech_to_text_provider.dart'; + +import 'test_speech_channel_handler.dart'; +import 'test_speech_listener.dart'; + +void main() { + SpeechToTextProvider provider; + SpeechToText speechToText; + TestSpeechChannelHandler speechHandler; + TestSpeechListener speechListener; + + TestWidgetsFlutterBinding.ensureInitialized(); + + setUp(() { + speechToText = SpeechToText.withMethodChannel(SpeechToText.speechChannel); + speechHandler = TestSpeechChannelHandler(speechToText); + speechToText.channel + .setMockMethodCallHandler(speechHandler.methodCallHandler); + provider = SpeechToTextProvider(speechToText); + speechListener = TestSpeechListener(provider); + provider.addListener(speechListener.onNotify); + }); + + tearDown(() { + speechToText.channel.setMockMethodCallHandler(null); + }); + + group('delegates', () { + test('isListening matches delegate defaults', () { + expect(provider.isListening, speechToText.isListening); + expect(provider.isNotListening, speechToText.isNotListening); + }); + test('isAvailable matches delegate defaults', () { + expect(provider.isAvailable, speechToText.isAvailable); + expect(provider.isNotAvailable, !speechToText.isAvailable); + }); + test('isAvailable matches delegate after init', () async { + expect(await provider.initialize(), isTrue); + expect(provider.isAvailable, speechToText.isAvailable); + expect(provider.isNotAvailable, !speechToText.isAvailable); + }); + test('hasError matches delegate after error', () async { + expect(await provider.initialize(), isTrue); + expect(provider.hasError, speechToText.hasError); + }); + }); + group('listening', () { + test('notifies on initialize', () async { + fakeAsync((fa) { + provider.initialize(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isAvailable, isTrue); + }); + }); + test('notifies on listening', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isTrue); + expect(provider.hasResults, isFalse); + }); + }); + test('notifies on final words', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + speechListener.reset(); + speechHandler.notifyFinalWords(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(provider.hasResults, isTrue); + var result = speechListener.recognitionResult; + expect(result.recognizedWords, + TestSpeechChannelHandler.secondRecognizedWords); + expect(result.finalResult, isTrue); + }); + }); + test('hasResult false after listening before new results', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener); + speechHandler.notifyFinalWords(); + provider.stop(); + setupForListen(provider, fa, speechListener); + fa.flushMicrotasks(); + expect(provider.hasResults, isFalse); + }); + }); + test('notifies on partial words', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, partialResults: true); + speechListener.reset(); + speechHandler.notifyPartialWords(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(provider.hasResults, isTrue); + var result = speechListener.recognitionResult; + expect(result.recognizedWords, + TestSpeechChannelHandler.firstRecognizedWords); + expect(result.finalResult, isFalse); + }); + }); + }); + group('soundLevel', () { + test('notifies when requested', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, + partialResults: true, soundLevel: true); + speechListener.reset(); + speechHandler.notifySoundLevel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.soundLevel, TestSpeechChannelHandler.level2); + }); + }); + test('no notification by default', () async { + fakeAsync((fa) { + setupForListen(provider, fa, speechListener, + partialResults: true, soundLevel: false); + speechListener.reset(); + speechHandler.notifySoundLevel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isFalse); + expect(speechListener.soundLevel, 0); + }); + }); + }); + group('stop/cancel', () { + test('notifies on stop', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + provider.stop(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isFalse); + }); + }); + test('notifies on cancel', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + provider.cancel(); + fa.flushMicrotasks(); + expect(speechListener.notified, isTrue); + expect(speechListener.isListening, isFalse); + }); + }); + }); + group('error handling', () { + test('hasError matches delegate default', () async { + expect(await provider.initialize(), isTrue); + expect(provider.hasError, speechToText.hasError); + }); + test('notifies on error', () async { + fakeAsync((fa) { + provider.initialize(); + setupForListen(provider, fa, speechListener); + speechListener.reset(); + speechHandler.notifyPermanentError(); + expect(speechListener.notified, isTrue); + expect(speechListener.hasError, isTrue); + }); + }); + }); + group('locale', () { + test('locales empty before init', () async { + expect(provider.systemLocale, isNull); + expect(provider.locales, isEmpty); + }); + test('set from SpeechToText after init', () async { + fakeAsync((fa) { + speechHandler.setupLocales(); + provider.initialize(); + fa.flushMicrotasks(); + expect( + provider.systemLocale.localeId, TestSpeechChannelHandler.localeId1); + expect(provider.locales, hasLength(speechHandler.locales.length)); + }); + }); + }); +} + +void setupForListen(SpeechToTextProvider provider, FakeAsync fa, + TestSpeechListener speechListener, + {bool partialResults = false, bool soundLevel = false}) { + provider.initialize(); + fa.flushMicrotasks(); + speechListener.reset(); + provider.listen(partialResults: partialResults, soundLevel: soundLevel); + fa.flushMicrotasks(); +} diff --git a/speech_to_text_/test/speech_to_text_test.dart b/speech_to_text_/test/speech_to_text_test.dart new file mode 100644 index 00000000..7b4701ff --- /dev/null +++ b/speech_to_text_/test/speech_to_text_test.dart @@ -0,0 +1,425 @@ +import 'package:fake_async/fake_async.dart'; +import 'package:flutter/services.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:speech_to_text/speech_recognition_error.dart'; +import 'package:speech_to_text/speech_recognition_result.dart'; +import 'package:speech_to_text/speech_to_text.dart'; + +import 'test_speech_channel_handler.dart'; + +void main() { + TestWidgetsFlutterBinding.ensureInitialized(); + + TestSpeechListener listener; + TestSpeechChannelHandler speechHandler; + SpeechToText speech; + + setUp(() { + listener = TestSpeechListener(); + speech = SpeechToText.withMethodChannel(SpeechToText.speechChannel); + speechHandler = TestSpeechChannelHandler(speech); + speech.channel.setMockMethodCallHandler(speechHandler.methodCallHandler); + }); + + tearDown(() { + speech.channel.setMockMethodCallHandler(null); + }); + + group('hasPermission', () { + test('true if platform reports true', () async { + expect(await speech.hasPermission, true); + }); + test('false if platform reports false', () async { + speechHandler.hasPermissionResult = false; + expect(await speech.hasPermission, false); + }); + }); + group('init', () { + test('succeeds on platform success', () async { + expect(await speech.initialize(), true); + expect(speechHandler.initInvoked, true); + expect(speech.isAvailable, true); + }); + test('only invokes once', () async { + expect(await speech.initialize(), true); + speechHandler.initInvoked = false; + expect(await speech.initialize(), true); + expect(speechHandler.initInvoked, false); + }); + test('fails on platform failure', () async { + speechHandler.initResult = false; + expect(await speech.initialize(), false); + expect(speech.isAvailable, false); + }); + }); + + group('listen', () { + test('fails with exception if not initialized', () async { + try { + await speech.listen(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('fails with exception if init fails', () async { + try { + speechHandler.initResult = false; + await speech.initialize(); + await speech.listen(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('invokes listen after successful init', () async { + await speech.initialize(); + await speech.listen(); + expect(speechHandler.listenLocale, isNull); + expect(speechHandler.listenInvoked, true); + }); + test('converts platformException to listenFailed', () async { + await speech.initialize(); + speechHandler.listenException = true; + try { + await speech.listen(); + fail("Should have thrown"); + } on ListenFailedException catch (e) { + expect(e.details, TestSpeechChannelHandler.listenExceptionDetails); + } catch (wrongE) { + fail("Should have been ListenFailedException"); + } + }); + test('stops listen after listenFor duration', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(listenFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 2)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after listenFor duration even with speech event', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(listenFor: Duration(seconds: 1)); + speech.processMethodCall(MethodCall(SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after pauseFor duration with no speech', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(pauseFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 2)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after pauseFor with longer listenFor duration', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen( + pauseFor: Duration(seconds: 1), listenFor: Duration(seconds: 5)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('stops listen after listenFor with longer pauseFor duration', + () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen( + listenFor: Duration(seconds: 1), pauseFor: Duration(seconds: 5)); + fa.flushMicrotasks(); + expect(speech.isListening, isTrue); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isFalse); + }); + }); + test('keeps listening after pauseFor with speech event', () async { + fakeAsync((fa) { + speech.initialize(); + fa.flushMicrotasks(); + speech.listen(pauseFor: Duration(seconds: 2)); + fa.flushMicrotasks(); + fa.elapse(Duration(seconds: 1)); + speech.processMethodCall(MethodCall(SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + fa.flushMicrotasks(); + fa.elapse(Duration(seconds: 1)); + expect(speech.isListening, isTrue); + }); + }); + test('uses localeId if provided', () async { + await speech.initialize(); + await speech.listen(localeId: TestSpeechChannelHandler.localeId1); + expect(speechHandler.listenInvoked, true); + expect(speechHandler.listenLocale, TestSpeechChannelHandler.localeId1); + }); + test('calls speech listener', () async { + await speech.initialize(); + await speech.listen(onResult: listener.onSpeechResult); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + expect(listener.speechResults, 1); + expect( + listener.results, [TestSpeechChannelHandler.firstRecognizedResult]); + expect(speech.lastRecognizedWords, + TestSpeechChannelHandler.firstRecognizedWords); + }); + test('calls speech listener with multiple', () async { + await speech.initialize(); + await speech.listen(onResult: listener.onSpeechResult); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.firstRecognizedJson)); + await speech.processMethodCall(MethodCall( + SpeechToText.textRecognitionMethod, + TestSpeechChannelHandler.secondRecognizedJson)); + expect(listener.speechResults, 2); + expect(listener.results, [ + TestSpeechChannelHandler.firstRecognizedResult, + TestSpeechChannelHandler.secondRecognizedResult + ]); + expect(speech.lastRecognizedWords, + TestSpeechChannelHandler.secondRecognizedWords); + }); + }); + + group('status callback', () { + test('invoked on listen', () async { + await speech.initialize( + onError: listener.onSpeechError, onStatus: listener.onSpeechStatus); + await speech.processMethodCall(MethodCall( + SpeechToText.notifyStatusMethod, SpeechToText.listeningStatus)); + expect(listener.speechStatus, 1); + expect(listener.statuses.contains(SpeechToText.listeningStatus), true); + }); + }); + + group('soundLevel callback', () { + test('invoked on listen', () async { + await speech.initialize(); + await speech.listen(onSoundLevelChange: listener.onSoundLevel); + await speech.processMethodCall(MethodCall( + SpeechToText.soundLevelChangeMethod, + TestSpeechChannelHandler.level1)); + expect(listener.soundLevel, 1); + expect(listener.soundLevels, contains(TestSpeechChannelHandler.level1)); + }); + test('sets lastLevel', () async { + await speech.initialize(); + await speech.listen(onSoundLevelChange: listener.onSoundLevel); + await speech.processMethodCall(MethodCall( + SpeechToText.soundLevelChangeMethod, + TestSpeechChannelHandler.level1)); + expect(speech.lastSoundLevel, TestSpeechChannelHandler.level1); + }); + }); + + group('cancel', () { + test('does nothing if not initialized', () async { + speech.cancel(); + expect(speechHandler.cancelInvoked, false); + }); + test('cancels an active listen', () async { + await speech.initialize(); + await speech.listen(); + await speech.cancel(); + expect(speechHandler.cancelInvoked, true); + expect(speech.isListening, isFalse); + }); + }); + group('stop', () { + test('does nothing if not initialized', () async { + speech.stop(); + expect(speechHandler.cancelInvoked, false); + }); + test('stops an active listen', () async { + await speech.initialize(); + speech.listen(); + speech.stop(); + expect(speechHandler.stopInvoked, true); + }); + }); + group('error', () { + test('notifies handler with transient', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.transientErrorJson)); + expect(listener.speechErrors, 1); + expect(listener.errors.first.permanent, isFalse); + }); + test('notifies handler with permanent', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(listener.speechErrors, 1); + expect(listener.errors.first.permanent, isTrue); + }); + test('continues listening on transient', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.transientErrorJson)); + expect(speech.isListening, isTrue); + }); + test('continues listening on permanent if cancel not explicitly requested', + () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isTrue); + }); + test('stops listening on permanent if cancel explicitly requested', + () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(cancelOnError: true); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + }); + test('Error not sent after cancel', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(); + await speech.cancel(); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + expect(listener.speechErrors, 0); + }); + test('Error still sent after implicit cancel', () async { + await speech.initialize(onError: listener.onSpeechError); + await speech.listen(cancelOnError: true); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + await speech.processMethodCall(MethodCall(SpeechToText.notifyErrorMethod, + TestSpeechChannelHandler.permanentErrorJson)); + expect(speech.isListening, isFalse); + expect(listener.speechErrors, 2); + }); + }); + group('locales', () { + test('fails with exception if not initialized', () async { + try { + await speech.locales(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + // This is a good result + } + }); + test('system locale null if not initialized', () async { + LocaleName current; + try { + current = await speech.systemLocale(); + fail("Expected an exception."); + } on SpeechToTextNotInitializedException { + expect(current, isNull); + } + }); + test('handles an empty list', () async { + await speech.initialize(onError: listener.onSpeechError); + List localeNames = await speech.locales(); + expect(speechHandler.localesInvoked, isTrue); + expect(localeNames, isEmpty); + }); + test('returns expected locales', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + speechHandler.locales.add(TestSpeechChannelHandler.locale2); + List localeNames = await speech.locales(); + expect(localeNames, hasLength(speechHandler.locales.length)); + expect(localeNames[0].localeId, TestSpeechChannelHandler.localeId1); + expect(localeNames[0].name, TestSpeechChannelHandler.name1); + expect(localeNames[1].localeId, TestSpeechChannelHandler.localeId2); + expect(localeNames[1].name, TestSpeechChannelHandler.name2); + }); + test('skips incorrect locales', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add("InvalidJunk"); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + List localeNames = await speech.locales(); + expect(localeNames, hasLength(1)); + expect(localeNames[0].localeId, TestSpeechChannelHandler.localeId1); + expect(localeNames[0].name, TestSpeechChannelHandler.name1); + }); + test('system locale matches first returned locale', () async { + await speech.initialize(onError: listener.onSpeechError); + speechHandler.locales.add(TestSpeechChannelHandler.locale1); + speechHandler.locales.add(TestSpeechChannelHandler.locale2); + LocaleName current = await speech.systemLocale(); + expect(current.localeId, TestSpeechChannelHandler.localeId1); + }); + }); + group('status', () { + test('recognized false at start', () async { + expect(speech.hasRecognized, isFalse); + }); + test('listening false at start', () async { + expect(speech.isListening, isFalse); + }); + }); + test('available false at start', () async { + expect(speech.isAvailable, isFalse); + }); + test('hasError false at start', () async { + expect(speech.hasError, isFalse); + }); + test('lastError null at start', () async { + expect(speech.lastError, isNull); + }); + test('status empty at start', () async { + expect(speech.lastStatus, isEmpty); + }); +} + +class TestSpeechListener { + int speechResults = 0; + List results = []; + int speechErrors = 0; + List errors = []; + int speechStatus = 0; + List statuses = []; + int soundLevel = 0; + List soundLevels = []; + + void onSpeechResult(SpeechRecognitionResult result) { + ++speechResults; + results.add(result); + } + + void onSpeechError(SpeechRecognitionError errorResult) { + ++speechErrors; + errors.add(errorResult); + } + + void onSpeechStatus(String status) { + ++speechStatus; + statuses.add(status); + } + + void onSoundLevel(double level) { + ++soundLevel; + soundLevels.add(level); + } +}