WD: vital sign camera screen added

dev_v3.13.6_vital_sign
taha.alam 1 year ago
parent 4bfaf0a0d8
commit ff7afe5997

@ -0,0 +1,27 @@
package com.cloud.diplomaticquarterapp.whatsapp
import android.content.Context
import android.content.Intent
import com.whatsapp.otp.android.sdk.WhatsAppOtpHandler
import com.whatsapp.otp.android.sdk.WhatsAppOtpIncomingIntentHandler
import java.lang.ref.WeakReference
object WhatsApp {
val whatsAppOtpHandler = WhatsAppOtpHandler()
inline fun handleOTP ( intent: Intent, crossinline validateOTP:(code: String )-> Unit) =
WhatsAppOtpIncomingIntentHandler().processOtpCode(
intent,
// call your function to validate
{code -> validateOTP(code) },
{_,_->})
fun performHandShake(context : WeakReference<Context>) = whatsAppOtpHandler.sendOtpIntentToWhatsApp(context.get()!!)
fun isWhatsAppInstalled(context : WeakReference<Context>) : Boolean = whatsAppOtpHandler.isWhatsAppInstalled(context.get()!!)
}

@ -0,0 +1,32 @@
import 'dart:async';
import 'package:flutter/services.dart';
class WhatsappMethodChannel {
static const MethodChannel _channel = MethodChannel('whats_app_otp');
FutureOr<void> handleHandShake() async {
try {
await _channel.invokeMethod("performHandShake");
} on PlatformException catch (e) {
print("Failed to launch PenguinIn: '${e.message}'.");
}
}
Future<bool> isWhatsAppInstalled() async {
try {
return await _channel.invokeMethod("isWhatsAppInstalled");
} catch (e) {
return false;
}
}
Future<String> startListening() async {
try{
return await _channel.invokeMethod("startListening");
}catch(e){
return "";
}
}
}

@ -0,0 +1,9 @@
*.iml
.gradle
/local.properties
/.idea/workspace.xml
/.idea/libraries
.DS_Store
/build
/captures
.cxx

@ -0,0 +1,69 @@
group 'ai.panoptic.vital_sign_camera'
version '1.0-SNAPSHOT'
buildscript {
ext.kotlin_version = '1.5.20'
repositories {
google()
mavenCentral()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.1.2'
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
}
}
allprojects {
repositories {
google()
mavenCentral()
}
}
apply plugin: 'com.android.library'
apply plugin: 'kotlin-android'
android {
compileSdkVersion 34
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
kotlinOptions {
jvmTarget = '1.8'
}
sourceSets {
main.java.srcDirs += 'src/main/kotlin'
}
defaultConfig {
minSdkVersion 24
}
}
dependencies {
implementation 'androidx.preference:preference-ktx:1.2.1'
implementation 'androidx.camera:camera-core:1.0.2'
implementation 'com.google.mlkit:face-detection:16.1.2'
implementation 'com.android.volley:volley:1.2.1'
implementation fileTree(dir: 'libs', include: ['*.jar'])
implementation project(":vital-sign-engine")
implementation "androidx.camera:camera-core:1.1.0-beta02"
implementation "androidx.camera:camera-camera2:1.1.0-beta02"
implementation "androidx.camera:camera-lifecycle:1.1.0-beta02"
implementation "androidx.camera:camera-video:1.1.0-beta02"
implementation "androidx.camera:camera-view:1.1.0-beta02"
implementation "androidx.camera:camera-extensions:1.1.0-beta02"
implementation "androidx.exifinterface:exifinterface:1.3.3"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-guava:1.5.2"
implementation "org.jetbrains.kotlinx:kotlinx-coroutines-android:1.5.2"
}

@ -0,0 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

@ -0,0 +1,234 @@
#!/bin/sh
#
# Copyright © 2015-2021 the original authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
##############################################################################
#
# Gradle start up script for POSIX generated by Gradle.
#
# Important for running:
#
# (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
# noncompliant, but you have some other compliant shell such as ksh or
# bash, then to run this script, type that shell name before the whole
# command line, like:
#
# ksh Gradle
#
# Busybox and similar reduced shells will NOT work, because this script
# requires all of these POSIX shell features:
# * functions;
# * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
# «${var#prefix}», «${var%suffix}», and «$( cmd )»;
# * compound commands having a testable exit status, especially «case»;
# * various built-in commands including «command», «set», and «ulimit».
#
# Important for patching:
#
# (2) This script targets any POSIX shell, so it avoids extensions provided
# by Bash, Ksh, etc; in particular arrays are avoided.
#
# The "traditional" practice of packing multiple parameters into a
# space-separated string is a well documented source of bugs and security
# problems, so this is (mostly) avoided, by progressively accumulating
# options in "$@", and eventually passing that to Java.
#
# Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
# and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
# see the in-line comments for details.
#
# There are tweaks for specific operating systems such as AIX, CygWin,
# Darwin, MinGW, and NonStop.
#
# (3) This script is generated from the Groovy template
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
# within the Gradle project.
#
# You can find Gradle at https://github.com/gradle/gradle/.
#
##############################################################################
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
app_path=$0
# Need this for daisy-chained symlinks.
while
APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
[ -h "$app_path" ]
do
ls=$( ls -ld "$app_path" )
link=${ls#*' -> '}
case $link in #(
/*) app_path=$link ;; #(
*) app_path=$APP_HOME$link ;;
esac
done
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
APP_NAME="Gradle"
APP_BASE_NAME=${0##*/}
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
warn () {
echo "$*"
} >&2
die () {
echo
echo "$*"
echo
exit 1
} >&2
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
nonstop=false
case "$( uname )" in #(
CYGWIN* ) cygwin=true ;; #(
Darwin* ) darwin=true ;; #(
MSYS* | MINGW* ) msys=true ;; #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD=$JAVA_HOME/jre/sh/java
else
JAVACMD=$JAVA_HOME/bin/java
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD=java
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
case $MAX_FD in #(
max*)
MAX_FD=$( ulimit -H -n ) ||
warn "Could not query maximum file descriptor limit"
esac
case $MAX_FD in #(
'' | soft) :;; #(
*)
ulimit -n "$MAX_FD" ||
warn "Could not set maximum file descriptor limit to $MAX_FD"
esac
fi
# Collect all arguments for the java command, stacking in reverse order:
# * args from the command line
# * the main class name
# * -classpath
# * -D...appname settings
# * --module-path (only if needed)
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
# For Cygwin or MSYS, switch paths to Windows format before running java
if "$cygwin" || "$msys" ; then
APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
JAVACMD=$( cygpath --unix "$JAVACMD" )
# Now convert the arguments - kludge to limit ourselves to /bin/sh
for arg do
if
case $arg in #(
-*) false ;; # don't mess with options #(
/?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
[ -e "$t" ] ;; #(
*) false ;;
esac
then
arg=$( cygpath --path --ignore --mixed "$arg" )
fi
# Roll the args list around exactly as many times as the number of
# args, so each arg winds up back in the position where it started, but
# possibly modified.
#
# NB: a `for` loop captures its iteration list before it begins, so
# changing the positional parameters here affects neither the number of
# iterations, nor the values presented in `arg`.
shift # remove old arg
set -- "$@" "$arg" # push replacement arg
done
fi
# Collect all arguments for the java command;
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
# shell script including quotes and variable substitutions, so put them in
# double quotes to make sure that they get re-expanded; and
# * put everything else in single quotes, so that it's not re-expanded.
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
"$@"
# Use "xargs" to parse quoted args.
#
# With -n1 it outputs one arg per line, with the quotes and backslashes removed.
#
# In Bash we could simply go:
#
# readarray ARGS < <( xargs -n1 <<<"$var" ) &&
# set -- "${ARGS[@]}" "$@"
#
# but POSIX shell has neither arrays nor command substitution, so instead we
# post-process each arg (as a line of input to sed) to backslash-escape any
# character that might be a shell metacharacter, then use eval to reverse
# that process (while maintaining the separation between arguments), and wrap
# the whole thing up as a single "set" statement.
#
# This will of course break if any of these variables contains a newline or
# an unmatched quote.
#
eval "set -- $(
printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
xargs -n1 |
sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
tr '\n' ' '
)" '"$@"'
exec "$JAVACMD" "$@"

@ -0,0 +1,89 @@
@rem
@rem Copyright 2015 the original author or authors.
@rem
@rem Licensed under the Apache License, Version 2.0 (the "License");
@rem you may not use this file except in compliance with the License.
@rem You may obtain a copy of the License at
@rem
@rem https://www.apache.org/licenses/LICENSE-2.0
@rem
@rem Unless required by applicable law or agreed to in writing, software
@rem distributed under the License is distributed on an "AS IS" BASIS,
@rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@rem See the License for the specific language governing permissions and
@rem limitations under the License.
@rem
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Resolve any "." and ".." in APP_HOME to make it shorter.
for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto execute
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto execute
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

@ -0,0 +1,2 @@
configurations.maybeCreate("default")
artifacts.add("default", file('vital-sign-engine-debug.aar'))

@ -0,0 +1,3 @@
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="ai.panoptic.vital_sign_camera">
</manifest>

@ -0,0 +1,108 @@
package ai.panoptic.vital_sign_camera
import ai.panoptic.wvs.camera.*
import ai.panoptic.wvs.engine.Converter
import android.content.Context
import android.os.Build
import android.os.Handler
import android.os.Looper
import android.view.View
import androidx.annotation.RequiresApi
import androidx.lifecycle.Lifecycle
import io.flutter.plugin.common.BinaryMessenger
import io.flutter.plugin.common.EventChannel
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.platform.PlatformView
@RequiresApi(Build.VERSION_CODES.O)
class FlutterVitalSignCamera internal constructor(
context: Context,
messenger: BinaryMessenger,
id: Int,
props: Map<String, Any>,
lifecycle: Lifecycle
) : PlatformView, MethodChannel.MethodCallHandler, EventChannel.StreamHandler, VitalSignCameraEventHandler {
private val camera: VitalSignCamera
private val methodChannel: MethodChannel
private var eventSink: EventChannel.EventSink? = null
private val mainThread = Handler(Looper.getMainLooper())
override fun getView(): View {
return camera
}
init {
camera = VitalSignCamera(context, MLKitFaceDetector(), lifecycle, this)
methodChannel = MethodChannel(messenger, "ai.panoptic/channel/flutter_vital_sign_camera_$id")
methodChannel.setMethodCallHandler(this)
val eventChannel = EventChannel(messenger, "ai.panoptic/channel/flutter_vital_sign_camera_processed_frame_event_$id")
eventChannel.setStreamHandler(this)
camera.setProps(props)
}
override fun onMethodCall(methodCall: MethodCall, result: MethodChannel.Result) {
when (methodCall.method) {
"startScanning" -> {
camera.startStartScanning()
.thenAccept { result.success(null) }
.exceptionally {
result.error("-1", it.message, it.cause)
return@exceptionally null
}
}
"stopScanning" -> {
camera.stopScanning()
.thenAccept { result.success(null) }
.exceptionally {
result.error("-2", it.message, it.cause)
return@exceptionally null
}
}
"configure" -> {
val arguments = methodCall.arguments as? Map<String, Any>
arguments?.let {
camera.setProps(it)
}
}
else -> {
result.notImplemented()
}
}
}
override fun dispose() {
// camera.destroy()
}
override fun onVideoFrameProcessed(result: ai.panoptic.wvs.engine.ProcessFrameResult): kotlin.Unit {
// Already handled in onEvent.
// No need to handle here.
}
override fun onEvent(cameraView: VitalSignCamera, eventName: String, event: Map<String,Any>?) {
when(eventName) {
"videoFrameProcessed" -> {
eventSink?.let {
mainThread.post {
it.success(event)
}
}
}
}
}
override fun onListen(arguments: Any?, events: EventChannel.EventSink?) {
eventSink = events
}
override fun onCancel(arguments: Any?) {
eventSink = null
}
}

@ -0,0 +1,165 @@
package ai.panoptic.vital_sign_camera;
import static com.google.mlkit.vision.face.FaceContour.FACE;
import android.graphics.PointF;
import android.media.Image;
import android.util.ArraySet;
import com.google.android.gms.tasks.Task;
import com.google.mlkit.vision.common.InputImage;
import com.google.mlkit.vision.face.Face;
import com.google.mlkit.vision.face.FaceContour;
import com.google.mlkit.vision.face.FaceDetection;
import com.google.mlkit.vision.face.FaceDetector;
import com.google.mlkit.vision.face.FaceDetectorOptions;
import java.util.List;
import java.util.Set;
import ai.panoptic.wvs.engine.EyesOpenProbability;
import ai.panoptic.wvs.engine.FaceDetectorFeature;
import ai.panoptic.wvs.engine.HeadEulerAngle;
import ai.panoptic.wvs.engine.NormalizedFacebox;
import ai.panoptic.wvs.engine.VitalSignFaceDetector;
public class MLKitFaceDetector implements VitalSignFaceDetector {
ai.panoptic.wvs.engine.Face face = null;
FaceDetectorOptions options =
new FaceDetectorOptions.Builder()
.setPerformanceMode(FaceDetectorOptions.PERFORMANCE_MODE_FAST)
.setContourMode(FaceDetectorOptions.CONTOUR_MODE_NONE)
.setClassificationMode(FaceDetectorOptions.CLASSIFICATION_MODE_ALL)
.setMinFaceSize(0.15f)
.build();
FaceDetector faceDetector = FaceDetection.getClient(options);
public ai.panoptic.wvs.engine.Face detectFace(Image mediaImage, int rotationDegrees) {
if (mediaImage == null) {
return null;
}
float width = mediaImage.getWidth();
float height = mediaImage.getHeight();
if (rotationDegrees == 270 || rotationDegrees == 90) {
width = mediaImage.getHeight();
height = mediaImage.getWidth();
}
final float _width = width;
final float _height = height;
InputImage image = InputImage.fromMediaImage(mediaImage, rotationDegrees);
Task<List<Face>> task = faceDetector.process(image);
task.addOnCompleteListener( result -> {
if (result.isCanceled()) {
return;
}
if (!result.isSuccessful()) {
return;
}
if (!result.isComplete()) {
return;
}
try {
List<Face> faces = result.getResult();
if (faces.isEmpty()) {
this.face = null;
return;
}
Face face = faces.get(0);
this.face = createFace(face, _width, _height);
} catch (Exception e) {
e.printStackTrace();
this.face = null;
}
});
return face;
}
static private ai.panoptic.wvs.engine.Face createFace(Face face, float _width, float _height) {
FaceContour contour = face.getContour(FACE);
NormalizedFacebox facebox = new NormalizedFacebox();
float minX = _width;
float minY = _height;
float maxX = 0;
float maxY = 0;
if (contour != null) {
for (PointF p : contour.getPoints()) {
if (p.x < minX) {
minX = p.x;
}
if (p.y < minY) {
minY = p.y;
}
if (p.x > maxX) {
maxX = p.x;
}
if (p.y > maxY) {
maxY = p.y;
}
}
} else {
minX = face.getBoundingBox().left;
minY = face.getBoundingBox().top;
maxX = face.getBoundingBox().right;
maxY = face.getBoundingBox().bottom;
}
facebox.xCenter = (minX + (maxX - minX) / 2.0f) / _width;
facebox.yCenter = (minY + (maxY - minY) / 2.0f) / _height;
facebox.width = (maxX - minX) / _width;
facebox.height = (maxY - minY) / _height;
ai.panoptic.wvs.engine.Face result = new ai.panoptic.wvs.engine.Face(facebox);
result.setEulerAngle(
new HeadEulerAngle(
face.getHeadEulerAngleX(),
face.getHeadEulerAngleY(),
face.getHeadEulerAngleY()
)
);
if (face.getLeftEyeOpenProbability() != null && face.getRightEyeOpenProbability() != null) {
result.setEyesOpenProbability(
new EyesOpenProbability(
face.getLeftEyeOpenProbability(),
face.getRightEyeOpenProbability()
)
);
}
return result;
}
@Override
public Set<FaceDetectorFeature> supportedFeatures() {
ArraySet<FaceDetectorFeature> features = new ArraySet<FaceDetectorFeature>();
features.add(FaceDetectorFeature.eyeOpenProbability);
features.add(FaceDetectorFeature.eulerAngle);
return features;
}
@Override
public NormalizedFacebox detect(Image mediaImage, int rotationDegrees) {
detectFace(mediaImage, rotationDegrees);
if (face == null) {
return null;
}
return face.getFacebox();
}
}

@ -0,0 +1,145 @@
package ai.panoptic.vital_sign_camera
import ai.panoptic.wvs.camera.availableCameraDevices
import android.Manifest
import android.app.Activity
import android.content.Context
import android.content.pm.PackageManager
import android.os.Build
import android.util.Log
import androidx.annotation.NonNull
import androidx.core.content.ContextCompat
import androidx.lifecycle.*
import io.flutter.embedding.engine.plugins.FlutterPlugin
import io.flutter.embedding.engine.plugins.activity.ActivityAware
import io.flutter.embedding.engine.plugins.activity.ActivityPluginBinding
import io.flutter.embedding.engine.plugins.lifecycle.FlutterLifecycleAdapter
import io.flutter.plugin.common.MethodCall
import io.flutter.plugin.common.MethodChannel
import io.flutter.plugin.common.MethodChannel.MethodCallHandler
import io.flutter.plugin.common.MethodChannel.Result
import io.flutter.plugin.common.PluginRegistry
import io.flutter.plugin.common.PluginRegistry.RequestPermissionsResultListener
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.launch
/** VitalSignCameraPlugin */
class VitalSignCameraPlugin: FlutterPlugin, MethodCallHandler, ActivityAware,
RequestPermissionsResultListener {
companion object {
const val TAG = "VitalSignCameraPlugin"
fun parsePermissionStatus(status: Int): String {
return when (status) {
PackageManager.PERMISSION_DENIED -> "denied"
PackageManager.PERMISSION_GRANTED -> "authorized"
else -> "not-determined"
}
}
}
/// The MethodChannel that will the communication between Flutter and native Android
///
/// This local reference serves to register the plugin with the Flutter Engine and unregister it
/// when the Flutter Engine is detached from the Activity
private lateinit var channel : MethodChannel
private lateinit var pluginBinding: FlutterPlugin.FlutterPluginBinding
private lateinit var context: Context
private lateinit var activity: Activity
private lateinit var activityBinding:ActivityPluginBinding
private val coroutineScope = CoroutineScope(Dispatchers.Main)
private var requestCode = 1
private var permissionRequests = HashMap<Int, Result>()
override fun onAttachedToEngine(@NonNull flutterPluginBinding: FlutterPlugin.FlutterPluginBinding) {
Log.d("VitalSignCameraPlugin", "onAttachedToEngine")
channel = MethodChannel(flutterPluginBinding.binaryMessenger, "vital_sign_camera_plugin")
channel.setMethodCallHandler(this)
pluginBinding = flutterPluginBinding
context = flutterPluginBinding.applicationContext
}
override fun onDetachedFromEngine(@NonNull binding: FlutterPlugin.FlutterPluginBinding) {
channel.setMethodCallHandler(null)
}
override fun onAttachedToActivity(binding: ActivityPluginBinding) {
Log.d(TAG, "onAttachedToActivity")
val lifecycle: Lifecycle = FlutterLifecycleAdapter.getActivityLifecycle(binding)
pluginBinding.platformViewRegistry.registerViewFactory(
"ai.panoptic/flutter_vital_sign_camera",
VitalSignCameraFactory(pluginBinding.binaryMessenger, lifecycle)
)
activity = binding.activity
activityBinding = binding
activityBinding.addRequestPermissionsResultListener(this)
}
override fun onDetachedFromActivityForConfigChanges() {}
override fun onReattachedToActivityForConfigChanges(binding: ActivityPluginBinding) {}
override fun onDetachedFromActivity() {
activityBinding.removeRequestPermissionsResultListener(this)
}
override fun onMethodCall(@NonNull call: MethodCall, @NonNull result: Result) {
when (call.method) {
"availableCameraDevices" -> {
coroutineScope.launch {
val devices = availableCameraDevices(context)
val returnValue = devices.map { it.map }
Log.d(TAG, "$returnValue")
result.success(returnValue)
}
}
"getCameraPermissionStatus" -> {
Log.d(TAG, "getCameraPermissionStatus")
val status =
parsePermissionStatus(
ContextCompat.checkSelfPermission(context, Manifest.permission.CAMERA))
result.success(status)
}
"requestCameraPermission" -> {
Log.d(TAG, "requestCameraPermission")
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.M) {
// API 21 and below always grants permission on app install
result.success("authorized")
}
activity.requestPermissions(arrayOf(Manifest.permission.CAMERA), requestCode)
permissionRequests[requestCode] = result
requestCode += 1
}
}
}
override fun onRequestPermissionsResult(
requestCode: Int,
permissions: Array<out String>,
grantResults: IntArray
): Boolean {
permissionRequests[requestCode]?.let {
val permissionStatus = if (grantResults.isNotEmpty()) grantResults[0] else PackageManager.PERMISSION_DENIED
it.success(parsePermissionStatus(permissionStatus))
permissionRequests.remove(requestCode)
return true
}
return false
}
}

@ -0,0 +1,218 @@
//
// MLKitFaceDetector.swift
// VitalSignEngineCore
//
// Created by Eddie Lau on 24/8/2022.
//
import Foundation
import MLKitFaceDetection
import MLKitVision
import VitalSignEngineCore
public class MLKitFaceDetector : VitalSignFaceDetector {
public typealias Face = MLKitFace
public let supportedFeatures: Set<VitalSignEngineCore.FaceDetectorFeature>
= [.eyesOpenProbability, .eularAngle]
public init() {
}
public let maxDetectionHeight:CGFloat = 240
let fps = Fps(name: "MLKitFaceDetector")
lazy var faceDetector:FaceDetector = {
let option = FaceDetectorOptions()
option.contourMode = .all
option.classificationMode = .all
option.landmarkMode = .none
option.performanceMode = .fast
return FaceDetector.faceDetector(options: option)
}()
var face:Face? {
didSet {
fps.tick()
}
}
public var frameRate: Double? { fps.lastReportedAverage }
public var maxConcurrency = 2
var jobCount = 0
public func detect(from frame: UIImage) -> Face? {
guard jobCount < maxConcurrency else {
return face
}
jobCount += 1
DispatchQueue.global().async {
let scaledFrame = self.scaled(videoFrame: frame)
let frameSize = scaledFrame.size
let image = VisionImage(image: scaledFrame)
image.orientation = image.orientation
DispatchQueue.main.async {
self.faceDetector.process(image) { faces, error in
guard let face = faces?.first else {
self.face = nil
self.jobCount -= 1
return
}
self.face = MLKitFace(face: face, frameSize: frameSize)
self.jobCount -= 1
}
}
}
return face
}
func scaled(videoFrame frame:UIImage) -> UIImage {
var scale = CGFloat(1.0)
if frame.size.height > maxDetectionHeight {
scale = maxDetectionHeight / frame.size.height
}
let scaledScale = CGSize(width:frame.size.width * scale, height:frame.size.height * scale)
return frame.resizedImage(size: scaledScale)!
}
}
public struct MLKitFaceLandmarks: FaceLandmarks {
public typealias ContourType = FaceContour
let mlkitFace:MLKitFaceDetection.Face
public let videoFrameSize: CGSize
init(mlkitFace: MLKitFaceDetection.Face, videoFrameSize: CGSize) {
self.mlkitFace = mlkitFace
self.videoFrameSize = videoFrameSize
}
public var faceOval: FaceContour? { mlkitFace.contour(ofType: .face) }
public func contour(of type: VitalSignEngineCore.FaceLandmarkType) -> FaceContour? {
guard let contourType = type.asContourType else { return nil }
return mlkitFace.contour(ofType: contourType)
}
}
public class MLKitFace : FaceProtocol {
public typealias ContourType = FaceContour
let mlkitFace:MLKitFaceDetection.Face
public let faceBox: NormalizedFaceBox
public let faceLandmarks: MLKitFaceLandmarks
public let eulerAngle: (x: CGFloat, y: CGFloat, z: CGFloat)?
public let eyesOpenProbability: (leftEye:CGFloat, rightEye:CGFloat)?
init?(face:MLKitFaceDetection.Face, frameSize:CGSize) {
self.mlkitFace = face
self.faceLandmarks = MLKitFaceLandmarks(mlkitFace: face, videoFrameSize: frameSize)
guard let faceContour = face.contour(ofType: FaceContourType.face) else {
return nil
}
let eyesOpenPropbability: (leftEye:CGFloat, rightEye:CGFloat)?
if face.hasLeftEyeOpenProbability && face.hasRightEyeOpenProbability {
eyesOpenPropbability = (face.leftEyeOpenProbability, face.rightEyeOpenProbability)
} else {
eyesOpenPropbability = nil
}
self.eyesOpenProbability = eyesOpenPropbability
let eulerAngle: (x:CGFloat, y:CGFloat, z:CGFloat)?
if face.hasHeadEulerAngleX && face.hasHeadEulerAngleY && face.hasHeadEulerAngleZ {
eulerAngle = (face.headEulerAngleX, face.headEulerAngleY, face.headEulerAngleZ)
} else {
eulerAngle = nil
}
self.eulerAngle = eulerAngle
self.faceBox = faceContour.toFacebox(frameSize: frameSize)
}
}
fileprivate extension MLKitFaceDetection.FaceContour {
func toFacebox(frameSize:CGSize) -> NormalizedFaceBox {
var minX = CGFloat.greatestFiniteMagnitude
var minY = CGFloat.greatestFiniteMagnitude
var maxX = -CGFloat.greatestFiniteMagnitude
var maxY = -CGFloat.greatestFiniteMagnitude
points.forEach { point in
minX = min(minX, point.x)
minY = min(minY, point.y)
maxX = max(maxX, point.x)
maxY = max(maxY, point.y)
}
return NormalizedFaceBox(
xCenter: ((maxX - minX)/2 + minX) / frameSize.width,
yCenter: ((maxY - minY)/2 + minY) / frameSize.height,
width: (maxX - minX) / frameSize.width,
height: (maxY - minY) / frameSize.height
)
}
}
fileprivate extension UIImage {
func resizedImage(size: CGSize) -> UIImage? {
UIGraphicsBeginImageContextWithOptions(size, false, scale)
defer { UIGraphicsEndImageContext() }
draw(in: CGRect(origin: .zero, size: size))
let image = UIGraphicsGetImageFromCurrentImageContext()
return image?.data.flatMap(UIImage.init) // This line is copied from MLKit, Not sure why is needed
}
private var data: Data? {
#if swift(>=4.2)
return pngData() ?? jpegData(compressionQuality: Constant.jpegCompressionQuality)
#else
return pngData() ?? jpegData(compressionQuality: Constant.jpegCompressionQuality)
#endif // swift(>=4.2)
}
private enum Constant {
static let jpegCompressionQuality: CGFloat = 0.8
}
}
extension VisionPoint : FaceContourPoint {
}
extension FaceContour : Contour {
}
extension VitalSignEngineCore.FaceLandmarkType {
var asContourType:MLKitFaceDetection.FaceContourType? {
switch self {
case .faceOval: return .face
case .leftEyebrowTop: return .leftEyebrowTop
case .leftEyebrowBottom: return .leftEyebrowBottom
case .rightEyebrowTop: return .rightEyebrowTop
case .rightEyebrowBottom: return .rightEyebrowBottom
case .leftEye: return .leftEye
case .rightEye: return .rightEye
case .upperLipTop: return .upperLipTop
case .upperLipBottom: return .upperLipBottom
case .lowerLipTop: return .lowerLipTop
case .lowerLipBottom: return .lowerLipBottom
case .noseBridge: return .noseBridge
case .noseBottom: return .noseBottom
case .leftCheek: return .leftCheek
case .rightCheek: return .rightCheek
@unknown default: return nil
}
}
}

@ -0,0 +1,91 @@
import Flutter
import UIKit
import VitalSignEngineCore
public class SwiftVitalSignCameraPlugin: NSObject, FlutterPlugin {
public static func register(with registrar: FlutterPluginRegistrar) {
let channel = FlutterMethodChannel(name: "vital_sign_camera_plugin", binaryMessenger: registrar.messenger())
let instance = SwiftVitalSignCameraPlugin()
registrar.addMethodCallDelegate(instance, channel: channel)
registrar.register(VitalSignCameraFactory(messenger: registrar.messenger()), withId: "ai.panoptic/flutter_vital_sign_camera")
}
public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
if call.method == "availableCameraDevices" {
let devices = VitalSignCamera.availableCameraDevices()
result(devices.map({ $0.flutterCompatibleDict }))
}
if call.method == "requestCameraPermission" {
VitalSignCamera.requestCameraPermission().done { status in
result(status.rawValue)
}.catch { error in
result(FlutterError(code: "\(error.code)", message: error.message, details: nil))
}
}
if call.method == "getCameraPermissionStatus" {
let status = VitalSignCamera.getCameraPermissionStatus()
result(status.rawValue)
}
}
}
fileprivate extension CameraDevice {
var flutterCompatibleDict:[String:Any] {
[
"id": id,
"devices": devices.map { $0.rawValue },
"position": position.rawValue,
"name": name,
"hasFlash": NSNumber(value: hasFlash),
"hasTorch": NSNumber(value: hasTorch),
"minZoom": NSNumber(value: Double(minZoom)),
"neutralZoom": NSNumber(value: Double(neutralZoom)),
"maxZoom": NSNumber(value: Double(maxZoom)),
"isMultiCam": NSNumber(value: isMultiCam),
"supportsParallelVideoProcessing": NSNumber(value: supportsParallelVideoProcessing),
"supportsDepthCapture": NSNumber(value: supportsDepthCapture),
"supportsRawCapture": NSNumber(value: supportsRawCapture),
"supportsLowLightBoost": NSNumber(value: supportsLowLightBoost),
"supportsFocus": NSNumber(value: supportsFocus),
"formats": formats.map { format -> [String: Any] in
format.flutterCompatibleDict
},
]
}
}
fileprivate extension CameraCaptureFormat {
var flutterCompatibleDict:[String:Any] {
var dict:[String:Any] = [
"videoStabilizationModes": videoStabilizationModes.map { $0.rawValue },
"autoFocusSystem": autoFocusSystem.rawValue,
"photoHeight": NSNumber(value: Double(photoHeight)),
"photoWidth": NSNumber(value: Double(photoWidth)),
"videoHeight": NSNumber(value: Double(videoHeight)),
"videoWidth": NSNumber(value: Double(videoWidth)),
"maxISO": NSNumber(value: Int(maxISO)),
"minISO": NSNumber(value: Int(minISO)),
"fieldOfView": NSNumber(value: Double(fieldOfView)),
"maxZoom": NSNumber(value: Double(maxZoom)),
"colorSpaces": colorSpaces.map { $0.rawValue },
"supportsVideoHDR": NSNumber(value: supportsVideoHDR),
"supportsPhotoHDR": NSNumber(value: supportsPhotoHDR),
"frameRateRanges": frameRateRanges.map {
[
"minFrameRate": NSNumber(value: Int($0.minFrameRate)),
"maxFrameRate": NSNumber(value: Int($0.maxFrameRate)),
]
},
"pixelFormat": pixelFormat.rawValue,
]
if let isHighestPhotoQualitySupported = isHighestPhotoQualitySupported {
dict["isHighestPhotoQualitySupported"] = isHighestPhotoQualitySupported
}
return dict
}
}

@ -0,0 +1,37 @@
//
// VitalSignCameraFactory.swift
// vital_sign_camera
//
// Created by Eddie Lau on 24/1/2023.
//
import Foundation
import Foundation
import Flutter
import UIKit
class VitalSignCameraFactory: NSObject, FlutterPlatformViewFactory {
private var messenger: FlutterBinaryMessenger
init(messenger: FlutterBinaryMessenger) {
self.messenger = messenger
super.init()
}
func create(
withFrame frame: CGRect,
viewIdentifier viewId: Int64,
arguments args: Any?
) -> FlutterPlatformView {
return FlutterVitalSignCamera(
frame: frame,
viewIdentifier: viewId,
arguments: args,
binaryMessenger: messenger)
}
func createArgsCodec() -> FlutterMessageCodec & NSObjectProtocol {
FlutterStandardMessageCodec.sharedInstance()
}
}

@ -0,0 +1,4 @@
#import <Flutter/Flutter.h>
@interface VitalSignCameraPlugin : NSObject<FlutterPlugin>
@end

@ -0,0 +1,15 @@
#import "VitalSignCameraPlugin.h"
#if __has_include(<vital_sign_camera/vital_sign_camera-Swift.h>)
#import <vital_sign_camera/vital_sign_camera-Swift.h>
#else
// Support project import fallback if the generated compatibility header
// is not copied when this plugin is created as a library.
// https://forums.swift.org/t/swift-static-libraries-dont-copy-generated-objective-c-header/19816
#import "vital_sign_camera-Swift.h"
#endif
@implementation VitalSignCameraPlugin
+ (void)registerWithRegistrar:(NSObject<FlutterPluginRegistrar>*)registrar {
[SwiftVitalSignCameraPlugin registerWithRegistrar:registrar];
}
@end

@ -0,0 +1,411 @@
#if 0
#elif defined(__arm64__) && __arm64__
// Generated by Apple Swift version 5.10 (swiftlang-5.10.0.13 clang-1500.3.9.4)
#ifndef VITALSIGNENGINECORE_SWIFT_H
#define VITALSIGNENGINECORE_SWIFT_H
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wgcc-compat"
#if !defined(__has_include)
# define __has_include(x) 0
#endif
#if !defined(__has_attribute)
# define __has_attribute(x) 0
#endif
#if !defined(__has_feature)
# define __has_feature(x) 0
#endif
#if !defined(__has_warning)
# define __has_warning(x) 0
#endif
#if __has_include(<swift/objc-prologue.h>)
# include <swift/objc-prologue.h>
#endif
#pragma clang diagnostic ignored "-Wauto-import"
#if defined(__OBJC__)
#include <Foundation/Foundation.h>
#endif
#if defined(__cplusplus)
#include <cstdint>
#include <cstddef>
#include <cstdbool>
#include <cstring>
#include <stdlib.h>
#include <new>
#include <type_traits>
#else
#include <stdint.h>
#include <stddef.h>
#include <stdbool.h>
#include <string.h>
#endif
#if defined(__cplusplus)
#if defined(__arm64e__) && __has_include(<ptrauth.h>)
# include <ptrauth.h>
#else
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wreserved-macro-identifier"
# ifndef __ptrauth_swift_value_witness_function_pointer
# define __ptrauth_swift_value_witness_function_pointer(x)
# endif
# ifndef __ptrauth_swift_class_method_pointer
# define __ptrauth_swift_class_method_pointer(x)
# endif
#pragma clang diagnostic pop
#endif
#endif
#if !defined(SWIFT_TYPEDEFS)
# define SWIFT_TYPEDEFS 1
# if __has_include(<uchar.h>)
# include <uchar.h>
# elif !defined(__cplusplus)
typedef uint_least16_t char16_t;
typedef uint_least32_t char32_t;
# endif
typedef float swift_float2 __attribute__((__ext_vector_type__(2)));
typedef float swift_float3 __attribute__((__ext_vector_type__(3)));
typedef float swift_float4 __attribute__((__ext_vector_type__(4)));
typedef double swift_double2 __attribute__((__ext_vector_type__(2)));
typedef double swift_double3 __attribute__((__ext_vector_type__(3)));
typedef double swift_double4 __attribute__((__ext_vector_type__(4)));
typedef int swift_int2 __attribute__((__ext_vector_type__(2)));
typedef int swift_int3 __attribute__((__ext_vector_type__(3)));
typedef int swift_int4 __attribute__((__ext_vector_type__(4)));
typedef unsigned int swift_uint2 __attribute__((__ext_vector_type__(2)));
typedef unsigned int swift_uint3 __attribute__((__ext_vector_type__(3)));
typedef unsigned int swift_uint4 __attribute__((__ext_vector_type__(4)));
#endif
#if !defined(SWIFT_PASTE)
# define SWIFT_PASTE_HELPER(x, y) x##y
# define SWIFT_PASTE(x, y) SWIFT_PASTE_HELPER(x, y)
#endif
#if !defined(SWIFT_METATYPE)
# define SWIFT_METATYPE(X) Class
#endif
#if !defined(SWIFT_CLASS_PROPERTY)
# if __has_feature(objc_class_property)
# define SWIFT_CLASS_PROPERTY(...) __VA_ARGS__
# else
# define SWIFT_CLASS_PROPERTY(...)
# endif
#endif
#if !defined(SWIFT_RUNTIME_NAME)
# if __has_attribute(objc_runtime_name)
# define SWIFT_RUNTIME_NAME(X) __attribute__((objc_runtime_name(X)))
# else
# define SWIFT_RUNTIME_NAME(X)
# endif
#endif
#if !defined(SWIFT_COMPILE_NAME)
# if __has_attribute(swift_name)
# define SWIFT_COMPILE_NAME(X) __attribute__((swift_name(X)))
# else
# define SWIFT_COMPILE_NAME(X)
# endif
#endif
#if !defined(SWIFT_METHOD_FAMILY)
# if __has_attribute(objc_method_family)
# define SWIFT_METHOD_FAMILY(X) __attribute__((objc_method_family(X)))
# else
# define SWIFT_METHOD_FAMILY(X)
# endif
#endif
#if !defined(SWIFT_NOESCAPE)
# if __has_attribute(noescape)
# define SWIFT_NOESCAPE __attribute__((noescape))
# else
# define SWIFT_NOESCAPE
# endif
#endif
#if !defined(SWIFT_RELEASES_ARGUMENT)
# if __has_attribute(ns_consumed)
# define SWIFT_RELEASES_ARGUMENT __attribute__((ns_consumed))
# else
# define SWIFT_RELEASES_ARGUMENT
# endif
#endif
#if !defined(SWIFT_WARN_UNUSED_RESULT)
# if __has_attribute(warn_unused_result)
# define SWIFT_WARN_UNUSED_RESULT __attribute__((warn_unused_result))
# else
# define SWIFT_WARN_UNUSED_RESULT
# endif
#endif
#if !defined(SWIFT_NORETURN)
# if __has_attribute(noreturn)
# define SWIFT_NORETURN __attribute__((noreturn))
# else
# define SWIFT_NORETURN
# endif
#endif
#if !defined(SWIFT_CLASS_EXTRA)
# define SWIFT_CLASS_EXTRA
#endif
#if !defined(SWIFT_PROTOCOL_EXTRA)
# define SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_ENUM_EXTRA)
# define SWIFT_ENUM_EXTRA
#endif
#if !defined(SWIFT_CLASS)
# if __has_attribute(objc_subclassing_restricted)
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# else
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# endif
#endif
#if !defined(SWIFT_RESILIENT_CLASS)
# if __has_attribute(objc_class_stub)
# define SWIFT_RESILIENT_CLASS(SWIFT_NAME) SWIFT_CLASS(SWIFT_NAME) __attribute__((objc_class_stub))
# define SWIFT_RESILIENT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_class_stub)) SWIFT_CLASS_NAMED(SWIFT_NAME)
# else
# define SWIFT_RESILIENT_CLASS(SWIFT_NAME) SWIFT_CLASS(SWIFT_NAME)
# define SWIFT_RESILIENT_CLASS_NAMED(SWIFT_NAME) SWIFT_CLASS_NAMED(SWIFT_NAME)
# endif
#endif
#if !defined(SWIFT_PROTOCOL)
# define SWIFT_PROTOCOL(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
# define SWIFT_PROTOCOL_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_EXTENSION)
# define SWIFT_EXTENSION(M) SWIFT_PASTE(M##_Swift_, __LINE__)
#endif
#if !defined(OBJC_DESIGNATED_INITIALIZER)
# if __has_attribute(objc_designated_initializer)
# define OBJC_DESIGNATED_INITIALIZER __attribute__((objc_designated_initializer))
# else
# define OBJC_DESIGNATED_INITIALIZER
# endif
#endif
#if !defined(SWIFT_ENUM_ATTR)
# if __has_attribute(enum_extensibility)
# define SWIFT_ENUM_ATTR(_extensibility) __attribute__((enum_extensibility(_extensibility)))
# else
# define SWIFT_ENUM_ATTR(_extensibility)
# endif
#endif
#if !defined(SWIFT_ENUM)
# define SWIFT_ENUM(_type, _name, _extensibility) enum _name : _type _name; enum SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# if __has_feature(generalized_swift_name)
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) enum _name : _type _name SWIFT_COMPILE_NAME(SWIFT_NAME); enum SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# else
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) SWIFT_ENUM(_type, _name, _extensibility)
# endif
#endif
#if !defined(SWIFT_UNAVAILABLE)
# define SWIFT_UNAVAILABLE __attribute__((unavailable))
#endif
#if !defined(SWIFT_UNAVAILABLE_MSG)
# define SWIFT_UNAVAILABLE_MSG(msg) __attribute__((unavailable(msg)))
#endif
#if !defined(SWIFT_AVAILABILITY)
# define SWIFT_AVAILABILITY(plat, ...) __attribute__((availability(plat, __VA_ARGS__)))
#endif
#if !defined(SWIFT_WEAK_IMPORT)
# define SWIFT_WEAK_IMPORT __attribute__((weak_import))
#endif
#if !defined(SWIFT_DEPRECATED)
# define SWIFT_DEPRECATED __attribute__((deprecated))
#endif
#if !defined(SWIFT_DEPRECATED_MSG)
# define SWIFT_DEPRECATED_MSG(...) __attribute__((deprecated(__VA_ARGS__)))
#endif
#if !defined(SWIFT_DEPRECATED_OBJC)
# if __has_feature(attribute_diagnose_if_objc)
# define SWIFT_DEPRECATED_OBJC(Msg) __attribute__((diagnose_if(1, Msg, "warning")))
# else
# define SWIFT_DEPRECATED_OBJC(Msg) SWIFT_DEPRECATED_MSG(Msg)
# endif
#endif
#if defined(__OBJC__)
#if !defined(IBSegueAction)
# define IBSegueAction
#endif
#endif
#if !defined(SWIFT_EXTERN)
# if defined(__cplusplus)
# define SWIFT_EXTERN extern "C"
# else
# define SWIFT_EXTERN extern
# endif
#endif
#if !defined(SWIFT_CALL)
# define SWIFT_CALL __attribute__((swiftcall))
#endif
#if !defined(SWIFT_INDIRECT_RESULT)
# define SWIFT_INDIRECT_RESULT __attribute__((swift_indirect_result))
#endif
#if !defined(SWIFT_CONTEXT)
# define SWIFT_CONTEXT __attribute__((swift_context))
#endif
#if !defined(SWIFT_ERROR_RESULT)
# define SWIFT_ERROR_RESULT __attribute__((swift_error_result))
#endif
#if defined(__cplusplus)
# define SWIFT_NOEXCEPT noexcept
#else
# define SWIFT_NOEXCEPT
#endif
#if !defined(SWIFT_C_INLINE_THUNK)
# if __has_attribute(always_inline)
# if __has_attribute(nodebug)
# define SWIFT_C_INLINE_THUNK inline __attribute__((always_inline)) __attribute__((nodebug))
# else
# define SWIFT_C_INLINE_THUNK inline __attribute__((always_inline))
# endif
# else
# define SWIFT_C_INLINE_THUNK inline
# endif
#endif
#if defined(_WIN32)
#if !defined(SWIFT_IMPORT_STDLIB_SYMBOL)
# define SWIFT_IMPORT_STDLIB_SYMBOL __declspec(dllimport)
#endif
#else
#if !defined(SWIFT_IMPORT_STDLIB_SYMBOL)
# define SWIFT_IMPORT_STDLIB_SYMBOL
#endif
#endif
#if defined(__OBJC__)
#if __has_feature(objc_modules)
#if __has_warning("-Watimport-in-framework-header")
#pragma clang diagnostic ignored "-Watimport-in-framework-header"
#endif
@import AVFoundation;
@import CoreFoundation;
@import CoreMedia;
@import Dispatch;
@import ObjectiveC;
@import UIKit;
#endif
#endif
#pragma clang diagnostic ignored "-Wproperty-attribute-mismatch"
#pragma clang diagnostic ignored "-Wduplicate-method-arg"
#if __has_warning("-Wpragma-clang-attribute")
# pragma clang diagnostic ignored "-Wpragma-clang-attribute"
#endif
#pragma clang diagnostic ignored "-Wunknown-pragmas"
#pragma clang diagnostic ignored "-Wnullability"
#pragma clang diagnostic ignored "-Wdollar-in-identifier-extension"
#if __has_attribute(external_source_symbol)
# pragma push_macro("any")
# undef any
# pragma clang attribute push(__attribute__((external_source_symbol(language="Swift", defined_in="VitalSignEngineCore",generated_declaration))), apply_to=any(function,enum,objc_interface,objc_category,objc_protocol))
# pragma pop_macro("any")
#endif
#if defined(__OBJC__)
SWIFT_CLASS("_TtC19VitalSignEngineCore12CameraQueues")
@interface CameraQueues : NSObject
/// The serial execution queue for the camera preview layer (input stream) as well as output processing of photos.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull cameraQueue;)
+ (dispatch_queue_t _Nonnull)cameraQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of videos for recording.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;)
+ (dispatch_queue_t _Nonnull)videoQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of videos for frame processing.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull frameProcessorQueue;)
+ (dispatch_queue_t _Nonnull)frameProcessorQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of audio buffers.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull audioQueue;)
+ (dispatch_queue_t _Nonnull)audioQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of audio buffers.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull recordingQueue;)
+ (dispatch_queue_t _Nonnull)recordingQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of audio buffers.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull simulatorQueue;)
+ (dispatch_queue_t _Nonnull)simulatorQueue SWIFT_WARN_UNUSED_RESULT;
- (nonnull instancetype)init OBJC_DESIGNATED_INITIALIZER;
@end
@class NSCoder;
SWIFT_CLASS("_TtC19VitalSignEngineCore10CameraView")
@interface CameraView : UIView
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly) Class _Nonnull layerClass;)
+ (Class _Nonnull)layerClass SWIFT_WARN_UNUSED_RESULT;
- (nonnull instancetype)initWithFrame:(CGRect)frame OBJC_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)_ OBJC_DESIGNATED_INITIALIZER SWIFT_UNAVAILABLE;
- (void)willMoveToSuperview:(UIView * _Nullable)newSuperview;
@end
@class UIPinchGestureRecognizer;
@interface CameraView (SWIFT_EXTENSION(VitalSignEngineCore))
- (void)onPinch:(UIPinchGestureRecognizer * _Nonnull)gesture;
- (void)zoomWithFactor:(CGFloat)factor animated:(BOOL)animated;
@end
@class AVCaptureOutput;
@class AVCaptureConnection;
@interface CameraView (SWIFT_EXTENSION(VitalSignEngineCore)) <AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
- (void)captureOutput:(AVCaptureOutput * _Nonnull)captureOutput didOutputSampleBuffer:(CMSampleBufferRef _Nonnull)sampleBuffer fromConnection:(AVCaptureConnection * _Nonnull)_;
@end
SWIFT_CLASS("_TtC19VitalSignEngineCore15VitalSignCamera")
@interface VitalSignCamera : UIView
- (nonnull instancetype)initWithFrame:(CGRect)frame OBJC_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)coder SWIFT_UNAVAILABLE;
- (void)layoutSubviews;
@end
#endif
#if __has_attribute(external_source_symbol)
# pragma clang attribute pop
#endif
#if defined(__cplusplus)
#endif
#pragma clang diagnostic pop
#endif
#else
#error unsupported Swift architecture
#endif

@ -0,0 +1,4 @@
framework module VitalSignEngineCore {
header "VitalSignEngineCore-Swift.h"
requires objc
}

@ -0,0 +1,183 @@
/* Class = "UILabel"; text = "Continuously monitors exposure levels and automatically adjusts exposure when necessary."; ObjectID = "2ad-p5-WH5"; */
"2ad-p5-WH5.text" = "Continuously monitors exposure levels and automatically adjusts exposure when necessary.";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "4ie-UM-xw1"; */
"4ie-UM-xw1.text" = "White Balance";
/* Class = "UILabel"; text = "Front"; ObjectID = "55X-XR-Xoe"; */
"55X-XR-Xoe.text" = "Front";
/* Class = "UITableViewController"; title = "Preview Layer Gravity"; ObjectID = "5qf-l9-Bpt"; */
"5qf-l9-Bpt.title" = "Preview Layer Gravity";
/* Class = "UILabel"; text = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds."; ObjectID = "6I3-ak-vEx"; */
"6I3-ak-vEx.text" = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds.";
/* Class = "UILabel"; text = "Continuous Auto"; ObjectID = "6yD-61-CEf"; */
"6yD-61-CEf.text" = "Continuous Auto";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "77e-kN-xI5"; */
"77e-kN-xI5.text" = "White Balance";
/* Class = "UILabel"; text = "Preview Gravity"; ObjectID = "7AW-UQ-WfA"; */
"7AW-UQ-WfA.text" = "Preview Gravity";
/* Class = "UILabel"; text = "Lens Aperture"; ObjectID = "7ZB-LO-AYF"; */
"7ZB-LO-AYF.text" = "Lens Aperture";
/* Class = "UINavigationItem"; title = "Preview Gravity"; ObjectID = "7aV-Vw-b2w"; */
"7aV-Vw-b2w.title" = "Preview Gravity";
/* Class = "UINavigationItem"; title = "Camera"; ObjectID = "8D6-fu-CJu"; */
"8D6-fu-CJu.title" = "Camera";
/* Class = "UILabel"; text = "Medium"; ObjectID = "9xa-bl-6eJ"; */
"9xa-bl-6eJ.text" = "Medium";
/* Class = "UITableViewSection"; headerTitle = "Camera"; ObjectID = "AKs-Pq-lcR"; */
"AKs-Pq-lcR.headerTitle" = "Camera";
/* Class = "UILabel"; text = "640x480"; ObjectID = "B6a-KT-T4m"; */
"B6a-KT-T4m.text" = "640x480";
/* Class = "UILabel"; text = "Face Detector"; ObjectID = "Byq-a3-CTH"; */
"Byq-a3-CTH.text" = "Face Detector";
/* Class = "UILabel"; text = "1920x1080"; ObjectID = "Dp9-9b-EEt"; */
"Dp9-9b-EEt.text" = "1920x1080";
/* Class = "UILabel"; text = "A mode that locks the white balance state."; ObjectID = "FOH-2X-R88"; */
"FOH-2X-R88.text" = "A mode that locks the white balance state.";
/* Class = "UILabel"; text = "Duration"; ObjectID = "FPh-cx-ffz"; */
"FPh-cx-ffz.text" = "Duration";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Hmc-Mc-AME"; */
"Hmc-Mc-AME.text" = "Exposure";
/* Class = "UILabel"; text = "Resolution"; ObjectID = "J12-De-6Ah"; */
"J12-De-6Ah.text" = "Resolution";
/* Class = "UITextField"; text = "100"; ObjectID = "J2A-ot-v0j"; */
"J2A-ot-v0j.text" = "100";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "J5t-dB-TfS"; */
"J5t-dB-TfS.text" = "30 fps";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "JgS-aW-Prv"; */
"JgS-aW-Prv.text" = "Frame Rate";
/* Class = "UILabel"; text = "Video Size"; ObjectID = "KJ4-Uw-4AI"; */
"KJ4-Uw-4AI.text" = "Video Size";
/* Class = "UILabel"; text = "Locked"; ObjectID = "Nuf-3J-OLu"; */
"Nuf-3J-OLu.text" = "Locked";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "RKd-LF-IM5"; */
"RKd-LF-IM5.text" = "Frame Rate";
/* Class = "UILabel"; text = "Resize Aspect"; ObjectID = "Rz5-6b-y0h"; */
"Rz5-6b-y0h.text" = "Resize Aspect";
/* Class = "UILabel"; text = "f1.0"; ObjectID = "SxX-36-5NN"; */
"SxX-36-5NN.text" = "f1.0";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Szo-Pj-jno"; */
"Szo-Pj-jno.text" = "Exposure";
/* Class = "UILabel"; text = "ISO"; ObjectID = "TTb-bW-pMN"; */
"TTb-bW-pMN.text" = "ISO";
/* Class = "UILabel"; text = "Camera"; ObjectID = "WaU-wB-ecv"; */
"WaU-wB-ecv.text" = "Camera";
/* Class = "UITableViewController"; title = "White Balance"; ObjectID = "X40-UA-27w"; */
"X40-UA-27w.title" = "White Balance";
/* Class = "UINavigationItem"; title = "Settings"; ObjectID = "Xje-kQ-L0P"; */
"Xje-kQ-L0P.title" = "Settings";
/* Class = "UIBarButtonItem"; title = "Close"; ObjectID = "XvT-YW-xBL"; */
"XvT-YW-xBL.title" = "Close";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YGU-l8-ZMR"; */
"YGU-l8-ZMR.text" = "Locked";
/* Class = "UILabel"; text = "Format"; ObjectID = "YPv-nc-7nK"; */
"YPv-nc-7nK.text" = "Format";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YrA-Y0-l1z"; */
"YrA-Y0-l1z.text" = "Locked";
/* Class = "UINavigationItem"; title = "Format"; ObjectID = "Z09-mH-385"; */
"Z09-mH-385.title" = "Format";
/* Class = "UILabel"; text = "Camera On/Off"; ObjectID = "ZiN-q3-8Cs"; */
"ZiN-q3-8Cs.text" = "Camera On/Off";
/* Class = "UINavigationItem"; title = "White Balance"; ObjectID = "b1A-7E-zBH"; */
"b1A-7E-zBH.title" = "White Balance";
/* Class = "UILabel"; text = "100"; ObjectID = "b1L-VE-boz"; */
"b1L-VE-boz.text" = "100";
/* Class = "UINavigationItem"; title = "Exposure"; ObjectID = "bwF-Fn-XeY"; */
"bwF-Fn-XeY.title" = "Exposure";
/* Class = "UIButton"; configuration.title = "Test"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.configuration.title" = "Test";
/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.normalTitle" = "Test";
/* Class = "UISegmentedControl"; fSu-H9-Cz9.segmentTitles[0] = "sec"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[0]" = "sec";
/* Class = "UISegmentedControl"; fSu-H9-Cz9.segmentTitles[1] = "ms"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[1]" = "ms";
/* Class = "UISegmentedControl"; fSu-H9-Cz9.segmentTitles[2] = "μs"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[2]" = "μs";
/* Class = "UILabel"; text = "Locked"; ObjectID = "fty-cZ-huK"; */
"fty-cZ-huK.text" = "Locked";
/* Class = "UILabel"; text = "Auto (10ms)"; ObjectID = "hVI-M7-yVe"; */
"hVI-M7-yVe.text" = "Auto (10ms)";
/* Class = "UILabel"; text = "Camera Name"; ObjectID = "if3-5B-xPF"; */
"if3-5B-xPF.text" = "Camera Name";
/* Class = "UILabel"; text = "Unit"; ObjectID = "l6D-7r-DH5"; */
"l6D-7r-DH5.text" = "Unit";
/* Class = "UINavigationItem"; title = "Camera Test"; ObjectID = "lZ7-W3-MRS"; */
"lZ7-W3-MRS.title" = "Camera Test";
/* Class = "UILabel"; text = "8ms"; ObjectID = "lct-8a-Lqk"; */
"lct-8a-Lqk.text" = "8ms";
/* Class = "UITableViewController"; title = "Camera"; ObjectID = "lvC-jH-Oee"; */
"lvC-jH-Oee.title" = "Camera";
/* Class = "UILabel"; text = "Resize Aspect Fill"; ObjectID = "mod-K3-RQU"; */
"mod-K3-RQU.text" = "Resize Aspect Fill";
/* Class = "UILabel"; text = "HD 1920x1080"; ObjectID = "nbm-4k-ihb"; */
"nbm-4k-ihb.text" = "HD 1920x1080";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "pKS-rd-Dx6"; */
"pKS-rd-Dx6.text" = "30 fps";
/* Class = "UILabel"; text = "Exposure Duration"; ObjectID = "soE-ah-Y4f"; */
"soE-ah-Y4f.text" = "Exposure Duration";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "u82-lR-lTP"; */
"u82-lR-lTP.text" = "30 fps";
/* Class = "UILabel"; text = "100"; ObjectID = "y9H-1I-dYX"; */
"y9H-1I-dYX.text" = "100";
/* Class = "UILabel"; text = "ISO"; ObjectID = "yOn-QG-KiO"; */
"yOn-QG-KiO.text" = "ISO";

@ -0,0 +1,183 @@
/* Class = "UILabel"; text = "Continuously monitors exposure levels and automatically adjusts exposure when necessary."; ObjectID = "2ad-p5-WH5"; */
"2ad-p5-WH5.text" = "持續監測曝光水平並在必要時自動調整曝光。";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "4ie-UM-xw1"; */
"4ie-UM-xw1.text" = "白平衡";
/* Class = "UILabel"; text = "Front"; ObjectID = "55X-XR-Xoe"; */
"55X-XR-Xoe.text" = "前置";
/* Class = "UITableViewController"; text = "Preview Layer Gravity"; ObjectID = "5qf-l9-Bpt"; */
"5qf-l9-Bpt.title" = "相機預覽展示形式";
/* Class = "UILabel"; text = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds."; ObjectID = "6I3-ak-vEx"; */
"6I3-ak-vEx.text" = "按比例顯示相機預覽,並使其限制在圖層的範圍內。";
/* Class = "UILabel"; text = "Continuous Auto"; ObjectID = "6yD-61-CEf"; */
"6yD-61-CEf.text" = "連續自動";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "77e-kN-xI5"; */
"77e-kN-xI5.text" = "白平衡";
/* Class = "UILabel"; text = "Preview Gravity"; ObjectID = "7AW-UQ-WfA"; */
"7AW-UQ-WfA.text" = "相機預覽展示形式";
/* Class = "UILabel"; text = "Lens Aperture"; ObjectID = "7ZB-LO-AYF"; */
"7ZB-LO-AYF.text" = "鏡頭光圈";
/* Class = "UINavigationItem"; text = "Preview Gravity"; ObjectID = "7aV-Vw-b2w"; */
"7aV-Vw-b2w.title" = "相機預覽展示形式";
/* Class = "UINavigationItem"; text = "Camera"; ObjectID = "8D6-fu-CJu"; */
"8D6-fu-CJu.title" = "相機";
/* Class = "UILabel"; text = "Medium"; ObjectID = "9xa-bl-6eJ"; */
"9xa-bl-6eJ.text" = "中等品質";
/* Class = "UITableViewSection"; text = "Camera"; ObjectID = "AKs-Pq-lcR"; */
"AKs-Pq-lcR.headerTitle" = "相機";
/* Class = "UILabel"; text = "640x480"; ObjectID = "B6a-KT-T4m"; */
"B6a-KT-T4m.text" = "640x480";
/* Class = "UILabel"; text = "Face Detector"; ObjectID = "Byq-a3-CTH"; */
"Byq-a3-CTH.text" = "人臉辨識";
/* Class = "UILabel"; text = "1920x1080"; ObjectID = "Dp9-9b-EEt"; */
"Dp9-9b-EEt.text" = "1920x1080";
/* Class = "UILabel"; text = "A mode that locks the white balance state."; ObjectID = "FOH-2X-R88"; */
"FOH-2X-R88.text" = "一種鎖定白平衡狀態的模式。";
/* Class = "UILabel"; text = "Duration"; ObjectID = "FPh-cx-ffz"; */
"FPh-cx-ffz.text" = "持續時間";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Hmc-Mc-AME"; */
"Hmc-Mc-AME.text" = "曝光";
/* Class = "UILabel"; text = "Resolution"; ObjectID = "J12-De-6Ah"; */
"J12-De-6Ah.text" = "解析度";
/* Class = "UITextField"; text = "100"; ObjectID = "J2A-ot-v0j"; */
"J2A-ot-v0j.text" = "100";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "J5t-dB-TfS"; */
"J5t-dB-TfS.text" = "30 fps";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "JgS-aW-Prv"; */
"JgS-aW-Prv.text" = "幀率";
/* Class = "UILabel"; text = "Video Size"; ObjectID = "KJ4-Uw-4AI"; */
"KJ4-Uw-4AI.text" = "視頻尺寸";
/* Class = "UILabel"; text = "Locked"; ObjectID = "Nuf-3J-OLu"; */
"Nuf-3J-OLu.text" = "鎖定";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "RKd-LF-IM5"; */
"RKd-LF-IM5.text" = "幀率";
/* Class = "UILabel"; text = "Resize Aspect"; ObjectID = "Rz5-6b-y0h"; */
"Rz5-6b-y0h.text" = "按比例調整";
/* Class = "UILabel"; text = "f1.0"; ObjectID = "SxX-36-5NN"; */
"SxX-36-5NN.text" = "f1.0";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Szo-Pj-jno"; */
"Szo-Pj-jno.text" = "曝光";
/* Class = "UILabel"; text = "ISO"; ObjectID = "TTb-bW-pMN"; */
"TTb-bW-pMN.text" = "ISO";
/* Class = "UILabel"; text = "Camera"; ObjectID = "WaU-wB-ecv"; */
"WaU-wB-ecv.text" = "相機";
/* Class = "UITableViewController"; text = "White Balance"; ObjectID = "X40-UA-27w"; */
"X40-UA-27w.title" = "白平衡";
/* Class = "UINavigationItem"; text = "Settings"; ObjectID = "Xje-kQ-L0P"; */
"Xje-kQ-L0P.title" = "設定";
/* Class = "UIBarButtonItem"; text = "Close"; ObjectID = "XvT-YW-xBL"; */
"XvT-YW-xBL.title" = "關閉";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YGU-l8-ZMR"; */
"YGU-l8-ZMR.text" = "鎖定";
/* Class = "UILabel"; text = "Format"; ObjectID = "YPv-nc-7nK"; */
"YPv-nc-7nK.text" = "格式";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YrA-Y0-l1z"; */
"YrA-Y0-l1z.text" = "鎖定";
/* Class = "UINavigationItem"; text = "Format"; ObjectID = "Z09-mH-385"; */
"Z09-mH-385.title" = "格式";
/* Class = "UILabel"; text = "Camera On/Off"; ObjectID = "ZiN-q3-8Cs"; */
"ZiN-q3-8Cs.text" = "相機 開/關";
/* Class = "UINavigationItem"; text = "White Balance"; ObjectID = "b1A-7E-zBH"; */
"b1A-7E-zBH.title" = "白平衡";
/* Class = "UILabel"; text = "100"; ObjectID = "b1L-VE-boz"; */
"b1L-VE-boz.text" = "100";
/* Class = "UINavigationItem"; text = "Exposure"; ObjectID = "bwF-Fn-XeY"; */
"bwF-Fn-XeY.title" = "曝光";
/* Class = "UIButton"; text = "Test"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.configuration.title" = "測試";
/* Class = "UIButton"; text = "Button"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.normalTitle" = "測試";
/* Class = "UISegmentedControl"; text = "sec"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[0]" = "秒";
/* Class = "UISegmentedControl"; text = "ms"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[1]" = "毫秒";
/* Class = "UISegmentedControl"; text = "μs"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[2]" = "微秒";
/* Class = "UILabel"; text = "Locked"; ObjectID = "fty-cZ-huK"; */
"fty-cZ-huK.text" = "鎖定";
/* Class = "UILabel"; text = "Auto (10ms)"; ObjectID = "hVI-M7-yVe"; */
"hVI-M7-yVe.text" = "自動10毫秒";
/* Class = "UILabel"; text = "Camera Name"; ObjectID = "if3-5B-xPF"; */
"if3-5B-xPF.text" = "相機名稱";
/* Class = "UILabel"; text = "Unit"; ObjectID = "l6D-7r-DH5"; */
"l6D-7r-DH5.text" = "單位";
/* Class = "UINavigationItem"; text = "Camera Test"; ObjectID = "lZ7-W3-MRS"; */
"lZ7-W3-MRS.title" = "相機測試";
/* Class = "UILabel"; text = "8ms"; ObjectID = "lct-8a-Lqk"; */
"lct-8a-Lqk.text" = "8毫秒";
/* Class = "UITableViewController"; text = "Camera"; ObjectID = "lvC-jH-Oee"; */
"lvC-jH-Oee.title" = "相機";
/* Class = "UILabel"; text = "Resize Aspect Fill"; ObjectID = "mod-K3-RQU"; */
"mod-K3-RQU.text" = "按比例填充";
/* Class = "UILabel"; text = "HD 1920x1080"; ObjectID = "nbm-4k-ihb"; */
"nbm-4k-ihb.text" = "HD 1920x1080";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "pKS-rd-Dx6"; */
"pKS-rd-Dx6.text" = "30 fps";
/* Class = "UILabel"; text = "Exposure Duration"; ObjectID = "soE-ah-Y4f"; */
"soE-ah-Y4f.text" = "曝光時間";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "u82-lR-lTP"; */
"u82-lR-lTP.text" = "30 fps";
/* Class = "UILabel"; text = "100"; ObjectID = "y9H-1I-dYX"; */
"y9H-1I-dYX.text" = "100";
/* Class = "UILabel"; text = "ISO"; ObjectID = "yOn-QG-KiO"; */
"yOn-QG-KiO.text" = "ISO";

@ -0,0 +1,183 @@
/* Class = "UILabel"; text = "Continuously monitors exposure levels and automatically adjusts exposure when necessary."; ObjectID = "2ad-p5-WH5"; */
"2ad-p5-WH5.text" = "持续监测曝光水平并在必要时自动调整曝光。";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "4ie-UM-xw1"; */
"4ie-UM-xw1.text" = "白平衡";
/* Class = "UILabel"; text = "Front"; ObjectID = "55X-XR-Xoe"; */
"55X-XR-Xoe.text" = "前置";
/* Class = "UITableViewController"; text = "Preview Layer Gravity"; ObjectID = "5qf-l9-Bpt"; */
"5qf-l9-Bpt.title" = "相机预览展示形式";
/* Class = "UILabel"; text = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds."; ObjectID = "6I3-ak-vEx"; */
"6I3-ak-vEx.text" = "按比例显示相机预览,并使其限制在图层的范围内。";
/* Class = "UILabel"; text = "Continuous Auto"; ObjectID = "6yD-61-CEf"; */
"6yD-61-CEf.text" = "连续自动";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "77e-kN-xI5"; */
"77e-kN-xI5.text" = "白平衡";
/* Class = "UILabel"; text = "Preview Gravity"; ObjectID = "7AW-UQ-WfA"; */
"7AW-UQ-WfA.text" = "相机预览展示形式";
/* Class = "UILabel"; text = "Lens Aperture"; ObjectID = "7ZB-LO-AYF"; */
"7ZB-LO-AYF.text" = "镜头光圈";
/* Class = "UINavigationItem"; text = "Preview Gravity"; ObjectID = "7aV-Vw-b2w"; */
"7aV-Vw-b2w.title" = "相机预览展示形式";
/* Class = "UINavigationItem"; text = "Camera"; ObjectID = "8D6-fu-CJu"; */
"8D6-fu-CJu.title" = "相机";
/* Class = "UILabel"; text = "Medium"; ObjectID = "9xa-bl-6eJ"; */
"9xa-bl-6eJ.text" = "中等品质";
/* Class = "UITableViewSection"; text = "Camera"; ObjectID = "AKs-Pq-lcR"; */
"AKs-Pq-lcR.headerTitle" = "相机";
/* Class = "UILabel"; text = "640x480"; ObjectID = "B6a-KT-T4m"; */
"B6a-KT-T4m.text" = "640x480";
/* Class = "UILabel"; text = "Face Detector"; ObjectID = "Byq-a3-CTH"; */
"Byq-a3-CTH.text" = "人脸识别";
/* Class = "UILabel"; text = "1920x1080"; ObjectID = "Dp9-9b-EEt"; */
"Dp9-9b-EEt.text" = "1920x1080";
/* Class = "UILabel"; text = "A mode that locks the white balance state."; ObjectID = "FOH-2X-R88"; */
"FOH-2X-R88.text" = "一种锁定白平衡状态的模式。";
/* Class = "UILabel"; text = "Duration"; ObjectID = "FPh-cx-ffz"; */
"FPh-cx-ffz.text" = "持续时间";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Hmc-Mc-AME"; */
"Hmc-Mc-AME.text" = "曝光";
/* Class = "UILabel"; text = "Resolution"; ObjectID = "J12-De-6Ah"; */
"J12-De-6Ah.text" = "分辨率";
/* Class = "UITextField"; text = "100"; ObjectID = "J2A-ot-v0j"; */
"J2A-ot-v0j.text" = "100";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "J5t-dB-TfS"; */
"J5t-dB-TfS.text" = "30 fps";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "JgS-aW-Prv"; */
"JgS-aW-Prv.text" = "帧率";
/* Class = "UILabel"; text = "Video Size"; ObjectID = "KJ4-Uw-4AI"; */
"KJ4-Uw-4AI.text" = "视频尺寸";
/* Class = "UILabel"; text = "Locked"; ObjectID = "Nuf-3J-OLu"; */
"Nuf-3J-OLu.text" = "锁定";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "RKd-LF-IM5"; */
"RKd-LF-IM5.text" = "帧率";
/* Class = "UILabel"; text = "Resize Aspect"; ObjectID = "Rz5-6b-y0h"; */
"Rz5-6b-y0h.text" = "按比例调整";
/* Class = "UILabel"; text = "f1.0"; ObjectID = "SxX-36-5NN"; */
"SxX-36-5NN.text" = "f1.0";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Szo-Pj-jno"; */
"Szo-Pj-jno.text" = "曝光";
/* Class = "UILabel"; text = "ISO"; ObjectID = "TTb-bW-pMN"; */
"TTb-bW-pMN.text" = "ISO";
/* Class = "UILabel"; text = "Camera"; ObjectID = "WaU-wB-ecv"; */
"WaU-wB-ecv.text" = "相机";
/* Class = "UITableViewController"; text = "White Balance"; ObjectID = "X40-UA-27w"; */
"X40-UA-27w.title" = "白平衡";
/* Class = "UINavigationItem"; text = "Settings"; ObjectID = "Xje-kQ-L0P"; */
"Xje-kQ-L0P.title" = "设定";
/* Class = "UIBarButtonItem"; text = "Close"; ObjectID = "XvT-YW-xBL"; */
"XvT-YW-xBL.title" = "关闭";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YGU-l8-ZMR"; */
"YGU-l8-ZMR.text" = "鎖定";
/* Class = "UILabel"; text = "Format"; ObjectID = "YPv-nc-7nK"; */
"YPv-nc-7nK.text" = "格式";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YrA-Y0-l1z"; */
"YrA-Y0-l1z.text" = "鎖定";
/* Class = "UINavigationItem"; text = "Format"; ObjectID = "Z09-mH-385"; */
"Z09-mH-385.title" = "格式";
/* Class = "UILabel"; text = "Camera On/Off"; ObjectID = "ZiN-q3-8Cs"; */
"ZiN-q3-8Cs.text" = "相机 开/关";
/* Class = "UINavigationItem"; text = "White Balance"; ObjectID = "b1A-7E-zBH"; */
"b1A-7E-zBH.title" = "白平衡";
/* Class = "UILabel"; text = "100"; ObjectID = "b1L-VE-boz"; */
"b1L-VE-boz.text" = "100";
/* Class = "UINavigationItem"; text = "Exposure"; ObjectID = "bwF-Fn-XeY"; */
"bwF-Fn-XeY.title" = "曝光";
/* Class = "UIButton"; text = "Test"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.configuration.title" = "测试";
/* Class = "UIButton"; text = "Button"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.normalTitle" = "测试";
/* Class = "UISegmentedControl"; text = "sec"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[0]" = "秒";
/* Class = "UISegmentedControl"; text = "ms"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[1]" = "毫秒";
/* Class = "UISegmentedControl"; text = "μs"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[2]" = "微秒";
/* Class = "UILabel"; text = "Locked"; ObjectID = "fty-cZ-huK"; */
"fty-cZ-huK.text" = "鎖定";
/* Class = "UILabel"; text = "Auto (10ms)"; ObjectID = "hVI-M7-yVe"; */
"hVI-M7-yVe.text" = "自动10毫秒";
/* Class = "UILabel"; text = "Camera Name"; ObjectID = "if3-5B-xPF"; */
"if3-5B-xPF.text" = "相机名称";
/* Class = "UILabel"; text = "Unit"; ObjectID = "l6D-7r-DH5"; */
"l6D-7r-DH5.text" = "单位";
/* Class = "UINavigationItem"; text = "Camera Test"; ObjectID = "lZ7-W3-MRS"; */
"lZ7-W3-MRS.title" = "相机测试";
/* Class = "UILabel"; text = "8ms"; ObjectID = "lct-8a-Lqk"; */
"lct-8a-Lqk.text" = "8毫秒";
/* Class = "UITableViewController"; text = "Camera"; ObjectID = "lvC-jH-Oee"; */
"lvC-jH-Oee.title" = "相机";
/* Class = "UILabel"; text = "Resize Aspect Fill"; ObjectID = "mod-K3-RQU"; */
"mod-K3-RQU.text" = "按比例填充";
/* Class = "UILabel"; text = "HD 1920x1080"; ObjectID = "nbm-4k-ihb"; */
"nbm-4k-ihb.text" = "HD 1920x1080";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "pKS-rd-Dx6"; */
"pKS-rd-Dx6.text" = "30 fps";
/* Class = "UILabel"; text = "Exposure Duration"; ObjectID = "soE-ah-Y4f"; */
"soE-ah-Y4f.text" = "曝光时间";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "u82-lR-lTP"; */
"u82-lR-lTP.text" = "30 fps";
/* Class = "UILabel"; text = "100"; ObjectID = "y9H-1I-dYX"; */
"y9H-1I-dYX.text" = "100";
/* Class = "UILabel"; text = "ISO"; ObjectID = "yOn-QG-KiO"; */
"yOn-QG-KiO.text" = "ISO";

@ -0,0 +1,183 @@
/* Class = "UILabel"; text = "Continuously monitors exposure levels and automatically adjusts exposure when necessary."; ObjectID = "2ad-p5-WH5"; */
"2ad-p5-WH5.text" = "持續監測曝光水平並在必要時自動調整曝光。";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "4ie-UM-xw1"; */
"4ie-UM-xw1.text" = "白平衡";
/* Class = "UILabel"; text = "Front"; ObjectID = "55X-XR-Xoe"; */
"55X-XR-Xoe.text" = "前置";
/* Class = "UITableViewController"; text = "Preview Layer Gravity"; ObjectID = "5qf-l9-Bpt"; */
"5qf-l9-Bpt.title" = "相機預覽展示形式";
/* Class = "UILabel"; text = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds."; ObjectID = "6I3-ak-vEx"; */
"6I3-ak-vEx.text" = "按比例顯示相機預覽,並使其限制在圖層的範圍內。";
/* Class = "UILabel"; text = "Continuous Auto"; ObjectID = "6yD-61-CEf"; */
"6yD-61-CEf.text" = "連續自動";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "77e-kN-xI5"; */
"77e-kN-xI5.text" = "白平衡";
/* Class = "UILabel"; text = "Preview Gravity"; ObjectID = "7AW-UQ-WfA"; */
"7AW-UQ-WfA.text" = "相機預覽展示形式";
/* Class = "UILabel"; text = "Lens Aperture"; ObjectID = "7ZB-LO-AYF"; */
"7ZB-LO-AYF.text" = "鏡頭光圈";
/* Class = "UINavigationItem"; text = "Preview Gravity"; ObjectID = "7aV-Vw-b2w"; */
"7aV-Vw-b2w.title" = "相機預覽展示形式";
/* Class = "UINavigationItem"; text = "Camera"; ObjectID = "8D6-fu-CJu"; */
"8D6-fu-CJu.title" = "相機";
/* Class = "UILabel"; text = "Medium"; ObjectID = "9xa-bl-6eJ"; */
"9xa-bl-6eJ.text" = "中等品質";
/* Class = "UITableViewSection"; text = "Camera"; ObjectID = "AKs-Pq-lcR"; */
"AKs-Pq-lcR.headerTitle" = "相機";
/* Class = "UILabel"; text = "640x480"; ObjectID = "B6a-KT-T4m"; */
"B6a-KT-T4m.text" = "640x480";
/* Class = "UILabel"; text = "Face Detector"; ObjectID = "Byq-a3-CTH"; */
"Byq-a3-CTH.text" = "人臉辨識";
/* Class = "UILabel"; text = "1920x1080"; ObjectID = "Dp9-9b-EEt"; */
"Dp9-9b-EEt.text" = "1920x1080";
/* Class = "UILabel"; text = "A mode that locks the white balance state."; ObjectID = "FOH-2X-R88"; */
"FOH-2X-R88.text" = "一種鎖定白平衡狀態的模式。";
/* Class = "UILabel"; text = "Duration"; ObjectID = "FPh-cx-ffz"; */
"FPh-cx-ffz.text" = "持續時間";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Hmc-Mc-AME"; */
"Hmc-Mc-AME.text" = "曝光";
/* Class = "UILabel"; text = "Resolution"; ObjectID = "J12-De-6Ah"; */
"J12-De-6Ah.text" = "解析度";
/* Class = "UITextField"; text = "100"; ObjectID = "J2A-ot-v0j"; */
"J2A-ot-v0j.text" = "100";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "J5t-dB-TfS"; */
"J5t-dB-TfS.text" = "30 fps";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "JgS-aW-Prv"; */
"JgS-aW-Prv.text" = "幀率";
/* Class = "UILabel"; text = "Video Size"; ObjectID = "KJ4-Uw-4AI"; */
"KJ4-Uw-4AI.text" = "視頻尺寸";
/* Class = "UILabel"; text = "Locked"; ObjectID = "Nuf-3J-OLu"; */
"Nuf-3J-OLu.text" = "鎖定";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "RKd-LF-IM5"; */
"RKd-LF-IM5.text" = "幀率";
/* Class = "UILabel"; text = "Resize Aspect"; ObjectID = "Rz5-6b-y0h"; */
"Rz5-6b-y0h.text" = "按比例調整";
/* Class = "UILabel"; text = "f1.0"; ObjectID = "SxX-36-5NN"; */
"SxX-36-5NN.text" = "f1.0";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Szo-Pj-jno"; */
"Szo-Pj-jno.text" = "曝光";
/* Class = "UILabel"; text = "ISO"; ObjectID = "TTb-bW-pMN"; */
"TTb-bW-pMN.text" = "ISO";
/* Class = "UILabel"; text = "Camera"; ObjectID = "WaU-wB-ecv"; */
"WaU-wB-ecv.text" = "相機";
/* Class = "UITableViewController"; text = "White Balance"; ObjectID = "X40-UA-27w"; */
"X40-UA-27w.title" = "白平衡";
/* Class = "UINavigationItem"; text = "Settings"; ObjectID = "Xje-kQ-L0P"; */
"Xje-kQ-L0P.title" = "設定";
/* Class = "UIBarButtonItem"; text = "Close"; ObjectID = "XvT-YW-xBL"; */
"XvT-YW-xBL.title" = "關閉";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YGU-l8-ZMR"; */
"YGU-l8-ZMR.text" = "鎖定";
/* Class = "UILabel"; text = "Format"; ObjectID = "YPv-nc-7nK"; */
"YPv-nc-7nK.text" = "格式";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YrA-Y0-l1z"; */
"YrA-Y0-l1z.text" = "鎖定";
/* Class = "UINavigationItem"; text = "Format"; ObjectID = "Z09-mH-385"; */
"Z09-mH-385.title" = "格式";
/* Class = "UILabel"; text = "Camera On/Off"; ObjectID = "ZiN-q3-8Cs"; */
"ZiN-q3-8Cs.text" = "相機 開/關";
/* Class = "UINavigationItem"; text = "White Balance"; ObjectID = "b1A-7E-zBH"; */
"b1A-7E-zBH.title" = "白平衡";
/* Class = "UILabel"; text = "100"; ObjectID = "b1L-VE-boz"; */
"b1L-VE-boz.text" = "100";
/* Class = "UINavigationItem"; text = "Exposure"; ObjectID = "bwF-Fn-XeY"; */
"bwF-Fn-XeY.title" = "曝光";
/* Class = "UIButton"; text = "Test"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.configuration.title" = "測試";
/* Class = "UIButton"; text = "Button"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.normalTitle" = "測試";
/* Class = "UISegmentedControl"; text = "sec"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[0]" = "秒";
/* Class = "UISegmentedControl"; text = "ms"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[1]" = "毫秒";
/* Class = "UISegmentedControl"; text = "μs"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[2]" = "微秒";
/* Class = "UILabel"; text = "Locked"; ObjectID = "fty-cZ-huK"; */
"fty-cZ-huK.text" = "鎖定";
/* Class = "UILabel"; text = "Auto (10ms)"; ObjectID = "hVI-M7-yVe"; */
"hVI-M7-yVe.text" = "自動10毫秒";
/* Class = "UILabel"; text = "Camera Name"; ObjectID = "if3-5B-xPF"; */
"if3-5B-xPF.text" = "相機名稱";
/* Class = "UILabel"; text = "Unit"; ObjectID = "l6D-7r-DH5"; */
"l6D-7r-DH5.text" = "單位";
/* Class = "UINavigationItem"; text = "Camera Test"; ObjectID = "lZ7-W3-MRS"; */
"lZ7-W3-MRS.title" = "相機測試";
/* Class = "UILabel"; text = "8ms"; ObjectID = "lct-8a-Lqk"; */
"lct-8a-Lqk.text" = "8毫秒";
/* Class = "UITableViewController"; text = "Camera"; ObjectID = "lvC-jH-Oee"; */
"lvC-jH-Oee.title" = "相機";
/* Class = "UILabel"; text = "Resize Aspect Fill"; ObjectID = "mod-K3-RQU"; */
"mod-K3-RQU.text" = "按比例填充";
/* Class = "UILabel"; text = "HD 1920x1080"; ObjectID = "nbm-4k-ihb"; */
"nbm-4k-ihb.text" = "HD 1920x1080";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "pKS-rd-Dx6"; */
"pKS-rd-Dx6.text" = "30 fps";
/* Class = "UILabel"; text = "Exposure Duration"; ObjectID = "soE-ah-Y4f"; */
"soE-ah-Y4f.text" = "曝光時間";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "u82-lR-lTP"; */
"u82-lR-lTP.text" = "30 fps";
/* Class = "UILabel"; text = "100"; ObjectID = "y9H-1I-dYX"; */
"y9H-1I-dYX.text" = "100";
/* Class = "UILabel"; text = "ISO"; ObjectID = "yOn-QG-KiO"; */
"yOn-QG-KiO.text" = "ISO";

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleIdentifier</key>
<string>com.apple.xcode.dsym.com.demo.panoptic</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>dSYM</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleShortVersionString</key>
<string>4.8.1</string>
<key>CFBundleVersion</key>
<string>1</string>
</dict>
</plist>

@ -0,0 +1,411 @@
#if 0
#elif defined(__x86_64__) && __x86_64__
// Generated by Apple Swift version 5.10 (swiftlang-5.10.0.13 clang-1500.3.9.4)
#ifndef VITALSIGNENGINECORE_SWIFT_H
#define VITALSIGNENGINECORE_SWIFT_H
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wgcc-compat"
#if !defined(__has_include)
# define __has_include(x) 0
#endif
#if !defined(__has_attribute)
# define __has_attribute(x) 0
#endif
#if !defined(__has_feature)
# define __has_feature(x) 0
#endif
#if !defined(__has_warning)
# define __has_warning(x) 0
#endif
#if __has_include(<swift/objc-prologue.h>)
# include <swift/objc-prologue.h>
#endif
#pragma clang diagnostic ignored "-Wauto-import"
#if defined(__OBJC__)
#include <Foundation/Foundation.h>
#endif
#if defined(__cplusplus)
#include <cstdint>
#include <cstddef>
#include <cstdbool>
#include <cstring>
#include <stdlib.h>
#include <new>
#include <type_traits>
#else
#include <stdint.h>
#include <stddef.h>
#include <stdbool.h>
#include <string.h>
#endif
#if defined(__cplusplus)
#if defined(__arm64e__) && __has_include(<ptrauth.h>)
# include <ptrauth.h>
#else
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wreserved-macro-identifier"
# ifndef __ptrauth_swift_value_witness_function_pointer
# define __ptrauth_swift_value_witness_function_pointer(x)
# endif
# ifndef __ptrauth_swift_class_method_pointer
# define __ptrauth_swift_class_method_pointer(x)
# endif
#pragma clang diagnostic pop
#endif
#endif
#if !defined(SWIFT_TYPEDEFS)
# define SWIFT_TYPEDEFS 1
# if __has_include(<uchar.h>)
# include <uchar.h>
# elif !defined(__cplusplus)
typedef uint_least16_t char16_t;
typedef uint_least32_t char32_t;
# endif
typedef float swift_float2 __attribute__((__ext_vector_type__(2)));
typedef float swift_float3 __attribute__((__ext_vector_type__(3)));
typedef float swift_float4 __attribute__((__ext_vector_type__(4)));
typedef double swift_double2 __attribute__((__ext_vector_type__(2)));
typedef double swift_double3 __attribute__((__ext_vector_type__(3)));
typedef double swift_double4 __attribute__((__ext_vector_type__(4)));
typedef int swift_int2 __attribute__((__ext_vector_type__(2)));
typedef int swift_int3 __attribute__((__ext_vector_type__(3)));
typedef int swift_int4 __attribute__((__ext_vector_type__(4)));
typedef unsigned int swift_uint2 __attribute__((__ext_vector_type__(2)));
typedef unsigned int swift_uint3 __attribute__((__ext_vector_type__(3)));
typedef unsigned int swift_uint4 __attribute__((__ext_vector_type__(4)));
#endif
#if !defined(SWIFT_PASTE)
# define SWIFT_PASTE_HELPER(x, y) x##y
# define SWIFT_PASTE(x, y) SWIFT_PASTE_HELPER(x, y)
#endif
#if !defined(SWIFT_METATYPE)
# define SWIFT_METATYPE(X) Class
#endif
#if !defined(SWIFT_CLASS_PROPERTY)
# if __has_feature(objc_class_property)
# define SWIFT_CLASS_PROPERTY(...) __VA_ARGS__
# else
# define SWIFT_CLASS_PROPERTY(...)
# endif
#endif
#if !defined(SWIFT_RUNTIME_NAME)
# if __has_attribute(objc_runtime_name)
# define SWIFT_RUNTIME_NAME(X) __attribute__((objc_runtime_name(X)))
# else
# define SWIFT_RUNTIME_NAME(X)
# endif
#endif
#if !defined(SWIFT_COMPILE_NAME)
# if __has_attribute(swift_name)
# define SWIFT_COMPILE_NAME(X) __attribute__((swift_name(X)))
# else
# define SWIFT_COMPILE_NAME(X)
# endif
#endif
#if !defined(SWIFT_METHOD_FAMILY)
# if __has_attribute(objc_method_family)
# define SWIFT_METHOD_FAMILY(X) __attribute__((objc_method_family(X)))
# else
# define SWIFT_METHOD_FAMILY(X)
# endif
#endif
#if !defined(SWIFT_NOESCAPE)
# if __has_attribute(noescape)
# define SWIFT_NOESCAPE __attribute__((noescape))
# else
# define SWIFT_NOESCAPE
# endif
#endif
#if !defined(SWIFT_RELEASES_ARGUMENT)
# if __has_attribute(ns_consumed)
# define SWIFT_RELEASES_ARGUMENT __attribute__((ns_consumed))
# else
# define SWIFT_RELEASES_ARGUMENT
# endif
#endif
#if !defined(SWIFT_WARN_UNUSED_RESULT)
# if __has_attribute(warn_unused_result)
# define SWIFT_WARN_UNUSED_RESULT __attribute__((warn_unused_result))
# else
# define SWIFT_WARN_UNUSED_RESULT
# endif
#endif
#if !defined(SWIFT_NORETURN)
# if __has_attribute(noreturn)
# define SWIFT_NORETURN __attribute__((noreturn))
# else
# define SWIFT_NORETURN
# endif
#endif
#if !defined(SWIFT_CLASS_EXTRA)
# define SWIFT_CLASS_EXTRA
#endif
#if !defined(SWIFT_PROTOCOL_EXTRA)
# define SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_ENUM_EXTRA)
# define SWIFT_ENUM_EXTRA
#endif
#if !defined(SWIFT_CLASS)
# if __has_attribute(objc_subclassing_restricted)
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_subclassing_restricted)) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# else
# define SWIFT_CLASS(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# define SWIFT_CLASS_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_CLASS_EXTRA
# endif
#endif
#if !defined(SWIFT_RESILIENT_CLASS)
# if __has_attribute(objc_class_stub)
# define SWIFT_RESILIENT_CLASS(SWIFT_NAME) SWIFT_CLASS(SWIFT_NAME) __attribute__((objc_class_stub))
# define SWIFT_RESILIENT_CLASS_NAMED(SWIFT_NAME) __attribute__((objc_class_stub)) SWIFT_CLASS_NAMED(SWIFT_NAME)
# else
# define SWIFT_RESILIENT_CLASS(SWIFT_NAME) SWIFT_CLASS(SWIFT_NAME)
# define SWIFT_RESILIENT_CLASS_NAMED(SWIFT_NAME) SWIFT_CLASS_NAMED(SWIFT_NAME)
# endif
#endif
#if !defined(SWIFT_PROTOCOL)
# define SWIFT_PROTOCOL(SWIFT_NAME) SWIFT_RUNTIME_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
# define SWIFT_PROTOCOL_NAMED(SWIFT_NAME) SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_PROTOCOL_EXTRA
#endif
#if !defined(SWIFT_EXTENSION)
# define SWIFT_EXTENSION(M) SWIFT_PASTE(M##_Swift_, __LINE__)
#endif
#if !defined(OBJC_DESIGNATED_INITIALIZER)
# if __has_attribute(objc_designated_initializer)
# define OBJC_DESIGNATED_INITIALIZER __attribute__((objc_designated_initializer))
# else
# define OBJC_DESIGNATED_INITIALIZER
# endif
#endif
#if !defined(SWIFT_ENUM_ATTR)
# if __has_attribute(enum_extensibility)
# define SWIFT_ENUM_ATTR(_extensibility) __attribute__((enum_extensibility(_extensibility)))
# else
# define SWIFT_ENUM_ATTR(_extensibility)
# endif
#endif
#if !defined(SWIFT_ENUM)
# define SWIFT_ENUM(_type, _name, _extensibility) enum _name : _type _name; enum SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# if __has_feature(generalized_swift_name)
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) enum _name : _type _name SWIFT_COMPILE_NAME(SWIFT_NAME); enum SWIFT_COMPILE_NAME(SWIFT_NAME) SWIFT_ENUM_ATTR(_extensibility) SWIFT_ENUM_EXTRA _name : _type
# else
# define SWIFT_ENUM_NAMED(_type, _name, SWIFT_NAME, _extensibility) SWIFT_ENUM(_type, _name, _extensibility)
# endif
#endif
#if !defined(SWIFT_UNAVAILABLE)
# define SWIFT_UNAVAILABLE __attribute__((unavailable))
#endif
#if !defined(SWIFT_UNAVAILABLE_MSG)
# define SWIFT_UNAVAILABLE_MSG(msg) __attribute__((unavailable(msg)))
#endif
#if !defined(SWIFT_AVAILABILITY)
# define SWIFT_AVAILABILITY(plat, ...) __attribute__((availability(plat, __VA_ARGS__)))
#endif
#if !defined(SWIFT_WEAK_IMPORT)
# define SWIFT_WEAK_IMPORT __attribute__((weak_import))
#endif
#if !defined(SWIFT_DEPRECATED)
# define SWIFT_DEPRECATED __attribute__((deprecated))
#endif
#if !defined(SWIFT_DEPRECATED_MSG)
# define SWIFT_DEPRECATED_MSG(...) __attribute__((deprecated(__VA_ARGS__)))
#endif
#if !defined(SWIFT_DEPRECATED_OBJC)
# if __has_feature(attribute_diagnose_if_objc)
# define SWIFT_DEPRECATED_OBJC(Msg) __attribute__((diagnose_if(1, Msg, "warning")))
# else
# define SWIFT_DEPRECATED_OBJC(Msg) SWIFT_DEPRECATED_MSG(Msg)
# endif
#endif
#if defined(__OBJC__)
#if !defined(IBSegueAction)
# define IBSegueAction
#endif
#endif
#if !defined(SWIFT_EXTERN)
# if defined(__cplusplus)
# define SWIFT_EXTERN extern "C"
# else
# define SWIFT_EXTERN extern
# endif
#endif
#if !defined(SWIFT_CALL)
# define SWIFT_CALL __attribute__((swiftcall))
#endif
#if !defined(SWIFT_INDIRECT_RESULT)
# define SWIFT_INDIRECT_RESULT __attribute__((swift_indirect_result))
#endif
#if !defined(SWIFT_CONTEXT)
# define SWIFT_CONTEXT __attribute__((swift_context))
#endif
#if !defined(SWIFT_ERROR_RESULT)
# define SWIFT_ERROR_RESULT __attribute__((swift_error_result))
#endif
#if defined(__cplusplus)
# define SWIFT_NOEXCEPT noexcept
#else
# define SWIFT_NOEXCEPT
#endif
#if !defined(SWIFT_C_INLINE_THUNK)
# if __has_attribute(always_inline)
# if __has_attribute(nodebug)
# define SWIFT_C_INLINE_THUNK inline __attribute__((always_inline)) __attribute__((nodebug))
# else
# define SWIFT_C_INLINE_THUNK inline __attribute__((always_inline))
# endif
# else
# define SWIFT_C_INLINE_THUNK inline
# endif
#endif
#if defined(_WIN32)
#if !defined(SWIFT_IMPORT_STDLIB_SYMBOL)
# define SWIFT_IMPORT_STDLIB_SYMBOL __declspec(dllimport)
#endif
#else
#if !defined(SWIFT_IMPORT_STDLIB_SYMBOL)
# define SWIFT_IMPORT_STDLIB_SYMBOL
#endif
#endif
#if defined(__OBJC__)
#if __has_feature(objc_modules)
#if __has_warning("-Watimport-in-framework-header")
#pragma clang diagnostic ignored "-Watimport-in-framework-header"
#endif
@import AVFoundation;
@import CoreFoundation;
@import CoreMedia;
@import Dispatch;
@import ObjectiveC;
@import UIKit;
#endif
#endif
#pragma clang diagnostic ignored "-Wproperty-attribute-mismatch"
#pragma clang diagnostic ignored "-Wduplicate-method-arg"
#if __has_warning("-Wpragma-clang-attribute")
# pragma clang diagnostic ignored "-Wpragma-clang-attribute"
#endif
#pragma clang diagnostic ignored "-Wunknown-pragmas"
#pragma clang diagnostic ignored "-Wnullability"
#pragma clang diagnostic ignored "-Wdollar-in-identifier-extension"
#if __has_attribute(external_source_symbol)
# pragma push_macro("any")
# undef any
# pragma clang attribute push(__attribute__((external_source_symbol(language="Swift", defined_in="VitalSignEngineCore",generated_declaration))), apply_to=any(function,enum,objc_interface,objc_category,objc_protocol))
# pragma pop_macro("any")
#endif
#if defined(__OBJC__)
SWIFT_CLASS("_TtC19VitalSignEngineCore12CameraQueues")
@interface CameraQueues : NSObject
/// The serial execution queue for the camera preview layer (input stream) as well as output processing of photos.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull cameraQueue;)
+ (dispatch_queue_t _Nonnull)cameraQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of videos for recording.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull videoQueue;)
+ (dispatch_queue_t _Nonnull)videoQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of videos for frame processing.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull frameProcessorQueue;)
+ (dispatch_queue_t _Nonnull)frameProcessorQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of audio buffers.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull audioQueue;)
+ (dispatch_queue_t _Nonnull)audioQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of audio buffers.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull recordingQueue;)
+ (dispatch_queue_t _Nonnull)recordingQueue SWIFT_WARN_UNUSED_RESULT;
/// The serial execution queue for output processing of audio buffers.
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly, strong) dispatch_queue_t _Nonnull simulatorQueue;)
+ (dispatch_queue_t _Nonnull)simulatorQueue SWIFT_WARN_UNUSED_RESULT;
- (nonnull instancetype)init OBJC_DESIGNATED_INITIALIZER;
@end
@class NSCoder;
SWIFT_CLASS("_TtC19VitalSignEngineCore10CameraView")
@interface CameraView : UIView
SWIFT_CLASS_PROPERTY(@property (nonatomic, class, readonly) Class _Nonnull layerClass;)
+ (Class _Nonnull)layerClass SWIFT_WARN_UNUSED_RESULT;
- (nonnull instancetype)initWithFrame:(CGRect)frame OBJC_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)_ OBJC_DESIGNATED_INITIALIZER SWIFT_UNAVAILABLE;
- (void)willMoveToSuperview:(UIView * _Nullable)newSuperview;
@end
@class UIPinchGestureRecognizer;
@interface CameraView (SWIFT_EXTENSION(VitalSignEngineCore))
- (void)onPinch:(UIPinchGestureRecognizer * _Nonnull)gesture;
- (void)zoomWithFactor:(CGFloat)factor animated:(BOOL)animated;
@end
@class AVCaptureOutput;
@class AVCaptureConnection;
@interface CameraView (SWIFT_EXTENSION(VitalSignEngineCore)) <AVCaptureAudioDataOutputSampleBufferDelegate, AVCaptureVideoDataOutputSampleBufferDelegate>
- (void)captureOutput:(AVCaptureOutput * _Nonnull)captureOutput didOutputSampleBuffer:(CMSampleBufferRef _Nonnull)sampleBuffer fromConnection:(AVCaptureConnection * _Nonnull)_;
@end
SWIFT_CLASS("_TtC19VitalSignEngineCore15VitalSignCamera")
@interface VitalSignCamera : UIView
- (nonnull instancetype)initWithFrame:(CGRect)frame OBJC_DESIGNATED_INITIALIZER;
- (nullable instancetype)initWithCoder:(NSCoder * _Nonnull)coder SWIFT_UNAVAILABLE;
- (void)layoutSubviews;
@end
#endif
#if __has_attribute(external_source_symbol)
# pragma clang attribute pop
#endif
#if defined(__cplusplus)
#endif
#pragma clang diagnostic pop
#endif
#else
#error unsupported Swift architecture
#endif

@ -0,0 +1,183 @@
/* Class = "UILabel"; text = "Continuously monitors exposure levels and automatically adjusts exposure when necessary."; ObjectID = "2ad-p5-WH5"; */
"2ad-p5-WH5.text" = "Continuously monitors exposure levels and automatically adjusts exposure when necessary.";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "4ie-UM-xw1"; */
"4ie-UM-xw1.text" = "White Balance";
/* Class = "UILabel"; text = "Front"; ObjectID = "55X-XR-Xoe"; */
"55X-XR-Xoe.text" = "Front";
/* Class = "UITableViewController"; title = "Preview Layer Gravity"; ObjectID = "5qf-l9-Bpt"; */
"5qf-l9-Bpt.title" = "Preview Layer Gravity";
/* Class = "UILabel"; text = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds."; ObjectID = "6I3-ak-vEx"; */
"6I3-ak-vEx.text" = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds.";
/* Class = "UILabel"; text = "Continuous Auto"; ObjectID = "6yD-61-CEf"; */
"6yD-61-CEf.text" = "Continuous Auto";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "77e-kN-xI5"; */
"77e-kN-xI5.text" = "White Balance";
/* Class = "UILabel"; text = "Preview Gravity"; ObjectID = "7AW-UQ-WfA"; */
"7AW-UQ-WfA.text" = "Preview Gravity";
/* Class = "UILabel"; text = "Lens Aperture"; ObjectID = "7ZB-LO-AYF"; */
"7ZB-LO-AYF.text" = "Lens Aperture";
/* Class = "UINavigationItem"; title = "Preview Gravity"; ObjectID = "7aV-Vw-b2w"; */
"7aV-Vw-b2w.title" = "Preview Gravity";
/* Class = "UINavigationItem"; title = "Camera"; ObjectID = "8D6-fu-CJu"; */
"8D6-fu-CJu.title" = "Camera";
/* Class = "UILabel"; text = "Medium"; ObjectID = "9xa-bl-6eJ"; */
"9xa-bl-6eJ.text" = "Medium";
/* Class = "UITableViewSection"; headerTitle = "Camera"; ObjectID = "AKs-Pq-lcR"; */
"AKs-Pq-lcR.headerTitle" = "Camera";
/* Class = "UILabel"; text = "640x480"; ObjectID = "B6a-KT-T4m"; */
"B6a-KT-T4m.text" = "640x480";
/* Class = "UILabel"; text = "Face Detector"; ObjectID = "Byq-a3-CTH"; */
"Byq-a3-CTH.text" = "Face Detector";
/* Class = "UILabel"; text = "1920x1080"; ObjectID = "Dp9-9b-EEt"; */
"Dp9-9b-EEt.text" = "1920x1080";
/* Class = "UILabel"; text = "A mode that locks the white balance state."; ObjectID = "FOH-2X-R88"; */
"FOH-2X-R88.text" = "A mode that locks the white balance state.";
/* Class = "UILabel"; text = "Duration"; ObjectID = "FPh-cx-ffz"; */
"FPh-cx-ffz.text" = "Duration";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Hmc-Mc-AME"; */
"Hmc-Mc-AME.text" = "Exposure";
/* Class = "UILabel"; text = "Resolution"; ObjectID = "J12-De-6Ah"; */
"J12-De-6Ah.text" = "Resolution";
/* Class = "UITextField"; text = "100"; ObjectID = "J2A-ot-v0j"; */
"J2A-ot-v0j.text" = "100";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "J5t-dB-TfS"; */
"J5t-dB-TfS.text" = "30 fps";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "JgS-aW-Prv"; */
"JgS-aW-Prv.text" = "Frame Rate";
/* Class = "UILabel"; text = "Video Size"; ObjectID = "KJ4-Uw-4AI"; */
"KJ4-Uw-4AI.text" = "Video Size";
/* Class = "UILabel"; text = "Locked"; ObjectID = "Nuf-3J-OLu"; */
"Nuf-3J-OLu.text" = "Locked";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "RKd-LF-IM5"; */
"RKd-LF-IM5.text" = "Frame Rate";
/* Class = "UILabel"; text = "Resize Aspect"; ObjectID = "Rz5-6b-y0h"; */
"Rz5-6b-y0h.text" = "Resize Aspect";
/* Class = "UILabel"; text = "f1.0"; ObjectID = "SxX-36-5NN"; */
"SxX-36-5NN.text" = "f1.0";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Szo-Pj-jno"; */
"Szo-Pj-jno.text" = "Exposure";
/* Class = "UILabel"; text = "ISO"; ObjectID = "TTb-bW-pMN"; */
"TTb-bW-pMN.text" = "ISO";
/* Class = "UILabel"; text = "Camera"; ObjectID = "WaU-wB-ecv"; */
"WaU-wB-ecv.text" = "Camera";
/* Class = "UITableViewController"; title = "White Balance"; ObjectID = "X40-UA-27w"; */
"X40-UA-27w.title" = "White Balance";
/* Class = "UINavigationItem"; title = "Settings"; ObjectID = "Xje-kQ-L0P"; */
"Xje-kQ-L0P.title" = "Settings";
/* Class = "UIBarButtonItem"; title = "Close"; ObjectID = "XvT-YW-xBL"; */
"XvT-YW-xBL.title" = "Close";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YGU-l8-ZMR"; */
"YGU-l8-ZMR.text" = "Locked";
/* Class = "UILabel"; text = "Format"; ObjectID = "YPv-nc-7nK"; */
"YPv-nc-7nK.text" = "Format";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YrA-Y0-l1z"; */
"YrA-Y0-l1z.text" = "Locked";
/* Class = "UINavigationItem"; title = "Format"; ObjectID = "Z09-mH-385"; */
"Z09-mH-385.title" = "Format";
/* Class = "UILabel"; text = "Camera On/Off"; ObjectID = "ZiN-q3-8Cs"; */
"ZiN-q3-8Cs.text" = "Camera On/Off";
/* Class = "UINavigationItem"; title = "White Balance"; ObjectID = "b1A-7E-zBH"; */
"b1A-7E-zBH.title" = "White Balance";
/* Class = "UILabel"; text = "100"; ObjectID = "b1L-VE-boz"; */
"b1L-VE-boz.text" = "100";
/* Class = "UINavigationItem"; title = "Exposure"; ObjectID = "bwF-Fn-XeY"; */
"bwF-Fn-XeY.title" = "Exposure";
/* Class = "UIButton"; configuration.title = "Test"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.configuration.title" = "Test";
/* Class = "UIButton"; normalTitle = "Button"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.normalTitle" = "Test";
/* Class = "UISegmentedControl"; fSu-H9-Cz9.segmentTitles[0] = "sec"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[0]" = "sec";
/* Class = "UISegmentedControl"; fSu-H9-Cz9.segmentTitles[1] = "ms"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[1]" = "ms";
/* Class = "UISegmentedControl"; fSu-H9-Cz9.segmentTitles[2] = "μs"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[2]" = "μs";
/* Class = "UILabel"; text = "Locked"; ObjectID = "fty-cZ-huK"; */
"fty-cZ-huK.text" = "Locked";
/* Class = "UILabel"; text = "Auto (10ms)"; ObjectID = "hVI-M7-yVe"; */
"hVI-M7-yVe.text" = "Auto (10ms)";
/* Class = "UILabel"; text = "Camera Name"; ObjectID = "if3-5B-xPF"; */
"if3-5B-xPF.text" = "Camera Name";
/* Class = "UILabel"; text = "Unit"; ObjectID = "l6D-7r-DH5"; */
"l6D-7r-DH5.text" = "Unit";
/* Class = "UINavigationItem"; title = "Camera Test"; ObjectID = "lZ7-W3-MRS"; */
"lZ7-W3-MRS.title" = "Camera Test";
/* Class = "UILabel"; text = "8ms"; ObjectID = "lct-8a-Lqk"; */
"lct-8a-Lqk.text" = "8ms";
/* Class = "UITableViewController"; title = "Camera"; ObjectID = "lvC-jH-Oee"; */
"lvC-jH-Oee.title" = "Camera";
/* Class = "UILabel"; text = "Resize Aspect Fill"; ObjectID = "mod-K3-RQU"; */
"mod-K3-RQU.text" = "Resize Aspect Fill";
/* Class = "UILabel"; text = "HD 1920x1080"; ObjectID = "nbm-4k-ihb"; */
"nbm-4k-ihb.text" = "HD 1920x1080";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "pKS-rd-Dx6"; */
"pKS-rd-Dx6.text" = "30 fps";
/* Class = "UILabel"; text = "Exposure Duration"; ObjectID = "soE-ah-Y4f"; */
"soE-ah-Y4f.text" = "Exposure Duration";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "u82-lR-lTP"; */
"u82-lR-lTP.text" = "30 fps";
/* Class = "UILabel"; text = "100"; ObjectID = "y9H-1I-dYX"; */
"y9H-1I-dYX.text" = "100";
/* Class = "UILabel"; text = "ISO"; ObjectID = "yOn-QG-KiO"; */
"yOn-QG-KiO.text" = "ISO";

@ -0,0 +1,183 @@
/* Class = "UILabel"; text = "Continuously monitors exposure levels and automatically adjusts exposure when necessary."; ObjectID = "2ad-p5-WH5"; */
"2ad-p5-WH5.text" = "持續監測曝光水平並在必要時自動調整曝光。";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "4ie-UM-xw1"; */
"4ie-UM-xw1.text" = "白平衡";
/* Class = "UILabel"; text = "Front"; ObjectID = "55X-XR-Xoe"; */
"55X-XR-Xoe.text" = "前置";
/* Class = "UITableViewController"; text = "Preview Layer Gravity"; ObjectID = "5qf-l9-Bpt"; */
"5qf-l9-Bpt.title" = "相機預覽展示形式";
/* Class = "UILabel"; text = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds."; ObjectID = "6I3-ak-vEx"; */
"6I3-ak-vEx.text" = "按比例顯示相機預覽,並使其限制在圖層的範圍內。";
/* Class = "UILabel"; text = "Continuous Auto"; ObjectID = "6yD-61-CEf"; */
"6yD-61-CEf.text" = "連續自動";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "77e-kN-xI5"; */
"77e-kN-xI5.text" = "白平衡";
/* Class = "UILabel"; text = "Preview Gravity"; ObjectID = "7AW-UQ-WfA"; */
"7AW-UQ-WfA.text" = "相機預覽展示形式";
/* Class = "UILabel"; text = "Lens Aperture"; ObjectID = "7ZB-LO-AYF"; */
"7ZB-LO-AYF.text" = "鏡頭光圈";
/* Class = "UINavigationItem"; text = "Preview Gravity"; ObjectID = "7aV-Vw-b2w"; */
"7aV-Vw-b2w.title" = "相機預覽展示形式";
/* Class = "UINavigationItem"; text = "Camera"; ObjectID = "8D6-fu-CJu"; */
"8D6-fu-CJu.title" = "相機";
/* Class = "UILabel"; text = "Medium"; ObjectID = "9xa-bl-6eJ"; */
"9xa-bl-6eJ.text" = "中等品質";
/* Class = "UITableViewSection"; text = "Camera"; ObjectID = "AKs-Pq-lcR"; */
"AKs-Pq-lcR.headerTitle" = "相機";
/* Class = "UILabel"; text = "640x480"; ObjectID = "B6a-KT-T4m"; */
"B6a-KT-T4m.text" = "640x480";
/* Class = "UILabel"; text = "Face Detector"; ObjectID = "Byq-a3-CTH"; */
"Byq-a3-CTH.text" = "人臉辨識";
/* Class = "UILabel"; text = "1920x1080"; ObjectID = "Dp9-9b-EEt"; */
"Dp9-9b-EEt.text" = "1920x1080";
/* Class = "UILabel"; text = "A mode that locks the white balance state."; ObjectID = "FOH-2X-R88"; */
"FOH-2X-R88.text" = "一種鎖定白平衡狀態的模式。";
/* Class = "UILabel"; text = "Duration"; ObjectID = "FPh-cx-ffz"; */
"FPh-cx-ffz.text" = "持續時間";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Hmc-Mc-AME"; */
"Hmc-Mc-AME.text" = "曝光";
/* Class = "UILabel"; text = "Resolution"; ObjectID = "J12-De-6Ah"; */
"J12-De-6Ah.text" = "解析度";
/* Class = "UITextField"; text = "100"; ObjectID = "J2A-ot-v0j"; */
"J2A-ot-v0j.text" = "100";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "J5t-dB-TfS"; */
"J5t-dB-TfS.text" = "30 fps";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "JgS-aW-Prv"; */
"JgS-aW-Prv.text" = "幀率";
/* Class = "UILabel"; text = "Video Size"; ObjectID = "KJ4-Uw-4AI"; */
"KJ4-Uw-4AI.text" = "視頻尺寸";
/* Class = "UILabel"; text = "Locked"; ObjectID = "Nuf-3J-OLu"; */
"Nuf-3J-OLu.text" = "鎖定";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "RKd-LF-IM5"; */
"RKd-LF-IM5.text" = "幀率";
/* Class = "UILabel"; text = "Resize Aspect"; ObjectID = "Rz5-6b-y0h"; */
"Rz5-6b-y0h.text" = "按比例調整";
/* Class = "UILabel"; text = "f1.0"; ObjectID = "SxX-36-5NN"; */
"SxX-36-5NN.text" = "f1.0";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Szo-Pj-jno"; */
"Szo-Pj-jno.text" = "曝光";
/* Class = "UILabel"; text = "ISO"; ObjectID = "TTb-bW-pMN"; */
"TTb-bW-pMN.text" = "ISO";
/* Class = "UILabel"; text = "Camera"; ObjectID = "WaU-wB-ecv"; */
"WaU-wB-ecv.text" = "相機";
/* Class = "UITableViewController"; text = "White Balance"; ObjectID = "X40-UA-27w"; */
"X40-UA-27w.title" = "白平衡";
/* Class = "UINavigationItem"; text = "Settings"; ObjectID = "Xje-kQ-L0P"; */
"Xje-kQ-L0P.title" = "設定";
/* Class = "UIBarButtonItem"; text = "Close"; ObjectID = "XvT-YW-xBL"; */
"XvT-YW-xBL.title" = "關閉";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YGU-l8-ZMR"; */
"YGU-l8-ZMR.text" = "鎖定";
/* Class = "UILabel"; text = "Format"; ObjectID = "YPv-nc-7nK"; */
"YPv-nc-7nK.text" = "格式";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YrA-Y0-l1z"; */
"YrA-Y0-l1z.text" = "鎖定";
/* Class = "UINavigationItem"; text = "Format"; ObjectID = "Z09-mH-385"; */
"Z09-mH-385.title" = "格式";
/* Class = "UILabel"; text = "Camera On/Off"; ObjectID = "ZiN-q3-8Cs"; */
"ZiN-q3-8Cs.text" = "相機 開/關";
/* Class = "UINavigationItem"; text = "White Balance"; ObjectID = "b1A-7E-zBH"; */
"b1A-7E-zBH.title" = "白平衡";
/* Class = "UILabel"; text = "100"; ObjectID = "b1L-VE-boz"; */
"b1L-VE-boz.text" = "100";
/* Class = "UINavigationItem"; text = "Exposure"; ObjectID = "bwF-Fn-XeY"; */
"bwF-Fn-XeY.title" = "曝光";
/* Class = "UIButton"; text = "Test"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.configuration.title" = "測試";
/* Class = "UIButton"; text = "Button"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.normalTitle" = "測試";
/* Class = "UISegmentedControl"; text = "sec"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[0]" = "秒";
/* Class = "UISegmentedControl"; text = "ms"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[1]" = "毫秒";
/* Class = "UISegmentedControl"; text = "μs"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[2]" = "微秒";
/* Class = "UILabel"; text = "Locked"; ObjectID = "fty-cZ-huK"; */
"fty-cZ-huK.text" = "鎖定";
/* Class = "UILabel"; text = "Auto (10ms)"; ObjectID = "hVI-M7-yVe"; */
"hVI-M7-yVe.text" = "自動10毫秒";
/* Class = "UILabel"; text = "Camera Name"; ObjectID = "if3-5B-xPF"; */
"if3-5B-xPF.text" = "相機名稱";
/* Class = "UILabel"; text = "Unit"; ObjectID = "l6D-7r-DH5"; */
"l6D-7r-DH5.text" = "單位";
/* Class = "UINavigationItem"; text = "Camera Test"; ObjectID = "lZ7-W3-MRS"; */
"lZ7-W3-MRS.title" = "相機測試";
/* Class = "UILabel"; text = "8ms"; ObjectID = "lct-8a-Lqk"; */
"lct-8a-Lqk.text" = "8毫秒";
/* Class = "UITableViewController"; text = "Camera"; ObjectID = "lvC-jH-Oee"; */
"lvC-jH-Oee.title" = "相機";
/* Class = "UILabel"; text = "Resize Aspect Fill"; ObjectID = "mod-K3-RQU"; */
"mod-K3-RQU.text" = "按比例填充";
/* Class = "UILabel"; text = "HD 1920x1080"; ObjectID = "nbm-4k-ihb"; */
"nbm-4k-ihb.text" = "HD 1920x1080";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "pKS-rd-Dx6"; */
"pKS-rd-Dx6.text" = "30 fps";
/* Class = "UILabel"; text = "Exposure Duration"; ObjectID = "soE-ah-Y4f"; */
"soE-ah-Y4f.text" = "曝光時間";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "u82-lR-lTP"; */
"u82-lR-lTP.text" = "30 fps";
/* Class = "UILabel"; text = "100"; ObjectID = "y9H-1I-dYX"; */
"y9H-1I-dYX.text" = "100";
/* Class = "UILabel"; text = "ISO"; ObjectID = "yOn-QG-KiO"; */
"yOn-QG-KiO.text" = "ISO";

@ -0,0 +1,183 @@
/* Class = "UILabel"; text = "Continuously monitors exposure levels and automatically adjusts exposure when necessary."; ObjectID = "2ad-p5-WH5"; */
"2ad-p5-WH5.text" = "持续监测曝光水平并在必要时自动调整曝光。";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "4ie-UM-xw1"; */
"4ie-UM-xw1.text" = "白平衡";
/* Class = "UILabel"; text = "Front"; ObjectID = "55X-XR-Xoe"; */
"55X-XR-Xoe.text" = "前置";
/* Class = "UITableViewController"; text = "Preview Layer Gravity"; ObjectID = "5qf-l9-Bpt"; */
"5qf-l9-Bpt.title" = "相机预览展示形式";
/* Class = "UILabel"; text = "Show the camera preview by preserving its aspect ratio and fits it within the layer's bounds."; ObjectID = "6I3-ak-vEx"; */
"6I3-ak-vEx.text" = "按比例显示相机预览,并使其限制在图层的范围内。";
/* Class = "UILabel"; text = "Continuous Auto"; ObjectID = "6yD-61-CEf"; */
"6yD-61-CEf.text" = "连续自动";
/* Class = "UILabel"; text = "White Balance"; ObjectID = "77e-kN-xI5"; */
"77e-kN-xI5.text" = "白平衡";
/* Class = "UILabel"; text = "Preview Gravity"; ObjectID = "7AW-UQ-WfA"; */
"7AW-UQ-WfA.text" = "相机预览展示形式";
/* Class = "UILabel"; text = "Lens Aperture"; ObjectID = "7ZB-LO-AYF"; */
"7ZB-LO-AYF.text" = "镜头光圈";
/* Class = "UINavigationItem"; text = "Preview Gravity"; ObjectID = "7aV-Vw-b2w"; */
"7aV-Vw-b2w.title" = "相机预览展示形式";
/* Class = "UINavigationItem"; text = "Camera"; ObjectID = "8D6-fu-CJu"; */
"8D6-fu-CJu.title" = "相机";
/* Class = "UILabel"; text = "Medium"; ObjectID = "9xa-bl-6eJ"; */
"9xa-bl-6eJ.text" = "中等品质";
/* Class = "UITableViewSection"; text = "Camera"; ObjectID = "AKs-Pq-lcR"; */
"AKs-Pq-lcR.headerTitle" = "相机";
/* Class = "UILabel"; text = "640x480"; ObjectID = "B6a-KT-T4m"; */
"B6a-KT-T4m.text" = "640x480";
/* Class = "UILabel"; text = "Face Detector"; ObjectID = "Byq-a3-CTH"; */
"Byq-a3-CTH.text" = "人脸识别";
/* Class = "UILabel"; text = "1920x1080"; ObjectID = "Dp9-9b-EEt"; */
"Dp9-9b-EEt.text" = "1920x1080";
/* Class = "UILabel"; text = "A mode that locks the white balance state."; ObjectID = "FOH-2X-R88"; */
"FOH-2X-R88.text" = "一种锁定白平衡状态的模式。";
/* Class = "UILabel"; text = "Duration"; ObjectID = "FPh-cx-ffz"; */
"FPh-cx-ffz.text" = "持续时间";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Hmc-Mc-AME"; */
"Hmc-Mc-AME.text" = "曝光";
/* Class = "UILabel"; text = "Resolution"; ObjectID = "J12-De-6Ah"; */
"J12-De-6Ah.text" = "分辨率";
/* Class = "UITextField"; text = "100"; ObjectID = "J2A-ot-v0j"; */
"J2A-ot-v0j.text" = "100";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "J5t-dB-TfS"; */
"J5t-dB-TfS.text" = "30 fps";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "JgS-aW-Prv"; */
"JgS-aW-Prv.text" = "帧率";
/* Class = "UILabel"; text = "Video Size"; ObjectID = "KJ4-Uw-4AI"; */
"KJ4-Uw-4AI.text" = "视频尺寸";
/* Class = "UILabel"; text = "Locked"; ObjectID = "Nuf-3J-OLu"; */
"Nuf-3J-OLu.text" = "锁定";
/* Class = "UILabel"; text = "Frame Rate"; ObjectID = "RKd-LF-IM5"; */
"RKd-LF-IM5.text" = "帧率";
/* Class = "UILabel"; text = "Resize Aspect"; ObjectID = "Rz5-6b-y0h"; */
"Rz5-6b-y0h.text" = "按比例调整";
/* Class = "UILabel"; text = "f1.0"; ObjectID = "SxX-36-5NN"; */
"SxX-36-5NN.text" = "f1.0";
/* Class = "UILabel"; text = "Exposure"; ObjectID = "Szo-Pj-jno"; */
"Szo-Pj-jno.text" = "曝光";
/* Class = "UILabel"; text = "ISO"; ObjectID = "TTb-bW-pMN"; */
"TTb-bW-pMN.text" = "ISO";
/* Class = "UILabel"; text = "Camera"; ObjectID = "WaU-wB-ecv"; */
"WaU-wB-ecv.text" = "相机";
/* Class = "UITableViewController"; text = "White Balance"; ObjectID = "X40-UA-27w"; */
"X40-UA-27w.title" = "白平衡";
/* Class = "UINavigationItem"; text = "Settings"; ObjectID = "Xje-kQ-L0P"; */
"Xje-kQ-L0P.title" = "设定";
/* Class = "UIBarButtonItem"; text = "Close"; ObjectID = "XvT-YW-xBL"; */
"XvT-YW-xBL.title" = "关闭";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YGU-l8-ZMR"; */
"YGU-l8-ZMR.text" = "鎖定";
/* Class = "UILabel"; text = "Format"; ObjectID = "YPv-nc-7nK"; */
"YPv-nc-7nK.text" = "格式";
/* Class = "UILabel"; text = "Locked"; ObjectID = "YrA-Y0-l1z"; */
"YrA-Y0-l1z.text" = "鎖定";
/* Class = "UINavigationItem"; text = "Format"; ObjectID = "Z09-mH-385"; */
"Z09-mH-385.title" = "格式";
/* Class = "UILabel"; text = "Camera On/Off"; ObjectID = "ZiN-q3-8Cs"; */
"ZiN-q3-8Cs.text" = "相机 开/关";
/* Class = "UINavigationItem"; text = "White Balance"; ObjectID = "b1A-7E-zBH"; */
"b1A-7E-zBH.title" = "白平衡";
/* Class = "UILabel"; text = "100"; ObjectID = "b1L-VE-boz"; */
"b1L-VE-boz.text" = "100";
/* Class = "UINavigationItem"; text = "Exposure"; ObjectID = "bwF-Fn-XeY"; */
"bwF-Fn-XeY.title" = "曝光";
/* Class = "UIButton"; text = "Test"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.configuration.title" = "测试";
/* Class = "UIButton"; text = "Button"; ObjectID = "cC0-N9-hxt"; */
"cC0-N9-hxt.normalTitle" = "测试";
/* Class = "UISegmentedControl"; text = "sec"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[0]" = "秒";
/* Class = "UISegmentedControl"; text = "ms"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[1]" = "毫秒";
/* Class = "UISegmentedControl"; text = "μs"; ObjectID = "fSu-H9-Cz9"; */
"fSu-H9-Cz9.segmentTitles[2]" = "微秒";
/* Class = "UILabel"; text = "Locked"; ObjectID = "fty-cZ-huK"; */
"fty-cZ-huK.text" = "鎖定";
/* Class = "UILabel"; text = "Auto (10ms)"; ObjectID = "hVI-M7-yVe"; */
"hVI-M7-yVe.text" = "自动10毫秒";
/* Class = "UILabel"; text = "Camera Name"; ObjectID = "if3-5B-xPF"; */
"if3-5B-xPF.text" = "相机名称";
/* Class = "UILabel"; text = "Unit"; ObjectID = "l6D-7r-DH5"; */
"l6D-7r-DH5.text" = "单位";
/* Class = "UINavigationItem"; text = "Camera Test"; ObjectID = "lZ7-W3-MRS"; */
"lZ7-W3-MRS.title" = "相机测试";
/* Class = "UILabel"; text = "8ms"; ObjectID = "lct-8a-Lqk"; */
"lct-8a-Lqk.text" = "8毫秒";
/* Class = "UITableViewController"; text = "Camera"; ObjectID = "lvC-jH-Oee"; */
"lvC-jH-Oee.title" = "相机";
/* Class = "UILabel"; text = "Resize Aspect Fill"; ObjectID = "mod-K3-RQU"; */
"mod-K3-RQU.text" = "按比例填充";
/* Class = "UILabel"; text = "HD 1920x1080"; ObjectID = "nbm-4k-ihb"; */
"nbm-4k-ihb.text" = "HD 1920x1080";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "pKS-rd-Dx6"; */
"pKS-rd-Dx6.text" = "30 fps";
/* Class = "UILabel"; text = "Exposure Duration"; ObjectID = "soE-ah-Y4f"; */
"soE-ah-Y4f.text" = "曝光时间";
/* Class = "UILabel"; text = "30 fps"; ObjectID = "u82-lR-lTP"; */
"u82-lR-lTP.text" = "30 fps";
/* Class = "UILabel"; text = "100"; ObjectID = "y9H-1I-dYX"; */
"y9H-1I-dYX.text" = "100";
/* Class = "UILabel"; text = "ISO"; ObjectID = "yOn-QG-KiO"; */
"yOn-QG-KiO.text" = "ISO";

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleIdentifier</key>
<string>com.apple.xcode.dsym.com.demo.panoptic</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>dSYM</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleShortVersionString</key>
<string>4.8.1</string>
<key>CFBundleVersion</key>
<string>1</string>
</dict>
</plist>

@ -0,0 +1,47 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>AvailableLibraries</key>
<array>
<dict>
<key>BinaryPath</key>
<string>VitalSignObjCFramework.framework/VitalSignObjCFramework</string>
<key>DebugSymbolsPath</key>
<string>dSYMs</string>
<key>LibraryIdentifier</key>
<string>ios-x86_64-simulator</string>
<key>LibraryPath</key>
<string>VitalSignObjCFramework.framework</string>
<key>SupportedArchitectures</key>
<array>
<string>x86_64</string>
</array>
<key>SupportedPlatform</key>
<string>ios</string>
<key>SupportedPlatformVariant</key>
<string>simulator</string>
</dict>
<dict>
<key>BinaryPath</key>
<string>VitalSignObjCFramework.framework/VitalSignObjCFramework</string>
<key>DebugSymbolsPath</key>
<string>dSYMs</string>
<key>LibraryIdentifier</key>
<string>ios-arm64</string>
<key>LibraryPath</key>
<string>VitalSignObjCFramework.framework</string>
<key>SupportedArchitectures</key>
<array>
<string>arm64</string>
</array>
<key>SupportedPlatform</key>
<string>ios</string>
</dict>
</array>
<key>CFBundlePackageType</key>
<string>XFWK</string>
<key>XCFrameworkFormatVersion</key>
<string>1.0</string>
</dict>
</plist>

@ -0,0 +1,17 @@
//
// VitalSignObjCFramework.h
// VitalSignObjCFramework
//
// Created by Eddie Lau on 8/6/2023.
//
#import <Foundation/Foundation.h>
//! Project version number for VitalSignObjCFramework.
FOUNDATION_EXPORT double VitalSignObjCFrameworkVersionNumber;
//! Project version string for VitalSignObjCFramework.
FOUNDATION_EXPORT const unsigned char VitalSignObjCFrameworkVersionString[];
// In this header, you should import all the public headers of your framework using statements like #import <VitalSignObjCFramework/PublicHeader.h>

@ -0,0 +1,6 @@
framework module VitalSignObjCFramework {
umbrella header "VitalSignObjCFramework.h"
export *
module * { export * }
}

@ -0,0 +1,124 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>files</key>
<dict>
<key>Headers/VitalSignObjCFramework.h</key>
<data>
CdA62sXuLiyZ24FIv/0cHywdeT4=
</data>
<key>Info.plist</key>
<data>
G44lW0TrsCTFA8x2pXw6EX0I/tI=
</data>
<key>Modules/module.modulemap</key>
<data>
fCoLhLRYL6DmWGrSLEWWmXOz2gM=
</data>
</dict>
<key>files2</key>
<dict>
<key>Headers/VitalSignObjCFramework.h</key>
<dict>
<key>hash2</key>
<data>
RYvZ8Aw+PynSAoB6GHYJWogvO3tOweT93JrLAL4ASCo=
</data>
</dict>
<key>Modules/module.modulemap</key>
<dict>
<key>hash2</key>
<data>
9ytJ/YRg/NYVEdrK/5XnW2GCvMh+wwM0BRYfPdiKK9A=
</data>
</dict>
</dict>
<key>rules</key>
<dict>
<key>^.*</key>
<true/>
<key>^.*\.lproj/</key>
<dict>
<key>optional</key>
<true/>
<key>weight</key>
<real>1000</real>
</dict>
<key>^.*\.lproj/locversion.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>1100</real>
</dict>
<key>^Base\.lproj/</key>
<dict>
<key>weight</key>
<real>1010</real>
</dict>
<key>^version.plist$</key>
<true/>
</dict>
<key>rules2</key>
<dict>
<key>.*\.dSYM($|/)</key>
<dict>
<key>weight</key>
<real>11</real>
</dict>
<key>^(.*/)?\.DS_Store$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>2000</real>
</dict>
<key>^.*</key>
<true/>
<key>^.*\.lproj/</key>
<dict>
<key>optional</key>
<true/>
<key>weight</key>
<real>1000</real>
</dict>
<key>^.*\.lproj/locversion.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>1100</real>
</dict>
<key>^Base\.lproj/</key>
<dict>
<key>weight</key>
<real>1010</real>
</dict>
<key>^Info\.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>20</real>
</dict>
<key>^PkgInfo$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>20</real>
</dict>
<key>^embedded\.provisionprofile$</key>
<dict>
<key>weight</key>
<real>20</real>
</dict>
<key>^version\.plist$</key>
<dict>
<key>weight</key>
<real>20</real>
</dict>
</dict>
</dict>
</plist>

@ -0,0 +1,6 @@
framework module VitalSignObjCFramework {
umbrella header "VitalSignObjCFramework.h"
export *
module * { export * }
}

@ -0,0 +1,124 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>files</key>
<dict>
<key>Headers/VitalSignObjCFramework.h</key>
<data>
CdA62sXuLiyZ24FIv/0cHywdeT4=
</data>
<key>Info.plist</key>
<data>
e47bkbWmbhiOYpQ2/SHW48rW73c=
</data>
<key>Modules/module.modulemap</key>
<data>
fCoLhLRYL6DmWGrSLEWWmXOz2gM=
</data>
</dict>
<key>files2</key>
<dict>
<key>Headers/VitalSignObjCFramework.h</key>
<dict>
<key>hash2</key>
<data>
RYvZ8Aw+PynSAoB6GHYJWogvO3tOweT93JrLAL4ASCo=
</data>
</dict>
<key>Modules/module.modulemap</key>
<dict>
<key>hash2</key>
<data>
9ytJ/YRg/NYVEdrK/5XnW2GCvMh+wwM0BRYfPdiKK9A=
</data>
</dict>
</dict>
<key>rules</key>
<dict>
<key>^.*</key>
<true/>
<key>^.*\.lproj/</key>
<dict>
<key>optional</key>
<true/>
<key>weight</key>
<real>1000</real>
</dict>
<key>^.*\.lproj/locversion.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>1100</real>
</dict>
<key>^Base\.lproj/</key>
<dict>
<key>weight</key>
<real>1010</real>
</dict>
<key>^version.plist$</key>
<true/>
</dict>
<key>rules2</key>
<dict>
<key>.*\.dSYM($|/)</key>
<dict>
<key>weight</key>
<real>11</real>
</dict>
<key>^(.*/)?\.DS_Store$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>2000</real>
</dict>
<key>^.*</key>
<true/>
<key>^.*\.lproj/</key>
<dict>
<key>optional</key>
<true/>
<key>weight</key>
<real>1000</real>
</dict>
<key>^.*\.lproj/locversion.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>1100</real>
</dict>
<key>^Base\.lproj/</key>
<dict>
<key>weight</key>
<real>1010</real>
</dict>
<key>^Info\.plist$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>20</real>
</dict>
<key>^PkgInfo$</key>
<dict>
<key>omit</key>
<true/>
<key>weight</key>
<real>20</real>
</dict>
<key>^embedded\.provisionprofile$</key>
<dict>
<key>weight</key>
<real>20</real>
</dict>
<key>^version\.plist$</key>
<dict>
<key>weight</key>
<real>20</real>
</dict>
</dict>
</dict>
</plist>

@ -0,0 +1,20 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleIdentifier</key>
<string>com.apple.xcode.dsym.ai.panoptic.VitalSignObjCFramework</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundlePackageType</key>
<string>dSYM</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleShortVersionString</key>
<string>1.0</string>
<key>CFBundleVersion</key>
<string>1</string>
</dict>
</plist>

@ -0,0 +1,18 @@
/// Indicates a format's autofocus system.
///
/// * `none`: Indicates that autofocus is not available
/// * `contrastDetection`: Indicates that autofocus is achieved by contrast detection. Contrast detection performs a focus scan to find the optimal position
/// * `phaseDetection`: Indicates that autofocus is achieved by phase detection. Phase detection has the ability to achieve focus in many cases without a focus scan. Phase detection autofocus is typically less visually intrusive than contrast detection autofocus
enum AutoFocusSystem { contrastDetection, phaseDetection, none }
AutoFocusSystem autoFocusSystemFromString(String string) {
switch (string) {
case 'contrast-detection':
return AutoFocusSystem.contrastDetection;
case 'phase-detection':
return AutoFocusSystem.phaseDetection;
case 'none':
default:
return AutoFocusSystem.none;
}
}

@ -0,0 +1,87 @@
import 'pixel_format.dart';
import 'video_stabilization_mode.dart';
import 'auto_focus_system.dart';
import 'color_space.dart';
/// The range of the frame rate.
class FrameRateRange {
/// The minimum frame rate.
final int minFrameRate;
// The maximum frame rate.
final int maxFrameRate;
const FrameRateRange(this.minFrameRate, this.maxFrameRate);
factory FrameRateRange.fromMap(Map map) {
return FrameRateRange(map['minFrameRate'], map['maxFrameRate']);
}
}
/// The capture format of the camera device.
class CameraCaptureFormat {
final double photoHeight;
final double photoWidth;
final double? videoHeight;
final double? videoWidth;
final bool isHighestPhotoQualitySupported;
final int? maxISO;
final int? minISO;
final double? fieldOfView;
final double? maxZoom;
final List<ColorSpace> colorSpaces;
final bool supportsVideoHDR;
final bool supportsPhotoHDR;
final List<FrameRateRange> frameRateRanges;
final AutoFocusSystem autoFocusSystem;
final List<VideoStabilizationMode> videoStabilizationModes;
final PixelFormat pixelFormat;
const CameraCaptureFormat(
this.photoHeight,
this.photoWidth,
this.videoHeight,
this.videoWidth,
this.isHighestPhotoQualitySupported,
this.maxISO,
this.minISO,
this.fieldOfView,
this.maxZoom,
this.colorSpaces,
this.supportsVideoHDR,
this.supportsPhotoHDR,
this.frameRateRanges,
this.autoFocusSystem,
this.videoStabilizationModes,
this.pixelFormat);
factory CameraCaptureFormat.fromMap(Map map) {
return CameraCaptureFormat(
map['photoHeight'],
map['photoWidth'],
map['videoHeight'],
map['videoWidth'],
map['isHighestPhotoQualitySupported'] ?? false,
map['maxISO'],
map['minISO'],
map['fieldOfView'],
map['maxZoom'],
(map['colorSpaces'] as List<Object?>)
.where((e) => e != null)
.map((e) => e as String)
.map(colorSpaceFromString)
.toList(),
map['supportsVideoHDR'],
map['supportsPhotoHDR'],
(map['frameRateRanges'] as List<Object?>)
.where((element) => element != null)
.map((e) => FrameRateRange.fromMap(e! as Map))
.toList(),
autoFocusSystemFromString(map['autoFocusSystem']),
(map['videoStabilizationModes'] as List<Object?>)
.where((element) => element != null)
.map((e) => e! as String)
.map(videoStabilizationModeFromString)
.toList(),
pixelFormatFromString(map['pixelFormat']));
}
}

@ -0,0 +1,129 @@
import 'camera_device_type.dart';
import 'camera_position.dart';
import 'camera_capture_format.dart';
/// Represents a camera device discovered by the [availableCameraDevices()] function
class CameraDevice {
/// The native ID of the camera device instance.
final String id;
/// The physical devices this `CameraDevice` contains.
///
/// If this camera device is a **logical camera** (combination of multiple physical cameras), there are multiple cameras in this list.
/// If this camera device is a **physical camera**, there is only a single element in this list.
///
/// You can check if the camera is a logical multi-camera by using the [isMultiCam] property.
final List<CameraDeviceType> devices;
/// Specifies the physical position of this camera. (back or front)
final CameraPosition position;
/// A friendly localized name describing the camera.
final String name;
/// Specifies whether this camera supports enabling flash for photo capture.
final bool hasFlash;
/// Specifies whether this camera supports continuously enabling the flash to act like a torch (flash with video capture)
final bool hasTorch;
/// A property indicating whether the device is a virtual multi-camera consisting of multiple combined physical cameras.
///
/// Examples:
/// The Dual Camera, which supports seamlessly switching between a wide and telephoto camera while zooming and generating depth data from the disparities between the different points of view of the physical cameras.
/// The TrueDepth Camera, which generates depth data from disparities between a YUV camera and an Infrared camera pointed in the same direction.
final bool isMultiCam;
/// Whether this camera device supports using Video Recordings (`video={true}`) and Frame Processors (`frameProcessor={...}`) at the same time. See ["The `supportsParallelVideoProcessing` prop"](https://mrousavy.github.io/react-native-vision-camera/docs/guides/devices#the-supportsparallelvideoprocessing-prop) for more information.
///
/// If this property is `false`, you can only enable `video` or add a `frameProcessor`, but not both.
/// On iOS this value is always `true`.
/// On newer Android devices this value is always `true`.
/// On older Android devices this value is `false` if the Camera's hardware level is `LEGACY` or `LIMITED`, `true` otherwise. (See [`INFO_SUPPORTED_HARDWARE_LEVEL`](https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics#INFO_SUPPORTED_HARDWARE_LEVEL) or [the tables at "Regular capture"](https://developer.android.com/reference/android/hardware/camera2/CameraDevice#regular-capture))
final bool supportsParallelVideoProcessing;
/// Whether this camera supports taking photos in RAW format
///
/// **! Work in Progress !**
final bool supportsRawCapture;
/// Whether this camera supports taking photos with depth data.
///
/// **! Work in Progress !**
final bool supportsDepthCapture;
/// Whether this camera device supports low light boost.
final bool supportsLowLightBoost;
/// Specifies whether this device supports focusing)
final bool supportsFocus;
/// Minimum available zoom factor. (e.g. `1`)
final double minZoom;
/// Maximum available zoom factor. (e.g. `128`)
final double maxZoom;
/// The zoom factor where the camera is "neutral".
///
/// For single-physical cameras this property is always `1.0`.
/// For multi cameras this property is a value between `minZoom` and `maxZoom`, where the camera is in _wide-angle_ mode and hasn't switched to the _ultra-wide-angle_ ("fish-eye") or telephoto camera yet.
///
/// Use this value as an initial value for the zoom property if you implement custom zoom. (e.g. reanimated shared value should be initially set to this value)
/// ```const device = ...
///
/// const zoom = useSharedValue(device.neutralZoom) // <-- initial value so it doesn't start at ultra-wide
/// const cameraProps = useAnimatedProps(() => ({
/// zoom: zoom.value
/// }))```
final double neutralZoom;
/// All available formats for this camera device. Use this to find the best format for your use case and set it to the [CameraDevice.formats] property.
final List<CameraCaptureFormat> formats;
const CameraDevice(
this.id,
this.devices,
this.position,
this.name,
this.hasFlash,
this.hasTorch,
this.isMultiCam,
this.supportsParallelVideoProcessing,
this.supportsRawCapture,
this.supportsDepthCapture,
this.supportsLowLightBoost,
this.supportsFocus,
this.minZoom,
this.maxZoom,
this.neutralZoom,
this.formats);
factory CameraDevice.fromMap(Map map) {
return CameraDevice(
map['id'],
(map['devices'] as List<Object?>)
.where((element) => element != null)
.map((e) => e as String)
.map(cameraDeviceTypeFromString)
.toList(),
cameraPositionFromString(map['position']),
map['name'],
map['hasFlash'],
map['hasTorch'],
map['isMultiCam'],
map['supportsParallelVideoProcessing'],
map['supportsRawCapture'],
map['supportsDepthCapture'],
map['supportsLowLightBoost'],
map['supportsFocus'],
map['minZoom'],
map['maxZoom'],
map['neutralZoom'],
(map['formats'] as List<Object?>)
.where((element) => element != null)
.map((e) => e!)
.map((e) => CameraCaptureFormat.fromMap(e as Map))
.toList());
}
}

@ -0,0 +1,34 @@
/// Indentifiers for a physical camera (one that actually exists on the back/front of the device)
///
/// * `ultraWideAngleCamera`: A built-in camera with a shorter focal length than that of a wide-angle camera. (focal length between below 24mm)
/// * `wideAngleCamera`: A built-in wide-angle camera. (focal length between 24mm and 35mm)
/// * `telephotoCamera`: A built-in camera device with a longer focal length than a wide-angle camera. (focal length between above 85mm)
///
/// Indentifiers for a logical camera (Combinations of multiple physical cameras to create a single logical camera).
///
/// * `dualCamera`: A combination of wide-angle and telephoto cameras that creates a capture device.
/// * `dualWideCamera`: A device that consists of two cameras of fixed focal length, one ultrawide angle and one wide angle.
/// * `tripleCamera`: A device that consists of three cameras of fixed focal length, one ultrawide angle, one wide angle, and one telephoto.
enum CameraDeviceType {
// Physical camera type
ultraWideAngleCamera,
wideAngleCamera,
telephotoCamera,
// Logical camera type, for query only
dualCamera,
dualWideCamera,
tripleCamera
}
CameraDeviceType cameraDeviceTypeFromString(String string) {
switch (string) {
case 'ultra-wide-angle-camera':
return CameraDeviceType.ultraWideAngleCamera;
case 'wide-angle-camera':
return CameraDeviceType.wideAngleCamera;
case 'telephoto-camera':
return CameraDeviceType.telephotoCamera;
default:
return CameraDeviceType.wideAngleCamera;
}
}

@ -0,0 +1,33 @@
/// The permission status of a camera.
enum CameraPermissionStatus {
/// The permission is authorized by user.
authorized,
/// The permission is not determined.
notDetermined,
/// The permission is denied by user.
denied,
/// The permisssion is restricted by system.
restricted
}
/// Parse the camera permission from a string value.
CameraPermissionStatus cameraPermissionFromString(String? value) {
if (value == null) {
return CameraPermissionStatus.denied;
}
switch (value) {
case "authorized":
return CameraPermissionStatus.authorized;
case "not-determined":
return CameraPermissionStatus.notDetermined;
case "denied":
return CameraPermissionStatus.denied;
case "restricted":
return CameraPermissionStatus.restricted;
default:
return CameraPermissionStatus.denied;
}
}

@ -0,0 +1,26 @@
/// Represents the camera device position.
///
/// * `back`: Indicates that the device is physically located on the back of the system hardware
/// * `front`: Indicates that the device is physically located on the front of the system hardware
///
/// #### iOS only
/// * `unspecified`: Indicates that the device's position relative to the system hardware is unspecified
///
/// #### Android only
/// * `external`: The camera device is an external camera, and has no fixed facing relative to the device's screen. (Android only)
enum CameraPosition { front, back, unspecified, external }
CameraPosition cameraPositionFromString(String string) {
switch (string) {
case 'front':
return CameraPosition.front;
case 'back':
return CameraPosition.back;
case 'unspecified':
return CameraPosition.unspecified;
case 'external':
return CameraPosition.external;
default:
return CameraPosition.unspecified;
}
}

@ -0,0 +1,68 @@
/// Indicates a format's color space.
///
/// #### The following colorspaces are available on iOS:
/// * `srgb`: The sGRB color space.
/// * `p3d65`: The P3 D65 wide color space which uses Illuminant D65 as the white point
/// * `hlgBt2020`: The BT2020 wide color space which uses Illuminant D65 as the white point and Hybrid Log-Gamma as the transfer function
///
/// > See ["AVCaptureColorSpace"](https://developer.apple.com/documentation/avfoundation/avcapturecolorspace) for more information.
///
/// #### The following colorspaces are available on Android:
/// * `yuv`: The Multi-plane Android YCbCr color space. (YUV 420_888, 422_888 or 444_888)
/// * `jpeg`: The compressed JPEG color space.
/// * `jpegDepth`: The compressed JPEG color space including depth data.
/// * `raw`: The Camera's RAW sensor color space. (Single-channel Bayer-mosaic image, usually 16 bit)
/// * `heic`: The compressed HEIC color space.
/// * `private`: The Android private opaque image format. (The choices of the actual format and pixel data layout are entirely up to the device-specific and framework internal implementations, and may vary depending on use cases even for the same device. These buffers are not directly accessible to the application)
/// * `depth16`: The Android dense depth image format (16 bit)
/// * `unknown`: Placeholder for an unknown image/pixel format. [Edit this file](https://github.com/mrousavy/react-native-vision-camera/edit/main/android/src/main/java/com/mrousavy/camera/parsers/ImageFormat+String.kt) to add a name for the unknown format.
///
/// > See ["Android Color Formats"](https://jbit.net/Android_Colors/) for more information.
enum ColorSpace {
// ios
hlgBt2020,
p3d65,
srgb,
// android
yuv,
jpeg,
jpegDepth,
raw,
heic,
private,
depth16,
unknown
}
ColorSpace colorSpaceFromString(String? string) {
if (string == null) {
return ColorSpace.unknown;
}
switch (string) {
// ios
case 'hlg-bt2020':
return ColorSpace.hlgBt2020;
case 'p3-d65':
return ColorSpace.p3d65;
case 'srgb':
return ColorSpace.srgb;
// android
case 'yuv':
return ColorSpace.yuv;
case 'jpeg':
return ColorSpace.jpeg;
case 'jpeg-depth':
return ColorSpace.jpegDepth;
case 'raw':
return ColorSpace.raw;
case 'heic':
return ColorSpace.heic;
case 'private':
return ColorSpace.private;
case 'depth-16':
return ColorSpace.depth16;
case 'unknown':
default:
return ColorSpace.unknown;
}
}

@ -0,0 +1 @@
enum PreviewLayerGravity { resizeAspectFill, resizeAspect }

@ -0,0 +1,63 @@
import 'camera_device.dart';
import 'camera_device_type.dart';
import 'camera_position.dart';
import 'vital_sign_camera.dart';
/// Queries the best available [CameraDevice] with [CameraDeviceType].
Future<Map<CameraPosition, CameraDevice?>> queryCameraDevices(
CameraDeviceType type) async {
final devices = await availableCameraDevices();
final result = devices.where((element) {
if (element.devices.length == 1) {
return element.devices[0] == type;
}
final hasWide = element.devices.contains(CameraDeviceType.wideAngleCamera);
final hasUltra =
element.devices.contains(CameraDeviceType.ultraWideAngleCamera);
final hasTele = element.devices.contains(CameraDeviceType.telephotoCamera);
if (hasTele &&
hasWide &&
hasUltra &&
type == CameraDeviceType.tripleCamera) {
return true;
}
if (hasWide && hasUltra && type == CameraDeviceType.dualWideCamera) {
return true;
}
if (hasWide && hasTele && type == CameraDeviceType.dualCamera) {
return true;
}
return false;
});
CameraDevice? deviceWithPosition(CameraPosition position) {
final list = result.where((element) => element.position == position);
return list.isNotEmpty ? list.first : null;
}
return <CameraPosition, CameraDevice?>{
CameraPosition.back: deviceWithPosition(CameraPosition.back),
CameraPosition.front: deviceWithPosition(CameraPosition.front),
CameraPosition.external: deviceWithPosition(CameraPosition.external),
CameraPosition.unspecified: deviceWithPosition(CameraPosition.unspecified)
};
}
/// Query the camera device with [CameraPosition].
Future<CameraDevice?> queryCameraDevice(CameraPosition position) async {
var device =
(await queryCameraDevices(CameraDeviceType.wideAngleCamera))[position];
if (device == null) {
for (var type in CameraDeviceType.values) {
device = (await queryCameraDevices(type))[position];
if (device != null) {
return device;
}
}
}
return device;
}

@ -0,0 +1,38 @@
/// The conditions required to start a scan.
class ScanConditions {
/// Whether the lighting condtion is fulfilled.
final bool lighting;
/// Whether the condtintion of the face distance is fulfilled.
final bool distance;
/// Whether the face is centered.
final bool centered;
/// Whether the face is staying stilled.
final bool movement;
/// Whetter the frame rate of the camera is fast enough.
final bool frameRate;
/// Wheather the server is ready.
final bool serverReady;
const ScanConditions(
{required this.lighting,
required this.distance,
required this.centered,
required this.movement,
required this.frameRate,
required this.serverReady});
factory ScanConditions.fromMap(Map map) {
return ScanConditions(
lighting: map['lighting'],
distance: map['distance'],
centered: map['centered'],
movement: map['movement'],
frameRate: map['frameRate'],
serverReady: map['serverReady']);
}
}

@ -0,0 +1,29 @@
enum Gender { male, female }
/// The information of the user doing scanning.
class UserInfo {
late double age;
late Gender gender;
double? weight;
double? height;
double? waistCircumference;
bool? smoker;
bool? hypertension;
bool? bpMedication;
bool? diabetic;
String? userId;
String? appUserId;
UserInfo(
{required this.age,
required this.gender,
this.weight,
this.height,
this.waistCircumference,
this.smoker,
this.hypertension,
this.bpMedication,
this.diabetic,
this.userId,
this.appUserId});
}

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save