This commit is contained in:
J. A. Messias 2024-09-20 17:23:58 -03:00
parent 8d829ca997
commit 472b9d244b
7 changed files with 979 additions and 3 deletions

View File

@ -93,6 +93,11 @@ dependencies {
implementation 'androidx.window:window:1.0.0'
implementation 'androidx.window:window-java:1.0.0'
implementation ('com.google.firebase:firebase-messaging:24.0.0') {
exclude group: 'com.google.firebase', module: 'firebase-iid'
}
}
apply plugin: 'com.google.gms.google-services'

View File

@ -86,6 +86,7 @@
<meta-data
android:name="com.google.firebase.messaging.default_notification_icon"
android:resource="@drawable/notification_icon" />
<!-- Don't delete the meta-data below.
This is used by the Flutter tool to generate GeneratedPluginRegistrant.java -->

View File

@ -100,7 +100,7 @@ class MenuComponentModel extends FlutterFlowModel<MenuComponentWidget> {
.then((value) => value.toString() == 'true') as bool;
if (isProvisional == true) {
context.push(
'/provisionalSchedule',
'/face',
extra: <String, dynamic>{
kTransitionInfoKey: const TransitionInfo(
hasTransition: true,

View File

@ -10,6 +10,7 @@ import 'package:hub/pages/pets_page/pets_page_widget.dart';
import 'package:hub/pages/provisional_schedule_page/provisional_schedule_widget.dart';
import 'package:hub/pages/reception_page/reception_page_widget.dart';
import 'package:hub/pages/reservation_page/reservation_page_widget.dart';
import 'package:hub/test/face_detector_screen.dart';
import 'package:provider/provider.dart';
import '/backend/schema/structs/index.dart';
@ -87,6 +88,10 @@ GoRouter createRouter(AppStateNotifier appStateNotifier) => GoRouter(
);
},
),
FFRoute(
name: 'face',
path: '/face',
builder: (context, params) => FaceDetectorView()),
FFRoute(
name: 'receptionPage',
path: '/receptionPage',

View File

@ -0,0 +1,899 @@
import 'dart:convert';
import 'dart:io';
import 'dart:math';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import 'package:flutter/services.dart';
import 'package:image_picker/image_picker.dart';
import 'package:path/path.dart';
import 'package:path_provider/path_provider.dart';
double translateX(
double x,
Size canvasSize,
Size imageSize,
InputImageRotation rotation,
CameraLensDirection cameraLensDirection,
) {
switch (rotation) {
case InputImageRotation.rotation90deg:
return x *
canvasSize.width /
(Platform.isIOS ? imageSize.width : imageSize.height);
case InputImageRotation.rotation270deg:
return canvasSize.width -
x *
canvasSize.width /
(Platform.isIOS ? imageSize.width : imageSize.height);
case InputImageRotation.rotation0deg:
case InputImageRotation.rotation180deg:
switch (cameraLensDirection) {
case CameraLensDirection.back:
return x * canvasSize.width / imageSize.width;
default:
return canvasSize.width - x * canvasSize.width / imageSize.width;
}
}
}
double translateY(
double y,
Size canvasSize,
Size imageSize,
InputImageRotation rotation,
CameraLensDirection cameraLensDirection,
) {
switch (rotation) {
case InputImageRotation.rotation90deg:
case InputImageRotation.rotation270deg:
return y *
canvasSize.height /
(Platform.isIOS ? imageSize.height : imageSize.width);
case InputImageRotation.rotation0deg:
case InputImageRotation.rotation180deg:
return y * canvasSize.height / imageSize.height;
}
}
class FaceDetectorPainter extends CustomPainter {
FaceDetectorPainter(
this.faces,
this.imageSize,
this.rotation,
this.cameraLensDirection,
);
final List<Face> faces;
final Size imageSize;
final InputImageRotation rotation;
final CameraLensDirection cameraLensDirection;
@override
void paint(Canvas canvas, Size size) {
final Paint paint1 = Paint()
..style = PaintingStyle.stroke
..strokeWidth = 1.0
..color = Colors.red;
final Paint paint2 = Paint()
..style = PaintingStyle.fill
..strokeWidth = 1.0
..color = Colors.green;
for (final Face face in faces) {
final left = translateX(
face.boundingBox.left,
size,
imageSize,
rotation,
cameraLensDirection,
);
final top = translateY(
face.boundingBox.top,
size,
imageSize,
rotation,
cameraLensDirection,
);
final right = translateX(
face.boundingBox.right,
size,
imageSize,
rotation,
cameraLensDirection,
);
final bottom = translateY(
face.boundingBox.bottom,
size,
imageSize,
rotation,
cameraLensDirection,
);
canvas.drawRect(
Rect.fromLTRB(left, top, right, bottom),
paint1,
);
void paintContour(FaceContourType type) {
final contour = face.contours[type];
if (contour?.points != null) {
for (final Point point in contour!.points) {
canvas.drawCircle(
Offset(
translateX(
point.x.toDouble(),
size,
imageSize,
rotation,
cameraLensDirection,
),
translateY(
point.y.toDouble(),
size,
imageSize,
rotation,
cameraLensDirection,
),
),
1,
paint1);
}
}
}
void paintLandmark(FaceLandmarkType type) {
final landmark = face.landmarks[type];
if (landmark?.position != null) {
canvas.drawCircle(
Offset(
translateX(
landmark!.position.x.toDouble(),
size,
imageSize,
rotation,
cameraLensDirection,
),
translateY(
landmark.position.y.toDouble(),
size,
imageSize,
rotation,
cameraLensDirection,
),
),
2,
paint2);
}
}
for (final type in FaceContourType.values) {
paintContour(type);
}
for (final type in FaceLandmarkType.values) {
paintLandmark(type);
}
}
}
@override
bool shouldRepaint(FaceDetectorPainter oldDelegate) {
return oldDelegate.imageSize != imageSize || oldDelegate.faces != faces;
}
}
class FaceDetectorView extends StatefulWidget {
@override
State<FaceDetectorView> createState() => _FaceDetectorViewState();
}
class _FaceDetectorViewState extends State<FaceDetectorView> {
final FaceDetector _faceDetector = FaceDetector(
options: FaceDetectorOptions(
enableContours: true,
enableLandmarks: true,
),
);
bool _canProcess = true;
bool _isBusy = false;
CustomPaint? _customPaint;
String? _text;
var _cameraLensDirection = CameraLensDirection.front;
@override
void dispose() {
_canProcess = false;
_faceDetector.close();
super.dispose();
}
@override
Widget build(BuildContext context) {
return DetectorView(
title: 'Face Detector',
customPaint: _customPaint,
text: _text,
onImage: _processImage,
initialCameraLensDirection: _cameraLensDirection,
onCameraLensDirectionChanged: (value) => _cameraLensDirection = value,
);
}
Future<void> _processImage(InputImage inputImage) async {
if (!_canProcess) return;
if (_isBusy) return;
_isBusy = true;
setState(() {
_text = '';
});
final faces = await _faceDetector.processImage(inputImage);
if (inputImage.metadata?.size != null &&
inputImage.metadata?.rotation != null) {
final painter = FaceDetectorPainter(
faces,
inputImage.metadata!.size,
inputImage.metadata!.rotation,
_cameraLensDirection,
);
_customPaint = CustomPaint(painter: painter);
} else {
String text = 'Faces found: ${faces.length}\n\n';
for (final face in faces) {
text += 'face: ${face.boundingBox}\n\n';
}
_text = text;
// TODO: set _customPaint to draw boundingRect on top of image
_customPaint = null;
}
_isBusy = false;
if (mounted) {
setState(() {});
}
}
}
enum DetectorViewMode { liveFeed, gallery }
class DetectorView extends StatefulWidget {
DetectorView({
Key? key,
required this.title,
required this.onImage,
this.customPaint,
this.text,
this.initialDetectionMode = DetectorViewMode.liveFeed,
this.initialCameraLensDirection = CameraLensDirection.back,
this.onCameraFeedReady,
this.onDetectorViewModeChanged,
this.onCameraLensDirectionChanged,
}) : super(key: key);
final String title;
final CustomPaint? customPaint;
final String? text;
final DetectorViewMode initialDetectionMode;
final Function(InputImage inputImage) onImage;
final Function()? onCameraFeedReady;
final Function(DetectorViewMode mode)? onDetectorViewModeChanged;
final Function(CameraLensDirection direction)? onCameraLensDirectionChanged;
final CameraLensDirection initialCameraLensDirection;
@override
State<DetectorView> createState() => _DetectorViewState();
}
class _DetectorViewState extends State<DetectorView> {
late DetectorViewMode _mode;
@override
void initState() {
_mode = widget.initialDetectionMode;
super.initState();
}
@override
Widget build(BuildContext context) {
return _mode == DetectorViewMode.liveFeed
? CameraView(
customPaint: widget.customPaint,
onImage: widget.onImage,
onCameraFeedReady: widget.onCameraFeedReady,
onDetectorViewModeChanged: _onDetectorViewModeChanged,
initialCameraLensDirection: widget.initialCameraLensDirection,
onCameraLensDirectionChanged: widget.onCameraLensDirectionChanged,
)
: GalleryView(
title: widget.title,
text: widget.text,
onImage: widget.onImage,
onDetectorViewModeChanged: _onDetectorViewModeChanged);
}
void _onDetectorViewModeChanged() {
if (_mode == DetectorViewMode.liveFeed) {
_mode = DetectorViewMode.gallery;
} else {
_mode = DetectorViewMode.liveFeed;
}
if (widget.onDetectorViewModeChanged != null) {
widget.onDetectorViewModeChanged!(_mode);
}
setState(() {});
}
}
class CameraView extends StatefulWidget {
CameraView(
{Key? key,
required this.customPaint,
required this.onImage,
this.onCameraFeedReady,
this.onDetectorViewModeChanged,
this.onCameraLensDirectionChanged,
this.initialCameraLensDirection = CameraLensDirection.back})
: super(key: key);
final CustomPaint? customPaint;
final Function(InputImage inputImage) onImage;
final VoidCallback? onCameraFeedReady;
final VoidCallback? onDetectorViewModeChanged;
final Function(CameraLensDirection direction)? onCameraLensDirectionChanged;
final CameraLensDirection initialCameraLensDirection;
@override
State<CameraView> createState() => _CameraViewState();
}
class _CameraViewState extends State<CameraView> {
static List<CameraDescription> _cameras = [];
CameraController? _controller;
int _cameraIndex = -1;
double _currentZoomLevel = 1.0;
double _minAvailableZoom = 1.0;
double _maxAvailableZoom = 1.0;
double _minAvailableExposureOffset = 0.0;
double _maxAvailableExposureOffset = 0.0;
double _currentExposureOffset = 0.0;
bool _changingCameraLens = false;
late BuildContext buildContext;
@override
void initState() {
super.initState();
_initialize();
}
void _initialize() async {
if (_cameras.isEmpty) {
_cameras = await availableCameras();
}
for (var i = 0; i < _cameras.length; i++) {
if (_cameras[i].lensDirection == widget.initialCameraLensDirection) {
_cameraIndex = i;
break;
}
}
if (_cameraIndex != -1) {
_startLiveFeed();
}
}
@override
void dispose() {
_stopLiveFeed();
super.dispose();
}
@override
Widget build(BuildContext context) {
buildContext = context;
return Scaffold(body: _liveFeedBody());
}
Widget _liveFeedBody() {
if (_cameras.isEmpty) return Container();
if (_controller == null) return Container();
if (_controller?.value.isInitialized == false) return Container();
return ColoredBox(
color: Colors.black,
child: Stack(
fit: StackFit.expand,
children: <Widget>[
Center(
child: _changingCameraLens
? Center(
child: const Text('Changing camera lens'),
)
: CameraPreview(
_controller!,
child: widget.customPaint,
),
),
_backButton(),
_switchLiveCameraToggle(),
_detectionViewModeToggle(),
_zoomControl(),
_exposureControl(),
],
),
);
}
Widget _backButton() => Positioned(
top: 40,
left: 8,
child: SizedBox(
height: 50.0,
width: 50.0,
child: FloatingActionButton(
heroTag: Object(),
onPressed: () => Navigator.of(buildContext).pop(),
backgroundColor: Colors.black54,
child: Icon(
Icons.arrow_back_ios_outlined,
size: 20,
),
),
),
);
Widget _detectionViewModeToggle() => Positioned(
bottom: 8,
left: 8,
child: SizedBox(
height: 50.0,
width: 50.0,
child: FloatingActionButton(
heroTag: Object(),
onPressed: widget.onDetectorViewModeChanged,
backgroundColor: Colors.black54,
child: Icon(
Icons.photo_library_outlined,
size: 25,
),
),
),
);
Widget _switchLiveCameraToggle() => Positioned(
bottom: 8,
right: 8,
child: SizedBox(
height: 50.0,
width: 50.0,
child: FloatingActionButton(
heroTag: Object(),
onPressed: _switchLiveCamera,
backgroundColor: Colors.black54,
child: Icon(
Platform.isIOS
? Icons.flip_camera_ios_outlined
: Icons.flip_camera_android_outlined,
size: 25,
),
),
),
);
Widget _zoomControl() => Positioned(
bottom: 16,
left: 0,
right: 0,
child: Align(
alignment: Alignment.bottomCenter,
child: SizedBox(
width: 250,
child: Row(
mainAxisAlignment: MainAxisAlignment.center,
crossAxisAlignment: CrossAxisAlignment.center,
children: [
Expanded(
child: Slider(
value: _currentZoomLevel,
min: _minAvailableZoom,
max: _maxAvailableZoom,
activeColor: Colors.white,
inactiveColor: Colors.white30,
onChanged: (value) async {
setState(() {
_currentZoomLevel = value;
});
await _controller?.setZoomLevel(value);
},
),
),
Container(
width: 50,
decoration: BoxDecoration(
color: Colors.black54,
borderRadius: BorderRadius.circular(10.0),
),
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Center(
child: Text(
'${_currentZoomLevel.toStringAsFixed(1)}x',
style: TextStyle(color: Colors.white),
),
),
),
),
],
),
),
),
);
Widget _exposureControl() => Positioned(
top: 40,
right: 8,
child: ConstrainedBox(
constraints: BoxConstraints(
maxHeight: 250,
),
child: Column(children: [
Container(
width: 55,
decoration: BoxDecoration(
color: Colors.black54,
borderRadius: BorderRadius.circular(10.0),
),
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Center(
child: Text(
'${_currentExposureOffset.toStringAsFixed(1)}x',
style: TextStyle(color: Colors.white),
),
),
),
),
Expanded(
child: RotatedBox(
quarterTurns: 3,
child: SizedBox(
height: 30,
child: Slider(
value: _currentExposureOffset,
min: _minAvailableExposureOffset,
max: _maxAvailableExposureOffset,
activeColor: Colors.white,
inactiveColor: Colors.white30,
onChanged: (value) async {
setState(() {
_currentExposureOffset = value;
});
await _controller?.setExposureOffset(value);
},
),
),
),
)
]),
),
);
Future _startLiveFeed() async {
final camera = _cameras[_cameraIndex];
_controller = CameraController(
camera,
// Set to ResolutionPreset.high. Do NOT set it to ResolutionPreset.max because for some phones does NOT work.
ResolutionPreset.high,
enableAudio: false,
imageFormatGroup: Platform.isAndroid
? ImageFormatGroup.nv21
: ImageFormatGroup.bgra8888,
);
_controller?.initialize().then((_) {
if (!mounted) {
return;
}
_controller?.getMinZoomLevel().then((value) {
_currentZoomLevel = value;
_minAvailableZoom = value;
});
_controller?.getMaxZoomLevel().then((value) {
_maxAvailableZoom = value;
});
_currentExposureOffset = 0.0;
_controller?.getMinExposureOffset().then((value) {
_minAvailableExposureOffset = value;
});
_controller?.getMaxExposureOffset().then((value) {
_maxAvailableExposureOffset = value;
});
_controller?.startImageStream(_processCameraImage).then((value) {
if (widget.onCameraFeedReady != null) {
widget.onCameraFeedReady!();
}
if (widget.onCameraLensDirectionChanged != null) {
widget.onCameraLensDirectionChanged!(camera.lensDirection);
}
});
setState(() {});
});
}
Future _stopLiveFeed() async {
await _controller?.stopImageStream();
await _controller?.dispose();
_controller = null;
}
Future _switchLiveCamera() async {
setState(() => _changingCameraLens = true);
_cameraIndex = (_cameraIndex + 1) % _cameras.length;
await _stopLiveFeed();
await _startLiveFeed();
setState(() => _changingCameraLens = false);
}
void _processCameraImage(CameraImage image) {
final inputImage = _inputImageFromCameraImage(image);
if (inputImage == null) return;
widget.onImage(inputImage);
}
final _orientations = {
DeviceOrientation.portraitUp: 0,
DeviceOrientation.landscapeLeft: 90,
DeviceOrientation.portraitDown: 180,
DeviceOrientation.landscapeRight: 270,
};
InputImage? _inputImageFromCameraImage(CameraImage image) {
if (_controller == null) return null;
// get image rotation
// it is used in android to convert the InputImage from Dart to Java: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/google_mlkit_commons/android/src/main/java/com/google_mlkit_commons/InputImageConverter.java
// `rotation` is not used in iOS to convert the InputImage from Dart to Obj-C: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/google_mlkit_commons/ios/Classes/MLKVisionImage%2BFlutterPlugin.m
// in both platforms `rotation` and `camera.lensDirection` can be used to compensate `x` and `y` coordinates on a canvas: https://github.com/flutter-ml/google_ml_kit_flutter/blob/master/packages/example/lib/vision_detector_views/painters/coordinates_translator.dart
final camera = _cameras[_cameraIndex];
final sensorOrientation = camera.sensorOrientation;
// print(
// 'lensDirection: ${camera.lensDirection}, sensorOrientation: $sensorOrientation, ${_controller?.value.deviceOrientation} ${_controller?.value.lockedCaptureOrientation} ${_controller?.value.isCaptureOrientationLocked}');
InputImageRotation? rotation;
if (Platform.isIOS) {
rotation = InputImageRotationValue.fromRawValue(sensorOrientation);
} else if (Platform.isAndroid) {
var rotationCompensation =
_orientations[_controller!.value.deviceOrientation];
if (rotationCompensation == null) return null;
if (camera.lensDirection == CameraLensDirection.front) {
// front-facing
rotationCompensation = (sensorOrientation + rotationCompensation) % 360;
} else {
// back-facing
rotationCompensation =
(sensorOrientation - rotationCompensation + 360) % 360;
}
rotation = InputImageRotationValue.fromRawValue(rotationCompensation);
// print('rotationCompensation: $rotationCompensation');
}
if (rotation == null) return null;
// print('final rotation: $rotation');
// get image format
final format = InputImageFormatValue.fromRawValue(image.format.raw);
// validate format depending on platform
// only supported formats:
// * nv21 for Android
// * bgra8888 for iOS
if (format == null ||
(Platform.isAndroid && format != InputImageFormat.nv21) ||
(Platform.isIOS && format != InputImageFormat.bgra8888)) return null;
// since format is constraint to nv21 or bgra8888, both only have one plane
if (image.planes.length != 1) return null;
final plane = image.planes.first;
// compose InputImage using bytes
return InputImage.fromBytes(
bytes: plane.bytes,
metadata: InputImageMetadata(
size: Size(image.width.toDouble(), image.height.toDouble()),
rotation: rotation, // used only in Android
format: format, // used only in iOS
bytesPerRow: plane.bytesPerRow, // used only in iOS
),
);
}
}
class GalleryView extends StatefulWidget {
GalleryView(
{Key? key,
required this.title,
this.text,
required this.onImage,
required this.onDetectorViewModeChanged})
: super(key: key);
final String title;
final String? text;
final Function(InputImage inputImage) onImage;
final Function()? onDetectorViewModeChanged;
@override
State<GalleryView> createState() => _GalleryViewState();
}
class _GalleryViewState extends State<GalleryView> {
File? _image;
String? _path;
ImagePicker? _imagePicker;
late BuildContext buildContext;
@override
void initState() {
super.initState();
_imagePicker = ImagePicker();
}
@override
Widget build(BuildContext context) {
buildContext = this.context;
return Scaffold(
appBar: AppBar(
title: Text(widget.title),
actions: [
Padding(
padding: EdgeInsets.only(right: 20.0),
child: GestureDetector(
onTap: widget.onDetectorViewModeChanged,
child: Icon(
Platform.isIOS ? Icons.camera_alt_outlined : Icons.camera,
),
),
),
],
),
body: _galleryBody());
}
Widget _galleryBody() {
return ListView(shrinkWrap: true, children: [
_image != null
? SizedBox(
height: 400,
width: 400,
child: Stack(
fit: StackFit.expand,
children: <Widget>[
Image.file(_image!),
],
),
)
: Icon(
Icons.image,
size: 200,
),
Padding(
padding: EdgeInsets.symmetric(horizontal: 16),
child: ElevatedButton(
onPressed: _getImageAsset,
child: Text('From Assets'),
),
),
Padding(
padding: EdgeInsets.symmetric(horizontal: 16),
child: ElevatedButton(
child: Text('From Gallery'),
onPressed: () => _getImage(ImageSource.gallery),
),
),
Padding(
padding: EdgeInsets.symmetric(horizontal: 16),
child: ElevatedButton(
child: Text('Take a picture'),
onPressed: () => _getImage(ImageSource.camera),
),
),
if (_image != null)
Padding(
padding: const EdgeInsets.all(16.0),
child: Text(
'${_path == null ? '' : 'Image path: $_path'}\n\n${widget.text ?? ''}'),
),
]);
}
Future _getImage(ImageSource source) async {
setState(() {
_image = null;
_path = null;
});
final pickedFile = await _imagePicker?.pickImage(source: source);
if (pickedFile != null) {
_processFile(pickedFile.path);
}
}
Future _getImageAsset() async {
final manifestContent = await rootBundle.loadString('AssetManifest.json');
final Map<String, dynamic> manifestMap = json.decode(manifestContent);
final assets = manifestMap.keys
.where((String key) => key.contains('images/'))
.where((String key) =>
key.contains('.jpg') ||
key.contains('.jpeg') ||
key.contains('.png') ||
key.contains('.webp'))
.toList();
showDialog(
context: buildContext,
builder: (BuildContext context) {
return Dialog(
shape: RoundedRectangleBorder(
borderRadius: BorderRadius.circular(30.0)),
child: Padding(
padding: const EdgeInsets.all(16.0),
child: Column(
mainAxisAlignment: MainAxisAlignment.spaceBetween,
children: [
Text(
'Select image',
style: TextStyle(fontSize: 20),
),
ConstrainedBox(
constraints: BoxConstraints(
maxHeight: MediaQuery.of(context).size.height * 0.7),
child: SingleChildScrollView(
child: Column(
children: [
for (final path in assets)
GestureDetector(
onTap: () async {
Navigator.of(context).pop();
_processFile(await getAssetPath(path));
},
child: Padding(
padding: const EdgeInsets.all(8.0),
child: Image.asset(path),
),
),
],
),
),
),
ElevatedButton(
onPressed: () => Navigator.of(context).pop(),
child: Text('Cancel')),
],
),
),
);
});
}
Future _processFile(String path) async {
setState(() {
_image = File(path);
});
_path = path;
final inputImage = InputImage.fromFilePath(path);
widget.onImage(inputImage);
}
}
Future<String> getAssetPath(String asset) async {
final path = await getLocalPath(asset);
await Directory(dirname(path)).create(recursive: true);
final file = File(path);
if (!await file.exists()) {
final byteData = await rootBundle.load(asset);
await file.writeAsBytes(byteData.buffer
.asUint8List(byteData.offsetInBytes, byteData.lengthInBytes));
}
return file.path;
}
Future<String> getLocalPath(String path) async {
return '${(await getApplicationSupportDirectory()).path}/$path';
}

View File

@ -113,6 +113,46 @@ packages:
url: "https://pub.dev"
source: hosted
version: "1.2.0"
camera:
dependency: "direct main"
description:
name: camera
sha256: "26ff41045772153f222ffffecba711a206f670f5834d40ebf5eed3811692f167"
url: "https://pub.dev"
source: hosted
version: "0.11.0+2"
camera_android_camerax:
dependency: transitive
description:
name: camera_android_camerax
sha256: "7cd93578ad201dcc6bb5810451fb00d76a86bab9b68dceb68b8cbd7038ac5846"
url: "https://pub.dev"
source: hosted
version: "0.6.8+3"
camera_avfoundation:
dependency: transitive
description:
name: camera_avfoundation
sha256: "7c28969a975a7eb2349bc2cb2dfe3ad218a33dba9968ecfb181ce08c87486655"
url: "https://pub.dev"
source: hosted
version: "0.9.17+3"
camera_platform_interface:
dependency: transitive
description:
name: camera_platform_interface
sha256: b3ede1f171532e0d83111fe0980b46d17f1aa9788a07a2fbed07366bbdbb9061
url: "https://pub.dev"
source: hosted
version: "2.8.0"
camera_web:
dependency: transitive
description:
name: camera_web
sha256: "595f28c89d1fb62d77c73c633193755b781c6d2e0ebcd8dc25b763b514e6ba8f"
url: "https://pub.dev"
source: hosted
version: "0.3.5"
characters:
dependency: transitive
description:
@ -701,6 +741,22 @@ packages:
url: "https://pub.dev"
source: hosted
version: "6.2.1"
google_mlkit_commons:
dependency: transitive
description:
name: google_mlkit_commons
sha256: "9990a65f407a3ef6bae646bf10143faa93fec126683771465bc6c0b43fb0e6e9"
url: "https://pub.dev"
source: hosted
version: "0.8.1"
google_mlkit_face_detection:
dependency: "direct main"
description:
name: google_mlkit_face_detection
sha256: "0aeab4f39204f7a235ed4cccedfe7e61401b43f4ef139a868c01fa29fdc225ab"
url: "https://pub.dev"
source: hosted
version: "0.11.1"
html:
dependency: transitive
description:
@ -1346,6 +1402,14 @@ packages:
url: "https://pub.dev"
source: hosted
version: "2.1.2"
stream_transform:
dependency: transitive
description:
name: stream_transform
sha256: "14a00e794c7c11aa145a170587321aedce29769c08d7f58b1d141da75e3b1c6f"
url: "https://pub.dev"
source: hosted
version: "2.1.0"
string_scanner:
dependency: transitive
description:
@ -1627,5 +1691,5 @@ packages:
source: hosted
version: "3.1.2"
sdks:
dart: ">=3.4.0 <4.0.0"
flutter: ">=3.22.0"
dart: ">=3.5.0 <4.0.0"
flutter: ">=3.24.0"

View File

@ -58,11 +58,13 @@ dependencies:
image_picker_platform_interface: 2.10.0
local_auth: ^2.2.0
intl: ^0.19.0
camera: ^0.11.0+2
json_path: 0.7.2
mime_type: 1.0.0
page_transition: 2.1.0
path_provider: 2.1.3
path_provider_android: 2.2.5
google_mlkit_face_detection: ^0.11.1
path_provider_foundation: 2.4.0
path_provider_platform_interface: 2.1.2
percent_indicator: 4.2.2