I have the code for an app that detects faces from the live camera feed and it works perfectly fine. I want to implement a method that crops the face when it is detected and save it on my pc. I have written the code but I cannot seem to find the problem. For testing, if it saves it I have a print line. When I start the app for debugging, that message which is supposed to show in the terminal, is not shown. I do not know where the problem is, when cropping or when saving. This being my first app, I have little knowledge when it comes to this.
**THIS IS MY FACE DETECTION**
import 'dart:typed_data';
import 'dart:ui' as ui;
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import 'detector_view.dart';
import 'painters/face_detector_painter.dart';
import 'dart:io';
import 'package:path/path.dart' as path;
class FaceDetectorView extends StatefulWidget {
@override
State<FaceDetectorView> createState() => _FaceDetectorViewState();
}
class _FaceDetectorViewState extends State<FaceDetectorView> {
final FaceDetector _faceDetector = FaceDetector(
options: FaceDetectorOptions(
enableContours: true,
enableLandmarks: true,
),
);
bool _canProcess = true;
bool _isBusy = false;
CustomPaint? _customPaint;
String? _text;
var _cameraLensDirection = CameraLensDirection.front;
late final FaceDetectorPainter _painter;
@override
void initState() {
super.initState();
_painter = FaceDetectorPainter(
[],
Size.zero,
InputImageRotation.rotation0deg,
CameraLensDirection.front,
);
}
@override
void dispose() {
_canProcess = false;
_faceDetector.close();
super.dispose();
}
@override
Widget build(BuildContext context) {
return DetectorView(
title: 'Face Detector',
customPaint: _customPaint,
text: _text,
onImage: _processImage,
initialCameraLensDirection: _cameraLensDirection,
onCameraLensDirectionChanged: (value) => _cameraLensDirection = value,
);
}
// Method to crop the detected face
Future<Uint8List?> cropFaceImage(Face face, ui.Image originalImage) async {
final left = face.boundingBox.left.toInt();
final top = face.boundingBox.top.toInt();
final width = face.boundingBox.width.toInt();
final height = face.boundingBox.height.toInt();
final outputWidth = 200; // Adjust as needed
final outputHeight = 200; // Adjust as needed
final recorder = ui.PictureRecorder();
final canvas = Canvas(recorder);
final src = Rect.fromLTWH(left.toDouble(), top.toDouble(), width.toDouble(), height.toDouble());
final dst = Rect.fromLTWH(0, 0, outputWidth.toDouble(), outputHeight.toDouble());
canvas.drawImageRect(originalImage, src, dst, Paint());
final picture = recorder.endRecording();
final img = await picture.toImage(outputWidth, outputHeight);
final byteData = await img.toByteData(format: ui.ImageByteFormat.png);
return byteData?.buffer.asUint8List();
}
Future<void> _processImage(InputImage inputImage) async {
if (!_canProcess) return;
if (_isBusy) return;
_isBusy = true;
setState(() {
_text = '';
});
final faces = await _faceDetector.processImage(inputImage);
if (inputImage.metadata?.size != null &&
inputImage.metadata?.rotation != null) {
final painter = FaceDetectorPainter(
faces,
inputImage.metadata!.size,
inputImage.metadata!.rotation,
_cameraLensDirection,
);
_customPaint = CustomPaint(painter: painter);
} else {
String text = 'Faces found: ${faces.length}\n\n';
for (final face in faces) {
text += 'face: ${face.boundingBox}\n\n';
final croppedFace = await _painter.cropFaceImage(face, inputImage);
_saveImageLocally(croppedFace);
}
_text = text;
// TODO: set _customPaint to draw boundingRect on top of image
_customPaint = null;
}
_isBusy = false;
if (mounted) {
setState(() {});
}
}
Future<void> _saveImageLocally(Uint8List? imageBytes) async {
if (imageBytes == null) {
print('Image bytes are null. Cannot save.');
return;
}
try {
// Get the user's home directory
final String homeDir = path.join(Directory.systemTemp.path, 'CroppedImages');
final Directory userDirectory = Directory(homeDir);
// Create the directory if it doesn't exist
if (!await userDirectory.exists()) {
await userDirectory.create(recursive: true);
}
// Generate a unique file name
final String fileName = '${DateTime.now().millisecondsSinceEpoch}.png';
// Write the image bytes to a file
final File file = File(path.join(userDirectory.path, fileName));
await file.writeAsBytes(imageBytes);
print('Image saved successfully: ${file.path}');
} catch (e) {
print('Error saving image: $e');
}
}
}
**THIS IS MY FACE PAINTER**
// import 'dart:math';
import 'package:camera/camera.dart';
import 'package:flutter/material.dart';
import 'package:google_mlkit_face_detection/google_mlkit_face_detection.dart';
import 'coordinates_translator.dart';
import 'dart:typed_data';
import 'dart:ui' as ui;
class FaceDetectorPainter extends CustomPainter {
FaceDetectorPainter(
this.faces,
this.imageSize,
this.rotation,
this.cameraLensDirection,
);
final List<Face> faces;
final Size imageSize;
final InputImageRotation rotation;
final CameraLensDirection cameraLensDirection;
Future<Uint8List?> cropFaceImage(Face face, InputImage inputImage) async {
// Get the image byte data
final Uint8List? bytes = inputImage.bytes;
if (bytes == null) return null;
// Construct a Codec from the byte data
final ui.Codec codec = await ui.instantiateImageCodec(bytes);
// Get the next frame from the Codec
final ui.FrameInfo frameInfo = await codec.getNextFrame();
// Get the image from the frame
final ui.Image image = frameInfo.image;
// Calculate stride
final int stride = image.width;
// Convert face bounding box to integers
final int x = face.boundingBox.left.toInt();
final int y = face.boundingBox.top.toInt();
final int width = face.boundingBox.width.toInt();
final int height = face.boundingBox.height.toInt();
// Calculate cropped image width and height
final int croppedImageWidth = width ~/ 2;
final int croppedImageHeight = height ~/ 2;
// Create a Uint8List to hold the cropped image data
final Uint8List croppedImageData =
Uint8List(croppedImageWidth * croppedImageHeight * 4);
// Extract pixel data from the image and copy it to the cropped image data
final ByteData? byteData =
await image.toByteData(format: ui.ImageByteFormat.rawRgba);
final Uint8List? imageBytes = byteData?.buffer.asUint8List();
if (imageBytes != null) {
for (int j = 0; j < croppedImageHeight; j++) {
for (int i = 0; i < croppedImageWidth; i++) {
final int srcOffset = ((y + j) * stride + x + i) * 4;
final int destOffset = (j * croppedImageWidth + i) * 4;
// Copy pixel data
croppedImageData[destOffset + 0] = imageBytes[srcOffset + 0]; // Red
croppedImageData[destOffset + 1] = imageBytes[srcOffset + 1]; // Green
croppedImageData[destOffset + 2] = imageBytes[srcOffset + 2]; // Blue
croppedImageData[destOffset + 3] = imageBytes[srcOffset + 3]; // Alpha
}
}
}
return croppedImageData;
}
@override
void paint(Canvas canvas, Size size) {
final Paint paint1 = Paint()
..style = PaintingStyle.stroke
..strokeWidth = 2.0
..color = Colors.black;
for (final Face face in faces) {
final left = translateX(
face.boundingBox.left,
size,
imageSize,
rotation,
cameraLensDirection,
);
final top = translateY(
face.boundingBox.top,
size,
imageSize,
rotation,
cameraLensDirection,
);
final right = translateX(
face.boundingBox.right,
size,
imageSize,
rotation,
cameraLensDirection,
);
final bottom = translateY(
face.boundingBox.bottom,
size,
imageSize,
rotation,
cameraLensDirection,
);
canvas.drawRect(
Rect.fromLTRB(left, top, right, bottom),
paint1,
);
}
}
@override
bool shouldRepaint(FaceDetectorPainter oldDelegate) {
return oldDelegate.imageSize != imageSize || oldDelegate.faces != faces;
}
}