Skip to content

Commit

Permalink
Add getCalibrationData, getFaceIdSensorData & getFaceIdSensorDataStre…
Browse files Browse the repository at this point in the history
…am (#1)
  • Loading branch information
JulianHartl authored Jan 11, 2023
1 parent a9a8046 commit 9a4f8ad
Show file tree
Hide file tree
Showing 23 changed files with 1,277 additions and 25 deletions.
18 changes: 13 additions & 5 deletions example/lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,7 @@ class _MyAppState extends State<MyApp> {

void setUpStream() async {
final stream = await _controller.startImageStream();
_cameraImageStream = stream.listen((image) {
// print(image.width);
});
_cameraImageStream = stream.listen((image) {});
}

@override
Expand Down Expand Up @@ -73,8 +71,18 @@ class _MyAppState extends State<MyApp> {
},
),
IconButton(
icon: const Icon(Icons.flip_camera_ios),
onPressed: () async {},
icon: const Icon(Icons.compass_calibration),
onPressed: () async {
final result = await _controller.getCalibrationData();
print(result);
},
),
IconButton(
icon: const Icon(Icons.face),
onPressed: () async {
final result = await _controller.getFaceIdSensorData();
print(result);
},
),
],
),
Expand Down
4 changes: 2 additions & 2 deletions example/pubspec.lock
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ packages:
source: hosted
version: "2.2.0"
image:
dependency: "direct main"
dependency: transitive
description:
name: image
url: "https://pub.dartlang.org"
Expand All @@ -129,7 +129,7 @@ packages:
path: "."
ref: HEAD
resolved-ref: dd66d459fe40e54047e1d20c9c1fc8d10f7a8641
url: "https://github.com/JulianHartl/isolate_handler"
url: "https://github.com/SelectCode/isolate_handler.git"
source: git
version: "1.0.0"
js:
Expand Down
1 change: 0 additions & 1 deletion example/pubspec.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ dependencies:
# The following adds the Cupertino Icons font to your application.
# Use with the CupertinoIcons class for iOS style icons.
cupertino_icons: ^1.0.2
image: ^3.2.2

dev_dependencies:
flutter_test:
Expand Down
69 changes: 68 additions & 1 deletion ios/Classes/NativeCameraView.swift
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ class FLNativeView: NSObject, FlutterPlatformView {
self.eventChannel.setStreamHandler(self.imageStreamHandler)
self.methodChannel.setMethodCallHandler({
(call, result) in
print("Incoming call from flutter: \(call.method)")
switch (call.method) {
case "startImageStream":
self.startImageStream()
Expand All @@ -92,6 +91,16 @@ class FLNativeView: NSObject, FlutterPlatformView {
data in
result(data)
})
case "get_face_id_sensor_data":
self.faceIdSensorDataSnapshot({
data in
result(data)
})
case "get_calibration_data":
self.calibrationDataSnapshot({
data in
result(data)
})
case "dispose":
self.dispose {
result(nil);
Expand Down Expand Up @@ -142,6 +151,64 @@ class FLNativeView: NSObject, FlutterPlatformView {
]
}

func calibrationDataSnapshot(_ onData: @escaping ((Dictionary<String, Any?>) -> Void)) {
scannerController?.getCalibrationData { (data) in
let calibrationData = data
let pixelSize = NSNumber(value: calibrationData.pixelSize)
let iMatrix = calibrationData.intrinsicMatrix.columns;
let intrinsicMatrix = [
self.parseMatrixCol(iMatrix.0),
self.parseMatrixCol(iMatrix.1),
self.parseMatrixCol(iMatrix.2),
]
let eMatrix = calibrationData.extrinsicMatrix.columns
let extrinsicMatrix: [Dictionary<String, NSNumber>] = [
self.parseMatrixCol(eMatrix.0),
self.parseMatrixCol(eMatrix.1),
self.parseMatrixCol(eMatrix.2),
self.parseMatrixCol(eMatrix.3),
]
let intrinsicMatrixReferenceDimensionsData = calibrationData.intrinsicMatrixReferenceDimensions
let intrinsicMatrixReferenceDimensions: Dictionary<String, NSNumber> = [
"width": NSNumber(value: intrinsicMatrixReferenceDimensionsData.width),
"height": NSNumber(value: intrinsicMatrixReferenceDimensionsData.height),
]
let lensDistortionCenterData = calibrationData.lensDistortionCenter
let lensDistortionCenter: Dictionary<String, NSNumber> = [
"x": NSNumber(value: lensDistortionCenterData.x),
"y": NSNumber(value: lensDistortionCenterData.y),
]
let lensDistortionLookupTableData = calibrationData.lensDistortionLookupTable
let lensDistortionLookupTable = FlutterStandardTypedData(bytes: Data(lensDistortionLookupTableData!.map { point in
point
}))

onData([
"intrinsicMatrix": intrinsicMatrix,
"intrinsicMatrixReferenceDimensions": intrinsicMatrixReferenceDimensions,
"extrinsicMatrix": extrinsicMatrix,
"pixelSize": pixelSize,
"lensDistortionLookupTable": lensDistortionLookupTable,
"lensDistortionCenter": lensDistortionCenter
]);
}
}

func parseMatrixCol(_ col: simd_float3) -> Dictionary<String, NSNumber> {
return [
"x": NSNumber(value: col.x),
"y": NSNumber(value: col.y),
"z": NSNumber(value: col.z)
]
}

func faceIdSensorDataSnapshot(_ onData: @escaping (Dictionary<String, Any?>) -> Void) -> Void {
scannerController!.getFaceIdSensorDataSnapshot { data in
let encoded = self.encodeFaceIdSensorData(data)
onData(encoded)
}
}

func snapshot(_ onData: @escaping (Dictionary<String, Any?>) -> Void) -> Void {
if (disposed) {
return;
Expand Down
30 changes: 25 additions & 5 deletions ios/Classes/ScannerController.swift
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,16 @@ class ScannerController: NSObject, AVCaptureDataOutputSynchronizerDelegate, AVCa

private var previewLayer: AVCaptureVideoPreviewLayer?
private var snapshotCallback: ((FaceIdData, NativeCameraImage) -> Void)?
private var calibrationCallback: ((AVCameraCalibrationData) -> Void)?
private var faceIdSensorDataCallback: ((FaceIdData) -> Void)?

func getCalibrationData(_ callback: @escaping (AVCameraCalibrationData) -> Void) {
calibrationCallback = callback
}

func getFaceIdSensorDataSnapshot(_ callback: @escaping (FaceIdData) -> Void) {
faceIdSensorDataCallback = callback
}

/// Returns [FaceIdData] and a [NativeCameraImage] for the next processed frame.
func getSnapshot(_ callback: @escaping (FaceIdData, NativeCameraImage) -> Void) {
Expand Down Expand Up @@ -200,14 +209,20 @@ class ScannerController: NSObject, AVCaptureDataOutputSynchronizerDelegate, AVCa

if (self.bytesCallback != nil) {
self.bytesCallback!(self.getNativeCameraImage(sampleBuffer: sampleBuffer))
self.bytesCallback = nil;
self.bytesCallback = nil

}

if (self.calibrationCallback != nil) {
self.calibrationCallback!(depthData.cameraCalibrationData!)
self.calibrationCallback = nil
}

if (self.streaming) {
self.streamingCallback!(self.getNativeCameraImage(sampleBuffer: sampleBuffer))
}

if (self.snapshotCallback != nil) {
if (self.snapshotCallback != nil || self.faceIdSensorDataCallback != nil) {
self.pointCloudQueue.async {
var cgImage: CGImage?
VTCreateCGImageFromCVPixelBuffer(videoPixelBuffer, options: nil, imageOut: &cgImage)
Expand Down Expand Up @@ -259,12 +274,17 @@ class ScannerController: NSObject, AVCaptureDataOutputSynchronizerDelegate, AVCa
return
}

if let callback = self.snapshotCallback {
let faceIdData = convertRGBDtoXYZ(colorImage: cgColorImage, depthValues: depthValues, depthWidth: depthWidth, cameraCalibrationData: cameraCalibrationData)
callback(faceIdData, decodeNativeCameraImage(getNativeCameraImage(sampleBuffer: sampleBuffer)))

let faceIdData = convertRGBDtoXYZ(colorImage: cgColorImage, depthValues: depthValues, depthWidth: depthWidth, cameraCalibrationData: cameraCalibrationData)
if (self.faceIdSensorDataCallback != nil) {
self.faceIdSensorDataCallback!(faceIdData)
self.faceIdSensorDataCallback = nil
}
if (self.snapshotCallback != nil) {
self.snapshotCallback!(faceIdData, decodeNativeCameraImage(getNativeCameraImage(sampleBuffer: sampleBuffer)))
self.snapshotCallback = nil
}

}

/// Returns a decoded [NativeCameraImage] from a Map.
Expand Down
12 changes: 12 additions & 0 deletions lib/src/controller/camera_controller.dart
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import 'package:flutter/material.dart';

import '../models/calibration_data/calibration_data.dart';
import '../models/models.dart';

abstract class CameraController {
Expand Down Expand Up @@ -36,4 +37,15 @@ abstract class CameraController {

/// Determines which lens the camera uses.
LensDirection get lensDirection;

/// Gets the calibration data of the current camera.
Future<CvCameraCalibrationData> getCalibrationData();

/// Returns a snapshot of the current [FaceIdSensorData].
Future<FaceIdSensorData> getFaceIdSensorData();

/// Returns a stream of snapshots of the current [FaceIdSensorData].
///
/// `interval` specifies the interval in milliseconds between two snapshots.
Stream<FaceIdSensorData> getFaceIdSensorDataStream(int interval);
}
34 changes: 24 additions & 10 deletions lib/src/controller/camera_controller_impl.dart
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ import 'dart:io';

import 'package:clock/clock.dart';
import 'package:cv_camera/src/controller/camera_controller.dart';
import 'package:cv_camera/src/models/calibration_data/calibration_data.dart';
import 'package:cv_camera/src/utils/image_builder.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
Expand All @@ -11,16 +12,6 @@ import 'package:path_provider/path_provider.dart';

import '../models/models.dart';

class _OnTakePictureArgs {
final StreamController<TakePictureResult> controller;
final Map<String, dynamic> data;

const _OnTakePictureArgs({
required this.controller,
required this.data,
});
}

class CameraControllerImpl implements CameraController {
@visibleForTesting
late final MethodChannel methodChannel;
Expand Down Expand Up @@ -165,6 +156,29 @@ class CameraControllerImpl implements CameraController {
await stopImageStream();
methodChannel.invokeMethod('dispose');
}

@override
Future<CvCameraCalibrationData> getCalibrationData() async {
final response = Map<String, dynamic>.from(
await methodChannel.invokeMethod("get_calibration_data"),
);
return CvCameraCalibrationData.fromJson(response);
}

@override
Future<FaceIdSensorData> getFaceIdSensorData() async {
final response = Map<String, dynamic>.from(
await methodChannel.invokeMethod("get_face_id_sensor_data"),
);
return FaceIdSensorData.fromJson(response);
}

@override
Stream<FaceIdSensorData> getFaceIdSensorDataStream(int interval) {
return Stream.periodic(Duration(milliseconds: interval), (i) async {
return await getFaceIdSensorData();
}).asyncMap((event) => event);
}
}

/// Params that are passed to [CameraControllerImpl._savePictureHandler].
Expand Down
28 changes: 28 additions & 0 deletions lib/src/models/calibration_data/calibration_data.dart
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
import 'dart:typed_data';

import 'package:cv_camera/src/utils/converters/float64_list_converter.dart';
import 'package:freezed_annotation/freezed_annotation.dart';

import 'cg_point.dart';
import 'cg_size.dart';
import 'cg_vector.dart';

part 'calibration_data.freezed.dart';
part 'calibration_data.g.dart';

@freezed
class CvCameraCalibrationData with _$CvCameraCalibrationData {
factory CvCameraCalibrationData.fromJson(Map<String, dynamic> json) =>
_$CvCameraCalibrationDataFromJson(json);

// ignore: invalid_annotation_target
@JsonSerializable(explicitToJson: true, anyMap: true)
const factory CvCameraCalibrationData({
required double pixelSize,
required List<CGVector> intrinsicMatrix,
required List<CGVector> extrinsicMatrix,
required CGSize intrinsicMatrixReferenceDimensions,
@Float64ListConverter() required Float64List lensDistortionLookupTable,
required CGPoint lensDistortionCenter,
}) = _CvCameraCalibrationData;
}
Loading

0 comments on commit 9a4f8ad

Please sign in to comment.