diff --git a/TheMaskedManual.xcodeproj/project.pbxproj b/TheMaskedManual.xcodeproj/project.pbxproj
index 4064fa4..efb5c6d 100644
--- a/TheMaskedManual.xcodeproj/project.pbxproj
+++ b/TheMaskedManual.xcodeproj/project.pbxproj
@@ -449,7 +449,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
- CURRENT_PROJECT_VERSION = 3;
+ CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 5QC8WP9VB5;
INFOPLIST_FILE = TheMaskedManual/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 13.6;
@@ -457,7 +457,7 @@
"$(inherited)",
"@executable_path/Frameworks",
);
- MARKETING_VERSION = 0.1;
+ MARKETING_VERSION = 1.1;
PRODUCT_BUNDLE_IDENTIFIER = oliverkernst.TheMaskedManual;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 5.0;
@@ -471,7 +471,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
- CURRENT_PROJECT_VERSION = 3;
+ CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = 5QC8WP9VB5;
INFOPLIST_FILE = TheMaskedManual/Info.plist;
IPHONEOS_DEPLOYMENT_TARGET = 13.6;
@@ -479,7 +479,7 @@
"$(inherited)",
"@executable_path/Frameworks",
);
- MARKETING_VERSION = 0.1;
+ MARKETING_VERSION = 1.1;
PRODUCT_BUNDLE_IDENTIFIER = oliverkernst.TheMaskedManual;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_VERSION = 5.0;
diff --git a/TheMaskedManual/Info.plist b/TheMaskedManual/Info.plist
index 9dae5bf..c0b3875 100644
--- a/TheMaskedManual/Info.plist
+++ b/TheMaskedManual/Info.plist
@@ -17,7 +17,7 @@
CFBundlePackageType
$(PRODUCT_BUNDLE_PACKAGE_TYPE)
CFBundleShortVersionString
- 1.0
+ $(MARKETING_VERSION)
CFBundleVersion
$(CURRENT_PROJECT_VERSION)
LSRequiresIPhoneOS
diff --git a/TheMaskedManual/ViewControllers/CameraViewController.swift b/TheMaskedManual/ViewControllers/CameraViewController.swift
index 3d0a9d2..6d5ad41 100644
--- a/TheMaskedManual/ViewControllers/CameraViewController.swift
+++ b/TheMaskedManual/ViewControllers/CameraViewController.swift
@@ -51,6 +51,8 @@ class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBuff
var is_search_in_progress : Bool = false
var session = AVCaptureSession()
+ var deviceInput : AVCaptureInput? = nil
+ var deviceOutput : AVCaptureOutput? = nil
var requests = [VNRequest]()
private var screenshot_mode = false
@@ -73,12 +75,7 @@ class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBuff
// Company
let companyNib = UINib.init(nibName: "CompanyTableViewCell", bundle: Bundle.main)
tableView.register(companyNib, forCellReuseIdentifier: "companyTableViewCell")
-
- if !screenshot_mode {
- // Setup video once
- setup_live_video()
- }
-
+
if screenshot_mode {
imageView.image = UIImage(named: "toy_image")
@@ -92,26 +89,43 @@ class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBuff
// Background color
tableView.backgroundColor = UIColor(red: 174.0/256.0, green: 174.0/256.0, blue: 178.0/256.0, alpha: 1.0)
+
+ // Setup image once
+ setup_image_once()
}
override func viewDidAppear(_ animated: Bool) {
super.viewDidAppear(animated)
+ start_up()
+ }
+
+ private func start_up() {
if !screenshot_mode {
- startLiveVideo()
- startTextDetection()
+ let success = setup_live_video()
+
+ if success {
+ startLiveVideo()
+ startTextDetection()
+ }
}
}
- override func viewWillDisappear(_ animated: Bool) {
- super.viewWillDisappear(animated)
-
+ private func shut_down() {
if !screenshot_mode {
stopLiveVideo()
stopTextDetection()
+
+ break_down_live_video()
}
}
+ override func viewWillDisappear(_ animated: Bool) {
+ super.viewWillDisappear(animated)
+
+ shut_down()
+ }
+
override func viewDidLayoutSubviews() {
// Fix the fact that view is not finished in viewDidAppear
imageView.layer.sublayers?[0].frame = imageView.bounds
@@ -124,19 +138,19 @@ class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBuff
print("No result")
return
}
-
- // Highlight
- DispatchQueue.main.async() {
- self.imageView.layer.sublayers?.removeSubrange(1...)
- for rg in observations {
- self.highlightWord(box: rg)
- }
- }
// Only search if not currently searching
if !is_search_in_progress {
is_search_in_progress = true
-
+
+ // Highlight
+ DispatchQueue.main.async() {
+ self.imageView.layer.sublayers?.removeSubrange(1...)
+ for rg in observations {
+ self.highlightWord(box: rg)
+ }
+ }
+
var raw_observed_texts : [String] = []
for observation in observations {
@@ -238,7 +252,11 @@ class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBuff
}
func stopLiveVideo() {
- session.stopRunning()
+ print("Stopping live video...")
+ if session.isRunning {
+ session.stopRunning()
+ }
+ print("Stopped live video.")
}
func startTextDetection() {
@@ -254,25 +272,60 @@ class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBuff
self.requests = [textRequest]
}
- func setup_live_video() {
+ func setup_live_video() -> Bool {
+ print("setup live video start...")
+
// Init capture session
session.sessionPreset = AVCaptureSession.Preset.photo
- let captureDevice = AVCaptureDevice.default(for: AVMediaType.video)
-
+ guard let captureDevice = AVCaptureDevice.default(for: AVMediaType.video) else {
+ print("No camera device")
+ return false
+ }
+
// Setup
- let deviceInput = try! AVCaptureDeviceInput(device: captureDevice!)
- let deviceOutput = AVCaptureVideoDataOutput()
- deviceOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
- deviceOutput.setSampleBufferDelegate(self, queue: DispatchQueue.global(qos: DispatchQoS.QoSClass.default))
- session.addInput(deviceInput)
- session.addOutput(deviceOutput)
-
+ do {
+ let deviceInput_ = try AVCaptureDeviceInput(device: captureDevice)
+ session.addInput(deviceInput_)
+ deviceInput = deviceInput_
+ } catch {
+ print("Error: ", error.localizedDescription)
+ return false
+ }
+
+ let deviceOutput_ = AVCaptureVideoDataOutput()
+ deviceOutput_.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32BGRA)]
+ deviceOutput_.setSampleBufferDelegate(self, queue: DispatchQueue.global(qos: DispatchQoS.QoSClass.default))
+ session.addOutput(deviceOutput_)
+ deviceOutput = deviceOutput_
+
+ print("setup live video.")
+
+ return true
+ }
+
+ private func setup_image_once() {
// Set image
let imageLayer = AVCaptureVideoPreviewLayer(session: session)
- imageLayer.bounds = imageView.bounds
+ imageLayer.frame = imageView.bounds
imageView.layer.addSublayer(imageLayer)
}
+ func break_down_live_video() {
+ print("Breaking down live video...")
+
+ // Remove IO
+ if let deviceInput = deviceInput {
+ session.removeInput(deviceInput)
+ }
+ if let deviceOutput = deviceOutput {
+ session.removeOutput(deviceOutput)
+ }
+ deviceOutput = nil
+ deviceInput = nil
+
+ print("Broke down live video.")
+ }
+
func startLiveVideo() {
session.startRunning()
}
@@ -315,8 +368,15 @@ class CameraViewController: UIViewController, AVCaptureVideoDataOutputSampleBuff
alert.definesPresentationContext = true
alert.modalPresentationStyle = UIModalPresentationStyle.overFullScreen
alert.modalTransitionStyle = UIModalTransitionStyle.coverVertical
-
+ alert.completion_on_close = {
+ // Start live video and recognition again
+ self.start_up()
+ }
+
DispatchQueue.main.async {
+ // Stop live video and recognition
+ self.shut_down()
+
self.present(alert, animated: true, completion: nil)
}
}
@@ -477,8 +537,7 @@ extension CameraViewController : UITableViewDataSource, UITableViewDelegate {
alert.mask_ui = mask.get_mask_ui()
alert.completion_on_close = {
// Start live video and recognition again
- self.startLiveVideo()
- self.startTextDetection()
+ self.start_up()
}
alert.providesPresentationContextTransitionStyle = true
alert.definesPresentationContext = true
@@ -487,8 +546,7 @@ extension CameraViewController : UITableViewDataSource, UITableViewDelegate {
DispatchQueue.main.async {
// Stop live video and recognition
- self.stopLiveVideo()
- self.stopTextDetection()
+ self.shut_down()
// Show
self.present(alert, animated: true, completion: nil)
diff --git a/TheMaskedManual/ViewControllers/MaskNotFoundViewController.swift b/TheMaskedManual/ViewControllers/MaskNotFoundViewController.swift
index 716a5ab..c0e1d5b 100644
--- a/TheMaskedManual/ViewControllers/MaskNotFoundViewController.swift
+++ b/TheMaskedManual/ViewControllers/MaskNotFoundViewController.swift
@@ -34,7 +34,8 @@ class MaskNotFoundViewController: UIViewController {
@IBOutlet weak var central_view: UIView!
@IBOutlet weak var first_text_view: UITextView!
@IBOutlet weak var second_text_view: UITextView!
-
+ var completion_on_close : () -> Void = {}
+
override func viewDidLoad() {
super.viewDidLoad()
@@ -64,7 +65,7 @@ class MaskNotFoundViewController: UIViewController {
}
@IBAction func close_button_pressed(_ sender: Any) {
- self.dismiss(animated: true, completion: nil)
+ self.dismiss(animated: true, completion: completion_on_close)
}
/*
diff --git a/TheMaskedManual/ViewControllers/TabBarController.swift b/TheMaskedManual/ViewControllers/TabBarController.swift
index 6c84dac..40be71d 100644
--- a/TheMaskedManual/ViewControllers/TabBarController.swift
+++ b/TheMaskedManual/ViewControllers/TabBarController.swift
@@ -33,7 +33,8 @@ class TabBarController: UITabBarController {
override func viewDidLoad() {
super.viewDidLoad()
-
+ print("Tab bar viewDidLoad starting...")
+
// Load masks
LoadMasks.load_masks_and_companies { (masks, companies) in
@@ -67,6 +68,8 @@ class TabBarController: UITabBarController {
DispatchQueue.main.async {
self.present(alert, animated: false, completion: nil)
}
+
+ print("Tab bar viewDidLoad.")
}
override func viewWillAppear(_ animated: Bool) {