diff --git a/.DS_Store b/.DS_Store index 7f9c9367f5271643b19920e1adfe909578449f9e..a56fbee1195862794d118c0cdfdfac9cdb71f645 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/project.pbxproj b/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/project.pbxproj index b4107ae45515641a085bbb1d068c3a3bfcc7dc97..74e7cd694dd4ea39f467d5ecd051d362a17c2d94 100644 --- a/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/project.pbxproj +++ b/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/project.pbxproj @@ -64,7 +64,7 @@ A0B8A3552630BCC000068B14 /* RegisterViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = RegisterViewController.swift; sourceTree = "<group>"; }; A0B8A3582630BCD900068B14 /* WelcomeViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = WelcomeViewController.swift; sourceTree = "<group>"; }; A0B8A35B2630C6D100068B14 /* FaceTrackerViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = FaceTrackerViewController.swift; sourceTree = "<group>"; }; - A0B8A35E2630C70A00068B14 /* SuccessViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SuccessViewController.swift; sourceTree = "<group>"; }; + A0B8A35E2630C70A00068B14 /* SuccessViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; name = SuccessViewController.swift; path = FaceRecogAttendance/Controller/SuccessViewController.swift; sourceTree = SOURCE_ROOT; }; A0CDC2CF2638E6920022BEA2 /* ModuleListViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ModuleListViewController.swift; sourceTree = "<group>"; }; A0CDC2D22638E6B20022BEA2 /* SessionListViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SessionListViewController.swift; sourceTree = "<group>"; }; A0CDC2D52638E6C20022BEA2 /* AttendanceListViewController.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AttendanceListViewController.swift; sourceTree = "<group>"; }; diff --git a/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist b/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist index a21b3520539564c59b7e163946d4720253e69e81..76c0f0053443265dbe50c8d838816fcd2ce1f187 100644 --- a/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist +++ b/FaceRecogAttendance/FaceRecogAttendance.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist @@ -7,7 +7,7 @@ <key>FaceRecogAttendance.xcscheme_^#shared#^_</key> <dict> <key>orderHint</key> - <integer>22</integer> + <integer>19</integer> </dict> </dict> </dict> diff --git a/FaceRecogAttendance/FaceRecogAttendance.xcworkspace/xcuserdata/Lucas.xcuserdatad/UserInterfaceState.xcuserstate b/FaceRecogAttendance/FaceRecogAttendance.xcworkspace/xcuserdata/Lucas.xcuserdatad/UserInterfaceState.xcuserstate index f8f5d78c159fa4bd7e5435c8fc3dee5ebde66bdd..5726d57594256da237020a94cd58ac49d47c5da2 100644 Binary files a/FaceRecogAttendance/FaceRecogAttendance.xcworkspace/xcuserdata/Lucas.xcuserdatad/UserInterfaceState.xcuserstate and b/FaceRecogAttendance/FaceRecogAttendance.xcworkspace/xcuserdata/Lucas.xcuserdatad/UserInterfaceState.xcuserstate differ diff --git a/FaceRecogAttendance/FaceRecogAttendance.xcworkspace/xcuserdata/Lucas.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist b/FaceRecogAttendance/FaceRecogAttendance.xcworkspace/xcuserdata/Lucas.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist new file mode 100644 index 0000000000000000000000000000000000000000..b652b55b53289ca4de22bfcc8a1e1e4d360ce4c9 --- /dev/null +++ b/FaceRecogAttendance/FaceRecogAttendance.xcworkspace/xcuserdata/Lucas.xcuserdatad/xcdebugger/Breakpoints_v2.xcbkptlist @@ -0,0 +1,6 @@ +<?xml version="1.0" encoding="UTF-8"?> +<Bucket + uuid = "1BCE61F2-B067-4495-BFD8-D3C4FE08A6A1" + type = "0" + version = "2.0"> +</Bucket> diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceClassificationViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceClassificationViewController.swift index 5a44f7c1a02f3533c437ce80ee5b79dcb344f156..4575af38ad639c5f87d7fe01eb61aabf032ddcb4 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceClassificationViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceClassificationViewController.swift @@ -14,7 +14,6 @@ import RealmSwift class FaceClassificationViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate { -// let realm = try! Realm() var notificationToken: NotificationToken? var realm : Realm? var attendance : Results<Attendance>? @@ -24,6 +23,7 @@ class FaceClassificationViewController: UIViewController, AVCaptureVideoDataOutp } } var faceDetected: Bool = false + var verification: Bool = false let captureSession = AVCaptureSession() let cameraManager = CameraManager() var capturedFaceCount = 0 @@ -48,27 +48,6 @@ class FaceClassificationViewController: UIViewController, AVCaptureVideoDataOutp setupLabel() } - // here is where we start the camera -// func setupCamera() { -// captureSession.sessionPreset = .high -// -// guard let captureDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: .front) else { preconditionFailure("A Camera is needed to start the AV session") } -// -// //throw error if no camera is found. -// guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return } -// captureSession.addInput(input) -// -// captureSession.startRunning() -// -// let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) -// view.layer.addSublayer(previewLayer) -// previewLayer.frame = view.frame -// -// let dataOutput = AVCaptureVideoDataOutput() -// dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue")) -// captureSession.addOutput(dataOutput) -// } - func setupLabel() { view.addSubview(label) label.bottomAnchor.constraint(equalTo: view.bottomAnchor, constant: -32).isActive = true @@ -79,84 +58,48 @@ class FaceClassificationViewController: UIViewController, AVCaptureVideoDataOutp func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { + verification = false faceDetected = false guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return } + // initiate the face recognition model guard let model = try? VNCoreMLModel(for: FaceClassifier().model) else { fatalError("Unable to load model") } let ciImage = CIImage(cvImageBuffer: pixelBuffer, options: [:]) + // detect face in the image detectFace(image: ciImage) -// let detectFaceRequest = VNDetectFaceRectanglesRequest { (request, error) in -// guard let faceResults = request.results as? [VNFaceObservation], -// let _ = faceResults.first -// else { -// print("no faces") -// DispatchQueue.main.async { -// self.label.text = "no faces" -// } -// return -// } -// self.faceDetected = true -// print("faceDetected is now true") if faceDetected == true { capturedFaceCount += 1 print("capture face \(capturedFaceCount) times") classifyFace(image: pixelBuffer, model: model) - if capturedFaceCount > 200 { + // if the correct face is identified, create attendance + if capturedFaceCount > 100 { DispatchQueue.main.async { - let newAttendance = Attendance(studentID: "test", studentName: self.label.text!, dateCreated: Date()) - // newAttendance.studentName = self.label.text! - // newAttendance.studentID = "test" - // newAttendance.dateCreated = Date() - // self.saveAttendance(attendance: newAttendance) + let alert = UIAlertController.init(title: "Verify", message: "\(self.label.text!), please confirm", preferredStyle: .alert) + alert.addAction(UIAlertAction.init(title: "Yes", style: .default, handler: { (action) in + self.verification = true + print("verification is now true") + })) + alert.addAction(UIAlertAction.init(title: "No", style: .cancel, handler: nil)) + self.present(alert, animated: true, completion: nil) - try! self.realm?.write { - self.selectedSession?.attendances.append(newAttendance) + if self.verification == true { + let newAttendance = Attendance(studentID: "test", studentName: self.label.text!, dateCreated: Date()) + + try! self.realm?.write { + self.selectedSession?.attendances.append(newAttendance) + } + print("attendance created") + self.cameraManager.captureSession.stopRunning() + self.navigationController?.popToRootViewController(animated: true) + return } - print("attendance created") - self.captureSession.stopRunning() - return } } - -// guard let model = try? VNCoreMLModel(for: FaceClassifier().model) else { -// fatalError("Unable to load model") -// } - -// let coreMlRequest = VNCoreMLRequest(model: model) {[weak self] request, error in -// guard let results = request.results as? [VNClassificationObservation], -// let topResult = results.first -// else { -// fatalError("Unexpected results") -// } -// -// print(topResult.identifier, topResult.confidence * 100) -// -// DispatchQueue.main.async {[weak self] in -// self?.label.text = topResult.identifier -// } -// } -// -// let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]) -// DispatchQueue.global().async { -// do { -// try handler.perform([coreMlRequest]) -// } catch { -// print(error) -// } -// } } else { print("face detected is false") } } - -// let faceDetectionHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]) -// do { -// try faceDetectionHandler.perform([detectFaceRequest]) -// } catch { -// print(error.localizedDescription) -// } -// } //MARK: - Facial Recognition Method func classifyFace(image: CVPixelBuffer, model: VNCoreMLModel) { @@ -171,7 +114,7 @@ class FaceClassificationViewController: UIViewController, AVCaptureVideoDataOutp print(topResult.identifier, topResult.confidence * 100) DispatchQueue.main.async {[weak self] in - self?.label.text = topResult.identifier + self?.label.text = "\(topResult.identifier) - \(topResult.confidence)" } } @@ -211,17 +154,5 @@ class FaceClassificationViewController: UIViewController, AVCaptureVideoDataOutp func loadAttendance() { attendance = selectedSession?.attendances.sorted(byKeyPath: "studentID", ascending: true) } - -// func saveAttendance(attendance: Attendance) { -// if let currentSession = selectedSession { -// do { -// try realm.write { -// currentSession.attendances.append(attendance) -// } -// } catch { -// print(error.localizedDescription) -// } -// } -// } } diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceTrackerViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceTrackerViewController.swift index 5e71600af57068bc123485c44855abedff7afd76..8ee8349c384dd1385344d4ac20e867e7712abde3 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceTrackerViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/FaceTrackerViewController.swift @@ -18,9 +18,11 @@ class FaceTrackerViewController: UIViewController, AVCaptureVideoDataOutputSampl let faceDetectionRequest = VNSequenceRequestHandler() var faceClassificationRequest: VNCoreMLRequest! var lastObservation : VNFaceObservation? + let captureSession = AVCaptureSession() + let cameraManager = CameraManager() private var sampleCounter = 0 - private let requiredSamples = 5 + private let requiredSamples = 30 private var faceImages = [UIImage]() private var isIdentifiyingPeople = false private var isCapturing: Bool = false @@ -30,31 +32,32 @@ class FaceTrackerViewController: UIViewController, AVCaptureVideoDataOutputSampl override func viewDidLoad() { super.viewDidLoad() promptCommand() - setupCamera() - } - - func setupCamera() { - let captureSession = AVCaptureSession() - captureSession.sessionPreset = .high - - guard let captureDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: .front) else { preconditionFailure("A Camera is needed to start the AV session") } - - //throw error if no camera is found. - guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return } - captureSession.addInput(input) - - captureSession.startRunning() - - let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) - view.layer.addSublayer(previewLayer) - previewLayer.frame = view.frame - - let dataOutput = AVCaptureVideoDataOutput() - dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue")) - captureSession.addOutput(dataOutput) + cameraManager.setupCamera(view: view, delegate: self) +// setupCamera() } +// func setupCamera() { +// let captureSession = AVCaptureSession() +// captureSession.sessionPreset = .high +// +// guard let captureDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: .front) else { preconditionFailure("A Camera is needed to start the AV session") } +// +// //throw error if no camera is found. +// guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return } +// captureSession.addInput(input) +// +// captureSession.startRunning() +// +// let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) +// view.layer.addSublayer(previewLayer) +// previewLayer.frame = view.frame +// +// let dataOutput = AVCaptureVideoDataOutput() +// dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue")) +// captureSession.addOutput(dataOutput) +// } + // this function capture the output image frame by frame func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) { guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer), let attachments = CMCopyDictionaryOfAttachments(allocator: kCFAllocatorDefault, target: sampleBuffer, attachmentMode: kCMAttachmentMode_ShouldPropagate) @@ -74,12 +77,13 @@ class FaceTrackerViewController: UIViewController, AVCaptureVideoDataOutputSampl print("no faces") return } - + // if no face is detected if isIdentifiyingPeople { + // look for face again let handler = VNImageRequestHandler(ciImage: image, orientation: .up, options: [:]) self.lastObservation = faceObservation try? handler.perform([self.faceClassificationRequest]) - } else { + } else { // if face detected, capture the image and upload the image to firebase with the firebase functions let faceImage: UIImage = convert(cmage: image) sampleCounter += 1 if faceImages.count <= requiredSamples { @@ -102,6 +106,7 @@ class FaceTrackerViewController: UIViewController, AVCaptureVideoDataOutputSampl } } + //MARK:- Firebase method // upload image to firebase storage fileprivate func uploadImages(image: UIImage, completion: @escaping (_ url: String?) -> Void) { // convert UIImage to jpg format @@ -121,7 +126,7 @@ class FaceTrackerViewController: UIViewController, AVCaptureVideoDataOutputSampl DispatchQueue.main.async { imageReference.putData(data, metadata: nil) {(metadata, error) in if error != nil { - print("error") + print(error?.localizedDescription) completion(nil) } else { imageReference.downloadURL(completion: { (url, error) in @@ -133,6 +138,7 @@ class FaceTrackerViewController: UIViewController, AVCaptureVideoDataOutputSampl } } + //MARK:- Image Conversion // function to convert image to UIImage format private func convert(cmage:CIImage) -> UIImage { let context:CIContext = CIContext.init(options: nil) @@ -141,6 +147,7 @@ class FaceTrackerViewController: UIViewController, AVCaptureVideoDataOutputSampl return image } + //MARK:- Notification function private func promptCommand() { let alert = UIAlertController.init(title: "Info", message: "The system needs to capture your face images for training purposes. Please align your face at the centre of the screen and look at the camera.", preferredStyle: .alert) alert.addAction(UIAlertAction.init(title: "Ok", style: .default, handler: nil)) diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/ModuleListViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/ModuleListViewController.swift index 188fb388386b4a267fc754f8c59d25629b9c5f4d..4d4ff11ef91af314ca7a3acbe1b324e7c3b4046c 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/ModuleListViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/ModuleListViewController.swift @@ -11,9 +11,6 @@ import RealmSwift class ModuleListViewController: UITableViewController { - // call the database -// let realm = try! Realm() - var notificationToken: NotificationToken? var realm : Realm? var modules : Results<Module>? @@ -55,8 +52,10 @@ class ModuleListViewController: UITableViewController { let destinationVC = segue.destination as! SessionListViewController if isCheckAttendancePressed == true { destinationVC.navigationItem.rightBarButtonItem?.isEnabled = true + destinationVC.isCheckAttendancePressed = false } else { destinationVC.navigationItem.rightBarButtonItem?.isEnabled = false + destinationVC.isCheckAttendancePressed = true } if let indexPath = tableView.indexPathForSelectedRow { destinationVC.selectedModule = modules?[indexPath.row] @@ -86,7 +85,7 @@ class ModuleListViewController: UITableViewController { // } // } - //MARK - Add New Modules + //MARK: - Add New Modules @IBAction func addButtonPressed(_ sender: UIBarButtonItem) { var moduleNameTextField = UITextField() @@ -98,16 +97,11 @@ class ModuleListViewController: UITableViewController { // these line happen once the user clicks the Add Module button on our UIAlert let newModule = Module(moduleID: moduleIDTextField.text! , moduleName: moduleNameTextField.text!, partition: "user=\(app.currentUser!.id)") + // save module into realm database try! self.realm?.write { self.realm?.add(newModule) } -// if let moduleName = moduleNameTextField.text { -// newModule.moduleName = moduleName -// } -// if let moduleID = moduleIDTextField.text { -// newModule.moduleID = moduleID -// } -// self.saveModule(module: newModule) + print("Successfully created new module") self.tableView.reloadData() @@ -128,6 +122,7 @@ class ModuleListViewController: UITableViewController { present(alert, animated: true, completion: nil) } + // check if user can access to synced realm func onLogin() { let user = app.currentUser! let partitionValue = "user=\(user.id)" @@ -143,6 +138,8 @@ class ModuleListViewController: UITableViewController { } } + //MARK: - Data Manipulation Methods + // if realm is accessed, load the database func onRealmOpened(_ realm: Realm) { modules = realm.objects(Module.self) diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/RegisterViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/RegisterViewController.swift index 15fea9ab0935c2ad345f98bf6ff31e9a9d9080c0..ad4f6e83339b98d570b76739824d9bf4bea97242 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/RegisterViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/RegisterViewController.swift @@ -10,7 +10,6 @@ import RealmSwift class RegisterViewController: UIViewController { -// let realm = try! Realm() var notificationToken: NotificationToken? var realm : Realm? @@ -24,7 +23,6 @@ class RegisterViewController: UIViewController { override func viewDidLoad() { super.viewDidLoad() self.hideKeyboardWhenTappedAround() - signUp() } @@ -43,34 +41,17 @@ class RegisterViewController: UIViewController { self.present(alert, animated: true, completion: nil) } -// let newStudent = Student() -// newStudent.email = emailTextField.text! -// newStudent.password = passwordTextField.text! -// newStudent.studentName = nameTextField.text! -// newStudent.studentID = studentIDTextField.text! -// newStudent.isImageUpload = true - let newStudent = Student(studentName: nameTextField.text!, studentID: studentIDTextField.text!, email: emailTextField.text!, password: "", isImageUpload: true, isImageTrained: false, partition: "user=\(app.currentUser!.id)") + let newStudent = Student(studentName: nameTextField.text!, studentID: studentIDTextField.text!, email: emailTextField.text!, password: " ", isImageUpload: true, isImageTrained: false, partition: "user=\(app.currentUser!.id)") + // save student into realm database try! self.realm?.write { self.realm?.add(newStudent) } -// self.saveStudent(student: newStudent) print("successfully registered student") self.performSegue(withIdentifier: "goToRegistered", sender: self) } - -// func saveStudent(student: Student) { -// do { -// try self.realm.write { -// self.realm.add(student) -// } -// } catch { -// print("Error saving category \(error)") -// } -// } - override func prepare(for segue: UIStoryboardSegue, sender: Any?) { if segue.identifier == "GoToRecord" { let destinationVC = segue.destination as! FaceTrackerViewController @@ -83,6 +64,7 @@ class RegisterViewController: UIViewController { notificationToken?.invalidate() } + // a function that allow student to sign up to the application @objc func signUp() { app.login(credentials: Credentials.anonymous) { (result) in // Remember to dispatch back to the main thread in completion handlers @@ -100,6 +82,7 @@ class RegisterViewController: UIViewController { } } + // check if user can get access to synced realm @objc func signIn() { let user = app.currentUser! let partitionValue = "user=\(user.id)" @@ -117,6 +100,7 @@ class RegisterViewController: UIViewController { } } + // If user is able to access realm func onRealmOpened(_ realm: Realm) { let students = realm.objects(Student.self) diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/SessionListViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/SessionListViewController.swift index 9a4d46109032a0907a26643f8945834cba7fb2d0..3e8227021d4377394bf8f154b84b3adea23ad917 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/SessionListViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/SessionListViewController.swift @@ -10,8 +10,8 @@ import UIKit import RealmSwift class SessionListViewController: UITableViewController { -// let realm = try! Realm() + var isCheckAttendancePressed: Bool? var notificationToken: NotificationToken? var realm : Realm? var sessions : Results<Session>? @@ -36,6 +36,7 @@ class SessionListViewController: UITableViewController { let action = UIAlertAction(title: "Add Session", style: .default) { (action) in let newSession = Session(roomNo: sessionRoomNo.text!, sessionDate: sessionDateTextField.text!, sessionTime: sessionTimeTextField.text!) + // save session to realm database try! self.realm?.write { self.selectedModule?.sessions.append(newSession) } @@ -90,7 +91,9 @@ class SessionListViewController: UITableViewController { } override func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { - performSegue(withIdentifier: "goToAttendance", sender: self) + if isCheckAttendancePressed == true { + performSegue(withIdentifier: "goToAttendance", sender: self) + } } override func prepare(for segue: UIStoryboardSegue, sender: Any?) { @@ -101,19 +104,7 @@ class SessionListViewController: UITableViewController { } } - //MARK - Data Manipulation Method -// func saveSession(session: Session) { -// if let currentModule = self.selectedModule { -// do { -// try realm.write { -// currentModule.sessions.append(session) -// } -// } catch { -// print("Error saving session \(error)") -// } -// } -// } - + //MARK: - Data Manipulation Method func loadSessions() { sessions = selectedModule?.sessions.sorted(byKeyPath: "roomNo") tableView.reloadData() diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentDetailViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentDetailViewController.swift index 44b38b4ac5c6fadb8adfcd11c7fd7e957f75065b..81d5078c8e861222bda3bcea98950d8ca9e0df88 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentDetailViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentDetailViewController.swift @@ -40,9 +40,6 @@ class StudentDetailViewController: UIViewController { imageTrainedSegment.selectedSegmentIndex = 0 } -// studentIDLabel.text = studentID -// studentEmailLabel.text = studentEmail -// imageUploadLabel.text = imageUpload?.description } @IBAction func imageTrainedPressed(_ sender: UISegmentedControl) { diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentViewController.swift index db01a992780db32d5b11550ab4412ac2ad476b59..b8f26d5590e9c6ee9593eec5b9c281469d7647eb 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/StudentViewController.swift @@ -11,14 +11,11 @@ import RealmSwift class StudentViewController: UITableViewController { -// let realm = try! Realm() - var notificationToken: NotificationToken? var students: Results<Student>? override func viewDidLoad() { super.viewDidLoad() -// loadStudents() onLogin() } @@ -51,24 +48,21 @@ class StudentViewController: UITableViewController { } } -// func loadStudents() { -// students = realm?.objects(Student.self) -// tableView.reloadData() +// // function to login to synced realm +// func login() { +// app.login(credentials: Credentials.anonymous) { (result) in +// DispatchQueue.main.async { +// switch result { +// case .failure(let error): +// print("Login Failed: \(error)") +// case .success(let user): +// print("Login as \(user) sucdeeded") +// } +// } +// } // } - func login() { - app.login(credentials: Credentials.anonymous) { (result) in - DispatchQueue.main.async { - switch result { - case .failure(let error): - print("Login Failed: \(error)") - case .success(let user): - print("Login as \(user) sucdeeded") - } - } - } - } - + // check if user is allow to access to synced realm func onLogin() { let user = app.currentUser! let partitionValue = "user=\(user.id)" diff --git a/FaceRecogAttendance/FaceRecogAttendance/Controller/SuccessViewController.swift b/FaceRecogAttendance/FaceRecogAttendance/Controller/SuccessViewController.swift index 89ea24452cf54b1329235f8dbcd116d647a03c51..f92316b82c9191058553108e79c93fe1a083f6ea 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/Controller/SuccessViewController.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/Controller/SuccessViewController.swift @@ -13,9 +13,8 @@ class SuccessViewController: UIViewController { override func viewDidLoad() { super.viewDidLoad() } - + @IBAction func doneButtonPressed(_ sender: UIButton) { - self.navigationController?.popToRootViewController(animated: true) + self.view.window?.rootViewController?.dismiss(animated: true, completion: nil) } - } diff --git a/FaceRecogAttendance/FaceRecogAttendance/SceneDelegate.swift b/FaceRecogAttendance/FaceRecogAttendance/SceneDelegate.swift index e2398dc81f179028b369d9e414b3dcbefbe9e8e2..2246ccafcf79d22d24f49c533fcf2f80ba7a9381 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/SceneDelegate.swift +++ b/FaceRecogAttendance/FaceRecogAttendance/SceneDelegate.swift @@ -8,7 +8,7 @@ import UIKit import RealmSwift -let app = App(id: "facerecognition-gtgth") +let app = App(id: "facerecogattendance-qjhla") class SceneDelegate: UIResponder, UIWindowSceneDelegate { diff --git a/FaceRecogAttendance/FaceRecogAttendance/View/Base.lproj/Main.storyboard b/FaceRecogAttendance/FaceRecogAttendance/View/Base.lproj/Main.storyboard index 150cd9d74568db39aaa690f07e986face6faa8b5..48b0e2fbb91f972ea8c0b1c39637fc00d7b997ef 100644 --- a/FaceRecogAttendance/FaceRecogAttendance/View/Base.lproj/Main.storyboard +++ b/FaceRecogAttendance/FaceRecogAttendance/View/Base.lproj/Main.storyboard @@ -12,7 +12,7 @@ <!--Welcome View Controller--> <scene sceneID="OVG-eX-O6W"> <objects> - <viewController id="eu0-Bl-EjK" customClass="WelcomeViewController" customModule="FaceRecogAttendance" customModuleProvider="target" sceneMemberID="viewController"> + <viewController id="eu0-Bl-EjK" customClass="WelcomeViewController" customModule="FaceRecogAttendance" sceneMemberID="viewController"> <view key="view" contentMode="scaleToFill" id="ldd-zy-aUA"> <rect key="frame" x="0.0" y="0.0" width="414" height="896"/> <autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/> @@ -298,35 +298,30 @@ <constraint firstItem="l7b-rn-IuB" firstAttribute="top" secondItem="1U8-wh-go1" secondAttribute="top" id="zo3-uY-Xak"/> </constraints> </view> - <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="dXD-8x-7f8"> - <rect key="frame" x="82" y="697" width="251" height="75"/> + <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="ZXT-z6-lzH"> + <rect key="frame" x="91" y="707" width="232" height="91"/> <color key="backgroundColor" red="0.91764705879999997" green="0.98039215690000003" blue="0.99607843139999996" alpha="1" colorSpace="custom" customColorSpace="displayP3"/> - <constraints> - <constraint firstAttribute="height" constant="75" id="LeV-af-ZZa"/> - </constraints> <fontDescription key="fontDescription" type="system" pointSize="30"/> <state key="normal" title="Done"/> - <connections> - <action selector="doneButtonPressed:" destination="rXJ-vt-WP0" eventType="touchUpInside" id="c4s-q2-4Dg"/> - </connections> </button> </subviews> <viewLayoutGuide key="safeArea" id="GW6-XL-IZK"/> <color key="backgroundColor" white="1" alpha="1" colorSpace="custom" customColorSpace="genericGamma22GrayColorSpace"/> <constraints> + <constraint firstItem="ZXT-z6-lzH" firstAttribute="top" secondItem="1U8-wh-go1" secondAttribute="bottom" constant="89" id="6PY-3I-6Kf"/> <constraint firstItem="1U8-wh-go1" firstAttribute="top" secondItem="GW6-XL-IZK" secondAttribute="top" constant="29" id="7t2-Tg-rPg"/> <constraint firstItem="GW6-XL-IZK" firstAttribute="trailing" secondItem="1U8-wh-go1" secondAttribute="trailing" constant="20" id="88X-c2-rch"/> - <constraint firstItem="GW6-XL-IZK" firstAttribute="trailing" secondItem="dXD-8x-7f8" secondAttribute="trailing" constant="81" id="coT-fI-ySy"/> + <constraint firstItem="ZXT-z6-lzH" firstAttribute="centerX" secondItem="1U8-wh-go1" secondAttribute="centerX" id="GJM-Tp-rm5"/> + <constraint firstItem="GW6-XL-IZK" firstAttribute="bottom" secondItem="ZXT-z6-lzH" secondAttribute="bottom" constant="64" id="Ms5-1s-IwA"/> + <constraint firstItem="ZXT-z6-lzH" firstAttribute="leading" secondItem="GW6-XL-IZK" secondAttribute="leading" constant="91" id="cfY-qL-MgB"/> <constraint firstItem="1U8-wh-go1" firstAttribute="leading" secondItem="GW6-XL-IZK" secondAttribute="leading" constant="20" id="pk6-sg-dh4"/> - <constraint firstItem="dXD-8x-7f8" firstAttribute="leading" secondItem="GW6-XL-IZK" secondAttribute="leading" constant="82" id="xPN-Qu-xqi"/> - <constraint firstItem="dXD-8x-7f8" firstAttribute="top" secondItem="1U8-wh-go1" secondAttribute="bottom" constant="79" id="zSU-HD-eDQ"/> </constraints> </view> <navigationItem key="navigationItem" id="OUM-7J-GgO"/> </viewController> <placeholder placeholderIdentifier="IBFirstResponder" id="CQW-oK-xGG" userLabel="First Responder" customClass="UIResponder" sceneMemberID="firstResponder"/> </objects> - <point key="canvasLocation" x="4052" y="-575"/> + <point key="canvasLocation" x="4031.884057971015" y="-575.22321428571422"/> </scene> <!--Face Tracker View Controller--> <scene sceneID="LE7-oC-Q8R"> @@ -374,7 +369,7 @@ <imageView clipsSubviews="YES" userInteractionEnabled="NO" contentMode="scaleAspectFit" horizontalHuggingPriority="251" verticalHuggingPriority="251" image="textfield" translatesAutoresizingMaskIntoConstraints="NO" id="pgH-W7-l18"> <rect key="frame" x="0.0" y="20" width="414" height="137"/> </imageView> - <textField opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" placeholder="username / email" textAlignment="center" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="Ur7-91-6QF"> + <textField opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="left" contentVerticalAlignment="center" placeholder="username" textAlignment="center" minimumFontSize="17" translatesAutoresizingMaskIntoConstraints="NO" id="Ur7-91-6QF"> <rect key="frame" x="48" y="50" width="318" height="45"/> <fontDescription key="fontDescription" type="system" pointSize="25"/> <textInputTraits key="textInputTraits"/> @@ -418,9 +413,8 @@ <constraint firstAttribute="height" constant="177" id="x3g-9O-31e"/> </constraints> </view> - <button opaque="NO" contentMode="scaleToFill" fixedFrame="YES" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="kbz-Av-MZs"> + <button opaque="NO" contentMode="scaleToFill" contentHorizontalAlignment="center" contentVerticalAlignment="center" buttonType="system" lineBreakMode="middleTruncation" translatesAutoresizingMaskIntoConstraints="NO" id="kbz-Av-MZs"> <rect key="frame" x="0.0" y="814" width="414" height="48"/> - <autoresizingMask key="autoresizingMask" flexibleMaxX="YES" flexibleMaxY="YES"/> <color key="backgroundColor" red="0.91764705879999997" green="0.98039215690000003" blue="0.99607843139999996" alpha="1" colorSpace="custom" customColorSpace="displayP3"/> <fontDescription key="fontDescription" type="system" pointSize="30"/> <state key="normal" title="Login"> @@ -435,10 +429,13 @@ <color key="backgroundColor" red="0.2156862745" green="0.77647058820000003" blue="0.99607843139999996" alpha="1" colorSpace="custom" customColorSpace="displayP3"/> <constraints> <constraint firstItem="ZIm-Dj-weX" firstAttribute="trailing" secondItem="MP4-0d-Brf" secondAttribute="trailing" id="1oL-pj-js2"/> + <constraint firstItem="kbz-Av-MZs" firstAttribute="trailing" secondItem="QGg-gC-C0Q" secondAttribute="trailing" id="5wN-Sv-gZA"/> <constraint firstItem="ZIm-Dj-weX" firstAttribute="leading" secondItem="MP4-0d-Brf" secondAttribute="leading" id="90b-UZ-EWD"/> <constraint firstItem="QGg-gC-C0Q" firstAttribute="leading" secondItem="MP4-0d-Brf" secondAttribute="leading" id="GHP-AT-IUF"/> <constraint firstItem="ZIm-Dj-weX" firstAttribute="top" secondItem="MP4-0d-Brf" secondAttribute="top" id="IZd-7Z-8Lh"/> <constraint firstItem="QGg-gC-C0Q" firstAttribute="trailing" secondItem="MP4-0d-Brf" secondAttribute="trailing" id="OYH-sR-jbQ"/> + <constraint firstItem="kbz-Av-MZs" firstAttribute="leading" secondItem="QGg-gC-C0Q" secondAttribute="leading" id="Z71-vP-bQe"/> + <constraint firstItem="kbz-Av-MZs" firstAttribute="bottom" secondItem="MP4-0d-Brf" secondAttribute="bottom" id="jRM-kv-aK8"/> <constraint firstItem="QGg-gC-C0Q" firstAttribute="top" secondItem="ZIm-Dj-weX" secondAttribute="bottom" constant="-100" id="wMv-Sm-m54"/> </constraints> </view> @@ -842,7 +839,7 @@ </viewController> <placeholder placeholderIdentifier="IBFirstResponder" id="kwU-XT-yCf" userLabel="First Responder" customClass="UIResponder" sceneMemberID="firstResponder"/> </objects> - <point key="canvasLocation" x="5148" y="847"/> + <point key="canvasLocation" x="5343" y="587"/> </scene> <!--Sessions--> <scene sceneID="1rz-1N-nEt"> diff --git a/FaceRecogAttendance/Pods/Pods.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist b/FaceRecogAttendance/Pods/Pods.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist index cd39d20111f40a1efcf6362e8b95e36dd9064402..726ff6eca8471699082455e0dda9ae61de5692f2 100644 --- a/FaceRecogAttendance/Pods/Pods.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist +++ b/FaceRecogAttendance/Pods/Pods.xcodeproj/xcuserdata/Lucas.xcuserdatad/xcschemes/xcschememanagement.plist @@ -65,35 +65,35 @@ <key>isShown</key> <false/> <key>orderHint</key> - <integer>17</integer> + <integer>14</integer> </dict> <key>GoogleDataTransport.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>12</integer> + <integer>9</integer> </dict> <key>GoogleUtilities.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>13</integer> + <integer>10</integer> </dict> <key>Pods-FaceRecogAttendance.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>20</integer> + <integer>17</integer> </dict> <key>PromisesObjC.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>21</integer> + <integer>18</integer> </dict> <key>abseil.xcscheme</key> <dict> @@ -107,35 +107,35 @@ <key>isShown</key> <false/> <key>orderHint</key> - <integer>15</integer> + <integer>12</integer> </dict> <key>gRPC-C++.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>14</integer> + <integer>11</integer> </dict> <key>gRPC-Core.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>16</integer> + <integer>13</integer> </dict> <key>leveldb-library.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>18</integer> + <integer>15</integer> </dict> <key>nanopb.xcscheme</key> <dict> <key>isShown</key> <false/> <key>orderHint</key> - <integer>19</integer> + <integer>16</integer> </dict> </dict> <key>SuppressBuildableAutocreation</key>