diff --git a/enter/.obsidian/plugins/recent-files-obsidian/data.json b/enter/.obsidian/plugins/recent-files-obsidian/data.json
index dc9f411..ce69942 100644
--- a/enter/.obsidian/plugins/recent-files-obsidian/data.json
+++ b/enter/.obsidian/plugins/recent-files-obsidian/data.json
@@ -1,5 +1,21 @@
{
"recentFiles": [
+ {
+ "basename": "Step 1. Selfie Verification Program (Front End)",
+ "path": "Step 1. Selfie Verification Program (Front End).md"
+ },
+ {
+ "basename": "Selfie Verification Program High Level Description",
+ "path": "Selfie Verification Program High Level Description.md"
+ },
+ {
+ "basename": "Step 2. Program for facial feature Comparison",
+ "path": "Step 2. Program for facial feature Comparison.md"
+ },
+ {
+ "basename": "NYC-HAQ 2024",
+ "path": "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md"
+ },
{
"basename": "Vim",
"path": "Coding Tips (Classical)/Terminal Tips/1. Terminals/Text & Editors/Vim/Vim.md"
@@ -64,10 +80,6 @@
"basename": "About QRG",
"path": "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/QRG/About QRG.md"
},
- {
- "basename": "NYC-HAQ 2024",
- "path": "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md"
- },
{
"basename": "NYU Haqathon",
"path": "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/Get to hacking!/NYU Haqathon.md"
@@ -187,18 +199,6 @@
{
"basename": "E-Mail",
"path": "Coding Tips (Classical)/Project Vault/Current Projects/Communication Projects/WRITTEN TEXT/Mail/Templates/E-Mail.md"
- },
- {
- "basename": "My Domain Names",
- "path": "Coding Tips (Classical)/Project Vault/Current Projects/Website Projects/My Domain Names.md"
- },
- {
- "basename": "Welcome to Quantum 101",
- "path": "Machine Tips (Quantum)/Resources/Welcome to Quantum 101.md"
- },
- {
- "basename": "QEC",
- "path": "Machine Tips (Quantum)/Resources/Post-Processing/QEC.md"
}
],
"omittedPaths": [],
diff --git a/enter/.obsidian/workspace.json b/enter/.obsidian/workspace.json
index 3790d26..c8abef3 100644
--- a/enter/.obsidian/workspace.json
+++ b/enter/.obsidian/workspace.json
@@ -49,7 +49,43 @@
"state": {
"type": "markdown",
"state": {
- "file": "Coding Tips (Classical)/Terminal Tips/1. Terminals/Text & Editors/Vim/Vim.md",
+ "file": "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md",
+ "mode": "source",
+ "source": false
+ }
+ }
+ },
+ {
+ "id": "ed763deff73eedc4",
+ "type": "leaf",
+ "state": {
+ "type": "markdown",
+ "state": {
+ "file": "Step 1. Selfie Verification Program (Front End).md",
+ "mode": "source",
+ "source": false
+ }
+ }
+ },
+ {
+ "id": "815ca3d438a6b7ac",
+ "type": "leaf",
+ "state": {
+ "type": "markdown",
+ "state": {
+ "file": "Step 1. Selfie Verification Program (Front End).md",
+ "mode": "source",
+ "source": false
+ }
+ }
+ },
+ {
+ "id": "2d203d0ac92ac228",
+ "type": "leaf",
+ "state": {
+ "type": "markdown",
+ "state": {
+ "file": "Step 1. Selfie Verification Program (Front End).md",
"mode": "source",
"source": false
}
@@ -95,7 +131,7 @@
}
}
],
- "currentTab": 3
+ "currentTab": 6
}
],
"direction": "vertical"
@@ -154,8 +190,7 @@
}
],
"direction": "horizontal",
- "width": 200,
- "collapsed": true
+ "width": 200
},
"right": {
"id": "6b875168a70bd6eb",
@@ -179,7 +214,7 @@
"state": {
"type": "backlink",
"state": {
- "file": "Coding Tips (Classical)/Terminal Tips/1. Terminals/Text & Editors/Vim/Vim.md",
+ "file": "Step 1. Selfie Verification Program (Front End).md",
"collapseAll": false,
"extraContext": false,
"sortOrder": "alphabetical",
@@ -196,7 +231,7 @@
"state": {
"type": "outgoing-link",
"state": {
- "file": "Coding Tips (Classical)/Terminal Tips/1. Terminals/Text & Editors/Vim/Vim.md",
+ "file": "Step 1. Selfie Verification Program (Front End).md",
"linksCollapsed": false,
"unlinkedCollapsed": true
}
@@ -219,7 +254,7 @@
"state": {
"type": "outline",
"state": {
- "file": "Coding Tips (Classical)/Terminal Tips/1. Terminals/Text & Editors/Vim/Vim.md"
+ "file": "Step 1. Selfie Verification Program (Front End).md"
}
}
}
@@ -247,8 +282,13 @@
"obsidian-excalidraw-plugin:Create new drawing": false
}
},
- "active": "941b044aa4c4c938",
+ "active": "2d203d0ac92ac228",
"lastOpenFiles": [
+ "Selfie Verification Program High Level Description.md",
+ "Step 2. Program for facial feature Comparison.md",
+ "Step 1. Selfie Verification Program (Front End).md",
+ "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md",
+ "Coding Tips (Classical)/Terminal Tips/1. Terminals/Text & Editors/Vim/Vim.md",
"About Obsidian/Alt. Obsidian.md",
"Machine Tips (Quantum)/Resources/Technologies, Orgs, & Apps/Applications/Quantum Cryptography/Blind Quantum computing.md",
"Machine Tips (Quantum)/Resources/Technologies, Orgs, & Apps/Applications/A-Z Applications.md",
@@ -266,7 +306,6 @@
"Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/QSL Conversation with Chris.md",
"Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NQI Joint Algorithms Workshop.md",
"Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/QRG/About QRG.md",
- "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md",
"Machine Tips (Quantum)/Project Vault/Quantum Master's Paper/Quantum Learners - The Term Paper Outline.md",
"Machine Tips (Quantum)/Project Vault/Constructions/Undergraduate Quantum Teaching Lab.md",
"Coding Tips (Classical)/Project Vault/Current Projects/Communication Projects/Makerfaire/Maker Faire.md",
@@ -275,10 +314,6 @@
"About Obsidian/imgFiles/Pasted image 20241008191503.png",
"Machine Tips (Quantum)/Resources/QIS/Qyte vs Byte.md",
"jetzy bmeeting.md",
- "Coding Tips (Classical)/Project Vault/Setup Guides/Setting up GDrive for Export.md",
- "Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/List of Quantum Companies.md",
- "Machine Tips (Quantum)/Resources/Technologies, Orgs, & Apps/Applications/Machine Learning/Machine Learning (QML).md",
- "Machine Tips (Quantum)/Resources/Technologies, Orgs, & Apps/Applications/More specifically/Art.md",
"About Obsidian/imgFiles/Pasted image 20240930161853.png",
"Machine Tips (Quantum)/Project Vault/Website Projects",
"Machine Tips (Quantum)/Resources/Technologies, Orgs, & Apps/Applications/Machine Learning",
diff --git a/enter/Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md b/enter/Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md
index 36db25b..595315a 100644
--- a/enter/Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md
+++ b/enter/Machine Tips (Quantum)/Resources/Companies, Orgs, & Events/NYC-HAQ 2024.md
@@ -1,4 +1,4 @@
-
+z
With CUNY, Columbia, & NYU
in collaboration with Qbraid, SandboxAQ, & QWorld.
diff --git a/enter/Selfie Verification Program High Level Description.md b/enter/Selfie Verification Program High Level Description.md
new file mode 100644
index 0000000..7277e91
--- /dev/null
+++ b/enter/Selfie Verification Program High Level Description.md
@@ -0,0 +1,11 @@
+
+
+Creating a selfie verification program in JavaScript involves a combination of image processing, machine learning, and user interface design. Here's a step-by-step guide to building a robust selfie verification program:
+
+### 1. **Core Requirements:**
+
+- **Webcam Integration**: To capture selfies from the user's camera.
+- **Facial Recognition**: To compare the captured selfie with a pre-existing reference image (e.g., a user profile picture).
+- **Machine Learning Model**: For facial recognition and comparison (often using pre-trained models like OpenCV, face-api.js, or external services like AWS Rekognition or Microsoft Azure Face API).
+- **Backend Verification**: To handle the logic, store data securely, and interact with a database.
+- **User Interface**: To allow the user to take a selfie, view results, and handle errors (e.g., no face detected, low quality).
\ No newline at end of file
diff --git a/enter/Step 1. Selfie Verification Program (Front End).md b/enter/Step 1. Selfie Verification Program (Front End).md
new file mode 100644
index 0000000..7f45801
--- /dev/null
+++ b/enter/Step 1. Selfie Verification Program (Front End).md
@@ -0,0 +1,101 @@
+
+
+#### A. **Frontend: Capture Selfie**
+
+The frontend will handle capturing the selfie from the user's webcam and sending it to the backend for analysis.
+
+
+
+## Step 1: **Set Up Webcam Access:**
+
+ - Use the `getUserMedia()` API to access the webcam. This will allow users to capture their selfies in real-time.
+
+```javascript
+async function startWebcam() {
+ const stream = await navigator.mediaDevices.getUserMedia({ video: true });
+ const video = document.querySelector('#videoElement');
+ video.srcObject = stream;
+}
+
+startWebcam();
+
+
+```
+
+
+Within the Flutter app:
+
+```html
+
+
+
+
+```
+
+
+
+
+## Step 2: Capturing the Selfie program:
+
+
+```javascript
+function captureSelfie() {
+ const video = document.querySelector('#videoElement');
+ const canvas = document.querySelector('#canvasElement');
+ const context = canvas.getContext('2d');
+ const width = video.videoWidth;
+ const height = video.videoHeight;
+
+ canvas.width = width;
+ canvas.height = height;
+ context.drawImage(video, 0, 0, width, height);
+
+ return canvas.toDataURL('image/png'); // Returns the selfie as a base64-encoded image
+}
+
+```
+
+
+
+## Step 3: Incorporating the ML Models for the Selfie
+
+
+1. Incorporate into the HTML first
+
+
+```html
+
+
+```
+
+
+2. Then write it into your javascript program
+
+
+**This is the part to actually research and improve on top of the current existing process**
+
+Function for Loading the Model
+```javascript
+async function loadFaceAPI() {
+ await faceapi.nets.ssdMobilenetv1.loadFromUri('/models');
+ await faceapi.nets.faceLandmark68Net.loadFromUri('/models');
+ await faceapi.nets.faceRecognitionNet.loadFromUri('/models');
+}
+
+```
+
+
+Function for Comparing the Model to the Captured Selfie:
+```javascript
+async function detectFace(image) {
+ const detections = await faceapi.detectSingleFace(image)
+ .withFaceLandmarks()
+ .withFaceDescriptor();
+ return detections;
+}
+
+```
+
+
+Remember to load the "faceapi" into a separate folder.
+You will need to upload the face recognition models (`face_landmark`, `ssd_mobilenet`, and `face_descriptor`) from a server or local file system for `face-api.js` to work properly.
\ No newline at end of file
diff --git a/enter/Step 2. Program for facial feature Comparison.md b/enter/Step 2. Program for facial feature Comparison.md
new file mode 100644
index 0000000..908be40
--- /dev/null
+++ b/enter/Step 2. Program for facial feature Comparison.md
@@ -0,0 +1,23 @@
+
+
+If you already have the reference face (like the user profile picture), extract the descriptor of both the selfie and the reference image, and compare them.
+
+```javascript
+
+async function compareFaces(image1, image2) {
+ const detections1 = await detectFace(image1);
+ const detections2 = await detectFace(image2);
+
+ if (!detections1 || !detections2) {
+ return false; // No face detected
+ }
+
+ const distance = faceapi.euclideanDistance(
+ detections1.descriptor, detections2.descriptor
+ );
+
+ const MATCH_THRESHOLD = 0.6; // Set an appropriate threshold
+ return distance < MATCH_THRESHOLD; // If distance is below the threshold, the faces match
+}
+
+```
\ No newline at end of file