diff --git a/.env.schema b/.env.schema index 12893ad..314354c 100644 --- a/.env.schema +++ b/.env.schema @@ -1,2 +1,3 @@ REF_IMAGE_DIR= -TRAINED_MODEL_DIR= \ No newline at end of file +TRAINED_MODEL_DIR= +OUT_DIR= \ No newline at end of file diff --git a/homebridge-face-location.code-workspace b/homebridge-face-location.code-workspace index 517e0b2..a9da0aa 100644 --- a/homebridge-face-location.code-workspace +++ b/homebridge-face-location.code-workspace @@ -1,8 +1,8 @@ { "folders": [ - { - "path": "." - } + { + "path": "." + } ], "settings": {} } \ No newline at end of file diff --git a/scripts/streamAndDetect.ts b/scripts/streamAndDetect.ts new file mode 100644 index 0000000..a26b763 --- /dev/null +++ b/scripts/streamAndDetect.ts @@ -0,0 +1,66 @@ +import { Rtsp } from "rtsp-stream/lib"; +import { FaceMatcher, nets } from "@vladmandic/face-api"; +import * as faceapi from "@vladmandic/face-api"; +import canvas from "canvas"; +import fs from "fs"; +import * as path from "path"; +import dotenv from "dotenv-extended"; +import { getFaceDetectorOptions, saveFile } from "../src/common"; +require("@tensorflow/tfjs-node"); + +const { Canvas, Image, ImageData } = canvas; +//@ts-ignore +faceapi.env.monkeyPatch({ Canvas, Image, ImageData }); + +const main = async () => { + dotenv.load({ + silent: false, + errorOnMissing: true, + }); + + const modelDir = process.env.TRAINED_MODEL_DIR as string; + + const rtsp = new Rtsp("rtsp://brandon:asdf1234@192.168.1.229/live", { + rate: 10, + }); + const faceDetectionNet = nets.ssdMobilenetv1; + + await faceDetectionNet.loadFromDisk(path.join(__dirname, "../weights")); + await nets.faceLandmark68Net.loadFromDisk(path.join(__dirname, "../weights")); + await nets.faceRecognitionNet.loadFromDisk( + path.join(__dirname, "../weights") + ); + + const files = fs.readdirSync(modelDir); + const matchers: Array = []; + for (const file of files) { + const raw = fs.readFileSync(path.join(modelDir, file), "utf-8"); + const content = JSON.parse(raw); + matchers.push(FaceMatcher.fromJSON(content)); + } + + rtsp.on("data", async (data: Buffer) => { + const img = new Image(); + img.src = data.toString("base64"); + const input = await canvas.loadImage(data, "base64"); + const resultsQuery = await faceapi + .detectAllFaces(input, getFaceDetectorOptions(faceDetectionNet)) + .withFaceLandmarks() + .withFaceDescriptors(); + + for (const res of resultsQuery) { + for (const matcher of matchers) { + const bestMatch = matcher.findBestMatch(res.descriptor); + console.log(bestMatch.label); + } + } + }); + + rtsp.on("error", (err) => { + console.log(err); + }); + + rtsp.start(); +}; + +main(); diff --git a/scripts/train.ts b/scripts/train.ts index 6580348..cda26fd 100644 --- a/scripts/train.ts +++ b/scripts/train.ts @@ -2,9 +2,10 @@ import * as faceapi from "@vladmandic/face-api"; import canvas from "canvas"; import fs, { lstatSync } from "fs"; import * as path from "path"; -import { LabeledFaceDescriptors, TNetInput } from "@vladmandic/face-api"; +import { TNetInput } from "@vladmandic/face-api"; import * as mime from "mime-types"; import dotenv from "dotenv-extended"; +import { getFaceDetectorOptions } from "../src/common"; require("@tensorflow/tfjs-node"); const { Canvas, Image, ImageData } = canvas; @@ -90,19 +91,4 @@ const main = async () => { } }; -// SsdMobilenetv1Options -const minConfidence = 0.5; - -// TinyFaceDetectorOptions -const inputSize = 408; -const scoreThreshold = 0.5; - -function getFaceDetectorOptions(net: faceapi.NeuralNetwork) { - return net === faceapi.nets.ssdMobilenetv1 - ? new faceapi.SsdMobilenetv1Options({ minConfidence }) - : new faceapi.TinyFaceDetectorOptions({ inputSize, scoreThreshold }); -} - -const baseDir = path.resolve(__dirname, "../out"); - main(); diff --git a/src/common.ts b/src/common.ts new file mode 100644 index 0000000..87332b5 --- /dev/null +++ b/src/common.ts @@ -0,0 +1,25 @@ +import * as faceapi from "@vladmandic/face-api"; +import * as path from "path"; +import fs from "fs"; + +// SsdMobilenetv1Options +export const minConfidence = 0.5; + +// TinyFaceDetectorOptions +export const inputSize = 408; +export const scoreThreshold = 0.5; + +export const getFaceDetectorOptions = (net: faceapi.NeuralNetwork) => { + return net === faceapi.nets.ssdMobilenetv1 + ? new faceapi.SsdMobilenetv1Options({ minConfidence }) + : new faceapi.TinyFaceDetectorOptions({ inputSize, scoreThreshold }); +}; + +export function saveFile(fileName: string, buf: Buffer) { + const baseDir = process.env.OUT_DIR as string; + if (!fs.existsSync(baseDir)) { + fs.mkdirSync(baseDir); + } + + fs.writeFileSync(path.resolve(baseDir, fileName), buf); +} diff --git a/src/index.ts b/src/index.ts index 67b460a..accefce 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,20 +1 @@ -import { Rtsp } from "rtsp-stream/lib"; - -const main = async () => { - const rtsp = new Rtsp("rtsp://brandon:asdf1234@192.168.1.229/live", { - rate: 10, - }); - - rtsp.on("data", async (data: Buffer) => { - console.log(); - process.exit(0); - }); - - rtsp.on("error", (err) => { - console.log(err); - }); - - rtsp.start(); -}; - -main(); +console.log("Hello World"); diff --git a/tsconfig.face-location.json b/tsconfig.face-location.json index 68b58f7..28c576d 100644 --- a/tsconfig.face-location.json +++ b/tsconfig.face-location.json @@ -59,5 +59,6 @@ /* Advanced Options */ "skipLibCheck": true, "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ - } + }, + "include": ["./src"] }