Successfully training models based on a collection of images

This commit is contained in:
watsonb8 2020-11-08 20:57:57 -05:00
parent e1902a362e
commit fda68e7144
7 changed files with 176 additions and 113 deletions

2
.env.schema Normal file
View File

@ -0,0 +1,2 @@
REF_IMAGE_DIR=
TRAINED_MODEL_DIR=

1
.gitignore vendored
View File

@ -110,3 +110,4 @@ dist
images/*
bin
out
trainedModels/*

26
.vscode/tasks.json vendored
View File

@ -1,15 +1,13 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"type": "typescript",
"tsconfig": "tsconfig.json",
"problemMatcher": [
"$tsc"
]
}
]
}
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"type": "typescript",
"tsconfig": "tsconfig.json",
"problemMatcher": ["$tsc"]
}
]
}

130
package-lock.json generated
View File

@ -92,6 +92,11 @@
"tar": "^4.4.6"
}
},
"@types/mime-types": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.0.tgz",
"integrity": "sha1-nKUs2jY/aZxpRmwqbM2q2RPqenM="
},
"@types/node": {
"version": "14.14.6",
"resolved": "https://registry.npmjs.org/@types/node/-/node-14.14.6.tgz",
@ -194,6 +199,14 @@
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k="
},
"auto-parse": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/auto-parse/-/auto-parse-1.8.0.tgz",
"integrity": "sha512-Uri4uC+K5cSi5hjM4snFrqPrjqUpwxeSW5EMTPvN7Ju3PlDzmXXDr5tjdzxPvvwgT3J7bmMDJ3Rm625nbrc72A==",
"requires": {
"typpy": "2.3.11"
}
},
"balanced-match": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.0.tgz",
@ -390,6 +403,16 @@
"resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz",
"integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac="
},
"cross-spawn": {
"version": "7.0.3",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz",
"integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==",
"requires": {
"path-key": "^3.1.0",
"shebang-command": "^2.0.0",
"which": "^2.0.1"
}
},
"debug": {
"version": "3.2.6",
"resolved": "https://registry.npmjs.org/debug/-/debug-3.2.6.tgz",
@ -436,6 +459,22 @@
"resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
"integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A=="
},
"dotenv": {
"version": "8.2.0",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-8.2.0.tgz",
"integrity": "sha512-8sJ78ElpbDJBHNeBzUbUVLsqKdccaa/BXF1uPTw3GrvQTBgrQrtObr2mUrE38vzYd8cEv+m/JBfDLioYcfXoaw=="
},
"dotenv-extended": {
"version": "2.9.0",
"resolved": "https://registry.npmjs.org/dotenv-extended/-/dotenv-extended-2.9.0.tgz",
"integrity": "sha512-MKc4WCqZj6Abx4rpDbQ9LsuBJldRLxLgFkY5qE+4JM7hXVYT/v8zyWGgnBeDjSOGzEecWOFPlosNpxfB9YnsCw==",
"requires": {
"auto-parse": "^1.3.0",
"camelcase": "^5.3.1",
"cross-spawn": "^7.0.1",
"dotenv": "^8.2.0"
}
},
"emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
@ -459,40 +498,6 @@
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="
},
"face-api.js": {
"version": "0.22.2",
"resolved": "https://registry.npmjs.org/face-api.js/-/face-api.js-0.22.2.tgz",
"integrity": "sha512-9Bbv/yaBRTKCXjiDqzryeKhYxmgSjJ7ukvOvEBy6krA0Ah/vNBlsf7iBNfJljWiPA8Tys1/MnB3lyP2Hfmsuyw==",
"requires": {
"@tensorflow/tfjs-core": "1.7.0",
"tslib": "^1.11.1"
},
"dependencies": {
"@tensorflow/tfjs-core": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-1.7.0.tgz",
"integrity": "sha512-uwQdiklNjqBnHPeseOdG0sGxrI3+d6lybaKu2+ou3ajVeKdPEwpWbgqA6iHjq1iylnOGkgkbbnQ6r2lwkiIIHw==",
"requires": {
"@types/offscreencanvas": "~2019.3.0",
"@types/seedrandom": "2.4.27",
"@types/webgl-ext": "0.0.30",
"@types/webgl2": "0.0.4",
"node-fetch": "~2.1.2",
"seedrandom": "2.4.3"
}
},
"@types/webgl2": {
"version": "0.0.4",
"resolved": "https://registry.npmjs.org/@types/webgl2/-/webgl2-0.0.4.tgz",
"integrity": "sha512-PACt1xdErJbMUOUweSrbVM7gSIYm1vTncW2hF6Os/EeWi6TXYAYMPp+8v6rzHmypE5gHrxaxZNXgMkJVIdZpHw=="
},
"node-fetch": {
"version": "2.1.2",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.1.2.tgz",
"integrity": "sha1-q4hOjn5X44qUR1POxwb3iNF2i7U="
}
}
},
"find-up": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
@ -525,6 +530,14 @@
"resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
"integrity": "sha1-FQStJSMVjKpA20onh8sBQRmU6k8="
},
"function.name": {
"version": "1.0.13",
"resolved": "https://registry.npmjs.org/function.name/-/function.name-1.0.13.tgz",
"integrity": "sha512-mVrqdoy5npWZyoXl4DxCeuVF6delDcQjVS9aPdvLYlBxtMTZDR2B5GVEQEoM1jJyspCqg3C0v4ABkLE7tp9xFA==",
"requires": {
"noop6": "^1.0.1"
}
},
"gauge": {
"version": "2.7.4",
"resolved": "https://registry.npmjs.org/gauge/-/gauge-2.7.4.tgz",
@ -660,6 +673,11 @@
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
"integrity": "sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE="
},
"isexe": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
"integrity": "sha1-6PvzdNxVb/iUehDcsFctYz8s+hA="
},
"locate-path": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
@ -803,6 +821,11 @@
}
}
},
"noop6": {
"version": "1.0.9",
"resolved": "https://registry.npmjs.org/noop6/-/noop6-1.0.9.tgz",
"integrity": "sha512-DB3Hwyd89dPr5HqEPg3YHjzvwh/mCqizC1zZ8vyofqc+TQRyPDnT4wgXXbLGF4z9YAzwwTLi8pNLhGqcbSjgkA=="
},
"nopt": {
"version": "4.0.3",
"resolved": "https://registry.npmjs.org/nopt/-/nopt-4.0.3.tgz",
@ -914,6 +937,11 @@
"resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
"integrity": "sha1-F0uSaHNVNP+8es5r9TpanhtcX18="
},
"path-key": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
"integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="
},
"process-nextick-args": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz",
@ -1020,6 +1048,19 @@
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
"integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc="
},
"shebang-command": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
"integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
"requires": {
"shebang-regex": "^3.0.0"
}
},
"shebang-regex": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
"integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="
},
"signal-exit": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz",
@ -1133,16 +1174,19 @@
"yn": "3.1.1"
}
},
"tslib": {
"version": "1.14.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz",
"integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg=="
},
"typescript": {
"version": "4.0.5",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.0.5.tgz",
"integrity": "sha512-ywmr/VrTVCmNTJ6iV2LwIrfG1P+lv6luD8sUJs+2eI9NLGigaN+nUQc13iHqisq7bra9lnmUSYqbJvegraBOPQ=="
},
"typpy": {
"version": "2.3.11",
"resolved": "https://registry.npmjs.org/typpy/-/typpy-2.3.11.tgz",
"integrity": "sha512-Jh/fykZSaxeKO0ceMAs6agki9T5TNA9kiIR6fzKbvafKpIw8UlNlHhzuqKyi5lfJJ5VojJOx9tooIbyy7vHV/g==",
"requires": {
"function.name": "^1.0.3"
}
},
"untildify": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/untildify/-/untildify-4.0.0.tgz",
@ -1153,6 +1197,14 @@
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8="
},
"which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
"integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
"requires": {
"isexe": "^2.0.0"
}
},
"which-module": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",

View File

@ -5,8 +5,9 @@
"main": "index.ts",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "tsc --build && npm run copy-files",
"copy-files": "copyfiles -u 1 resources/* bin/resources"
"build": "npm run copy-files && tsc --build",
"copy-files": "copyfiles -u 1 -s weights/* bin/weights",
"train": "npx ts-node ./scripts/train.ts"
},
"repository": {
"type": "git",
@ -21,9 +22,12 @@
"dependencies": {
"@tensorflow/tfjs": "^2.6.0",
"@tensorflow/tfjs-node": "^2.6.0",
"@types/mime-types": "^2.1.0",
"@vladmandic/face-api": "^0.8.8",
"canvas": "^2.6.1",
"copyfiles": "^2.4.0",
"dotenv-extended": "^2.9.0",
"mime-types": "^2.1.27",
"rtsp-stream": "file:../rtsp-stream",
"ts-node": "^9.0.0",
"typescript": "^4.0.5"

View File

@ -1,20 +1,24 @@
import * as faceapi from "@vladmandic/face-api";
import canvas from "canvas";
import fs from "fs";
import fs, { lstatSync } from "fs";
import * as path from "path";
import { TNetInput } from "@vladmandic/face-api";
import { LabeledFaceDescriptors, TNetInput } from "@vladmandic/face-api";
import * as mime from "mime-types";
import dotenv from "dotenv-extended";
require("@tensorflow/tfjs-node");
const { Canvas, Image, ImageData } = canvas;
//@ts-ignore
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
const REFERENCE_IMAGE =
"/Users/brandonwatson/Documents/Git/Gitea/homebridge-face-location/images/brandon/IMG_1958.jpg";
const QUERY_IMAGE =
"/Users/brandonwatson/Documents/Git/Gitea/homebridge-face-location/images/brandon/IMG_0001.JPG";
const main = async () => {
dotenv.load({
silent: false,
errorOnMissing: true,
});
const inputDir = process.env.REF_IMAGE_DIR as string;
const outDir = process.env.TRAINED_MODEL_DIR as string;
const faceDetectionNet = faceapi.nets.ssdMobilenetv1;
await faceDetectionNet.loadFromDisk(path.join(__dirname, "../weights"));
await faceapi.nets.faceLandmark68Net.loadFromDisk(
@ -24,41 +28,66 @@ const main = async () => {
path.join(__dirname, "../weights")
);
const referenceImage = (await canvas.loadImage(REFERENCE_IMAGE)) as unknown;
const queryImage = (await canvas.loadImage(QUERY_IMAGE)) as unknown;
const options = getFaceDetectorOptions(faceDetectionNet);
const resultsRef = await faceapi
.detectAllFaces(referenceImage as TNetInput, options)
.withFaceLandmarks()
.withFaceDescriptors();
const dirs = fs.readdirSync(inputDir);
const resultsQuery = await faceapi
.detectAllFaces(queryImage as TNetInput, options)
.withFaceLandmarks()
.withFaceDescriptors();
for (const dir of dirs) {
if (!lstatSync(path.join(inputDir, dir)).isDirectory()) {
continue;
}
const files = fs.readdirSync(path.join(inputDir, dir));
let referenceResults = await Promise.all(
files.map(async (file: string) => {
const mimeType = mime.contentType(
path.extname(path.join(inputDir, dir, file))
);
if (!mimeType || !mimeType.startsWith("image")) {
return;
}
console.log(path.join(inputDir, dir, file));
const faceMatcher = new faceapi.FaceMatcher(resultsRef);
try {
const referenceImage = (await canvas.loadImage(
path.join(inputDir, dir, file)
)) as unknown;
const labels = faceMatcher.labeledDescriptors.map((ld) => ld.label);
const refDrawBoxes = resultsRef
.map((res) => res.detection.box)
.map((box, i) => new faceapi.draw.DrawBox(box, { label: labels[i] }));
const outRef = faceapi.createCanvasFromMedia(referenceImage as ImageData);
refDrawBoxes.forEach((drawBox) => drawBox.draw(outRef));
const descriptor = await faceapi
.detectAllFaces(referenceImage as TNetInput, options)
.withFaceLandmarks()
.withFaceDescriptors();
saveFile("referenceImage.jpg", (outRef as any).toBuffer("image/jpeg"));
return descriptor.length > 0 ? descriptor : undefined;
} catch (err) {
console.log(
"An error occurred loading image at path: " +
path.join(inputDir, dir, file)
);
}
return undefined;
})
);
const queryDrawBoxes = resultsQuery.map((res) => {
const bestMatch = faceMatcher.findBestMatch(res.descriptor);
return new faceapi.draw.DrawBox(res.detection.box, {
label: bestMatch.toString(),
});
});
const outQuery = faceapi.createCanvasFromMedia(queryImage as ImageData);
queryDrawBoxes.forEach((drawBox) => drawBox.draw(outQuery));
saveFile("queryImage.jpg", (outQuery as any).toBuffer("image/jpeg"));
console.log("done, saved results to out/queryImage.jpg");
const items = [];
for (const item of referenceResults) {
if (item) {
items.push(...item);
}
}
const faceMatcher = new faceapi.FaceMatcher(items);
fs.writeFile(
path.join(outDir, dir + ".json"),
JSON.stringify(faceMatcher.toJSON()),
"utf8",
(err) => {
if (err) {
console.log(`An error occurred while writing ${dir} model to file`);
}
console.log(`Successfully wrote ${dir} model to file`);
}
);
}
};
// SsdMobilenetv1Options
@ -76,12 +105,4 @@ function getFaceDetectorOptions(net: faceapi.NeuralNetwork<any>) {
const baseDir = path.resolve(__dirname, "../out");
function saveFile(fileName: string, buf: Buffer) {
if (!fs.existsSync(baseDir)) {
fs.mkdirSync(baseDir);
}
fs.writeFileSync(path.resolve(baseDir, fileName), buf);
}
main();

View File

@ -1,26 +1,11 @@
import { Rtsp } from "rtsp-stream/lib";
// import nodejs bindings to native tensorflow,
// not required, but will speed up things drastically (python required)
import * as faceapi from "face-api.js";
// implements nodejs wrappers for HTMLCanvasElement, HTMLImageElement, ImageData
const canvas = require("canvas");
// patch nodejs environment, we need to provide an implementation of
// HTMLCanvasElement and HTMLImageElement
const { Canvas, Image, ImageData } = canvas;
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
const main = async () => {
const rtsp = new Rtsp("rtsp://brandon:asdf1234@192.168.1.229/live", {
rate: 10,
});
await faceapi.nets.ssdMobilenetv1.loadFromDisk("./resources");
rtsp.on("data", async (data: Buffer) => {
const input = await canvas.loadImage(data);
const detections = await faceapi.detectAllFaces(input);
console.log();
process.exit(0);
});