Compare commits

..

No commits in common. "master" and "feature/faceApi" have entirely different histories.

50 changed files with 220 additions and 2816 deletions

View File

@ -1,41 +0,0 @@
kind: pipeline
type: docker
name: default
clone:
disable: true
steps:
- name: clone
image: alpine/git
commands:
- git clone https://gitea.watsonlabs.net/watsonb8/homebridge-face-location.git .
- git checkout $DRONE_COMMIT
- name: build
image: node
commands:
- npm install
- npm run build
- name: publish
image: plugins/npm:1.0.0
settings:
username: admin
password:
from_secret: npm_password
email: brandon@watsonlabs.net
registry: "http://linuxhost.me:4873/"
when:
event:
- tag
notify:
image: drillster/drone-email
host: smtp.watsonlabs.net
username: srvGitea
password:
from_secret: smtp_password
from: drone@watsonlabs.net
when:
status: [failure]

View File

@ -1,2 +0,0 @@
TRAINED_MODEL_DIR=./trainedModels
OUT_DIR=./out

View File

@ -1,4 +0,0 @@
REF_IMAGE_DIR=
TRAINED_MODEL_DIR=
OUT_DIR=
CONFIDENCE=

2
.gitignore vendored
View File

@ -109,5 +109,3 @@ dist
images/*
bin
out
trainedModels/*

49
.vscode/launch.json vendored
View File

@ -1,30 +1,21 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Current TS File",
"type": "node",
"request": "launch",
"args": ["${relativeFile}"],
"runtimeArgs": ["--nolazy", "-r", "ts-node/register"],
"sourceMaps": true,
"cwd": "${workspaceRoot}",
"protocol": "inspector"
},
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"preLaunchTask": "build",
"internalConsoleOptions": "openOnSessionStart",
"program": "/Users/brandonwatson/.nvm/versions/node/v14.15.0/lib/node_modules/homebridge/bin/homebridge",
"env": {
"HOMEBRIDGE_OPTS": "/Users/brandonwatson/.homebridge"
},
"sourceMaps": true
}
]
}
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/bin/index.js",
"preLaunchTask": "build",
"console": "internalConsole",
"internalConsoleOptions": "openOnSessionStart",
"sourceMaps": true,
"outFiles": [
"${workspaceFolder}/**/*.js"
]
}
]
}

31
.vscode/tasks.json vendored
View File

@ -1,18 +1,15 @@
{
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"type": "npm",
"script": "build",
"label": "build",
"problemMatcher": []
},
{
"type": "shell",
"label": "build and install",
"command": "npm run build&&sudo npm install -g --unsafe-perm ."
}
]
}
// See https://go.microsoft.com/fwlink/?LinkId=733558
// for the documentation about the tasks.json format
"version": "2.0.0",
"tasks": [
{
"label": "build",
"type": "typescript",
"tsconfig": "tsconfig.json",
"problemMatcher": [
"$tsc"
]
}
]
}

View File

@ -1,77 +1,2 @@
# homebridge-face-location
Homebridge plugin providing real time location tracking via facial recognition.
## Installation
1. Download FFMPEG [here](https://ffmpeg.org/download.html)
> Note: FFMPEG is required in order to run homebridge-face-location
1. Clone the repository by running `git clone ssh://git@thebword.ddns.net:3122/watsonb8/homebridge-face-location.git`
1. Run `npm install` to install required modules
1. Run `npm run build` to build the module
1. Run `npm link` to link this instance to your global homebridge instance
## Configuration
```
{
"platform": "HomeLocation",
"name": "HomeLocation",
"refImageDirectory": "/path/to/reference/images/",
"trainedModelDirectory": "/path/to/trainedModel/data.json/",
"weightDirectory": "/path/to/trained/weights/",
"outputDirectory": "/path/to/desired/output/image/directory",
"trainOnStartup": false,
"rate": 1,
"detectionTimeout": 90000,
"watchdogTimeout": 10000,
"debug": false,
"writeOutput": true,
"rooms": [
{
"name": "Kitchen",
"rtspConnectionStrings": [
"rtsp://username:password@example.com"
]
},
{
"name": "LivingRoom",
"rtspConnectionStrings": [
"rtsp://username:password@example.com"
]
}
]
}
```
#### Platform
- `refImageDirectory`: The location of the images used to train the facial recognition matcher
> NOTE: This directory should only contain sub directories labeled with the name of the person to be matched with their corresponding images in the labeled folder. Any labels found in this directory will be used in face tracking
- `trainedModelDirectory`: The location of the trained `data.json` file if it exists. This is also the location where training data will be placed if `trainOnStartup` is true
- `weightDirectory`: The location of the pre-trained weight files used for face detection
- `outputDirectory`: The directory to be used if debug output images are wanted
- `trainOnStartup`: If true, a trained model will be created and persisted in the `trainedModelDirectory` during startup
- `rate`: The rate at which collect images from rtsp stream in fps
- `detectionTimeout`: The number of milliseconds to wait after a person is detected before returing to the undetected state
- `watchdogTimeout`: The number of milliseconds to wait before restarting the rtsp stream when a new image has not been recieved
- `debug`: If true, additional debug logging is displayed in the console
- `writeOutput`: If true, an image will be written to disk for every frame received from the rtsp stream
#### Rooms
- `name`: The name of the room in which to detect
- `rtspConnectionStrings`: A list of camera rtsp connection strings to associate with the room

View File

@ -1,45 +0,0 @@
{
"pluginAlias": "HomeLocation",
"pluginType": "platform",
"singular": true,
"schema": {
"type": "object",
"properties": {
"name": {
"title": "Name",
"type": "string",
"required": true,
"default": "Example Dynamic Platform"
},
"refImageDirectory": {
"title": "RefImageDirectory",
"type": "string",
"required": true
},
"trainedModelDirectory": {
"title": "TrainedModelDirectory",
"type": "string",
"required": true
},
"rooms": {
"title": "Rooms",
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"title": "Name",
"type": "string",
"required": true
},
"rtspCameraConnectionString": {
"title": "RtspCameraConnectionString",
"type": "string",
"required": true
}
}
}
}
}
}
}

View File

@ -1,23 +0,0 @@
#!/bin/bash
remote_user="bmw"
remote_server="linuxhost.me"
deploy_location="/home/bmw/homebridge-face-location"
#build
tsc --build
#copy files to remote machine
scp -r bin $remote_user@$remote_server:$deploy_location
scp -r out $remote_user@$remote_server:$deploy_location
scp -r weights $remote_user@$remote_server:$deploy_location
scp -r trainedModels $remote_user@$remote_server:$deploy_location
scp package.json $remote_user@$remote_server:$deploy_location
#install package
ssh -t $remote_user@$remote_server "sudo npm install -g --unsafe-perm $deploy_location"
#restart service
ssh -t
ssh -t $remote_user@$remote_server "sudo systemctl restart homebridge.service"
echo done
exit

View File

@ -1,11 +1,8 @@
{
"folders": [
{
"path": "."
}
{
"path": "."
}
],
"settings": {
"editor.tabSize": 2,
"debug.javascript.unmapMissingSources": true
}
"settings": {}
}

1473
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,12 +2,11 @@
"name": "homebridge-face-location",
"version": "1.0.0",
"description": "",
"main": "bin/index.js",
"main": "index.ts",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "npm run copy-files && tsc --build",
"copy-files": "copyfiles -u 1 -s weights/* bin/weights",
"train": "npx ts-node ./scripts/train.ts"
"build": "tsc --build && npm run copy-files",
"copy-files": "copyfiles -u 1 resources/* bin/resources"
},
"repository": {
"type": "git",
@ -15,31 +14,15 @@
},
"keywords": [
"homebridge",
"typescript",
"homebridge-plugin"
"typescript"
],
"engines": {
"homebridge": ">=1.1.6"
},
"author": "Brandon Watson",
"license": "ISC",
"dependencies": {
"@tensorflow/tfjs": "^2.6.0",
"@tensorflow/tfjs-node": "^2.6.0",
"@types/mime-types": "^2.1.0",
"@types/sharp": "^0.26.1",
"@vladmandic/face-api": "^0.8.8",
"canvas": "^2.6.1",
"dotenv-extended": "^2.9.0",
"mime-types": "^2.1.27",
"sharp": "^0.26.3",
"tsyringe": "^4.4.0"
},
"devDependencies": {
"@types/webgl2": "0.0.5",
"typescript": "^4.0.5",
"ts-node": "^9.0.0",
"homebridge": "^1.1.6",
"copyfiles": "^2.4.0"
"copyfiles": "^2.4.0",
"face-api.js": "^0.22.2",
"node-rtsp-stream": "0.0.9",
"rtsp-stream": "file:../rtsp-stream"
}
}

View File

@ -1,74 +0,0 @@
import { nets } from "@vladmandic/face-api";
import * as faceapi from "@vladmandic/face-api";
import canvas from "canvas";
import fs from "fs";
import * as path from "path";
import dotenv from "dotenv-extended";
import { getFaceDetectorOptions } from "../src/common";
import * as mime from "mime-types";
require("@tensorflow/tfjs-node");
const { Canvas, Image, ImageData } = canvas;
//@ts-ignore
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
const main = async () => {
dotenv.load({
silent: false,
errorOnMissing: true,
});
const modelDir = process.env.TRAINED_MODEL_DIR as string;
const faceDetectionNet = nets.ssdMobilenetv1;
await faceDetectionNet.loadFromDisk(path.join(__dirname, "../weights"));
await nets.faceLandmark68Net.loadFromDisk(path.join(__dirname, "../weights"));
await nets.faceRecognitionNet.loadFromDisk(
path.join(__dirname, "../weights")
);
const raw = fs.readFileSync(path.join(modelDir, "data.json"), "utf-8");
const content = JSON.parse(raw);
const matcher = faceapi.FaceMatcher.fromJSON(content);
let dir = path.join(process.env.OUT_DIR as string);
const files = fs.readdirSync(dir);
await Promise.all(
files.map(async (file: string) => {
const mimeType = mime.contentType(path.extname(path.join(dir, file)));
if (!mimeType || !mimeType.startsWith("image")) {
return;
}
const imgData = fs.readFileSync(path.join(dir, file));
const input = ((await canvas.loadImage(imgData)) as unknown) as ImageData;
const out = faceapi.createCanvasFromMedia(input);
const resultsQuery = await faceapi
.detectAllFaces(out, getFaceDetectorOptions(faceDetectionNet))
.withFaceLandmarks()
.withFaceDescriptors();
if (resultsQuery.length > 0) {
for (const res of resultsQuery) {
const bestMatch = matcher.findBestMatch(res.descriptor);
console.log(
`Face Detected with ${
res.detection.score * 100
}% accuracy and a distance of ${bestMatch.distance}: ${
bestMatch.label
} in file ${path.join(dir, file)}`
);
}
} else {
console.log(
`No faces detected in file ${path.join(
process.env.REF_IMAGE_DIR as string,
"aline",
file
)}`
);
}
})
);
};
main();

View File

@ -1,58 +0,0 @@
import { Rtsp, IStreamEventArgs } from "../src/rtsp/rtsp";
import { nets } from "@vladmandic/face-api";
import * as faceapi from "@vladmandic/face-api";
import canvas from "canvas";
import fs from "fs";
import * as path from "path";
import dotenv from "dotenv-extended";
import { delay, getFaceDetectorOptions, saveFile } from "../src/common";
require("@tensorflow/tfjs-node");
const { Canvas, Image, ImageData } = canvas;
//@ts-ignore
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
const main = async () => {
dotenv.load({
silent: false,
errorOnMissing: true,
});
const modelDir = process.env.TRAINED_MODEL_DIR as string;
const rtsp = new Rtsp("rtsp://brandon:asdf1234@192.168.1.229/live", {
rate: 0.5,
image: true,
codec: "copy",
});
const faceDetectionNet = nets.ssdMobilenetv1;
await faceDetectionNet.loadFromDisk(path.join(__dirname, "../weights"));
await nets.faceLandmark68Net.loadFromDisk(path.join(__dirname, "../weights"));
await nets.faceRecognitionNet.loadFromDisk(
path.join(__dirname, "../weights")
);
const raw = fs.readFileSync(path.join(modelDir, "data.json"), "utf-8");
const content = JSON.parse(raw);
const matcher = faceapi.FaceMatcher.fromJSON(content);
rtsp.dataEvent.push(async (sender: Rtsp, args: IStreamEventArgs) => {
const input = ((await canvas.loadImage(args.data)) as unknown) as ImageData;
const out = faceapi.createCanvasFromMedia(input);
await saveFile(process.env.OUT_DIR as string, "image.jpg", args.data);
const resultsQuery = await faceapi
.detectAllFaces(out, getFaceDetectorOptions(faceDetectionNet))
.withFaceLandmarks()
.withFaceDescriptors();
for (const res of resultsQuery) {
const bestMatch = matcher.matchDescriptor(res.descriptor);
console.log("Face Detected: " + bestMatch.label);
}
});
rtsp.start();
};
main();

View File

@ -1,16 +0,0 @@
import dotenv from "dotenv-extended";
import { Trainer } from "../src/trainer";
const main = async () => {
dotenv.load({
silent: false,
errorOnMissing: true,
});
const trainer = new Trainer(
process.env.REF_IMAGE_DIR as string,
process.env.TRAINED_MODEL_DIR as string
);
await trainer.train(true);
};
main();

View File

@ -1,79 +0,0 @@
import * as faceapi from "@vladmandic/face-api";
import * as path from "path";
import fs from "fs";
// SsdMobilenetv1Options
export const minConfidence = 0.4;
// TinyFaceDetectorOptions
export const inputSize = 416;
export const scoreThreshold = 0.5;
export const getFaceDetectorOptions = (
net: faceapi.NeuralNetwork<any>,
options?: {
confidence?: number;
inputSize?: number;
scoreThreshold?: number;
}
) => {
return net === faceapi.nets.ssdMobilenetv1
? new faceapi.SsdMobilenetv1Options({
minConfidence: options?.confidence ?? minConfidence,
})
: new faceapi.TinyFaceDetectorOptions({
inputSize: options?.inputSize ?? inputSize,
scoreThreshold: options?.scoreThreshold ?? scoreThreshold,
});
};
export const saveFile = async (
basePath: string,
fileName: string,
buf: Buffer
): Promise<void> => {
return new Promise(async (resolve, reject) => {
try {
//Create directory if it does not exist
await makeDirectory(basePath);
} catch (err) {
return reject(err);
}
//Write file to directory
try {
const asdf = fs.writeFileSync(
path.join(basePath, fileName),
buf,
"base64"
);
} catch (err) {
return reject(err);
}
return resolve();
});
};
export const makeDirectory = (path: string): Promise<void> => {
return new Promise(async (resolve, reject) => {
if (!fs.existsSync(path)) {
fs.mkdir(path, async (err) => {
if (err) {
return reject(err);
}
return resolve();
});
}
return resolve();
});
};
export const delay = (ms: number): Promise<void> => {
return new Promise((resolve) => {
setTimeout(() => {
resolve();
}, ms);
});
};

View File

@ -1,41 +0,0 @@
import { PlatformConfig } from "homebridge";
export interface IConfig extends PlatformConfig {
refImageDirectory: string;
trainedModelDirectory: string;
weightDirectory: string;
outputDirectory: string;
trainOnStartup: boolean;
rooms: Array<IRoom>;
detectionTimeout?: number;
watchdogTimeout?: number;
debug?: boolean;
writeOutput?: boolean;
rate?: number;
confidence?: number;
}
export interface IRoom {
name: string;
rtspConnectionStrings: Array<string>;
}
export const isRoom = (object: any): object is IRoom => {
return "name" in object && "rtspConnectionStrings" in object;
};
export const isConfig = (object: any): object is IConfig => {
const roomsOkay =
object["rooms"].filter((room: any) => isRoom(room)).length ===
object["rooms"].length;
return (
"refImageDirectory" in object &&
"trainedModelDirectory" in object &&
"weightDirectory" in object &&
"outputDirectory" in object &&
"trainOnStartup" in object &&
"rooms" in object &&
roomsOkay
);
};

View File

@ -1,12 +0,0 @@
import { EventDelegate } from "./eventDelegate";
export class Event<T, K> extends Array<EventDelegate<T, K>> {
constructor() {
super();
}
public fire = (source: T, args: K) => {
for (const delegate of this) {
delegate(source, args);
}
};
}

View File

@ -1 +0,0 @@
export type EventDelegate<T, K> = (sender: T, args: K) => void;

View File

@ -1,2 +0,0 @@
export * from "./event";
export * from "./eventDelegate";

View File

@ -1,144 +0,0 @@
import {
API,
DynamicPlatformPlugin,
Logger,
PlatformAccessory,
PlatformConfig,
Service,
Characteristic,
} from "homebridge";
import { IConfig, isConfig } from "./config";
import * as faceapi from "@vladmandic/face-api";
import canvas from "canvas";
import fs from "fs";
import * as path from "path";
import { nets } from "@vladmandic/face-api";
import { FaceMatcher } from "@vladmandic/face-api";
import { Monitor } from "./monitor/monitor";
import { Trainer } from "./trainer";
require("@tensorflow/tfjs-node");
const { Canvas, Image, ImageData } = canvas;
//@ts-ignore
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
import { PLATFORM_NAME, PLUGIN_NAME } from "./settings";
import { LocationAccessory } from "./locationAccessory";
/**
* HomebridgePlatform
* This class is the main constructor for your plugin, this is where you should
* parse the user config and discover/register accessories with Homebridge.
*/
export class HomeLocationPlatform implements DynamicPlatformPlugin {
public readonly Service: typeof Service = this.api.hap.Service;
public readonly Characteristic: typeof Characteristic = this.api.hap
.Characteristic;
// this is used to track restored cached accessories
public readonly accessories: PlatformAccessory[] = [];
public config: IConfig;
constructor(
public readonly log: Logger,
config: PlatformConfig,
public readonly api: API
) {
this.log.debug("Finished initializing platform:", config.name);
if (!isConfig(config)) {
this.log.error("Configuration is incorrect or incomplete");
process.exit(1);
} else {
this.config = config;
}
this.api.on("didFinishLaunching", async () => {
log.debug("Executed didFinishLaunching callback");
// run the method to discover / register your devices as accessories
await this.discoverDevices();
});
}
/**
* This function is invoked when homebridge restores cached accessories from disk at startup.
* It should be used to setup event handlers for characteristics and update respective values.
*/
public configureAccessory(accessory: PlatformAccessory) {
this.log.info("Loading accessory from cache:", accessory.displayName);
// add the restored accessory to the accessories cache so we can track if it has already been registered
this.accessories.push(accessory);
}
/**
* This is an example method showing how to register discovered accessories.
* Accessories must only be registered once, previously created accessories
* must not be registered again to prevent "duplicate UUID" errors.
*/
public async discoverDevices() {
const faceDetectionNet = nets.ssdMobilenetv1;
await faceDetectionNet.loadFromDisk(this.config.weightDirectory);
await nets.faceLandmark68Net.loadFromDisk(this.config.weightDirectory);
await nets.faceRecognitionNet.loadFromDisk(this.config.weightDirectory);
//Train facial recognition model
let faceMatcher: FaceMatcher;
if (this.config.trainOnStartup) {
const trainer = new Trainer(
this.config.refImageDirectory,
this.config.trainedModelDirectory,
this.config.confidence
);
faceMatcher = await trainer.train(true);
} else {
const raw = fs.readFileSync(
path.join(this.config.trainedModelDirectory, "data.json"),
"utf-8"
);
faceMatcher = FaceMatcher.fromJSON(JSON.parse(raw));
}
const locationMonitor = new Monitor(
this.config.rooms,
faceMatcher,
this.log,
this.config
);
locationMonitor.startStreams();
const labels = faceMatcher.labeledDescriptors.map((e) => e.label);
for (const room of this.config.rooms) {
const uuid = this.api.hap.uuid.generate(room.name);
const existingAccessory = this.accessories.find((e) => e.UUID === uuid);
if (existingAccessory) {
this.log.info(
"Restoring existing accessory from cache: ",
existingAccessory.displayName
);
new LocationAccessory(this, existingAccessory, locationMonitor, room);
this.api.updatePlatformAccessories([existingAccessory]);
} else {
this.log.info("Adding new accessory:", `${room.name}`);
// create a new accessory
const accessory = new this.api.platformAccessory(`${room.name}`, uuid);
accessory.context["DeviceName"] = `${room.name}`;
// create the accessory handler for the newly create accessory
// this is imported from `platformAccessory.ts`
new LocationAccessory(this, accessory, locationMonitor, room);
// link the accessory to your platform
this.api.registerPlatformAccessories(PLUGIN_NAME, PLATFORM_NAME, [
accessory,
]);
}
}
}
}

View File

@ -1,11 +1,35 @@
import { API } from "homebridge";
import { Rtsp } from "rtsp-stream/lib";
// import nodejs bindings to native tensorflow,
// not required, but will speed up things drastically (python required)
import { PLATFORM_NAME } from "./settings";
import { HomeLocationPlatform } from "./homeLocationPlatform";
import * as faceapi from "face-api.js";
/**
* This method registers the platform with Homebridge
*/
export = (api: API) => {
api.registerPlatform(PLATFORM_NAME, HomeLocationPlatform);
// implements nodejs wrappers for HTMLCanvasElement, HTMLImageElement, ImageData
const canvas = require("canvas");
// patch nodejs environment, we need to provide an implementation of
// HTMLCanvasElement and HTMLImageElement
const { Canvas, Image, ImageData } = canvas;
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
const main = async () => {
const rtsp = new Rtsp("rtsp://brandon:asdf1234@192.168.1.229/live", {
rate: 10,
});
await faceapi.nets.ssdMobilenetv1.loadFromDisk("./resources");
rtsp.on("data", async (data: Buffer) => {
const input = await canvas.loadImage(data);
const detections = await faceapi.detectAllFaces(input);
console.log();
process.exit(0);
});
rtsp.on("error", (err) => {
console.log(err);
});
rtsp.start();
};
main();

View File

@ -1,118 +0,0 @@
import {
Service,
CharacteristicGetCallback,
PlatformAccessory,
} from "homebridge";
import { Monitor, IStateChangeEventArgs } from "./monitor/monitor";
import { HomeLocationPlatform } from "./homeLocationPlatform";
import { IRoom } from "./config";
const defaultDetectionTimeout = 180000;
interface IMotionDetectionService {
service: Service;
detectionTimeout: NodeJS.Timeout | null;
}
/**
* Platform Accessory
* An instance of this class is created for each accessory your platform registers
* Each accessory may expose multiple services of different service types.
*/
export class LocationAccessory {
private _services: Array<IMotionDetectionService>;
constructor(
private readonly _platform: HomeLocationPlatform,
private readonly _accessory: PlatformAccessory,
private _monitor: Monitor,
private _room: IRoom
) {
this._services = [];
// set accessory information
this._accessory
.getService(this._platform.Service.AccessoryInformation)!
.setCharacteristic(
this._platform.Characteristic.Manufacturer,
"Brandon Watson"
)
.setCharacteristic(
this._platform.Characteristic.Model,
"Person Location Sensor"
)
.setCharacteristic(
this._platform.Characteristic.SerialNumber,
"123-456-789"
);
//Init motion services
for (const label of this._monitor.labels) {
const newService =
this._accessory.getService(label) ||
this._accessory.addService(
this._platform.Service.MotionSensor,
label,
this._room + label
);
newService
.getCharacteristic(this._platform.Characteristic.MotionDetected)
.on("get", (callback: CharacteristicGetCallback) =>
this.onMotionDetectedGet(label, callback)
);
this._services.push({
service: newService,
detectionTimeout: null,
});
}
//Register monitor state change events
this._monitor.stateChangedEvent.push(this.onMonitorStateChange.bind(this));
}
private onMotionDetectedGet = (
label: string,
callback: CharacteristicGetCallback
) => {
this._platform.log.debug("Triggered GET MotionDetected");
// set this to a valid value for MotionDetected
const currentValue =
this._monitor.getState(label) === this._room.name ? 1 : 0;
callback(null, currentValue);
};
private onMonitorStateChange = (
sender: Monitor,
args: IStateChangeEventArgs
) => {
const motionService = this._services.find(
(motionService) => motionService.service.displayName == args.label
);
if (motionService) {
//Set accessory state
motionService.service.setCharacteristic(
this._platform.Characteristic.MotionDetected,
args.new === this._room.name
);
//Reset detectionTimeout
clearTimeout(motionService.detectionTimeout!);
motionService.detectionTimeout = setTimeout(
() => this.onDetectionTimeout(motionService),
this._platform.config.detectionTimeout ?? defaultDetectionTimeout
);
}
};
private onDetectionTimeout = (motionService: IMotionDetectionService) => {
//Set accessory state
motionService.service.setCharacteristic(
this._platform.Characteristic.MotionDetected,
0
);
this._monitor.resetState(motionService.service.displayName);
};
}

View File

@ -1,259 +0,0 @@
import { FaceMatcher } from "@vladmandic/face-api";
import { IRoom } from "../config";
import {
Rtsp,
IStreamEventArgs,
ICloseEventArgs,
IErrorEventArgs,
IMessageEventArgs,
} from "../rtsp/rtsp";
import canvas from "canvas";
import * as faceapi from "@vladmandic/face-api";
import { getFaceDetectorOptions, saveFile } from "../common";
import { nets } from "@vladmandic/face-api";
import { Logger } from "homebridge";
import { Event } from "../events";
import { IConfig } from "../config";
import { MonitorState } from "./monitorState";
import { IStream } from "./stream";
import sharp from "sharp";
const { Canvas, Image, ImageData } = canvas;
const defaultWatchDog = 30000;
const defaultRate = 0.7;
export interface IStateChangeEventArgs {
label: string;
old: string | null;
new: string;
}
export class Monitor {
private _state: MonitorState = {};
private _streamsByRoom: { [roomName: string]: Array<IStream> } = {};
private _faceDetectionNet = nets.ssdMobilenetv1;
private _stateChangedEvent: Event<this, IStateChangeEventArgs>;
constructor(
rooms: Array<IRoom>,
private _matcher: FaceMatcher,
private _logger: Logger,
private _config: IConfig
) {
this._stateChangedEvent = new Event();
//Initialize state
for (const room of rooms) {
this._streamsByRoom[room.name] = [
...room.rtspConnectionStrings.map((connectionString) => {
return this.getNewStream(connectionString, room.name);
}),
];
_matcher.labeledDescriptors.forEach((descriptor) => {
this._state[descriptor.label] = null;
});
}
}
/**
* @method getState
*
* @param label The name of the label to retrieve state for
*
* The last known room of the requested label
*/
public getState(label: string): string | null {
return this._state[label];
}
public resetState(label: string): Monitor {
this._state[label] = null;
return this;
}
/**
* @property labels
*
* Gets the list of labels associated with the monitor
*/
public get labels(): Array<string> {
return this._matcher.labeledDescriptors
.map((descriptor) => descriptor.label)
.filter(
(label: string, index: number, array: Array<string>) =>
array.indexOf(label) === index
);
}
public get stateChangedEvent(): Event<this, IStateChangeEventArgs> {
return this._stateChangedEvent;
}
/**
* @method startStreams
*
* Starts monitoring rtsp streams
*/
public startStreams(): Monitor {
for (const key in this._streamsByRoom) {
for (const stream of this._streamsByRoom[key]) {
//Start stream
stream.rtsp.start();
//Start watchdog timer
stream.watchdogTimer = setTimeout(
() => this.onWatchdogTimeout(stream, key),
this._config.watchdogTimeout ?? defaultWatchDog
);
}
}
return this;
}
/**
* @method closeStreams
*
* Stops monitoring rtsp streams
*/
public closeStreams(): Monitor {
for (const key in this._streamsByRoom) {
for (const stream of this._streamsByRoom[key]) {
stream.rtsp.close();
//Stop watchdog timer
if (stream.watchdogTimer) {
clearTimeout(stream.watchdogTimer);
}
}
}
return this;
}
private onData = async (
room: string,
stream: IStream,
args: IStreamEventArgs
) => {
//Reset watchdog timer for the stream
clearTimeout(stream.watchdogTimer!);
stream.watchdogTimer = setTimeout(
() => this.onWatchdogTimeout(stream, room),
this._config.watchdogTimeout ?? 30000
);
const regularizedImgData = await sharp(args.data)
.modulate({ brightness: 3 })
.sharpen()
.toBuffer();
//Detect faces in image
const input = ((await canvas.loadImage(
regularizedImgData
)) as unknown) as ImageData;
const out = faceapi.createCanvasFromMedia(input);
const resultsQuery = await faceapi
.detectAllFaces(
out,
getFaceDetectorOptions(this._faceDetectionNet, {
confidence: this._config.confidence,
})
)
.withFaceLandmarks()
.withFaceDescriptors();
//Write to output image
if (this._config.writeOutput) {
await saveFile(
this._config.outputDirectory,
room + ".jpg",
regularizedImgData
);
}
for (const res of resultsQuery) {
const bestMatch = this._matcher.findBestMatch(res.descriptor);
const old = this._state[bestMatch.label];
this._state[bestMatch.label] = room;
this._stateChangedEvent.fire(this, {
old: old,
new: room,
label: bestMatch.label,
});
this._logger.info(
`Face Detected with ${
res.detection.score * 100
}% accuracy and a distance of ${bestMatch.distance}: ${
bestMatch.label
} in room ${room}`
);
}
};
private getNewStream(connectionString: string, roomName: string): IStream {
const stream = {
rtsp: new Rtsp(connectionString, {
rate: this._config.rate ?? defaultRate,
image: true,
}),
watchdogTimer: null,
detectionTimer: null,
connectionString: connectionString,
};
connectionString = this.getRedactedConnectionString(connectionString);
//Subscribe to rtsp events
stream.rtsp.dataEvent.push((sender: Rtsp, args: IStreamEventArgs) =>
this.onData(roomName, stream, args)
);
//Only subscribe to these events if debug
if (this._config.debug) {
stream.rtsp.messageEvent.push((sender: Rtsp, args: IMessageEventArgs) => {
this._logger.info(`[${connectionString}] ${args.message}`);
});
stream.rtsp.errorEvent.push((sender: Rtsp, args: IErrorEventArgs) => {
this._logger.info(`[${connectionString}] ${args.message}`);
});
stream.rtsp.closeEvent.push((sender: Rtsp, args: ICloseEventArgs) => {
this._logger.info(
`[${connectionString}] Stream has exited: ${args.message}`
);
});
}
return stream;
}
private onWatchdogTimeout = async (stream: IStream, roomName: string) => {
this._logger.info(
`[${this.getRedactedConnectionString(
stream.connectionString
)}] Watchdog timeout: restarting stream`
);
//Close and remove old stream
stream.rtsp.close();
this._streamsByRoom[roomName].splice(
this._streamsByRoom[roomName].indexOf(stream),
1
);
//Create and add new stream
this._streamsByRoom[roomName].push(
this.getNewStream(stream.connectionString, roomName)
);
stream.rtsp.start();
};
private getRedactedConnectionString(connectionString: string) {
const pwSepIdx = connectionString.lastIndexOf(":") + 1;
const pwEndIdx = connectionString.indexOf("@");
return (
connectionString.substring(0, pwSepIdx) +
connectionString.substring(pwEndIdx)
);
}
}

View File

@ -1 +0,0 @@
export type MonitorState = { [label: string]: string | null };

View File

@ -1,8 +0,0 @@
import { Rtsp } from "../rtsp/rtsp";
export interface IStream {
rtsp: Rtsp;
connectionString: string;
watchdogTimer: NodeJS.Timeout | null;
detectionTimer: NodeJS.Timeout | null;
}

View File

@ -1,7 +0,0 @@
export interface IOptions {
rate?: number;
quality?: number;
resolution?: string;
codec?: string;
image?: boolean;
}

View File

@ -1,181 +0,0 @@
import { ChildProcess, spawn } from "child_process";
import { Writable } from "stream";
import { IOptions } from "./options";
import { Event } from "../events";
const ef1 = "ff";
const ef2 = "d9";
export interface IStreamEventArgs {
data: Buffer;
}
export interface ICloseEventArgs {
message: string;
}
export interface IErrorEventArgs {
message?: string;
err?: Error;
}
export interface IMessageEventArgs {
message: string;
}
export class Rtsp {
private _connecteionString: string;
private _childProcess: ChildProcess | undefined;
private _started: boolean;
private _buffer: Buffer;
private _options: IOptions;
private _paused: boolean;
private _dataEvent: Event<this, IStreamEventArgs>;
private _closeEvent: Event<this, ICloseEventArgs>;
private _errorEvent: Event<this, IErrorEventArgs>;
private _messageEvent: Event<this, IMessageEventArgs>;
constructor(connectionString: string, options: IOptions) {
this._started = false;
this._connecteionString = connectionString;
this._childProcess = undefined;
this._buffer = Buffer.from("");
this._options = options;
this._paused = false;
this._dataEvent = new Event();
this._closeEvent = new Event();
this._errorEvent = new Event();
this._messageEvent = new Event();
this.onData = this.onData.bind(this);
}
public get isStarted(): boolean {
return this._started;
}
public get isPaused(): boolean {
return this._paused;
}
public get dataEvent(): Event<this, IStreamEventArgs> {
return this._dataEvent;
}
public get messageEvent(): Event<this, IMessageEventArgs> {
return this._messageEvent;
}
public get closeEvent(): Event<this, ICloseEventArgs> {
return this._closeEvent;
}
public get errorEvent(): Event<this, IErrorEventArgs> {
return this._errorEvent;
}
public start(): void {
const argStrings = [
`-rtsp_transport tcp`,
`-i ${this._connecteionString}`,
`-qscale:v 1`,
`-r ${this._options.rate ?? 10}`,
`-vf mpdecimate,setpts=N/FRAME_RATE/TB`,
this._options.image
? `-f image2`
: `-codec:v ${this._options.codec ?? "libx264"}`,
`-update 1 -`,
];
const args = argStrings.join(" ");
this._childProcess = spawn("ffmpeg", args.split(/\s+/));
if (!this._childProcess) {
return;
}
this._childProcess.stdout?.on("data", this.onData);
this._childProcess.stdout?.on("error", (error: Error) =>
this._errorEvent.fire(this, { err: error })
);
this._childProcess.stdout?.on("close", () =>
this._closeEvent.fire(this, {
message: "Stream closed",
})
);
this._childProcess.stdout?.on("end", () =>
this._closeEvent.fire(this, {
message: "Stream ended",
})
);
//Only register this event if there are subscribers
if (this._childProcess.stderr && this._messageEvent.length > 0) {
this._childProcess.stderr.on("data", this.onMessage);
}
this._childProcess.on("close", (code: number, signal: NodeJS.Signals) =>
this._closeEvent.fire(this, {
message: "FFmpeg exited with code: " + code + " and signal: " + signal,
})
);
this._childProcess.on("exit", (code: number, signal: NodeJS.Signals) =>
this._closeEvent.fire(this, {
message: "FFmpeg exited with code: " + code + " and signal: " + signal,
})
);
this._childProcess.on("error", (error: Error) =>
this._errorEvent.fire(this, { err: error })
);
}
public close(): void {
this._childProcess && this._childProcess.kill("SIGKILL");
this._closeEvent.fire(this, { message: "Process killed by user" });
}
public pause(): void {
this._paused = true;
}
public resume(): void {
this._paused = false;
}
public getStdin(): Writable | null {
return this._childProcess ? this._childProcess.stdin : null;
}
private onMessage = (data: any): void => {
if (!this._started) {
this._started = true;
}
let msg = "";
data
.toString()
.split(/\n/)
.forEach((line: string) => {
msg += `${line}\n`;
});
this._messageEvent.fire(this, { message: msg });
};
private onData(data: Buffer): void {
if (!this._paused && data.length > 1) {
this._buffer = this._buffer
? Buffer.concat([this._buffer, data])
: (this._buffer = Buffer.from(data));
//End of image
if (
data[data.length - 2].toString(16) == ef1 &&
data[data.length - 1].toString(16) == ef2
) {
this._dataEvent.fire(this, { data: this._buffer });
this._buffer = Buffer.from("");
}
}
}
}

View File

@ -1,9 +0,0 @@
/**
* This is the name of the platform that users will use to register the plugin in the Homebridge config.json
*/
export const PLATFORM_NAME = "HomeLocation";
/**
* This must match the name of your plugin as defined the package.json
*/
export const PLUGIN_NAME = "homebridge-face-location";

View File

@ -1,117 +0,0 @@
import * as faceapi from "@vladmandic/face-api";
import canvas from "canvas";
import fs, { lstatSync } from "fs";
import * as path from "path";
import { LabeledFaceDescriptors, TNetInput } from "@vladmandic/face-api";
import * as mime from "mime-types";
import { getFaceDetectorOptions } from "./common";
require("@tensorflow/tfjs-node");
const { Canvas, Image, ImageData } = canvas;
//@ts-ignore
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
export class Trainer {
constructor(
private _refImageDir: string,
private _trainedModelDir: string,
private _confidence?: number
) {}
public async train(writeToDisk: boolean): Promise<faceapi.FaceMatcher> {
const faceDetectionNet = faceapi.nets.ssdMobilenetv1;
await faceDetectionNet.loadFromDisk(path.join(__dirname, "../weights"));
await faceapi.nets.faceLandmark68Net.loadFromDisk(
path.join(__dirname, "../weights")
);
await faceapi.nets.faceRecognitionNet.loadFromDisk(
path.join(__dirname, "../weights")
);
const options = getFaceDetectorOptions(faceDetectionNet, {
confidence: this._confidence,
});
const dirs = fs.readdirSync(this._refImageDir);
const refs = [];
for (const dir of dirs) {
const descriptor = new LabeledFaceDescriptors(dir, []);
try {
await this.getLabeledFaceDescriptorFromDir(
path.join(this._refImageDir, dir),
descriptor,
options
);
} catch (err) {
console.log(err);
}
if (descriptor) {
refs.push(descriptor);
}
}
const faceMatcher = new faceapi.FaceMatcher(refs);
if (writeToDisk) {
fs.writeFile(
path.join(this._trainedModelDir, "data.json"),
JSON.stringify(faceMatcher.toJSON()),
"utf8",
(err) => {
if (err) {
console.log(`An error occurred while writing data model to file`);
}
console.log(`Successfully wrote data model to file`);
}
);
}
return faceMatcher;
}
private getLabeledFaceDescriptorFromDir = async (
dir: string,
labeldFaceDescriptors: LabeledFaceDescriptors,
options: faceapi.TinyFaceDetectorOptions | faceapi.SsdMobilenetv1Options
): Promise<void> => {
if (!lstatSync(dir).isDirectory()) {
return;
}
const files = fs.readdirSync(dir);
await Promise.all(
files.map(async (file: string) => {
const mimeType = mime.contentType(path.extname(path.join(dir, file)));
if (!mimeType || !mimeType.startsWith("image")) {
return;
}
console.log(path.join(dir, file));
try {
const referenceImage = (await canvas.loadImage(
path.join(dir, file)
)) as unknown;
const descriptor = await faceapi
.detectSingleFace(referenceImage as TNetInput, options)
.withFaceLandmarks()
.withFaceDescriptor();
if (!descriptor || !descriptor.descriptor) {
throw new Error("No face found");
}
labeldFaceDescriptors.descriptors.push(descriptor.descriptor);
} catch (err) {
console.log(
"An error occurred loading image at " +
path.join(dir, file) +
": " +
err.message
);
}
})
);
};
}

View File

@ -0,0 +1,63 @@
{
"compilerOptions": {
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
"target": "es5" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019' or 'ESNEXT'. */,
"module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */,
// "lib": [], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
// "declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
"sourceMap": true /* Generates corresponding '.map' file. */,
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./bin" /* Redirect output structure to the directory. */,
"rootDir": "./src" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */,
"composite": true /* Enable project compilation */,
// "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
// "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true /* Enable all strict type-checking options. */,
"noImplicitAny": true /* Raise error on expressions and declarations with an implied 'any' type. */,
// "strictNullChecks": true, /* Enable strict null checks. */
"strictFunctionTypes": true /* Enable strict checking of function types. */,
// "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
"noImplicitReturns": true /* Report error when not all code paths in function return a value. */,
"noFallthroughCasesInSwitch": true /* Report errors for fallthrough cases in switch statement. */,
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
"baseUrl": "./" /* Base directory to resolve non-absolute module names. */,
"paths": {
"rtsp/*": ["./node_modules/rtsp-stream/lib/*"]
} /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */,
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */,
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
// "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
// "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
/* Advanced Options */
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
}
}

View File

@ -1,4 +1,14 @@
{
"references": [
{
"path": "./node_modules/rtsp-stream/tsconfig.json"
},
{
"path": "./tsconfig.face-location.json"
}
],
"files": [],
"exclude": ["node_modules/*"],
"compilerOptions": {
/* Basic Options */
// "incremental": true, /* Enable incremental compilation */
@ -39,8 +49,7 @@
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
"baseUrl": "./" /* Base directory to resolve non-absolute module names. */,
"paths": {
"rtsp/*": ["node_modules/rtsp-stream/lib/*"],
"common/*": ["node_modules/node-common/lib/*"]
"rtsp/*": ["./node_modules/rtsp-stream/lib/*"]
} /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */,
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
@ -60,7 +69,5 @@
/* Advanced Options */
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
},
"include": ["./src"],
"exclude": ["node_modules"]
}
}