228 lines
7.3 KiB
TypeScript
228 lines
7.3 KiB
TypeScript
import {
|
|
API,
|
|
DynamicPlatformPlugin,
|
|
Logger,
|
|
PlatformAccessory,
|
|
PlatformConfig,
|
|
Service,
|
|
Characteristic,
|
|
} from "homebridge";
|
|
import { IConfig, isConfig } from "./config";
|
|
import * as faceapi from "@vladmandic/face-api";
|
|
import canvas from "canvas";
|
|
import fs, { lstatSync } from "fs";
|
|
import * as path from "path";
|
|
import { nets } from "@vladmandic/face-api";
|
|
import {
|
|
LabeledFaceDescriptors,
|
|
TNetInput,
|
|
FaceMatcher,
|
|
} from "@vladmandic/face-api";
|
|
import * as mime from "mime-types";
|
|
import { LocationMonitor } from "./locationMonitor";
|
|
import { getFaceDetectorOptions } from "./common";
|
|
require("@tensorflow/tfjs-node");
|
|
|
|
const { Canvas, Image, ImageData } = canvas;
|
|
//@ts-ignore
|
|
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
|
|
|
|
import { PLATFORM_NAME, PLUGIN_NAME } from "./settings";
|
|
import { MonitorAccessory } from "./monitorAccessory";
|
|
|
|
/**
|
|
* HomebridgePlatform
|
|
* This class is the main constructor for your plugin, this is where you should
|
|
* parse the user config and discover/register accessories with Homebridge.
|
|
*/
|
|
export class HomeLocationPlatform implements DynamicPlatformPlugin {
|
|
public readonly Service: typeof Service = this.api.hap.Service;
|
|
public readonly Characteristic: typeof Characteristic = this.api.hap
|
|
.Characteristic;
|
|
|
|
// this is used to track restored cached accessories
|
|
public readonly accessories: PlatformAccessory[] = [];
|
|
public config: IConfig;
|
|
|
|
constructor(
|
|
public readonly log: Logger,
|
|
config: PlatformConfig,
|
|
public readonly api: API
|
|
) {
|
|
this.log.debug("Finished initializing platform:", config.name);
|
|
|
|
if (!isConfig(config)) {
|
|
this.log.error("Configuration is incorrect or incomplete");
|
|
process.exit(1);
|
|
} else {
|
|
this.config = config;
|
|
}
|
|
|
|
this.api.on("didFinishLaunching", async () => {
|
|
log.debug("Executed didFinishLaunching callback");
|
|
// run the method to discover / register your devices as accessories
|
|
await this.discoverDevices();
|
|
});
|
|
}
|
|
|
|
/**
|
|
* This function is invoked when homebridge restores cached accessories from disk at startup.
|
|
* It should be used to setup event handlers for characteristics and update respective values.
|
|
*/
|
|
public configureAccessory(accessory: PlatformAccessory) {
|
|
this.log.info("Loading accessory from cache:", accessory.displayName);
|
|
|
|
// add the restored accessory to the accessories cache so we can track if it has already been registered
|
|
this.accessories.push(accessory);
|
|
}
|
|
|
|
/**
|
|
* This is an example method showing how to register discovered accessories.
|
|
* Accessories must only be registered once, previously created accessories
|
|
* must not be registered again to prevent "duplicate UUID" errors.
|
|
*/
|
|
public async discoverDevices() {
|
|
//Train facial recognition model
|
|
let faceMatcher: FaceMatcher;
|
|
if (this.config.trainOnStartup) {
|
|
faceMatcher = await this.trainModels();
|
|
} else {
|
|
const faceDetectionNet = nets.ssdMobilenetv1;
|
|
|
|
await faceDetectionNet.loadFromDisk(this.config.weightDirectory);
|
|
await nets.faceLandmark68Net.loadFromDisk(this.config.weightDirectory);
|
|
await nets.faceRecognitionNet.loadFromDisk(this.config.weightDirectory);
|
|
|
|
const raw = fs.readFileSync(
|
|
path.join(this.config.trainedModelDirectory, "data.json"),
|
|
"utf-8"
|
|
);
|
|
faceMatcher = FaceMatcher.fromJSON(JSON.parse(raw));
|
|
}
|
|
|
|
const locationMonitor = new LocationMonitor(this.config.rooms, faceMatcher);
|
|
|
|
const labels = faceMatcher.labeledDescriptors.map((e) => e.label);
|
|
for (const room of this.config.rooms) {
|
|
for (const label of labels) {
|
|
const uuid = this.api.hap.uuid.generate(room.name + label);
|
|
|
|
const existingAccessory = this.accessories.find((e) => e.UUID === uuid);
|
|
if (existingAccessory) {
|
|
this.log.info(
|
|
"Restoring existing accessory from cache: ",
|
|
existingAccessory.displayName
|
|
);
|
|
|
|
new MonitorAccessory(this, existingAccessory, locationMonitor);
|
|
|
|
this.api.updatePlatformAccessories([existingAccessory]);
|
|
} else {
|
|
this.log.info("Adding new accessory:", `${room.name}+${label}`);
|
|
|
|
// create a new accessory
|
|
const accessory = new this.api.platformAccessory(
|
|
`${room.name} ${label}`,
|
|
uuid
|
|
);
|
|
|
|
accessory.context["DeviceName"] = `${room.name} ${label}`;
|
|
|
|
// create the accessory handler for the newly create accessory
|
|
// this is imported from `platformAccessory.ts`
|
|
new MonitorAccessory(this, accessory, locationMonitor);
|
|
|
|
// link the accessory to your platform
|
|
this.api.registerPlatformAccessories(PLUGIN_NAME, PLATFORM_NAME, [
|
|
accessory,
|
|
]);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
private async trainModels(): Promise<FaceMatcher> {
|
|
const faceDetectionNet = faceapi.nets.ssdMobilenetv1;
|
|
await faceDetectionNet.loadFromDisk(this.config.weightDirectory);
|
|
await faceapi.nets.faceLandmark68Net.loadFromDisk(
|
|
this.config.weightDirectory
|
|
);
|
|
await faceapi.nets.faceRecognitionNet.loadFromDisk(
|
|
this.config.weightDirectory
|
|
);
|
|
|
|
const options = getFaceDetectorOptions(faceDetectionNet);
|
|
|
|
const dirs = fs.readdirSync(this.config.refImageDirectory);
|
|
|
|
const refs: Array<LabeledFaceDescriptors> = [];
|
|
for (const dir of dirs) {
|
|
if (
|
|
!lstatSync(path.join(this.config.refImageDirectory, dir)).isDirectory()
|
|
) {
|
|
continue;
|
|
}
|
|
const files = fs.readdirSync(
|
|
path.join(this.config.refImageDirectory, dir)
|
|
);
|
|
let referenceResults = await Promise.all(
|
|
files.map(async (file: string) => {
|
|
const mimeType = mime.contentType(
|
|
path.extname(path.join(this.config.refImageDirectory, dir, file))
|
|
);
|
|
if (!mimeType || !mimeType.startsWith("image")) {
|
|
return;
|
|
}
|
|
console.log(path.join(this.config.refImageDirectory, dir, file));
|
|
|
|
try {
|
|
const referenceImage = (await canvas.loadImage(
|
|
path.join(this.config.refImageDirectory, dir, file)
|
|
)) as unknown;
|
|
|
|
const descriptor = await faceapi
|
|
.detectSingleFace(referenceImage as TNetInput, options)
|
|
.withFaceLandmarks()
|
|
.withFaceDescriptor();
|
|
if (!descriptor || !descriptor.descriptor) {
|
|
throw new Error("No face found");
|
|
}
|
|
|
|
const faceDescriptors = [descriptor.descriptor];
|
|
return new faceapi.LabeledFaceDescriptors(dir, faceDescriptors);
|
|
} catch (err) {
|
|
console.log(
|
|
"An error occurred loading image at path: " +
|
|
path.join(this.config.refImageDirectory, dir, file)
|
|
);
|
|
}
|
|
return undefined;
|
|
})
|
|
);
|
|
|
|
if (referenceResults) {
|
|
refs.push(
|
|
...(referenceResults.filter((e) => e) as LabeledFaceDescriptors[])
|
|
);
|
|
}
|
|
}
|
|
|
|
const faceMatcher = new faceapi.FaceMatcher(refs);
|
|
|
|
fs.writeFile(
|
|
path.join(this.config.trainedModelDirectory, "data.json"),
|
|
JSON.stringify(faceMatcher.toJSON()),
|
|
"utf8",
|
|
(err) => {
|
|
if (err) {
|
|
console.log(`An error occurred while writing data model to file`);
|
|
}
|
|
|
|
console.log(`Successfully wrote data model to file`);
|
|
}
|
|
);
|
|
|
|
return faceMatcher;
|
|
}
|
|
}
|