Merge branch 'feature/homebridge'

This commit is contained in:
watsonb8 2020-12-11 00:02:30 -05:00
commit 10b7ecccb7
21 changed files with 1505 additions and 81 deletions

2
.env.defaults Normal file
View File

@ -0,0 +1,2 @@
TRAINED_MODEL_DIR=./trainedModels
OUT_DIR=./out

23
.vscode/launch.json vendored
View File

@ -4,17 +4,6 @@
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"program": "${workspaceFolder}/bin/index.js",
"preLaunchTask": "build",
"console": "internalConsole",
"internalConsoleOptions": "openOnSessionStart",
"sourceMaps": true,
"outFiles": ["${workspaceFolder}/**/*.js"]
},
{
"name": "Current TS File",
"type": "node",
@ -24,6 +13,18 @@
"sourceMaps": true,
"cwd": "${workspaceRoot}",
"protocol": "inspector"
},
{
"type": "node",
"request": "launch",
"name": "Launch Program",
"preLaunchTask": "build",
"internalConsoleOptions": "openOnSessionStart",
"program": "/Users/brandonwatson/.nvm/versions/node/v14.15.0/lib/node_modules/homebridge/bin/homebridge",
"env": {
"HOMEBRIDGE_OPTS": "/Users/brandonwatson/.homebridge"
},
"sourceMaps": true
}
]
}

11
.vscode/tasks.json vendored
View File

@ -4,10 +4,15 @@
"version": "2.0.0",
"tasks": [
{
"type": "npm",
"script": "build",
"label": "build",
"type": "typescript",
"tsconfig": "tsconfig.json",
"problemMatcher": ["$tsc"]
"problemMatcher": []
},
{
"type": "shell",
"label": "build and install",
"command": "npm run build&&sudo npm install -g --unsafe-perm ."
}
]
}

45
config.schema.json Normal file
View File

@ -0,0 +1,45 @@
{
"pluginAlias": "HomeLocation",
"pluginType": "platform",
"singular": true,
"schema": {
"type": "object",
"properties": {
"name": {
"title": "Name",
"type": "string",
"required": true,
"default": "Example Dynamic Platform"
},
"refImageDirectory": {
"title": "RefImageDirectory",
"type": "string",
"required": true
},
"trainedModelDirectory": {
"title": "TrainedModelDirectory",
"type": "string",
"required": true
},
"rooms": {
"title": "Rooms",
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"title": "Name",
"type": "string",
"required": true
},
"rtspCameraConnectionString": {
"title": "RtspCameraConnectionString",
"type": "string",
"required": true
}
}
}
}
}
}
}

View File

@ -4,5 +4,8 @@
"path": "."
}
],
"settings": {}
"settings": {
"editor.tabSize": 2,
"debug.javascript.unmapMissingSources": true
}
}

692
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -2,7 +2,7 @@
"name": "homebridge-face-location",
"version": "1.0.0",
"description": "",
"main": "index.ts",
"main": "bin/index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build": "npm run copy-files && tsc --build",
@ -15,8 +15,12 @@
},
"keywords": [
"homebridge",
"typescript"
"typescript",
"homebridge-plugin"
],
"engines": {
"homebridge": ">=1.1.6"
},
"author": "Brandon Watson",
"license": "ISC",
"dependencies": {
@ -25,14 +29,15 @@
"@types/mime-types": "^2.1.0",
"@vladmandic/face-api": "^0.8.8",
"canvas": "^2.6.1",
"copyfiles": "^2.4.0",
"dotenv-extended": "^2.9.0",
"mime-types": "^2.1.27",
"rtsp-stream": "file:../rtsp-stream",
"ts-node": "^9.0.0",
"typescript": "^4.0.5"
"tsyringe": "^4.4.0"
},
"devDependencies": {
"@types/webgl2": "0.0.5"
"@types/webgl2": "0.0.5",
"typescript": "^4.0.5",
"ts-node": "^9.0.0",
"homebridge": "^1.1.6",
"copyfiles": "^2.4.0"
}
}

View File

@ -1,4 +1,4 @@
import { Rtsp } from "rtsp-stream/lib";
import { Rtsp, IStreamEventArgs } from "../src/rtsp/rtsp";
import { nets } from "@vladmandic/face-api";
import * as faceapi from "@vladmandic/face-api";
import canvas from "canvas";
@ -37,10 +37,10 @@ const main = async () => {
const content = JSON.parse(raw);
const matcher = faceapi.FaceMatcher.fromJSON(content);
rtsp.on("data", async (data: Buffer) => {
const input = ((await canvas.loadImage(data)) as unknown) as ImageData;
rtsp.dataEvent.push(async (sender: Rtsp, args: IStreamEventArgs) => {
const input = ((await canvas.loadImage(args.data)) as unknown) as ImageData;
const out = faceapi.createCanvasFromMedia(input);
await saveFile("image.jpg", data);
await saveFile(process.env.OUT_DIR as string, "image.jpg", args.data);
const resultsQuery = await faceapi
.detectAllFaces(out, getFaceDetectorOptions(faceDetectionNet))
.withFaceLandmarks()
@ -52,10 +52,6 @@ const main = async () => {
}
});
rtsp.on("error", (err) => {
// console.log(err);
});
rtsp.start();
};

View File

@ -15,13 +15,33 @@ export const getFaceDetectorOptions = (net: faceapi.NeuralNetwork<any>) => {
: new faceapi.TinyFaceDetectorOptions({ inputSize, scoreThreshold });
};
export function saveFile(fileName: string, buf: Buffer) {
const baseDir = process.env.OUT_DIR as string;
if (!fs.existsSync(baseDir)) {
fs.mkdirSync(baseDir);
}
fs.writeFileSync(path.resolve(baseDir, fileName), buf, "base64");
export function saveFile(
basePath: string,
fileName: string,
buf: Buffer
): Promise<void> {
const writeFile = (): Promise<void> => {
return new Promise((resolve, reject) => {
fs.writeFile(path.resolve(basePath, fileName), buf, "base64", (err) => {
if (err) {
return reject(err);
}
resolve();
});
});
};
return new Promise(async (resolve, reject) => {
if (!fs.existsSync(basePath)) {
fs.mkdir(basePath, async (err) => {
if (err) {
return reject(err);
}
resolve(await writeFile());
});
} else {
resolve(await writeFile());
}
});
}
export const delay = (ms: number): Promise<void> => {

39
src/config.ts Normal file
View File

@ -0,0 +1,39 @@
import { PlatformConfig } from "homebridge";
export interface IConfig extends PlatformConfig {
refImageDirectory: string;
trainedModelDirectory: string;
weightDirectory: string;
outputDirectory: string;
trainOnStartup: boolean;
rooms: Array<IRoom>;
detectionTimeout: number;
debug: boolean;
writeOutput: boolean;
}
export interface IRoom {
name: string;
rtspConnectionStrings: Array<string>;
}
export const isRoom = (object: any): object is IRoom => {
return "name" in object && "rtspConnectionStrings" in object;
};
export const isConfig = (object: any): object is IConfig => {
const roomsOkay =
object["rooms"].filter((room: any) => isRoom(room)).length ===
object["rooms"].length;
return (
"refImageDirectory" in object &&
"trainedModelDirectory" in object &&
"weightDirectory" in object &&
"outputDirectory" in object &&
"trainOnStartup" in object &&
"detectionTimeout" in object &&
"writeOutput" in object &&
"rooms" in object &&
roomsOkay
);
};

12
src/events/event.ts Normal file
View File

@ -0,0 +1,12 @@
import { EventDelegate } from "./eventDelegate";
export class Event<T, K> extends Array<EventDelegate<T, K>> {
constructor() {
super();
}
public fire = (source: T, args: K) => {
for (const delegate of this) {
delegate(source, args);
}
};
}

View File

@ -0,0 +1 @@
export type EventDelegate<T, K> = (sender: T, args: K) => void;

2
src/events/index.ts Normal file
View File

@ -0,0 +1,2 @@
export * from "./event";
export * from "./eventDelegate";

229
src/homeLocationPlatform.ts Normal file
View File

@ -0,0 +1,229 @@
import {
API,
DynamicPlatformPlugin,
Logger,
PlatformAccessory,
PlatformConfig,
Service,
Characteristic,
} from "homebridge";
import { IConfig, isConfig } from "./config";
import * as faceapi from "@vladmandic/face-api";
import canvas from "canvas";
import fs, { lstatSync } from "fs";
import * as path from "path";
import { nets } from "@vladmandic/face-api";
import {
LabeledFaceDescriptors,
TNetInput,
FaceMatcher,
} from "@vladmandic/face-api";
import * as mime from "mime-types";
import { Monitor } from "./monitor";
import { getFaceDetectorOptions } from "./common";
require("@tensorflow/tfjs-node");
const { Canvas, Image, ImageData } = canvas;
//@ts-ignore
faceapi.env.monkeyPatch({ Canvas, Image, ImageData });
import { PLATFORM_NAME, PLUGIN_NAME } from "./settings";
import { LocationAccessory } from "./locationAccessory";
/**
* HomebridgePlatform
* This class is the main constructor for your plugin, this is where you should
* parse the user config and discover/register accessories with Homebridge.
*/
export class HomeLocationPlatform implements DynamicPlatformPlugin {
public readonly Service: typeof Service = this.api.hap.Service;
public readonly Characteristic: typeof Characteristic = this.api.hap
.Characteristic;
// this is used to track restored cached accessories
public readonly accessories: PlatformAccessory[] = [];
public config: IConfig;
constructor(
public readonly log: Logger,
config: PlatformConfig,
public readonly api: API
) {
this.log.debug("Finished initializing platform:", config.name);
if (!isConfig(config)) {
this.log.error("Configuration is incorrect or incomplete");
process.exit(1);
} else {
this.config = config;
}
this.api.on("didFinishLaunching", async () => {
log.debug("Executed didFinishLaunching callback");
// run the method to discover / register your devices as accessories
await this.discoverDevices();
});
}
/**
* This function is invoked when homebridge restores cached accessories from disk at startup.
* It should be used to setup event handlers for characteristics and update respective values.
*/
public configureAccessory(accessory: PlatformAccessory) {
this.log.info("Loading accessory from cache:", accessory.displayName);
// add the restored accessory to the accessories cache so we can track if it has already been registered
this.accessories.push(accessory);
}
/**
* This is an example method showing how to register discovered accessories.
* Accessories must only be registered once, previously created accessories
* must not be registered again to prevent "duplicate UUID" errors.
*/
public async discoverDevices() {
//Train facial recognition model
let faceMatcher: FaceMatcher;
if (this.config.trainOnStartup) {
faceMatcher = await this.trainModels();
} else {
const faceDetectionNet = nets.ssdMobilenetv1;
await faceDetectionNet.loadFromDisk(this.config.weightDirectory);
await nets.faceLandmark68Net.loadFromDisk(this.config.weightDirectory);
await nets.faceRecognitionNet.loadFromDisk(this.config.weightDirectory);
const raw = fs.readFileSync(
path.join(this.config.trainedModelDirectory, "data.json"),
"utf-8"
);
faceMatcher = FaceMatcher.fromJSON(JSON.parse(raw));
}
const locationMonitor = new Monitor(
this.config.rooms,
faceMatcher,
this.log,
this.config
);
locationMonitor.startStreams();
const labels = faceMatcher.labeledDescriptors.map((e) => e.label);
for (const room of this.config.rooms) {
const uuid = this.api.hap.uuid.generate(room.name);
const existingAccessory = this.accessories.find((e) => e.UUID === uuid);
if (existingAccessory) {
this.log.info(
"Restoring existing accessory from cache: ",
existingAccessory.displayName
);
new LocationAccessory(this, existingAccessory, locationMonitor, room);
this.api.updatePlatformAccessories([existingAccessory]);
} else {
this.log.info("Adding new accessory:", `${room.name}`);
// create a new accessory
const accessory = new this.api.platformAccessory(`${room.name}`, uuid);
accessory.context["DeviceName"] = `${room.name}`;
// create the accessory handler for the newly create accessory
// this is imported from `platformAccessory.ts`
new LocationAccessory(this, accessory, locationMonitor, room);
// link the accessory to your platform
this.api.registerPlatformAccessories(PLUGIN_NAME, PLATFORM_NAME, [
accessory,
]);
}
}
}
private async trainModels(): Promise<FaceMatcher> {
const faceDetectionNet = faceapi.nets.ssdMobilenetv1;
await faceDetectionNet.loadFromDisk(this.config.weightDirectory);
await faceapi.nets.faceLandmark68Net.loadFromDisk(
this.config.weightDirectory
);
await faceapi.nets.faceRecognitionNet.loadFromDisk(
this.config.weightDirectory
);
const options = getFaceDetectorOptions(faceDetectionNet);
const dirs = fs.readdirSync(this.config.refImageDirectory);
const refs: Array<LabeledFaceDescriptors> = [];
for (const dir of dirs) {
if (
!lstatSync(path.join(this.config.refImageDirectory, dir)).isDirectory()
) {
continue;
}
const files = fs.readdirSync(
path.join(this.config.refImageDirectory, dir)
);
let referenceResults = await Promise.all(
files.map(async (file: string) => {
const mimeType = mime.contentType(
path.extname(path.join(this.config.refImageDirectory, dir, file))
);
if (!mimeType || !mimeType.startsWith("image")) {
return;
}
console.log(path.join(this.config.refImageDirectory, dir, file));
try {
const referenceImage = (await canvas.loadImage(
path.join(this.config.refImageDirectory, dir, file)
)) as unknown;
const descriptor = await faceapi
.detectSingleFace(referenceImage as TNetInput, options)
.withFaceLandmarks()
.withFaceDescriptor();
if (!descriptor || !descriptor.descriptor) {
throw new Error("No face found");
}
const faceDescriptors = [descriptor.descriptor];
return new faceapi.LabeledFaceDescriptors(dir, faceDescriptors);
} catch (err) {
console.log(
"An error occurred loading image at path: " +
path.join(this.config.refImageDirectory, dir, file)
);
}
return undefined;
})
);
if (referenceResults) {
refs.push(
...(referenceResults.filter((e) => e) as LabeledFaceDescriptors[])
);
}
}
const faceMatcher = new faceapi.FaceMatcher(refs);
fs.writeFile(
path.join(this.config.trainedModelDirectory, "data.json"),
JSON.stringify(faceMatcher.toJSON()),
"utf8",
(err) => {
if (err) {
console.log(`An error occurred while writing data model to file`);
}
console.log(`Successfully wrote data model to file`);
}
);
return faceMatcher;
}
}

View File

@ -1 +1,11 @@
console.log("Hello World");
import { API } from "homebridge";
import { PLATFORM_NAME } from "./settings";
import { HomeLocationPlatform } from "./homeLocationPlatform";
/**
* This method registers the platform with Homebridge
*/
export = (api: API) => {
api.registerPlatform(PLATFORM_NAME, HomeLocationPlatform);
};

91
src/locationAccessory.ts Normal file
View File

@ -0,0 +1,91 @@
import {
Service,
CharacteristicGetCallback,
PlatformAccessory,
} from "homebridge";
import { Monitor, IStateChangeEventArgs } from "./monitor";
import { HomeLocationPlatform } from "./homeLocationPlatform";
import { IRoom } from "./config";
/**
* Platform Accessory
* An instance of this class is created for each accessory your platform registers
* Each accessory may expose multiple services of different service types.
*/
export class LocationAccessory {
private _services: Array<Service>;
constructor(
private readonly _platform: HomeLocationPlatform,
private readonly _accessory: PlatformAccessory,
private _monitor: Monitor,
private _room: IRoom
) {
this._services = [];
// set accessory information
this._accessory
.getService(this._platform.Service.AccessoryInformation)!
.setCharacteristic(
this._platform.Characteristic.Manufacturer,
"Brandon Watson"
)
.setCharacteristic(
this._platform.Characteristic.Model,
"Person Location Sensor"
)
.setCharacteristic(
this._platform.Characteristic.SerialNumber,
"123-456-789"
);
//Init motion services
for (const label of this._monitor.labels) {
const newService =
this._accessory.getService(label) ||
this._accessory.addService(
this._platform.Service.MotionSensor,
label,
this._room + label
);
newService
.getCharacteristic(this._platform.Characteristic.MotionDetected)
.on("get", (callback: CharacteristicGetCallback) =>
this.onMotionDetectedGet(label, callback)
);
this._services.push(newService);
}
//Register monitor state change events
this._monitor.stateChangedEvent.push(this.onMonitorStateChange.bind(this));
}
private onMotionDetectedGet = (
label: string,
callback: CharacteristicGetCallback
) => {
this._platform.log.debug("Triggered GET MotionDetected");
// set this to a valid value for MotionDetected
const currentValue =
this._monitor.getState(label) === this._room.name ? 1 : 0;
callback(null, currentValue);
};
private onMonitorStateChange = (
sender: Monitor,
args: IStateChangeEventArgs
) => {
const service = this._services.find(
(service) => service.displayName == args.label
);
if (service) {
service.setCharacteristic(
this._platform.Characteristic.MotionDetected,
args.new === this._room.name
);
}
};
}

161
src/monitor.ts Normal file
View File

@ -0,0 +1,161 @@
import { FaceMatcher } from "@vladmandic/face-api";
import { IRoom } from "./config";
import {
Rtsp,
IStreamEventArgs,
ICloseEventArgs,
IErrorEventArgs,
IMessageEventArgs,
} from "./rtsp/rtsp";
import canvas from "canvas";
import * as faceapi from "@vladmandic/face-api";
import { getFaceDetectorOptions, saveFile } from "./common";
import { nets } from "@vladmandic/face-api";
import { Logger } from "homebridge";
import { Event } from "./events";
import { IConfig } from "./config";
const { Canvas, Image, ImageData } = canvas;
export type MonitorState = { [label: string]: string | null };
export interface IStateChangeEventArgs {
label: string;
old: string | null;
new: string;
}
export class Monitor {
private _state: MonitorState = {};
private _streamsByRoom: { [roomName: string]: Array<Rtsp> } = {};
private _faceDetectionNet = nets.ssdMobilenetv1;
private _stateChangedEvent: Event<this, IStateChangeEventArgs>;
constructor(
private _rooms: Array<IRoom>,
private _matcher: FaceMatcher,
private _logger: Logger,
private _config: IConfig
) {
this._stateChangedEvent = new Event();
//Initialize state
for (const room of this._rooms) {
this._streamsByRoom[room.name] = [
...room.rtspConnectionStrings.map((connectionString) => {
const rtsp = new Rtsp(connectionString, {
rate: 0.7,
image: true,
});
rtsp.dataEvent.push((sender: Rtsp, args: IStreamEventArgs) =>
this.onData(room.name, args)
);
rtsp.closeEvent.push((sender: Rtsp, args: ICloseEventArgs) =>
this.onExit(connectionString, args)
);
rtsp.errorEvent.push((sender: Rtsp, args: IErrorEventArgs) =>
this.onError(args, connectionString)
);
if (this._config.debug) {
rtsp.messageEvent.push((sender: Rtsp, args: IMessageEventArgs) => {
this._logger.info(`[${connectionString}] ${args.message}`);
});
}
return rtsp;
}),
];
_matcher.labeledDescriptors.forEach((descriptor) => {
this._state[descriptor.label] = null;
});
}
}
/**
* @method getState
*
* @param label The name of the label to retrieve state for
*
* The last known room of the requested label
*/
public getState(label: string): string | null {
return this._state[label];
}
/**
* @property labels
*
* Gets the list of labels associated with the monitor
*/
public get labels(): Array<string> {
return this._matcher.labeledDescriptors
.map((descriptor) => descriptor.label)
.filter(
(label: string, index: number, array: Array<string>) =>
array.indexOf(label) === index
);
}
public get stateChangedEvent(): Event<this, IStateChangeEventArgs> {
return this._stateChangedEvent;
}
/**
* @method startStreams
*
* Starts monitoring rtsp streams
*/
public startStreams() {
for (const key in this._streamsByRoom) {
for (const stream of this._streamsByRoom[key]) {
stream.start();
}
}
}
/**
* @method closeStreams
*
* Stops monitoring rtsp streams
*/
public closeStreams() {
for (const key in this._streamsByRoom) {
for (const stream of this._streamsByRoom[key]) {
stream.close();
}
}
}
private onData = async (room: string, args: IStreamEventArgs) => {
const input = ((await canvas.loadImage(args.data)) as unknown) as ImageData;
const out = faceapi.createCanvasFromMedia(input);
const resultsQuery = await faceapi
.detectAllFaces(out, getFaceDetectorOptions(this._faceDetectionNet))
.withFaceLandmarks()
.withFaceDescriptors();
//Write to output image
if (this._config.writeOutput) {
await saveFile(this._config.outputDirectory, room + ".jpg", args.data);
}
for (const res of resultsQuery) {
const bestMatch = this._matcher.matchDescriptor(res.descriptor);
const old = this._state[bestMatch.label];
this._state[bestMatch.label] = room;
this._stateChangedEvent.fire(this, {
old: old,
new: room,
label: bestMatch.label,
});
this._logger.info(`Face Detected: ${bestMatch.label} in room ${room}`);
}
};
private onError = (args: IErrorEventArgs, streamName: string) => {
this._logger.info(`[${streamName}] ${args.message}`);
};
private onExit = (streamName: string, args: ICloseEventArgs) => {
this._logger.info(`[${streamName}] Stream has exited: ${args.message}`);
};
}

7
src/rtsp/options.ts Normal file
View File

@ -0,0 +1,7 @@
export interface IOptions {
rate?: number;
quality?: number;
resolution?: string;
codec?: string;
image?: boolean;
}

170
src/rtsp/rtsp.ts Normal file
View File

@ -0,0 +1,170 @@
import { ChildProcess, spawn } from "child_process";
import { Writable } from "stream";
import { IOptions } from "./options";
import { Event } from "../events";
const ef1 = "ff";
const ef2 = "d9";
export interface IStreamEventArgs {
data: Buffer;
}
export interface ICloseEventArgs {
message: string;
}
export interface IErrorEventArgs {
message?: string;
err?: Error;
}
export interface IMessageEventArgs {
message: string;
}
export class Rtsp {
private _connecteionString: string;
private _childProcess: ChildProcess | undefined;
private _started: boolean;
private _buffer: Buffer;
private _options: IOptions;
private _paused: boolean;
private _dataEvent: Event<this, IStreamEventArgs>;
private _closeEvent: Event<this, ICloseEventArgs>;
private _errorEvent: Event<this, IErrorEventArgs>;
private _messageEvent: Event<this, IMessageEventArgs>;
constructor(connectionString: string, options: IOptions) {
this._started = false;
this._connecteionString = connectionString;
this._childProcess = undefined;
this._buffer = Buffer.from("");
this._options = options;
this._paused = false;
this._dataEvent = new Event();
this._closeEvent = new Event();
this._errorEvent = new Event();
this._messageEvent = new Event();
this.onData = this.onData.bind(this);
}
public get isStarted(): boolean {
return this._started;
}
public get isPaused(): boolean {
return this._paused;
}
public get dataEvent(): Event<this, IStreamEventArgs> {
return this._dataEvent;
}
public get messageEvent(): Event<this, IMessageEventArgs> {
return this._messageEvent;
}
public get closeEvent(): Event<this, ICloseEventArgs> {
return this._closeEvent;
}
public get errorEvent(): Event<this, IErrorEventArgs> {
return this._errorEvent;
}
public start(): void {
const argStrings = [
`-i ${this._connecteionString}`,
`-r ${this._options.rate ?? 10}`,
`-vf mpdecimate,setpts=N/FRAME_RATE/TB`,
this._options.image
? `-f image2`
: `-codec:v ${this._options.codec ?? "libx264"}`,
`-update 1 -`,
];
const args = argStrings.join(" ");
this._childProcess = spawn("ffmpeg", args.split(/\s+/));
if (!this._childProcess) {
return;
}
this._childProcess.stdout?.on("data", this.onData);
this._childProcess.stdout?.on("error", (err) =>
console.log("And error occurred" + err)
);
this._childProcess.stdout?.on("close", () => console.log("Stream closed"));
this._childProcess.stdout?.on("end", () => console.log("Stream ended"));
//Only register this event if there are subscribers
if (this._childProcess.stderr && this._messageEvent.length > 0) {
this._childProcess.stderr.on("data", this.onMessage);
}
this._childProcess.on("close", (code: number, signal: NodeJS.Signals) =>
this._closeEvent.fire(this, {
message: "FFmpeg exited with code: " + code + " and signal: " + signal,
})
);
this._childProcess.on("exit", (code: number, signal: NodeJS.Signals) =>
this._closeEvent.fire(this, {
message: "FFmpeg exited with code: " + code + " and signal: " + signal,
})
);
this._childProcess.on("error", (error: Error) =>
this._errorEvent.fire(this, { err: error })
);
}
public close(): void {
this._childProcess && this._childProcess.kill("SIGKILL");
this._closeEvent.fire(this, { message: "Process killed by user" });
}
public pause(): void {
this._paused = true;
}
public resume(): void {
this._paused = false;
}
public getStdin(): Writable | null {
return this._childProcess ? this._childProcess.stdin : null;
}
private onMessage = (data: any): void => {
if (!this._started) {
this._started = true;
}
let msg = "";
data
.toString()
.split(/\n/)
.forEach((line: string) => {
msg += `${line}\n`;
});
this._messageEvent.fire(this, { message: msg });
};
private onData(data: Buffer): void {
if (!this._paused && data.length > 1) {
this._buffer = this._buffer
? Buffer.concat([this._buffer, data])
: (this._buffer = Buffer.from(data));
//End of image
if (
data[data.length - 2].toString(16) == ef1 &&
data[data.length - 1].toString(16) == ef2
) {
this._dataEvent.fire(this, { data: this._buffer });
this._buffer = Buffer.from("");
}
}
}
}

9
src/settings.ts Normal file
View File

@ -0,0 +1,9 @@
/**
* This is the name of the platform that users will use to register the plugin in the Homebridge config.json
*/
export const PLATFORM_NAME = "HomeLocation";
/**
* This must match the name of your plugin as defined the package.json
*/
export const PLUGIN_NAME = "homebridge-face-location";

View File

@ -39,7 +39,8 @@
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
"baseUrl": "./" /* Base directory to resolve non-absolute module names. */,
"paths": {
"rtsp/*": ["./node_modules/rtsp-stream/lib/*"]
"rtsp/*": ["node_modules/rtsp-stream/lib/*"],
"common/*": ["node_modules/node-common/lib/*"]
} /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */,
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
@ -61,10 +62,5 @@
"forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */
},
"include": ["./src"],
"exclude": ["node_modules"],
"references": [
{
"path": "./node_modules/rtsp-stream/tsconfig.json"
}
]
"exclude": ["node_modules"]
}