import * as faceapi from 'face-api.js'; import { FaceMatcher } from 'face-api.js'; import { Doc, DocListCast } from '../../../fields/Doc'; import { DocData } from '../../../fields/DocSymbols'; import { List } from '../../../fields/List'; import { ComputedField } from '../../../fields/ScriptField'; import { DocCast, ImageCast, ImageCastToNameType, NumCast, StrCast } from '../../../fields/Types'; import { ImageField } from '../../../fields/URLField'; import { DocumentType } from '../../documents/DocumentTypes'; import { Docs } from '../../documents/Documents'; import { DocumentManager } from '../../util/DocumentManager'; import { DocumentView } from '../nodes/DocumentView'; /** * A singleton class that handles face recognition and manages face Doc collections for each face found. * Displaying an image doc anywhere will trigger this class to test if the image contains any faces. * If it does, each recognized face will be compared to a stored, global set of faces (each face is represented * as a face collection Doc). If the face matches a face collection Doc, then it will be added to that * collection along with the numerical representation of the face, its face descriptor. * * Image Doc's that are added to one or more face collection Docs will be given an annotation rectangle that * highlights where the face is, and the annotation will have these fields: * faceDescriptor - the numerical face representations found in the image. * face - the unique face Docs corresponding to recognized face in the image. * annotationOn - the image where the face was found * * unique face Doc's are created for each person identified and are stored in the Dashboard's myUniqueFaces field * * Each unique face Doc represents a unique face and collects all matching face images for that person. It has these fields: * face - a string label for the person that was recognized (TODO: currently it's just a 'face#') * face_annos - a list of face annotations, where each anno has */ export class FaceRecognitionHandler { static _instance: FaceRecognitionHandler; private _apiModelReady = false; private _pendingAPIModelReadyDocs: DocumentView[] = []; public static get Instance() { return FaceRecognitionHandler._instance ?? new FaceRecognitionHandler(); } /** * Loads an image */ private static loadImage = (imgUrl: ImageField): Promise => { const [name, type] = ImageCastToNameType(imgUrl); const imageURL = `${name}_o.${type}`; return new Promise((resolve, reject) => { const img = new Image(); img.crossOrigin = 'anonymous'; img.onload = () => resolve(img); img.onerror = err => reject(err); img.src = imageURL; }); }; /** * Returns an array of faceDocs for each face recognized in the image * @param imgDoc image with faces * @returns faceDoc array */ public static ImageDocFaceAnnos = (imgDoc: Doc) => DocListCast(imgDoc[`${Doc.LayoutDataKey(imgDoc)}_annotations`]).filter(doc => doc.face); /** * returns a list of all face collection Docs on the current dashboard * @returns face collection Doc list */ public static UniqueFaces = () => DocListCast(Doc.ActiveDashboard?.$myUniqueFaces); /** * Find a unique face from its name * @param name name of unique face * @returns unique face or undefined */ public static FindUniqueFaceByName = (name: string) => FaceRecognitionHandler.UniqueFaces().find(faceDoc => faceDoc.title === name); /** * Removes a unique face from the set of recognized unique faces * @param faceDoc unique face Doc * @returns */ public static DeleteUniqueFace = (faceDoc: Doc) => Doc.ActiveDashboard && Doc.RemoveDocFromList(Doc.ActiveDashboard[DocData], 'myUniqueFaces', faceDoc); /** * returns the labels associated with a face collection Doc * @param faceDoc unique face Doc * @returns label string */ public static UniqueFaceLabel = (faceDoc: Doc) => StrCast(faceDoc.$face); public static SetUniqueFaceLabel = (faceDoc: Doc, value: string) => (faceDoc.$face = value); /** * Returns all the face descriptors associated with a unique face Doc * @param faceDoc unique face Doc * @returns face descriptors */ public static UniqueFaceDescriptors = (faceDoc: Doc) => DocListCast(faceDoc.$face_annos).map(face => face.faceDescriptor as List); /** * Returns a list of all face image Docs associated with a unique face Doc * @param faceDoc unique face Doc * @returns image Docs */ public static UniqueFaceImages = (faceDoc: Doc) => DocListCast(faceDoc.$face_annos).map(face => DocCast(face.annotationOn, face)); /** * Adds a face image to a unique face Doc, adds the unique face Doc to the images list of reognized faces, * and updates the unique face's set of face image descriptors * @param img - image with faces to add to a face collection Doc * @param faceAnno - a face annotation */ public static UniqueFaceAddFaceImage = (faceAnno: Doc, faceDoc: Doc) => { Doc.AddDocToList(faceDoc, 'face_annos', faceAnno); }; /** * Removes a face from a unique Face Doc, and updates the unique face's set of face image descriptors * @param img - image with faces to remove * @param faceDoc - unique face Doc */ public static UniqueFaceRemoveFaceImage = (faceAnno: Doc, faceDoc: Doc) => { FaceRecognitionHandler.ImageDocFaceAnnos(faceAnno).forEach(face => Doc.RemoveDocFromList(faceDoc[DocData], 'face_annos', face) && (face.face = undefined)); }; constructor() { FaceRecognitionHandler._instance = this; this.loadAPIModels().then(() => this._pendingAPIModelReadyDocs.forEach(this.classifyFacesInImage)); DocumentManager.Instance.AddAnyViewRenderedCB(dv => FaceRecognitionHandler.Instance.classifyFacesInImage(dv)); } /** * Loads the face detection models. */ private loadAPIModels = async () => { const MODEL_URL = `/models`; await faceapi.loadFaceDetectionModel(MODEL_URL); await faceapi.loadFaceLandmarkModel(MODEL_URL); await faceapi.loadFaceRecognitionModel(MODEL_URL); this._apiModelReady = true; }; /** * Creates a new, empty unique face Doc * @returns a unique face Doc */ private createUniqueFaceDoc = (dashboard: Doc) => { const faceDocNum = NumCast(dashboard.$myUniqueFaces_count) + 1; dashboard.$myUniqueFaces_count = faceDocNum; // TODO: improve to a better name const uniqueFaceDoc = Docs.Create.UniqeFaceDocument({ title: ComputedField.MakeFunction('this.face', undefined, undefined, 'this.face = value') as unknown as string, _layout_reflowHorizontal: true, _layout_reflowVertical: true, _layout_nativeDimEditable: true, _layout_borderRounding: '20px', _layout_fitWidth: true, _layout_autoHeight: true, _face_showImages: true, _width: 400, _height: 100, }); uniqueFaceDoc.$face = `Face${faceDocNum}`; uniqueFaceDoc.$face_annos = new List(); Doc.MyFaceCollection && Doc.SetContainer(uniqueFaceDoc, Doc.MyFaceCollection); Doc.ActiveDashboard && Doc.AddDocToList(Doc.ActiveDashboard[DocData], 'myUniqueFaces', uniqueFaceDoc); return uniqueFaceDoc; }; /** * Finds the most similar matching Face Document to a face descriptor * @param faceDescriptor face descriptor number list * @returns face Doc */ private findMatchingFaceDoc = (faceDescriptor: Float32Array) => { if (!Doc.ActiveDashboard || FaceRecognitionHandler.UniqueFaces().length < 1) { return undefined; } const faceDescriptors = FaceRecognitionHandler.UniqueFaces().map(faceDoc => { const float32Array = FaceRecognitionHandler.UniqueFaceDescriptors(faceDoc).map(fd => new Float32Array(Array.from(fd))); return new faceapi.LabeledFaceDescriptors(FaceRecognitionHandler.UniqueFaceLabel(faceDoc), float32Array); }); const faceMatcher = new FaceMatcher(faceDescriptors, 0.6); const match = faceMatcher.findBestMatch(faceDescriptor); if (match.label !== 'unknown') { for (const faceDoc of FaceRecognitionHandler.UniqueFaces()) { if (FaceRecognitionHandler.UniqueFaceLabel(faceDoc) === match.label) { return faceDoc; } } } return undefined; }; /** * When a document is added, this finds faces in the images and tries to * match them to existing unique faces, otherwise new unique face(s) are created. * @param imgDoc The document being analyzed. */ private classifyFacesInImage = async (imgDocView: DocumentView) => { const imgDoc = imgDocView.Document; if (!Doc.UserDoc().recognizeFaceImages) return; const activeDashboard = Doc.ActiveDashboard; if (!this._apiModelReady || !activeDashboard) { this._pendingAPIModelReadyDocs.push(imgDocView); } else if (imgDoc.type === DocumentType.LOADING && !imgDoc.loadingError) { setTimeout(() => this.classifyFacesInImage(imgDocView), 1000); } else { const imgUrl = ImageCast(imgDoc[Doc.LayoutDataKey(imgDoc)]); if (imgUrl && !DocListCast(Doc.MyFaceCollection?.examinedFaceDocs).includes(imgDoc[DocData])) { imgDocView.ComponentView?.autoTag?.(); // only examine Docs that have an image and that haven't already been examined. Doc.MyFaceCollection && Doc.AddDocToList(Doc.MyFaceCollection, 'examinedFaceDocs', imgDoc[DocData]); FaceRecognitionHandler.loadImage(imgUrl).then( // load image and analyze faces img => faceapi .detectAllFaces(img) .withFaceLandmarks() .withFaceDescriptors() .then(imgDocFaceDescriptions => { // For each face detected, find a match. const annos = [] as Doc[]; const scale = NumCast(imgDoc.data_nativeWidth) / img.width; const showTags= imgDocFaceDescriptions.length > 1; imgDocFaceDescriptions.forEach(fd => { const faceDescriptor = new List(Array.from(fd.descriptor)); const matchedUniqueFace = this.findMatchingFaceDoc(fd.descriptor) ?? this.createUniqueFaceDoc(activeDashboard); const faceAnno = Docs.Create.FreeformDocument([], { title: ComputedField.MakeFunction(`this.face.face`, undefined, undefined, 'this.face.face = value') as unknown as string, // annotationOn: imgDoc, face: matchedUniqueFace[DocData], faceDescriptor: faceDescriptor, backgroundColor: 'transparent', x: fd.alignedRect.box.left * scale, y: fd.alignedRect.box.top * scale, _width: fd.alignedRect.box.width * scale, _height: fd.alignedRect.box.height * scale, _layout_showTags: showTags }) FaceRecognitionHandler.UniqueFaceAddFaceImage(faceAnno, matchedUniqueFace); // add image/faceDescriptor to matched unique face annos.push(faceAnno); }); imgDoc.$data_annotations = new List(annos); imgDoc._layout_showTags = annos.length > 0; return imgDocFaceDescriptions; }) ); // prettier-ignore } } }; }