"Fossies" - the Fresh Open Source Software Archive

Member "jitsi-meet-7315/react/features/face-landmarks/FaceLandmarksHelper.ts" (2 Jun 2023, 8191 Bytes) of package /linux/misc/jitsi-meet-7315.tar.gz:


As a special service "Fossies" has tried to format the requested source page into HTML format using (guessed) TypeScript source code syntax highlighting (style: standard) with prefixed line numbers and code folding option. Alternatively you can here view or download the uninterpreted source code file.

    1 import { setWasmPaths } from '@tensorflow/tfjs-backend-wasm';
    2 import { Config, FaceResult, Human } from '@vladmandic/human';
    3 
    4 import { DETECTION_TYPES, FACE_DETECTION_SCORE_THRESHOLD, FACE_EXPRESSIONS_NAMING_MAPPING } from './constants';
    5 import { DetectInput, DetectOutput, FaceBox, FaceExpression, InitInput } from './types';
    6 
    7 export interface IFaceLandmarksHelper {
    8     detect: ({ image, threshold }: DetectInput) => Promise<DetectOutput>;
    9     getDetectionInProgress: () => boolean;
   10     getDetections: (image: ImageBitmap | ImageData) => Promise<Array<FaceResult>>;
   11     getFaceBox: (detections: Array<FaceResult>, threshold: number) => FaceBox | undefined;
   12     getFaceCount: (detections: Array<FaceResult>) => number;
   13     getFaceExpression: (detections: Array<FaceResult>) => FaceExpression | undefined;
   14     init: () => Promise<void>;
   15 }
   16 
   17 /**
   18  * Helper class for human library.
   19  */
   20 export class HumanHelper implements IFaceLandmarksHelper {
   21     protected human: Human | undefined;
   22     protected faceDetectionTypes: string[];
   23     protected baseUrl: string;
   24     private detectionInProgress = false;
   25     private lastValidFaceBox: FaceBox | undefined;
   26 
   27     /**
   28     * Configuration for human.
   29     */
   30     private config: Partial<Config> = {
   31         backend: 'humangl',
   32         async: true,
   33         warmup: 'none',
   34         cacheModels: true,
   35         cacheSensitivity: 0,
   36         debug: false,
   37         deallocate: true,
   38         filter: { enabled: false },
   39         face: {
   40             enabled: false,
   41             detector: {
   42                 enabled: false,
   43                 rotation: false,
   44                 modelPath: 'blazeface-front.json',
   45                 maxDetected: 20
   46             },
   47             mesh: { enabled: false },
   48             iris: { enabled: false },
   49             emotion: {
   50                 enabled: false,
   51                 modelPath: 'emotion.json'
   52             },
   53             description: { enabled: false }
   54         },
   55         hand: { enabled: false },
   56         gesture: { enabled: false },
   57         body: { enabled: false },
   58         segmentation: { enabled: false }
   59     };
   60 
   61     /**
   62      * Constructor function for the helper which initialize the helper.
   63      *
   64      * @param  {InitInput} input - The input for the helper.
   65      * @returns {void}
   66      */
   67     constructor({ baseUrl, detectionTypes }: InitInput) {
   68         this.faceDetectionTypes = detectionTypes;
   69         this.baseUrl = baseUrl;
   70         this.init();
   71     }
   72 
   73     /**
   74      * Initializes the human helper with the available tfjs backend for the given detection types.
   75      *
   76      * @returns {Promise<void>}
   77      */
   78     async init(): Promise<void> {
   79 
   80         if (!this.human) {
   81             this.config.modelBasePath = this.baseUrl;
   82             if (!self.OffscreenCanvas) {
   83                 this.config.backend = 'wasm';
   84                 this.config.wasmPath = this.baseUrl;
   85                 setWasmPaths(this.baseUrl);
   86             }
   87 
   88             if (this.faceDetectionTypes.length > 0 && this.config.face) {
   89                 this.config.face.enabled = true;
   90             }
   91 
   92             if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX) && this.config.face?.detector) {
   93                 this.config.face.detector.enabled = true;
   94             }
   95 
   96             if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS) && this.config.face?.emotion) {
   97                 this.config.face.emotion.enabled = true;
   98             }
   99 
  100             const initialHuman = new Human(this.config);
  101 
  102             try {
  103                 await initialHuman.load();
  104             } catch (err) {
  105                 console.error(err);
  106             }
  107 
  108             this.human = initialHuman;
  109         }
  110     }
  111 
  112     /**
  113      * Gets the face box from the detections, if there is no valid detections it will return undefined..
  114      *
  115      * @param {Array<FaceResult>} detections - The array with the detections.
  116      * @param {number} threshold - Face box position change threshold.
  117      * @returns {FaceBox | undefined}
  118      */
  119     getFaceBox(detections: Array<FaceResult>, threshold: number): FaceBox | undefined {
  120         if (this.getFaceCount(detections) !== 1) {
  121             return;
  122         }
  123 
  124         const faceBox: FaceBox = {
  125             // normalize to percentage based
  126             left: Math.round(detections[0].boxRaw[0] * 100),
  127             right: Math.round((detections[0].boxRaw[0] + detections[0].boxRaw[2]) * 100)
  128         };
  129 
  130         faceBox.width = Math.round(faceBox.right - faceBox.left);
  131 
  132         if (this.lastValidFaceBox && threshold && Math.abs(this.lastValidFaceBox.left - faceBox.left) < threshold) {
  133             return;
  134         }
  135 
  136         this.lastValidFaceBox = faceBox;
  137 
  138         return faceBox;
  139     }
  140 
  141     /**
  142      * Gets the face expression from the detections, if there is no valid detections it will return undefined.
  143      *
  144      * @param {Array<FaceResult>} detections - The array with the detections.
  145      * @returns {string | undefined}
  146      */
  147     getFaceExpression(detections: Array<FaceResult>): FaceExpression | undefined {
  148         if (this.getFaceCount(detections) !== 1) {
  149             return;
  150         }
  151 
  152         const detection = detections[0];
  153 
  154         if (detection.emotion) {
  155             return {
  156                 expression: FACE_EXPRESSIONS_NAMING_MAPPING[detection.emotion[0].emotion],
  157                 score: detection.emotion[0].score
  158             };
  159         }
  160     }
  161 
  162     /**
  163      * Gets the face count from the detections, which is the number of detections.
  164      *
  165      * @param {Array<FaceResult>} detections - The array with the detections.
  166      * @returns {number}
  167      */
  168     getFaceCount(detections: Array<FaceResult> | undefined): number {
  169         if (detections) {
  170             return detections.length;
  171         }
  172 
  173         return 0;
  174     }
  175 
  176     /**
  177      * Gets the detections from the image captured from the track.
  178      *
  179      * @param {ImageBitmap | ImageData} image - The image captured from the track,
  180      * if OffscreenCanvas available it will be ImageBitmap, otherwise it will be ImageData.
  181      * @returns {Promise<Array<FaceResult>>}
  182      */
  183     async getDetections(image: ImageBitmap | ImageData): Promise<Array<FaceResult>> {
  184         if (!this.human || !this.faceDetectionTypes.length) {
  185             return [];
  186         }
  187 
  188         this.human.tf.engine().startScope();
  189 
  190         const imageTensor = this.human.tf.browser.fromPixels(image);
  191         const { face: detections } = await this.human.detect(imageTensor, this.config);
  192 
  193         this.human.tf.engine().endScope();
  194 
  195         return detections.filter(detection => detection.score > FACE_DETECTION_SCORE_THRESHOLD);
  196     }
  197 
  198     /**
  199      * Gathers together all the data from the detections, it's the function that will be called in the worker.
  200      *
  201      * @param {DetectInput} input - The input for the detections.
  202      * @returns {Promise<DetectOutput>}
  203      */
  204     public async detect({ image, threshold }: DetectInput): Promise<DetectOutput> {
  205         let faceExpression;
  206         let faceBox;
  207 
  208         this.detectionInProgress = true;
  209 
  210         const detections = await this.getDetections(image);
  211 
  212         if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_EXPRESSIONS)) {
  213             faceExpression = this.getFaceExpression(detections);
  214         }
  215 
  216         if (this.faceDetectionTypes.includes(DETECTION_TYPES.FACE_BOX)) {
  217             // if more than one face is detected the face centering will be disabled.
  218             if (this.getFaceCount(detections) > 1) {
  219                 this.faceDetectionTypes.splice(this.faceDetectionTypes.indexOf(DETECTION_TYPES.FACE_BOX), 1);
  220 
  221                 // face-box for re-centering
  222                 faceBox = {
  223                     left: 0,
  224                     right: 100,
  225                     width: 100
  226                 };
  227             } else {
  228                 faceBox = this.getFaceBox(detections, threshold);
  229             }
  230 
  231         }
  232 
  233         this.detectionInProgress = false;
  234 
  235         return {
  236             faceExpression,
  237             faceBox,
  238             faceCount: this.getFaceCount(detections)
  239         };
  240     }
  241 
  242     /**
  243      * Returns the detection state.
  244      *
  245      * @returns {boolean}
  246      */
  247     public getDetectionInProgress(): boolean {
  248         return this.detectionInProgress;
  249     }
  250 }