(()=>{ let dermObj = dml_fmc; dermObj.capture = {}; if (!dermObj.debug){ dermObj.debug = ()=>{} } let captureObj = dermObj.capture; captureObj.videoConstrains = { video: { facingMode: { ideal: "user" },width: { min: 720, ideal: 1920, max: 1920 }, height: {min: 720, ideal: 1080, max: 1920 }}, audio: false}; captureObj.missingCaptureHints = []; document.querySelectorAll('script').forEach((scriptEl) => { if (scriptEl.src.indexOf('derm_capture.js') > -1){ captureObj.scriptUrl = scriptEl.src; } }) captureObj.htmlCopy={ uploadInfo: "We cannot detect or were not given permissions to your camera. Please enable permissions or upload a photo using the button below.", uploadButton: "upload a photo", orientationHint: "If necessary, please rotate your selfie so it is upgright.", orientationRedo: "redo", orientationRotate: "rotate", orientationDone: "done" } captureObj.captureDict={ noFace : "sorry we could not detect your face", tooManyFaces : "more than one face detected", tiltedLeft : "Head titled too much to the left!", tiltedRight : "Head titled too much to the right!", tiltedUpwards : "Your face is tilted upwards", tiltedDownwards : "Your face is tilted downwards", turnedLeft : "Your face is turned too much to the left!", turnedRight : "Your face is turned too much to the right!", tooFar : "Your face is too far away", tooClose : "Your face is too close", tooFarRight : "Your face is too far to the right", tooFarLeft : "Your face is too far to the left", tooHigh : "Your face is too high in the image", tooLow : "Your face is too low in the image", unevenLight : "Face is not evenly lit", perfect : "\u2705 Perfect - hold still please" } captureObj.imageUpdateEvent = new Event('derm_capture_image_update'); captureObj.manualCaptureEvent = new Event('derm_manual_capture'); captureObj.manualRotatedImageDoneEvent = new Event ('manual_rotated_image_done'); captureObj.manualRotatedImageRedoEvent = new Event ('manual_rotated_image_redo'); captureObj.base64imageToImageObject = async (base64imageInput) => { return new Promise(async (resolve, reject)=>{ const imgObj = new Image(); imgObj.onload = ()=>{ resolve(imgObj); } imgObj.src = base64imageInput; }); } captureObj.base64ToArrayBuffer= async(base64image) => { const base64 = base64image.replace(/^data\:([^\;]+)\;base64,/gmi, ''); const binaryString = window.atob(base64); const bytes = new Uint8Array(binaryString.length); for (var i = 0; i < binaryString.length; i++) { bytes[i] = binaryString.charCodeAt(i); } return bytes.buffer; } captureObj.alignImage = async (base64imageInput) => { return new Promise(async(resolve, reject)=>{ if (captureObj.faceApiIsLoaded){ const exif = EXIF.readFromBinaryFile(await captureObj.base64ToArrayBuffer(base64imageInput)); const ctx = captureObj.canvas.getContext("2d"); let imageOrientation; if (exif.Orientation == undefined){ imageOrientation = 1 }else{ imageOrientation = exif.Orientation; } const imgObj = await captureObj.base64imageToImageObject(base64imageInput); captureObj.canvas.width = imgObj.naturalWidth; captureObj.canvas.height = imgObj.naturalHeight; captureObj.canvas.style.width = imgObj.naturalWidth + 'px'; captureObj.canvas.style.height = imgObj.naturalHeight + 'px'; ctx.drawImage(imgObj,0,0,imgObj.naturalWidth,imgObj.naturalHeight); let faces; if (dermObj.config.useAgGeFaceApiFeature === true){ faces = await faceapi.detectAllFaces(captureObj.canvas, captureObj.modelOptions).withFaceLandmarks().withAgeAndGender(); }else{ faces = await faceapi.detectAllFaces(captureObj.canvas, captureObj.modelOptions).withFaceLandmarks(); } dermObj.debug("raw image faces - ",faces); if (faces.length > 1){ captureObj.captureState = 'loading'; await captureObj.renderCaptureState(); reject({status: 'failure', error: 'multi_face', message:'more than one face detected'}) return; } if (faces.length == 1){ if (dermObj.config.useAgGeFaceApiFeature === true){ try{ dermObj.capture.ed6d4 = Math.round(faces[0].age); dermObj.capture.b83d2 = faces[0].gender == "male" ? 0 : 1; }catch(err){ console.error(err) } } dermObj.capture.faceBox = faces[0].detection._box; const finalImage = captureObj.canvas.toDataURL("image/jpeg") captureObj.currentImage = finalImage resolve(finalImage); return; } let rotDirection = 'toLeft'; if (imageOrientation == 8 || imageOrientation == 1){ rotDirection = 'toLeft'; } if (imageOrientation == 6){ rotDirection = 'toRight'; } if (imageOrientation == 3){ rotDirection = 'upsideDown'; } const rotatedImage = await captureObj.rotateFace(base64imageInput, rotDirection); let rotatedFaces; if (dermObj.config.useAgGeFaceApiFeature === true){ rotatedFaces = await faceapi.detectAllFaces(captureObj.canvas, captureObj.modelOptions).withFaceLandmarks().withAgeAndGender(); }else{ rotatedFaces = await faceapi.detectAllFaces(captureObj.canvas, captureObj.modelOptions).withFaceLandmarks(); } dermObj.debug("rotated image faces - ",rotatedFaces); if (rotatedFaces.length > 1){ captureObj.captureState = 'loading'; await captureObj.renderCaptureState(); reject({status: 'failure', error: 'multi_face', message:'more than one face detected'}) } if (rotatedFaces.length == 1){ if (dermObj.config.useAgGeFaceApiFeature === true){ try{ dermObj.capture.ed6d4 = Math.round(rotatedFaces[0].age); dermObj.capture.b83d2 = rotatedFaces[0].gender == "male" ? 0 : 1; }catch(err){ console.error(err) } } dermObj.capture.faceBox = faces[0].detection._box; captureObj.currentImage = rotatedImage; resolve(rotatedImage); return; } captureObj.captureState = 'manual_rotate'; await captureObj.renderCaptureState(); const manualRotatedImageDone = async ()=>{ captureObj.captureContainer.removeEventListener('manual_rotated_image_done' , manualRotatedImageDone) captureObj.captureContainer.removeEventListener('manual_rotated_image_redo' , manualRotatedImageRedo) try{ dermObj.capture.ed6d4 = -1; dermObj.capture.b83d2 = -1; }catch(err){ console.error(err) } resolve(captureObj.currentImage); } const manualRotatedImageRedo = async ()=>{ captureObj.captureContainer.removeEventListener('manual_rotated_image_done' , manualRotatedImageDone) captureObj.captureContainer.removeEventListener('manual_rotated_image_redo' , manualRotatedImageRedo) reject({status: 'failure', message: 'redo image capture clicked'}); } captureObj.captureContainer.addEventListener('manual_rotated_image_done' , manualRotatedImageDone) captureObj.captureContainer.addEventListener('manual_rotated_image_redo' , manualRotatedImageRedo) }else{ captureObj.captureState = 'manual_rotate'; await captureObj.renderCaptureState(); const manualRotatedImageDone = async ()=>{ captureObj.captureContainer.removeEventListener('manual_rotated_image_done' , manualRotatedImageDone) captureObj.captureContainer.removeEventListener('manual_rotated_image_redo' , manualRotatedImageRedo) resolve(captureObj.currentImage); } const manualRotatedImageRedo = async ()=>{ captureObj.captureContainer.removeEventListener('manual_rotated_image_done' , manualRotatedImageDone) captureObj.captureContainer.removeEventListener('manual_rotated_image_redo' , manualRotatedImageRedo) reject({status: 'failure', message: 'redo image capture clicked'}); } captureObj.captureContainer.addEventListener('manual_rotated_image_done' , manualRotatedImageDone) captureObj.captureContainer.addEventListener('manual_rotated_image_redo' , manualRotatedImageRedo) } }) } captureObj.rotateFace = async (base64image, direction = 'toLeft') =>{ return new Promise(async (resolve,reject)=>{ const ctx = captureObj.canvas.getContext('2d'); const imgObj = await captureObj.base64imageToImageObject(base64image); //console.log(imgObj.height, imgObj.width, direction) let degrees = 90; let xShift = 0; let yShift = 0; if (direction == 'toLeft'){ degrees = -90; xShift = -imgObj.width; } if (direction == 'toRight'){ degrees = 90; yShift = -imgObj.height; } if (direction == 'upsideDown'){ degrees = 180; xShift = -imgObj.width; yShift = -imgObj.height; } const radDegrees = Math.PI / 180 * degrees; const newWidth = Math.round(Math.cos(radDegrees) * Math.cos(radDegrees) * imgObj.width + Math.sin(radDegrees) * Math.sin(radDegrees) * imgObj.height) const newHeight = Math.round(Math.cos(radDegrees) * Math.cos(radDegrees) * imgObj.height + Math.sin(radDegrees) * Math.sin(radDegrees) * imgObj.width) captureObj.canvas.width = newWidth; captureObj.canvas.height = newHeight; captureObj.canvas.style.width = newWidth + 'px'; captureObj.canvas.style.height = newHeight + 'px'; ctx.rotate(radDegrees); ctx.drawImage(imgObj, xShift , yShift) const rotatedImage = captureObj.canvas.toDataURL("image/jpeg"); ctx.resetTransform(); resolve(rotatedImage) }) } captureObj.stopStream = async ()=>{ return new Promise( async (resolve,reject)=>{ try{ await dermObj.capture.stream.getTracks()[0].stop(); }catch(err){} resolve() }) } captureObj.startStream = async () => { return new Promise(async (resolve, reject)=>{ captureObj.manualCaptureClicked = false; if(window.location.search.indexOf('only_upload=1') > -1){ reject({status: "failed", message:"upload URL parameter found"}); captureObj.captureState = 'upload'; captureObj.renderCaptureState(); return; } if (!document.querySelector(`#${captureObj.captureContainerId}`) || !document.querySelector(`#${captureObj.captureContainerId} .derm_video_input`)) { reject({status: "failed", message:"camera container and/or video input not found"}); return; } try{ // try to access users webcam and stream the images // to the video element let deviceList = await navigator.mediaDevices.enumerateDevices(); const videoDevices = deviceList.filter(device => {return device.kind=="videoinput" && !(/Virtual/g.test(device.label)) && !(/CamTwist/g.test(device.label))}); //dermObj.debug("videoDevices: ",videoDevices) dermObj.devices = videoDevices; let videoDeviceId; if (videoDevices.length == 0){ reject({status: "failed", message:"no camera devices found"}); captureObj.captureState = 'upload'; captureObj.renderCaptureState(); return; }else{ videoDeviceId = videoDevices[0].deviceId; } if ('mediaDevices' in navigator && navigator.mediaDevices.getUserMedia){ let updatedVideoConstrains = {... captureObj.videoConstrains}; if (videoDeviceId){ updatedVideoConstrains.deviceId = { exact: videoDeviceId } } let camStream; try{ camStream = await navigator.mediaDevices.getUserMedia(updatedVideoConstrains); }catch(err){ console.warn("error getting camera stream - trying again") try{ camStream = await navigator.mediaDevices.getUserMedia(updatedVideoConstrains); }catch(err){ console.error("error getting camera stream - ",err) captureObj.captureState = 'upload'; captureObj.renderCaptureState(); reject({status: "failed", message:"could not start stream"}) return; } } const videoEl = document.querySelector(`#${captureObj.captureContainerId} .derm_video_input`); videoEl.srcObject = camStream; captureObj.stream = camStream; try{ await videoEl.play(); resolve({status: "success", message:"video stream started"}); }catch(err){ reject({status: "failed", message:"play() was not allowed on video element"}); } }else{ reject({status: "failed", message:"mediaDevices or getUserMedia not supported"}); } }catch(err){ reject({status: "failed", message:"something went wrong when trying to access the camera " + err,}); } }) } captureObj.getImage = async () => { return new Promise(async (resolve, reject) => { try{ dermObj.capture.ed6d4 = -1; dermObj.capture.b83d2 = -1; }catch(err){ console.error(err) } if (captureObj.stream){ captureObj.captureState = 'camera'; await captureObj.renderCaptureState(); if (captureObj.faceApiIsLoaded){ try{ const autoImage = await captureObj.autoCaptureFace(); captureObj.captureState = 'loading'; await captureObj.renderCaptureState(); resolve(autoImage); return; }catch(err){ console.warn('promise reject handler auto capture feature - ', err); } }else{ captureObj.manualButton.style.display = 'block'; const returnManualImage = async ()=>{ captureObj.captureContainer.removeEventListener('derm_manual_capture', returnManualImage); const manualImage = captureObj.currentImage; captureObj.captureState = 'loading'; await captureObj.renderCaptureState(); resolve(manualImage); return; } captureObj.captureContainer.addEventListener('derm_manual_capture', returnManualImage); } }else{ captureObj.captureState = 'upload'; await captureObj.renderCaptureState(); const returnUploadImage = async ()=>{ captureObj.captureContainer.removeEventListener('derm_capture_image_update', returnUploadImage); const uploadedImage = captureObj.currentImage; try{ captureObj.captureState = 'loading'; await captureObj.renderCaptureState(); const alignedImage = await captureObj.alignImage(uploadedImage); resolve(alignedImage); }catch(rejectRes){ if (rejectRes.message == 'redo image capture clicked'){ captureObj.captureState = 'upload'; await captureObj.renderCaptureState(); captureObj.captureContainer.addEventListener('derm_capture_image_update', returnUploadImage); return; }else { captureObj.captureState = 'loading'; await captureObj.renderCaptureState(); reject(rejectRes); return; } } return; } captureObj.captureContainer.addEventListener('derm_capture_image_update', returnUploadImage); } }) } captureObj.manualCapture = async()=>{ if (captureObj.manualCaptureClicked){ return; } captureObj.manualCaptureClicked = true; setTimeout(()=>{ captureObj.manualCaptureClicked = false; },1000) captureObj.captureContainer.querySelector('.derm_manual_button').style.display = 'none'; await captureObj.getImageFromStream(); captureObj.captureContainer.dispatchEvent(captureObj.manualCaptureEvent); } captureObj.clickFileInput = async () =>{ if (!captureObj.fileInputPending){ captureObj.fileInputPending = true; captureObj.uploadInput.value = ""; captureObj.uploadInput.click(); setTimeout(()=>{ captureObj.fileInputPending = false; },1000); } } captureObj.getImageFromFile = async () =>{ if (captureObj.uploadInput.files.length > 0){ const srcImgData = captureObj.uploadInput.files[0]; const imgReader = new FileReader(); imgReader.onloadend = async () => { captureObj.currentImage = imgReader.result; captureObj.captureContainer.dispatchEvent(captureObj.imageUpdateEvent); }; imgReader.readAsDataURL(srcImgData); } } captureObj.checkFaceApi = async () => { if (captureObj.pendingFaceApiCheck) return; captureObj.pendingFaceApiCheck = true; return new Promise((resolve,reject)=>{ const libArrayToCheck = ['face-api.js'] const timeStep = 50; const timeOut = 5000; let timeElapsed = 0; const checkInterval = setInterval(()=>{ let faceApiLoadedCheck = true; libArrayToCheck.forEach((libToCheck)=>{ faceApiLoadedCheck = faceApiLoadedCheck && dermObj.loadedLibs.includes(libToCheck); }) if (faceApiLoadedCheck == true && captureObj.landmarkModelLoaded == true){ clearInterval(checkInterval); captureObj.pendingFaceApiCheck = false; captureObj.faceApiIsLoaded = true; resolve(true); } timeElapsed += timeStep; if (timeElapsed > timeOut){ clearInterval(checkInterval); captureObj.pendingFaceApiCheck = false; resolve(false); } },timeStep); }) } captureObj.initFaceLandmarkModel = async(loadWithAgMod = true) =>{ dermObj.debug('init_landmark_model with agMod = ',loadWithAgMod) captureObj.landmarkModelLoaded = false; try{ const loadStartTimeTinyFD = Date.now(); await faceapi.nets.tinyFaceDetector.load('https://facemapping.me/vendor/face/weights/') dermObj.sendGA4Time('face_api_tfd_model' , Date.now() - loadStartTimeTinyFD); if (dermObj.config.useAgGeFaceApiFeature === true && loadWithAgMod === true){ const loadStartTimeAGModel = Date.now(); await faceapi.nets.ageGenderNet.load('https://facemapping.me/vendor/face/weights/'); dermObj.sendGA4Time('face_api_ag_model' , Date.now() - loadStartTimeAGModel); } const loadStartTimeLMModel = Date.now(); await faceapi.loadFaceLandmarkModel('https://facemapping.me/vendor/face/weights/') dermObj.sendGA4Time('face_api_lm_model' , Date.now() - loadStartTimeLMModel); dermObj.sendGA4Time('face_api_all_models' , Date.now() - loadStartTimeTinyFD); captureObj.modelOptions = await new faceapi.TinyFaceDetectorOptions({ inputSize: 256, scoreThreshold: 0.5 }); captureObj.landmarkModelLoaded = true; return {status: "success", message: "landmark model was loaded"} ; }catch(err){ console.error(err) return {status: "failed", message: err} } } captureObj.autoCaptureFace = async () =>{ return new Promise((resolve, reject) =>{ //let faces; let holdStillTimer = -1; const scanInterval = 100; const startTime = Date.now(); const manualButtonDelay = 5000; document.querySelector(`#${captureObj.captureContainerId} .derm_video_hint`).style.opacity = 1; const scanStreamForFaces = async()=>{ try{ const streamImage = await captureObj.getImageFromStream(); const imageObj = await captureObj.base64imageToImageObject(streamImage); let faces; if (dermObj.config.useAgGeFaceApiFeature === true){ faces = await faceapi.detectAllFaces(imageObj, captureObj.modelOptions).withFaceLandmarks().withAgeAndGender(); }else{ faces = await faceapi.detectAllFaces(imageObj, captureObj.modelOptions).withFaceLandmarks(); } clearInterval(firstScanInterval); if (Date.now()-startTime > manualButtonDelay && !captureObj.manualCaptureClicked ){ captureObj.manualButton.style.display = 'block'; } if ( captureObj.manualCaptureClicked){ captureObj.currentImage = streamImage; resolve(streamImage) return; } if (faces.length > 1){ holdStillTimer = -1; captureObj.updateCaptureHint('tooManyFaces') setTimeout(()=>{ scanStreamForFaces(); },scanInterval) }else if (faces.length == 1 || captureObj.manualCaptureClicked){ if (dermObj.config.useAgGeFaceApiFeature === true){ try{ dermObj.capture.ed6d4 = Math.round(faces[0].age); dermObj.capture.b83d2 = faces[0].gender == "male" ? 0 : 1; }catch(err){ console.error(err) } } dermObj.capture.faceBox = faces[0].detection._box; const faceHint = await captureObj.determineFaceHint(imageObj, faces[0]); captureObj.updateCaptureHint(faceHint) if (faceHint == "perfect"){ if (holdStillTimer == -1){ holdStillTimer = Date.now(); }else{ if (Date.now() - holdStillTimer > 3000 ){ captureObj.currentImage = streamImage; resolve(streamImage) return; } } }else{ holdStillTimer = -1; } setTimeout(()=>{ scanStreamForFaces(); },scanInterval) }else{ holdStillTimer = -1; captureObj.updateCaptureHint('noFace') setTimeout(()=>{ scanStreamForFaces(); },scanInterval) } }catch(errorDetectFace){ dermObj.debug("error in detect face: ",errorDetectFace); } } scanStreamForFaces(); const firstScanInterval = setInterval(()=>{ scanStreamForFaces(); },500) }) } captureObj.updateCaptureHint = async (hintKey) => { if (captureObj.missingCaptureHints.includes(hintKey)){ document.querySelector(`#${captureObj.captureContainerId} .derm_video_hint`).innerHTML = ""; return; } if (!Object.keys(captureObj.captureDict).includes(hintKey)){ captureObj.missingCaptureHints.push(hintKey); console.error(`Hint key "${hintKey}" not in capture dictionary`); document.querySelector(`#${captureObj.captureContainerId} .derm_video_hint`).innerHTML = ""; return; } try{ document.querySelector(`#${captureObj.captureContainerId} .derm_video_hint`).innerHTML = captureObj.captureDict[hintKey]; }catch(err){ console.error("error in updateing capture hint - ",err) } } captureObj.determineFaceHint = async (imageObj , faceObj) =>{ return new Promise(async (resolve,reject)=>{ const landmarks = faceObj.landmarks; let cameraHint, widthRatioLimitLower, widthRatioLimitUpper, xLimitLower, xLimitUpper, yLimitUpper, yLimitLower, lightParameters; try{ lightParameters = await captureObj.getFaceLightProperties(imageObj , landmarks); }catch(err){ console.log('error in getting face light properties - ', err); lightParameters = { relativeLightDiff : 0, averageLight: 500 } } const leftEyePoint = landmarks.getLeftEye()[0]; const rightEyePoint = landmarks.getRightEye()[3]; const noseStartPoint = landmarks.getNose()[0]; const noseTipPoint = landmarks.getNose()[3]; const faceBox = faceObj.alignedRect._box; const imageDims = faceObj.alignedRect._imageDims; if (imageDims._width >= imageDims._height){ widthRatioLimitLower=0.1; widthRatioLimitUpper=0.9; xLimitLower = 0.1; xLimitUpper = 0.9; yLimitLower = 0.1; yLimitUpper = 0.9; }else{ widthRatioLimitLower=0.1; widthRatioLimitUpper=0.9; xLimitLower = 0.1; xLimitUpper = 0.9; yLimitLower = 0.1; yLimitUpper = 0.9; } const relFaceBoxPos={ x: faceBox._x/imageDims._width, y: faceBox._y/imageDims._height } const widthRatio = faceBox._width/imageDims._width; const eyesTilt = (leftEyePoint._y-rightEyePoint._y)/(leftEyePoint._x-rightEyePoint._x); const noseTurn = (noseTipPoint._x-noseStartPoint._x)/(noseTipPoint._y-noseStartPoint._y); const noseTilt = (noseTipPoint._y-faceBox._y)/faceBox._height; let allowHint = true; if (eyesTilt > 0.1){ cameraHint = 'tiltedLeft'; allowHint = false; }else if (eyesTilt < -0.1){ cameraHint = 'tiltedRight'; allowHint = false; } if (noseTilt < 0.45 && allowHint){ cameraHint = 'tiltedUpwards'; allowHint = false; }else if (noseTilt > 0.55 && allowHint){ cameraHint = 'tiltedDownwards'; allowHint = false; } if (noseTurn > 0.1 && allowHint){ cameraHint = 'turnedLeft'; allowHint = false; }else if (noseTurn < -0.1 && allowHint){ cameraHint = 'turnedRight'; allowHint = false; } if (widthRatio < widthRatioLimitLower && allowHint){ cameraHint = 'tooFar'; allowHint = false; }else if (widthRatio > widthRatioLimitUpper && allowHint){ cameraHint = 'tooClose'; allowHint = false; } if (relFaceBoxPos.x < xLimitLower && allowHint){ cameraHint = 'tooFarRight'; allowHint = false; }else if(relFaceBoxPos.x > xLimitUpper && allowHint){ cameraHint = 'tooFarLeft'; allowHint = false; } if (relFaceBoxPos.y < yLimitLower && allowHint){ cameraHint = 'tooHigh'; allowHint = false; }else if(relFaceBoxPos.y > yLimitUpper && allowHint){ cameraHint = 'tooLow'; allowHint = false; } if (lightParameters.relativeLightDiff > 0.4 && allowHint){ cameraHint = 'unevenLight'; allowHint = false; } if (allowHint){ cameraHint = 'perfect'; } resolve(cameraHint); }) } captureObj.getFaceLightProperties = async (image, landmarks) =>{ return new Promise(async (resolve,reject) => { const ctx = captureObj.canvas.getContext('2d'); const leftEyeBrow = landmarks.getLeftEyeBrow(); const nose = landmarks.getNose(); const jaw = landmarks.getJawOutline(); const topLeft = { x : jaw[5]._x, y : leftEyeBrow[2]._y } const bottomLeft = { x : jaw[5]._x, y : jaw[5]._y } const topRight = { x : jaw[11]._x, y : leftEyeBrow[2]._y } const bottomRight = { x : jaw[11]._x, y : jaw[5]._y } const centerX = Math.round(0.5*(jaw[11]._x + jaw[5]._x)); const leftSideImgData = ctx.getImageData(topLeft.x, topLeft.y,centerX-topLeft.x , bottomLeft.y-topLeft.y); const rightSideImgData = ctx.getImageData(centerX, topRight.y,topRight.x - centerX , bottomRight.y - topRight.y); let leftLight = 0; let rightLight = 0; for(ii = 0; ii < leftSideImgData.data.length/4; ii++){ leftLight += 0.2126 * leftSideImgData.data[4*ii] + 0.7152 * leftSideImgData.data[4*ii+1] + 0.0722 * leftSideImgData.data[4*ii+2]; } for(ii = 0; ii < rightSideImgData.data.length/4; ii++){ rightLight += 0.2126 * rightSideImgData.data[4*ii] + 0.7152 * rightSideImgData.data[4*ii+1] + 0.0722 * rightSideImgData.data[4*ii+2]; if (isNaN(rightLight)){ console.log([4*ii] ,leftSideImgData.data.length, rightSideImgData.data.length) } } const lightData = { relativeLightDiff : Math.abs(leftLight-rightLight)/Math.max(leftLight,rightLight), averageLight: 0.5*(leftLight + rightLight)/(leftSideImgData.data.length/4) } resolve(lightData); }) } captureObj.getImageFromStream = async ()=>{ return new Promise(async (resolve, reject)=> { captureObj.canvas.width = captureObj.videoElement.videoWidth; captureObj.canvas.height = captureObj.videoElement.videoHeight; const ctx = captureObj.canvas.getContext("2d"); await ctx.drawImage(captureObj.videoElement,0,0,captureObj.canvas.width,captureObj.canvas.height); const base64imgData = await captureObj.canvas.toDataURL("image/jpeg"); resolve(base64imgData) }); } captureObj.renderCaptureState = async()=>{ const captContainer = document.getElementById(captureObj.captureContainerId); captContainer.classList.remove('derm_capture_state_camera', 'derm_capture_state_upload','derm_capture_state_manual_rotate','derm_capture_state_loading'); const captState = captureObj.captureState || 'camera'; captContainer.classList.add(`derm_capture_state_${captState}`) if (captState == 'manual_rotate'){ captureObj.captureContainer.querySelector('.derm_manual_rotate_screen').style.backgroundImage = `url(${captureObj.currentImage})`; } } captureObj.retakeImage = async()=>{ if (captureObj.retakeImagePending) return captureObj.retakeImagePending = true; setTimeout(()=>{ captureObj.retakeImagePending = false; },500) captureObj.captureContainer.dispatchEvent(captureObj.manualRotatedImageRedoEvent); } captureObj.rotateManualScreen = async()=>{ if (captureObj.rotateManualScreenPending) return captureObj.rotateManualScreenPending = true; setTimeout(()=>{ captureObj.rotateManualScreenPending = false; },500) const ctx = captureObj.canvas.getContext('2d'); const imgObj = await captureObj.base64imageToImageObject(captureObj.currentImage); captureObj.canvas.width = imgObj.height; captureObj.canvas.height = imgObj.width; ctx.rotate(- Math.PI / 2); ctx.drawImage(imgObj, -imgObj.width , 0 ); const rotBase64Image = captureObj.canvas.toDataURL('image/jpeg'); ctx.resetTransform(); captureObj.captureContainer.querySelector('.derm_manual_rotate_screen').style.backgroundImage = `url(${rotBase64Image})` captureObj.currentImage = rotBase64Image; //console.log(rotBase64Image) } captureObj.doneManualRotation = async()=>{ if (captureObj.doneManualRotationPending) return captureObj.doneManualRotationPending = true; setTimeout(()=>{ captureObj.doneManualRotationPending = false; },500) captureObj.captureContainer.dispatchEvent(captureObj.manualRotatedImageDoneEvent); } captureObj.getCaptureCopy = async(langCode) =>{ return new Promise(async (resolve, reject)=>{ let xhr = new XMLHttpRequest(); xhr.open('GET',`https://facemapping.me/vendor/capture/lang/${langCode}.json`); xhr.setRequestHeader('Content-Type', 'text/javascript'); xhr.onload = (successEvent)=>{ if (xhr.status === 200){ console.log("success capture copy call", ); const copyData = JSON.parse(xhr.response); Object.keys(copyData.htmlCopy).forEach((htmlTerm)=>{ captureObj.htmlCopy[htmlTerm] = copyData.htmlCopy[htmlTerm]; }) Object.keys(copyData.captureDict).forEach((captureTerm)=>{ captureObj.captureDict[captureTerm] = copyData.captureDict[captureTerm]; }) console.log(copyData) resolve(); }else{ reject(); } } xhr.onerror = (errorEvent)=>{ console.error("error capture copy call", errorEvent); reject(); } xhr.abort = (abortEvent)=>{ console.error("abort capture copy call",abortEvent); reject(); } xhr.ontimeout = (timeoutEvent)=>{ console.error("timeout capture copy call",timeoutEvent); reject(); } xhr.send(null); }) } captureObj.init = async (configurationObj)=>{ return new Promise(async (resolve, reject)=>{ if (captureObj.initDone){ resolve(); return; } captureObj.initDone = true; dermObj.lang_code = dermObj.lang_code || navigator.language; try{ await captureObj.getCaptureCopy(dermObj.lang_code); console.log(`capture copy DONE - ${dermObj.lang_code}`) }catch(err){ console.error(`ERROR capture copy call - ${dermObj.lang_code} - trying reduced lang code`); try{ await captureObj.getCaptureCopy(dermObj.lang_code.split('-')[0]); console.log(`capture copy DONE - ${dermObj.lang_code.split('-')[0]}`) }catch(err2){ console.error("ERROR capture copy call - using default copy",err) } } Object.keys(configurationObj).forEach((key) =>{ captureObj[key] = configurationObj[key]; }) captureObj.captureState = 'loading'; captureObj.renderCaptureState(); if (!document.getElementById(captureObj.captureContainerId)){ reject({status:"failed", message:"video container not found"}); return; } const captureContainer = document.getElementById(captureObj.captureContainerId); captureContainer.classList.add('derm_capture_container'); captureObj.captureContainer = captureContainer; const cameraContainer = document.createElement("div"); cameraContainer.classList.add('derm_camera_container'); captureContainer.appendChild(cameraContainer); const screeningEl = document.createElement("div"); screeningEl.classList.add('derm_video_border'); cameraContainer.appendChild(screeningEl); const vidEl = document.createElement("video"); vidEl.classList.add('derm_video_input'); vidEl.setAttribute('playsinline',''); vidEl.setAttribute('autoplay',''); vidEl.setAttribute('muted',''); cameraContainer.appendChild(vidEl); captureObj.videoElement = vidEl; const hintEl = document.createElement("div"); hintEl.classList.add('derm_video_hint'); cameraContainer.appendChild(hintEl); const manualButton = document.createElement("div"); manualButton.classList.add('derm_manual_button'); manualButton.onclick = captureObj.manualCapture; manualButton.innerHTML = '
'; cameraContainer.appendChild(manualButton); captureObj.manualButton = manualButton; const uploadContainer = document.createElement("div"); uploadContainer.classList.add('derm_upload_container'); captureContainer.appendChild(uploadContainer); const uploadInput = document.createElement("input"); uploadInput.classList.add('derm_upload_input'); uploadInput.setAttribute('type','file'); uploadInput.setAttribute('accept','image/*'); uploadInput.onchange = captureObj.getImageFromFile; uploadContainer.appendChild(uploadInput); captureObj.uploadInput = uploadInput; const uploadInfo = document.createElement("p"); uploadInfo.classList.add('derm_upload_info'); uploadInfo.innerHTML = captureObj.htmlCopy.uploadInfo; uploadContainer.appendChild(uploadInfo); const uploadButton = document.createElement("div"); uploadButton.classList.add('derm_upload_button'); uploadButton.innerHTML = captureObj.htmlCopy.uploadButton; uploadButton.onclick = captureObj.clickFileInput; uploadContainer.appendChild(uploadButton); const userOrientationContainer = document.createElement("div"); userOrientationContainer.classList.add('derm_manual_rotate_container'); captureContainer.appendChild(userOrientationContainer); const orientationScreen = document.createElement("div"); orientationScreen.classList.add('derm_manual_rotate_screen'); orientationScreen.innerHTML = '
'; userOrientationContainer.appendChild(orientationScreen); const orientationHint = document.createElement("p"); orientationHint.classList.add('derm_manual_rotate_hint'); orientationHint.innerHTML = captureObj.htmlCopy.orientationHint; userOrientationContainer.appendChild(orientationHint); const orientationToolbar = document.createElement("div"); orientationToolbar.classList.add('derm_manual_rotate_toolbar'); userOrientationContainer.appendChild(orientationToolbar); const redoEl = document.createElement("p"); redoEl.classList.add('derm_manual_rotate_redo', 'derm_manual_rotate_button'); redoEl.innerHTML = captureObj.htmlCopy.orientationRedo; redoEl.onclick = captureObj.retakeImage; orientationToolbar.appendChild(redoEl); const rotateEl = document.createElement("p"); rotateEl.classList.add('derm_manual_rotate_rotate', 'derm_manual_rotate_button'); rotateEl.innerHTML = captureObj.htmlCopy.orientationRotate; rotateEl.onclick = captureObj.rotateManualScreen; orientationToolbar.appendChild(rotateEl); const doneEl = document.createElement("p"); doneEl.classList.add('derm_manual_rotate_done', 'derm_manual_rotate_button'); doneEl.innerHTML = captureObj.htmlCopy.orientationDone; doneEl.onclick = captureObj.doneManualRotation; orientationToolbar.appendChild(doneEl); const loadingContainer = document.createElement('div'); loadingContainer.classList.add('derm_capture_loading_container'); captureContainer.appendChild(loadingContainer); const loadingSpinner = document.createElement('div'); loadingSpinner.classList.add('derm_capture_loading_spinner'); loadingSpinner.innerHTML = '
'; loadingContainer.appendChild(loadingSpinner); const canvasEl = document.createElement("canvas"); canvasEl.classList.add('derm_video_canvas'); captureContainer.appendChild(canvasEl); captureObj.canvas = canvasEl; const sessionTimeString = (new Date()).getTime(); const cssId = 'DermCaptureStyleSheet'; const pagelink = document.createElement('link'); pagelink.id = cssId; pagelink.rel = 'stylesheet'; pagelink.type = 'text/css'; pagelink.href = `https://facemapping.me/vendor/capture/derm_capture.min.css?${sessionTimeString}`; pagelink.media = 'all'; captureContainer.appendChild(pagelink); resolve({status:"success", message:"capture elements were built"}) }) } captureObj.displayCurrentImage = (parentElementId) => { if (!parentElementId) return; const parentElement = document.getElementById(parentElementId); if (!parentElement) return; const newImg = new Image(); newImg.onload = () => { //console.log(newImg.width, newImg.height ) newImg.style.position = "absolute"; newImg.style.top = "0"; newImg.style.left = "0"; parentElement.appendChild(newImg); } newImg.src = captureObj.currentImage; } })();