Browse Source

init commit

master
Cailean Finn 6 months ago
commit
69c45a9304
  1. 46
      face.js
  2. 17
      index.html
  3. 10
      jsconfig.json
  4. BIN
      models/face_expression_model-shard1
  5. 1
      models/face_expression_model-weights_manifest.json
  6. BIN
      models/face_landmark_68_model-shard1
  7. 1
      models/face_landmark_68_model-weights_manifest.json
  8. BIN
      models/face_landmark_68_tiny_model-shard1
  9. 1
      models/face_landmark_68_tiny_model-weights_manifest.json
  10. BIN
      models/face_recognition_model-shard1
  11. 6
      models/face_recognition_model-shard2
  12. 1
      models/face_recognition_model-weights_manifest.json
  13. BIN
      models/mtcnn_model-shard1
  14. 1
      models/mtcnn_model-weights_manifest.json
  15. BIN
      models/ssd_mobilenetv1_model-shard1
  16. 137
      models/ssd_mobilenetv1_model-shard2
  17. 1
      models/ssd_mobilenetv1_model-weights_manifest.json
  18. BIN
      models/tiny_face_detector_model-shard1
  19. 1
      models/tiny_face_detector_model-weights_manifest.json
  20. 1
      readme.md
  21. 47
      sketch.js
  22. 215
      sketch2.js
  23. 8
      style.css

46
face.js

@ -0,0 +1,46 @@
class Face {
constructor(image, emotions){
this.image = image;
this.emotions = emotions;
this.position = createVector(width / 2, height /2);
this.destination = createVector(0, 0);
this.isFinished = false;
this.positionRandom();
}
emotionCheck() {
let { neutral, happy, angry, sad, disgusted, surprised, fearful } = this.emotions;
if (happy > 0.5) return true;
return false;
}
// Set's a random destination
positionRandom() {
this.destination.set(random(0, windowWidth), random(0, windowHeight));
}
// Draws face to the canvas
drawImage() {
if (!this.isFinished) {
this.checkDistance();
this.position.set(p5.Vector.slerp(this.position, this.destination, 0.1));
image(this.image, this.position.x, this.position.y);
}
}
// Distance between to vectors
checkDistance() {
if(p5.Vector.dist(this.position, this.destination) < 0.1) this.isFinished = true;
}
// Add's face to the main image
addFaceToImage() {
// Returns
return false;
}
}

17
index.html

@ -0,0 +1,17 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8" />
<title>face-api</title>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.9.0/p5.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/p5.js/1.9.0/addons/p5.sound.js"></script>
<script src="https://cdn.jsdelivr.net/gh/ml5js/Intro-ML-Arts-IMA@ml5-build-10-7-19/ml5_build/ml5.min.js"></script>
<link rel="stylesheet" type="text/css" href="style.css" />
<meta charset="utf-8" />
</head>
<body>
<script src="sketch.js"></script>
<script src="face.js"></script>
</body>
</html>

10
jsconfig.json

@ -0,0 +1,10 @@
{
"compilerOptions": {
"target": "es6"
},
"include": [
"*.js",
"**/*.js",
"c:\\Users\\caile\\.vscode\\extensions\\samplavigne.p5-vscode-1.2.15\\p5types\\global.d.ts"
]
}

BIN
models/face_expression_model-shard1

Binary file not shown.

1
models/face_expression_model-weights_manifest.json

File diff suppressed because one or more lines are too long

BIN
models/face_landmark_68_model-shard1

Binary file not shown.

1
models/face_landmark_68_model-weights_manifest.json

File diff suppressed because one or more lines are too long

BIN
models/face_landmark_68_tiny_model-shard1

Binary file not shown.

1
models/face_landmark_68_tiny_model-weights_manifest.json

@ -0,0 +1 @@
[{"weights":[{"name":"dense0/conv0/filters","shape":[3,3,3,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008194216092427571,"min":-0.9423348506291708}},{"name":"dense0/conv0/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006839508168837603,"min":-0.8412595047670252}},{"name":"dense0/conv1/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009194007106855804,"min":-1.2779669878529567}},{"name":"dense0/conv1/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0036026100317637128,"min":-0.3170296827952067}},{"name":"dense0/conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.000740380117706224,"min":-0.06367269012273527}},{"name":"dense0/conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/pointwise_filter","shape":[1,1,32,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":1,"min":0}},{"name":"dense0/conv2/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0037702228508743585,"min":-0.6220867703942692}},{"name":"dense1/conv0/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0033707996209462483,"min":-0.421349952618281}},{"name":"dense1/conv0/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014611541991140328,"min":-1.8556658328748217}},{"name":"dense1/conv0/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002832523046755323,"min":-0.30307996600281956}},{"name":"dense1/conv1/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006593170586754294,"min":-0.6329443763284123}},{"name":"dense1/conv1/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.012215249211180444,"min":-1.6001976466646382}},{"name":"dense1/conv1/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002384825547536214,"min":-0.3028728445370992}},{"name":"dense1/conv2/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005859645441466687,"min":-0.7617539073906693}},{"name":"dense1/conv2/pointwise_filter","shape":[1,1,64,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013121426806730382,"min":-1.7845140457153321}},{"name":"dense1/conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032247188044529336,"min":-0.46435950784122243}},{"name":"dense2/conv0/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002659512618008782,"min":-0.32977956463308894}},{"name":"dense2/conv0/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015499923743453681,"min":-1.9839902391620712}},{"name":"dense2/conv0/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0032450980999890497,"min":-0.522460794098237}},{"name":"dense2/conv1/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005911862382701799,"min":-0.792189559282041}},{"name":"dense2/conv1/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021025861478319356,"min":-2.2077154552235325}},{"name":"dense2/conv1/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.00349616945958605,"min":-0.46149436866535865}},{"name":"dense2/conv2/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008104994250278847,"min":-1.013124281284856}},{"name":"dense2/conv2/pointwise_filter","shape":[1,1,128,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.029337059282789044,"min":-3.5791212325002633}},{"name":"dense2/conv2/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0038808938334969913,"min":-0.4230174278511721}},{"name":"fc/weights","shape":[128,136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.014016061670639936,"min":-1.8921683255363912}},{"name":"fc/bias","shape":[136],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0029505149698724935,"min":0.088760145008564}}],"paths":["face_landmark_68_tiny_model-shard1"]}]

BIN
models/face_recognition_model-shard1

Binary file not shown.

6
models/face_recognition_model-shard2

File diff suppressed because one or more lines are too long

1
models/face_recognition_model-weights_manifest.json

File diff suppressed because one or more lines are too long

BIN
models/mtcnn_model-shard1

Binary file not shown.

1
models/mtcnn_model-weights_manifest.json

@ -0,0 +1 @@
[{"paths":["mtcnn_model-shard1"],"weights":[{"dtype":"float32","name":"pnet/conv1/weights","shape":[3,3,3,10]},{"dtype":"float32","name":"pnet/conv1/bias","shape":[10]},{"dtype":"float32","name":"pnet/prelu1_alpha","shape":[10]},{"dtype":"float32","name":"pnet/conv2/weights","shape":[3,3,10,16]},{"dtype":"float32","name":"pnet/conv2/bias","shape":[16]},{"dtype":"float32","name":"pnet/prelu2_alpha","shape":[16]},{"dtype":"float32","name":"pnet/conv3/weights","shape":[3,3,16,32]},{"dtype":"float32","name":"pnet/conv3/bias","shape":[32]},{"dtype":"float32","name":"pnet/prelu3_alpha","shape":[32]},{"dtype":"float32","name":"pnet/conv4_1/weights","shape":[1,1,32,2]},{"dtype":"float32","name":"pnet/conv4_1/bias","shape":[2]},{"dtype":"float32","name":"pnet/conv4_2/weights","shape":[1,1,32,4]},{"dtype":"float32","name":"pnet/conv4_2/bias","shape":[4]},{"dtype":"float32","name":"rnet/conv1/weights","shape":[3,3,3,28]},{"dtype":"float32","name":"rnet/conv1/bias","shape":[28]},{"dtype":"float32","name":"rnet/prelu1_alpha","shape":[28]},{"dtype":"float32","name":"rnet/conv2/weights","shape":[3,3,28,48]},{"dtype":"float32","name":"rnet/conv2/bias","shape":[48]},{"dtype":"float32","name":"rnet/prelu2_alpha","shape":[48]},{"dtype":"float32","name":"rnet/conv3/weights","shape":[2,2,48,64]},{"dtype":"float32","name":"rnet/conv3/bias","shape":[64]},{"dtype":"float32","name":"rnet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"rnet/fc1/weights","shape":[576,128]},{"dtype":"float32","name":"rnet/fc1/bias","shape":[128]},{"dtype":"float32","name":"rnet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"rnet/fc2_1/weights","shape":[128,2]},{"dtype":"float32","name":"rnet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"rnet/fc2_2/weights","shape":[128,4]},{"dtype":"float32","name":"rnet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/conv1/weights","shape":[3,3,3,32]},{"dtype":"float32","name":"onet/conv1/bias","shape":[32]},{"dtype":"float32","name":"onet/prelu1_alpha","shape":[32]},{"dtype":"float32","name":"onet/conv2/weights","shape":[3,3,32,64]},{"dtype":"float32","name":"onet/conv2/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu2_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv3/weights","shape":[3,3,64,64]},{"dtype":"float32","name":"onet/conv3/bias","shape":[64]},{"dtype":"float32","name":"onet/prelu3_alpha","shape":[64]},{"dtype":"float32","name":"onet/conv4/weights","shape":[2,2,64,128]},{"dtype":"float32","name":"onet/conv4/bias","shape":[128]},{"dtype":"float32","name":"onet/prelu4_alpha","shape":[128]},{"dtype":"float32","name":"onet/fc1/weights","shape":[1152,256]},{"dtype":"float32","name":"onet/fc1/bias","shape":[256]},{"dtype":"float32","name":"onet/prelu5_alpha","shape":[256]},{"dtype":"float32","name":"onet/fc2_1/weights","shape":[256,2]},{"dtype":"float32","name":"onet/fc2_1/bias","shape":[2]},{"dtype":"float32","name":"onet/fc2_2/weights","shape":[256,4]},{"dtype":"float32","name":"onet/fc2_2/bias","shape":[4]},{"dtype":"float32","name":"onet/fc2_3/weights","shape":[256,10]},{"dtype":"float32","name":"onet/fc2_3/bias","shape":[10]}]}]

BIN
models/ssd_mobilenetv1_model-shard1

Binary file not shown.

137
models/ssd_mobilenetv1_model-shard2

File diff suppressed because one or more lines are too long

1
models/ssd_mobilenetv1_model-weights_manifest.json

File diff suppressed because one or more lines are too long

BIN
models/tiny_face_detector_model-shard1

Binary file not shown.

1
models/tiny_face_detector_model-weights_manifest.json

@ -0,0 +1 @@
[{"weights":[{"name":"conv0/filters","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.009007044399485869,"min":-1.2069439495311063}},{"name":"conv0/bias","shape":[16],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.005263455241334205,"min":-0.9211046672334858}},{"name":"conv1/depthwise_filter","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.004001977630690033,"min":-0.5042491814669441}},{"name":"conv1/pointwise_filter","shape":[1,1,16,32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.013836609615999109,"min":-1.411334180831909}},{"name":"conv1/bias","shape":[32],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0015159862590771096,"min":-0.30926119685173037}},{"name":"conv2/depthwise_filter","shape":[3,3,32,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002666276225856706,"min":-0.317286870876948}},{"name":"conv2/pointwise_filter","shape":[1,1,32,64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.015265831292844286,"min":-1.6792414422128714}},{"name":"conv2/bias","shape":[64],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0020280554598453,"min":-0.37113414915168985}},{"name":"conv3/depthwise_filter","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006100742489683862,"min":-0.8907084034938438}},{"name":"conv3/pointwise_filter","shape":[1,1,64,128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.016276211832083907,"min":-2.0508026908425725}},{"name":"conv3/bias","shape":[128],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.003394414279975143,"min":-0.7637432129944072}},{"name":"conv4/depthwise_filter","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.006716050119961009,"min":-0.8059260143953211}},{"name":"conv4/pointwise_filter","shape":[1,1,128,256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.021875603993733724,"min":-2.8875797271728514}},{"name":"conv4/bias","shape":[256],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.0041141652009066415,"min":-0.8187188749804216}},{"name":"conv5/depthwise_filter","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008423839597141042,"min":-0.9013508368940915}},{"name":"conv5/pointwise_filter","shape":[1,1,256,512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.030007277283014035,"min":-3.8709387695088107}},{"name":"conv5/bias","shape":[512],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.008402082966823203,"min":-1.4871686851277068}},{"name":"conv8/filters","shape":[1,1,512,25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.028336129469030042,"min":-4.675461362389957}},{"name":"conv8/bias","shape":[25],"dtype":"float32","quantization":{"dtype":"uint8","scale":0.002268134028303857,"min":-0.41053225912299807}}],"paths":["tiny_face_detector_model-shard1"]}]

1
readme.md

@ -0,0 +1 @@
p5 sketch with BodyPix (:

47
sketch.js

@ -0,0 +1,47 @@
let bodypix;
let video;
let segmentation;
let img;
let ready = false;
const options = {
outputStride: 16,
segmentationThreshold: 0.3
}
function preload() {
video = createCapture(VIDEO);
bodypix = ml5.bodyPix(video, options, modelReady)
}
function setup() {
createCanvas(480, 360);
video.size(480, 360);
}
function modelReady(){
console.log('model ready!');
ready = true;
bodypix.segment(gotResults)
}
function gotResults(err, result) {
if (err) {
console.log(err)
return
}
segmentation = result;
bodypix.segment(gotResults)
}
function draw() {
background(0);
image(video, 0, 0, 480, 360)
if(ready){
if(segmentation) {
image(segmentation.maskBackground, 0, 0, 480, 360)
console.log(segmentation)
}
}
}

215
sketch2.js

@ -0,0 +1,215 @@
let detections = [];
let fps;
let video;
let boxDimensions;
let boxScale;
let canvas;
let face;
let newImg;
let lm;
let staticImage;
let faces = [];
// ROI
// let roi;
// let lmBuffer;
const detectionOptions = {
withLandmarks: true,
withExpressions: true,
withDescriptors: true,
minConfidence: 0.5,
Mobilenetv1Model: "models",
FaceLandmarkModel: "models",
FaceRecognitionModel: "models",
FaceExpressionModel: "models",
};
function setup() {
frameRate(60);
pixelDensity(1);
canvas = createCanvas(windowWidth, windowHeight);
video = createCapture(VIDEO);
video.size(480, 360);
video.hide();
newImg = createImage(480, 360);
staticImage = createGraphics(width, height);
staticImage.background(0, 0, 0);
faceapi = ml5.faceApi(video, detectionOptions, faceReady);
}
function draw() {
image(staticImage, 0, 0);
background(0, 0, 0, 0);
drawFaces();
removeFinishedImages();
faceapi.detect(gotFaces);
}
function faceReady() {
faceapi.detect(gotFaces);
}
function gotFaces(error, result) {
if (error) {
console.log(error);
return;
}
detections = result;
if (detections) {
if (detections.length > 0) {
drawLandmarkMask(detections);
}
}
}
function drawLandmarkMask(detections) {
if (detections.length > 0) {
for (f = 0; f < detections.length; f++) {
let { _x, _y, _width, _height } = detections[f].alignedRect._box;
let points = detections[f].landmarks.positions;
updateGraphicsObject(points);
updateNewImage(_x, _y, _width, _height);
face = new Face(roi, detections[f].expressions);
if(face.emotionCheck()){
faces.push(face);
}
}
}
}
function updateNewImage(_x, _y, _width, _height) {
// Cross-checks with mask, to see what pixels are transparent, if they are not, use those pixels in final image
lm.loadPixels();
video.loadPixels();
newImg.loadPixels();
for (let i = 0; i < newImg.pixels.length; i += 4) {
if (lm.pixels[i + 3] == 255) {
newImg.pixels[i + 0] = video.pixels[i + 0];
newImg.pixels[i + 1] = video.pixels[i + 1];
newImg.pixels[i + 2] = video.pixels[i + 2];
newImg.pixels[i + 3] = 180;
} else {
newImg.pixels[i + 0] = 0
newImg.pixels[i + 1] = 0
newImg.pixels[i + 2] = 0
newImg.pixels[i + 3] = 0
}
}
lm.remove();
newImg.updatePixels();
roi = newImg.get(_x, _y, _width, _height);
}
function updateGraphicsObject(points) {
// Creates graphics mask with face mesh
lm = createGraphics(480, 360);
lm.background(0, 0, 0, 0)
lm.fill(0);
lm.beginShape();
lm.vertex(points[19]._x,points[19]._y )
for (let i = 0; i < 17; i++) {
lm.vertex(points[i]._x, points[i]._y);
}
lm.vertex(points[24]._x,points[24]._y )
lm.endShape(CLOSE);
}
function drawFaces() {
for(let i = 0; i < faces.length; i++) {
faces[i].drawImage();
}
}
function removeFinishedImages() {
for (let i = faces.length - 1; i >= 0; i--) {
if(faces[i].isFinished) {
staticImage.image(faces[i].image, faces[i].position.x, faces[i].position.y);
faces.splice(i, 1);
}
}
}
function drawBoxs(detections) {
if (detections.length > 0) {
//If at least 1 face is detected:
for (f = 0; f < detections.length; f++) {
let { _x, _y, _width, _height } = detections[f].alignedRect._box;
stroke(44, 169, 225);
strokeWeight(1);
noFill();
rect(_x, _y, _width, _height);
}
}
}
function drawLandmarks(detections) {
if (detections.length > 0) {
//If at least 1 face is detected:
for (f = 0; f < detections.length; f++) {
let points = detections[f].landmarks.positions;
for (let i = 0; i < points.length; i++) {
stroke(44, 169, 225);
strokeWeight(3);
point(points[i]._x, points[i]._y);
}
}
}
}
function drawExpressions(detections, x, y, textYSpace) {
if (detections.length > 0) {
//If at least 1 face is detected:
let { neutral, happy, angry, sad, disgusted, surprised, fearful } =
detections[0].expressions;
textFont("Helvetica Neue");
textSize(14);
noStroke();
fill(44, 169, 225);
text("neutral: " + nf(neutral * 100, 2, 2) + "%", x, y);
text("happiness: " + nf(happy * 100, 2, 2) + "%", x, y + textYSpace);
text("anger: " + nf(angry * 100, 2, 2) + "%", x, y + textYSpace * 2);
text("sad: " + nf(sad * 100, 2, 2) + "%", x, y + textYSpace * 3);
text(
"disgusted: " + nf(disgusted * 100, 2, 2) + "%",
x,
y + textYSpace * 4
);
text(
"surprised: " + nf(surprised * 100, 2, 2) + "%",
x,
y + textYSpace * 5
);
text(
"fear: " + nf(fearful * 100, 2, 2) + "%",
x,
y + textYSpace * 6
);
} else {
//If no faces is detected:
text("neutral: ", x, y);
text("happiness: ", x, y + textYSpace);
text("anger: ", x, y + textYSpace * 2);
text("sad: ", x, y + textYSpace * 3);
text("disgusted: ", x, y + textYSpace * 4);
text("surprised: ", x, y + textYSpace * 5);
text("fear: ", x, y + textYSpace * 6);
}
}

8
style.css

@ -0,0 +1,8 @@
html, body {
margin: 0;
padding: 0;
}
canvas {
display: block;
}
Loading…
Cancel
Save