|
@ -13,17 +13,17 @@ void ofApp::setup(){ |
|
|
//player.Setup();
|
|
|
//player.Setup();
|
|
|
//player.SetVideo("videos/demo.mp4", fbo);
|
|
|
//player.SetVideo("videos/demo.mp4", fbo);
|
|
|
|
|
|
|
|
|
//emoteImage.allocate(260, 260);
|
|
|
emoteImage.allocate(260, 260); |
|
|
//tempImage.allocate(emoteImage.getWidth(), emoteImage.getHeight(), OF_IMAGE_COLOR);
|
|
|
tempImage.allocate(emoteImage.getWidth(), emoteImage.getHeight(), OF_IMAGE_COLOR); |
|
|
|
|
|
|
|
|
ORTCHAR_T* modelPath = "/home/cailean/Desktop/openframeworks/of_v0.12.0_linux64gcc6_release/apps/myApps/onnx-test/bin/data/depth_anything_v2_vitb.onnx"; |
|
|
ORTCHAR_T* modelPath = "/home/cailean/Desktop/openframeworks/of_v0.12.0_linux64gcc6_release/apps/myApps/onnx-test/bin/data/depth_anything_v2_vitb.onnx"; |
|
|
ORTCHAR_T* modelPath2 = "/home/cailean/Desktop/openframeworks/of_v0.12.0_linux64gcc6_release/apps/myApps/onnx-test/bin/data/yolov5s-face.onnx"; |
|
|
ORTCHAR_T* modelPath2 = "/home/cailean/Desktop/openframeworks/of_v0.12.0_linux64gcc6_release/apps/myApps/onnx-test/bin/data/yolov5s-face.onnx"; |
|
|
ORTCHAR_T* modelPath3 = "/home/cailean/Desktop/openframeworks/of_v0.12.0_linux64gcc6_release/apps/myApps/onnx-test/bin/data/rgb_emotion.onnx"; |
|
|
ORTCHAR_T* modelPath3 = "/home/cailean/Desktop/openframeworks/of_v0.12.0_linux64gcc6_release/apps/myApps/onnx-test/bin/data/rgb_emotion.onnx"; |
|
|
|
|
|
|
|
|
/* Setup Models (modelPath, log, useCuda) */ |
|
|
/* Setup Models (modelPath, log, useCuda) */ |
|
|
//yolo.Setup(modelPath2, false, true);
|
|
|
yolo.Setup(modelPath2, false, true); |
|
|
depth.Setup(modelPath, false, true); |
|
|
depth.Setup(modelPath, false, true); |
|
|
//emotion.Setup(modelPath3, false, true);
|
|
|
emotion.Setup(modelPath3, false, true); |
|
|
|
|
|
|
|
|
/* Load shader, allocated rampedFbo */ |
|
|
/* Load shader, allocated rampedFbo */ |
|
|
depthToColourShader.load("data/shader/rampShader.vert", "data/shader/rampShader.frag"); |
|
|
depthToColourShader.load("data/shader/rampShader.vert", "data/shader/rampShader.frag"); |
|
@ -70,17 +70,17 @@ void ofApp::update(){ |
|
|
|
|
|
|
|
|
depth.DataToFbo(output_ptr, 518, 518, fbo); |
|
|
depth.DataToFbo(output_ptr, 518, 518, fbo); |
|
|
|
|
|
|
|
|
// auto output_tensors_face = yolo.Run(map.fboImage);
|
|
|
auto output_tensors_face = yolo.Run(map.fboImage); |
|
|
|
|
|
|
|
|
// auto output_faces = output_tensors_face.front().GetTensorTypeAndShapeInfo().GetShape();
|
|
|
auto output_faces = output_tensors_face.front().GetTensorTypeAndShapeInfo().GetShape(); |
|
|
|
|
|
|
|
|
// unsigned int num_anchors = output_faces[1]; // Number of anchors
|
|
|
unsigned int num_anchors = output_faces[1]; // Number of anchors
|
|
|
|
|
|
|
|
|
// float* output_face_ptr = output_tensors_face.front().GetTensorMutableData<float>();
|
|
|
float* output_face_ptr = output_tensors_face.front().GetTensorMutableData<float>(); |
|
|
|
|
|
|
|
|
// faceDetector.ParseOutput(output_face_ptr, detected_faces, num_anchors);
|
|
|
faceDetector.ParseOutput(output_face_ptr, detected_faces, num_anchors); |
|
|
|
|
|
|
|
|
// faceDetector.ConvertBoxCoordsToOriginalSize(detected_faces, fbo.getWidth(), fbo.getHeight());
|
|
|
faceDetector.ConvertBoxCoordsToOriginalSize(detected_faces, fbo.getWidth(), fbo.getHeight()); |
|
|
|
|
|
|
|
|
/* As no input is generated for the emotion recognition model, run a dummy vector through the model
|
|
|
/* As no input is generated for the emotion recognition model, run a dummy vector through the model
|
|
|
So it can load */ |
|
|
So it can load */ |
|
@ -122,15 +122,14 @@ void ofApp::update(){ |
|
|
|
|
|
|
|
|
//--------------------------------------------------------------
|
|
|
//--------------------------------------------------------------
|
|
|
void ofApp::draw(){ |
|
|
void ofApp::draw(){ |
|
|
auto start = std::chrono::high_resolution_clock::now(); |
|
|
|
|
|
map.Draw(); |
|
|
map.Draw(); |
|
|
|
|
|
|
|
|
renderDepthMap(); |
|
|
renderDepthMap(); |
|
|
|
|
|
|
|
|
// if(!firstRun){
|
|
|
if(!firstRun && detected_faces.size() != 0){ |
|
|
// faceDetector.DrawBox(detected_faces);
|
|
|
faceDetector.DrawBox(detected_faces); |
|
|
// faceDetector.DrawCenter(detected_faces);
|
|
|
faceDetector.DrawCenter(detected_faces); |
|
|
// }
|
|
|
} |
|
|
|
|
|
|
|
|
ofPushMatrix(); |
|
|
ofPushMatrix(); |
|
|
ofSetColor(255); |
|
|
ofSetColor(255); |
|
@ -142,10 +141,6 @@ void ofApp::draw(){ |
|
|
// ofDrawBitmapString(std::to_string(face.box.emotional_state.emotions[0]), 700, 300);
|
|
|
// ofDrawBitmapString(std::to_string(face.box.emotional_state.emotions[0]), 700, 300);
|
|
|
// }
|
|
|
// }
|
|
|
|
|
|
|
|
|
auto end = std::chrono::high_resolution_clock::now(); |
|
|
|
|
|
std::chrono::duration<float> duration = end - start; |
|
|
|
|
|
std::cout << "Time taken for Draw: " << duration.count() << " seconds" << std::endl; |
|
|
|
|
|
|
|
|
|
|
|
} |
|
|
} |
|
|
|
|
|
|
|
|
//--------------------------------------------------------------
|
|
|
//--------------------------------------------------------------
|
|
|