Imagine waving your hand in the air and having your app cursor follow your finger and activate by pinching. We have seen this before in movies and some experimental demonstrations. Now, it is full integrated in ZIM.
This means that any of the fifty ZIM components like sliders, buttons, tabs, etc. work by waving and pinching. ML5 tracks the hand and feeds the information to ZIM which adds the movements to its core interactions. These bubble up and now events that work with mouse or touch also work with levitated fingers and pinch.
Above we demonstrate moving a slider by pinching on its button and dragging our hand in the air. See Hand Tracking with ZIM and ML5.
We also created nine ZIM / ML5 examples in an afternoon.
Here is a vid of using ZIM integrated hand tracking with ML5:
Here is the ZIM Code:
<script src="https://unpkg.com/ml5@1/dist/ml5.min.js"></script>
<script type="module">
import zim from "https://zimjs.org/cdn/018/zim_cam";
// See Docs under Frame for FIT, FILL, FULL, and TAG
new Frame(FIT, 1024, 768, black, licorice, ready);
function ready() {
// given F (Frame), S (Stage), W (width), H (height)
// put code here
const ask = new CamAsk().show(yes=>{
if (yes) {
let cam = new Cam();
cam.on("ready", async ()=>{
cam.scaleTo().center().alp(.2);
const handPose = await ml5.handPose();
handPose.detectStart(cam.getCanvas(), result);
function result(results) {
cam.handTrack(results);
}
new Circle(50,red).center().drag();
new Label("pinch to drag circle", 30, null, tin)
.pos(0,100,CENTER,BOTTOM);
const message = new Pane("Button Selected!", green);
const button = new Button()
.pos(0,100,CENTER)
.tap(()=>{message.show();});
});
// if the user does not accept the browser asking about cam
cam.on("error", ()=>{
new Pane("CAM not accepted",yellow).show();
});
} else { // answered no to CamAsk dialog
new Pane("CAM not accepted",yellow).show();
}
});
}
Top comments (0)