DEV Community

obniz
obniz

Posted on

3 1

Face Following Fan

With iPhone camera, it detect your face and rotate fan to you.

How it works

It use HTML5 MultiMedia to use iPone camera.

Materials

Smartphone(We use an iPhone, but also Android is ok)

  • obniz
  • Servo motor
  • Small USB Fan
  • USB <-> Pin header
  • Pin header
  • Cable
  • USB-Cable
  • USB-Adaptor

Make Hardware

Only 3 Steps is here.

Make Software

  1. With HTML5 MultiMedia , get your camera data.
  2. Face detect with camera data and OpenCV.
  3. Detect x position of your face and rotate the servo motor.
<!-- HTML Example -->

<!DOCTYPE html>
<html>
<head>
  <meta charset="utf-8">
  <title>Video Capture Example</title>
  <meta name="viewport" content="width=device-width,initial-scale=1">
  <script src="https://docs.opencv.org/3.4/opencv.js"></script>
  <script src="https://obniz.io/js/jquery-3.2.1.min.js"></script>
  <script src="https://unpkg.com/obniz@1.4.1/obniz.js"></script>
  <style>
    .refrect-lr {
      -webkit-transform: scaleX(-1);
      -o-transform: scaleX(-1);
      -moz-transform: scaleX(-1);
      transform: scaleX(-1);
      filter: FlipH;
      -ms-filter: "FlipH";
    }
  </style>
</head>
<body>

<div id="obniz-debug"></div>

<div>
  <div class="control">
    <button id="startAndStop">Start</button>
  </div>
</div>
<p class="err" id="errorMessage"></p>
<div>
  <table cellpadding="0" cellspacing="0" width="0" border="0">
    <tr>
      <td>
        <video id="videoInput" autoplay playsinline width=320 height=240 class="refrect-lr"></video>
      </td>
      <td>
        <canvas id="canvasOutput" width=320 height=240 style="-webkit-font-smoothing:none"

                class="refrect-lr"></canvas>
      </td>
      <td></td>
      <td></td>
    </tr>
    <tr>
      <td>
        <div class="caption">videoInput</div>
      </td>
      <td>
        <div class="caption">canvasOutput</div>
      </td>
      <td></td>
      <td></td>
    </tr>
  </table>
</div>

<script src="https://webrtc.github.io/adapter/adapter-5.0.4.js" type="text/javascript"></script>
<script src="https://docs.opencv.org/3.4/utils.js" type="text/javascript"></script>
<script type="text/javascript">

  let servo;


  obniz = new Obniz("OBNIZ_ID_HERE");

  obniz.onconnect = async () => {
    obniz.display.print("ready")

    let usb = obniz.wired("USB", {gnd: 11, vcc: 8});
    usb.on();

    servo = obniz.wired("ServoMotor", {signal: 0, vcc: 1, gnd: 2});

  }

  let utils = new Utils('errorMessage');

  let faceCascadeFile = 'haarcascade_frontalface_default.xml';
  utils.createFileFromUrl(faceCascadeFile, 'https://raw.githubusercontent.com/opencv/opencv/master/data/haarcascades/haarcascade_frontalface_default.xml', () => {
    startAndStop.removeAttribute('disabled');
  });


  let streaming = false;
  let videoInput = document.getElementById('videoInput');
  let startAndStop = document.getElementById('startAndStop');
  let canvasOutput = document.getElementById('canvasOutput');
  let canvasContext = canvasOutput.getContext('2d');


  startAndStop.addEventListener('click', () => {

    if (!streaming) {
      utils.clearError();


      navigator.mediaDevices = navigator.mediaDevices || ((navigator.mozGetUserMedia || navigator.webkitGetUserMedia) ? {
        getUserMedia: function (c) {
          return new Promise(function (y, n) {
            (navigator.mozGetUserMedia ||
                navigator.webkitGetUserMedia).call(navigator, c, y, n);
          });
        }
      } : null);

      if (!navigator.mediaDevices) {
        console.log("getUserMedia() not supported.");
        return;
      }

      const medias = {
        audio: false,
        video: {
          facingMode: "user"
        }
      };

      navigator.mediaDevices.getUserMedia(medias)
          .then(function (stream) {
            streaming = true;
            var video = document.getElementById("videoInput");
            video.src = window.URL.createObjectURL(stream);
            video.onloadedmetadata = function (e) {
              video.play();
              onVideoStarted();
            };
          })
          .catch(function (err) {
            console.error('mediaDevice.getUserMedia() error:' + (error.message || error));
          });


    } else {
      utils.stopCamera();
      onVideoStopped();
    }

  });

  function onVideoStarted() {
    startAndStop.innerText = 'Stop';
    start();
  }
  function onVideoStopped() {
    streaming = false;
    canvasContext.clearRect(0, 0, canvasOutput.width, canvasOutput.height);
    startAndStop.innerText = 'Start';
  }

  async function start() {
    let video = document.getElementById('videoInput');
    let src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
    let dst = new cv.Mat(video.height, video.width, cv.CV_8UC4);
    let gray = new cv.Mat();
    let cap = new cv.VideoCapture(video);
    let faces = new cv.RectVector();
    let classifier = new cv.CascadeClassifier();


    let result = classifier.load("haarcascade_frontalface_default.xml");

    const FPS = 30;

    function processVideo() {
      try {
        if (!streaming) {
          // clean and stop.
          src.delete();
          dst.delete();
          gray.delete();
          faces.delete();
          classifier.delete();
          return;
        }
        let begin = Date.now();
        // start processing.
        cap.read(src);
        src.copyTo(dst);
        cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
        // detect faces.
        classifier.detectMultiScale(gray, faces, 1.1, 3, 0);
        // draw faces.
        for (let i = 0; i < faces.size(); ++i) {
          let face = faces.get(i);
          let point1 = new cv.Point(face.x, face.y);
          let point2 = new cv.Point(face.x + face.width, face.y + face.height);
          cv.rectangle(dst, point1, point2, [255, 0, 0, 255]);
        }
        cv.imshow('canvasOutput', dst);
        if (servo && faces.size() > 0) {
          let face = faces.get(0);
          servo.angle((320 - (face.x + face.width / 2)) * 180 / 320);
        }

        // schedule the next one.
        let delay = 1000 / FPS - (Date.now() - begin);
        setTimeout(processVideo, delay);
      } catch (err) {
        console.error(err);
      }
    };

    // schedule the first one.
    setTimeout(processVideo, 0);

  }
</script>
</body>
</html>
Enter fullscreen mode Exit fullscreen mode

API Trace View

How I Cut 22.3 Seconds Off an API Call with Sentry

Struggling with slow API calls? Dan Mindru walks through how he used Sentry's new Trace View feature to shave off 22.3 seconds from an API call.

Get a practical walkthrough of how to identify bottlenecks, split tasks into multiple parallel tasks, identify slow AI model calls, and more.

Read more →

Top comments (0)

Sentry image

See why 4M developers consider Sentry, “not bad.”

Fixing code doesn’t have to be the worst part of your day. Learn how Sentry can help.

Learn more

👋 Kindness is contagious

Please leave a ❤️ or a friendly comment on this post if you found it helpful!

Okay