自動運転カー

このエントリーをはてなブックマークに追加

線を認識して走る自動運転カーを作ってみます。
スマホのカメラで撮影したものを解析し、ロボットを動かします。

必要なもの

  • obniz とバッテリー
  • ラジコンカー(5V程度で動くもの)

作り方

ラジコンの前輪モーターと後輪モーターをobnizにつなぎます。あとはバッテリーとobnizを積めばロボットは完成です

プログラム

スマホのカメラを使って画像解析します。
今回はOpenCV.jsを使っています。

Program

<!DOCTYPE html>
<html>
<head>
  <meta charset="utf-8">
  <title>Video Capture Example</title>
  <meta name="viewport" content="width=device-width,initial-scale=1">
  <!--<script src="https://docs.opencv.org/3.4/opencv.js"></script> -->
  <script src="https://sample-4dd7b.firebaseapp.com/opencv.js"></script>
  <script src="https://obniz.io/js/jquery-3.2.1.min.js"></script>
  <script src="https://unpkg.com/obniz@latest/obniz.js"></script>
  <style>
    .refrect-lr {
      -webkit-transform: scaleX(-1);
      -o-transform: scaleX(-1);
      -moz-transform: scaleX(-1);
      transform: scaleX(-1);
      filter: FlipH;
      -ms-filter: "FlipH";
    }
  </style>
</head>
<body>

<div id="obniz-debug"></div>

<div>
  <div class="control">
    <button id="startAndStop">Start</button>
  </div>
</div>
<p class="err" id="errorMessage"></p>
<div>
  <table cellpadding="0" cellspacing="0" width="0" border="0">
    <tr>
      <td>
        <video id="videoInput" autoplay playsinline width=320 height=240 class="refrect-lr"></video>
      </td>
      <td>
        <canvas id="canvasOutput" width=320 height=240 style="-webkit-font-smoothing:none"

                class="refrect-lr"></canvas>
      </td>
      <td></td>
      <td></td>
    </tr>
    <tr>
      <td>
        <div class="caption">videoInput</div>
      </td>
      <td>
        <div class="caption">canvasOutput</div>
      </td>
      <td></td>
      <td></td>
    </tr>
  </table>
</div>

<script src="https://webrtc.github.io/adapter/adapter-5.0.4.js" type="text/javascript"></script>
<script src="https://docs.opencv.org/3.4/utils.js" type="text/javascript"></script>
<script type="text/javascript">

  obniz = new Obniz("OBNIZ_ID_HERE");

  obniz.onconnect = async () => {
    
    motorRear = obniz.wired("DCMotor", {forward: 0, back: 1});
    motorRear.power(20);
    motorSteer = obniz.wired("DCMotor", {forward: 2, back: 3});
    motorSteer.power(50);
    obniz.io10.pull("3v");
    obniz.io10.output(true);
    obniz.io11.output(false);
  }

  let utils = new Utils('errorMessage');

  let faceCascadeFile = 'haarcascade_frontalface_default.xml';
  utils.createFileFromUrl(faceCascadeFile, 'https://raw.githubusercontent.com/opencv/opencv/master/data/haarcascades/haarcascade_frontalface_default.xml', () => {
    startAndStop.removeAttribute('disabled');
  });


  let streaming = false;
  let videoInput = document.getElementById('videoInput');
  let startAndStop = document.getElementById('startAndStop');
  let canvasOutput = document.getElementById('canvasOutput');
  let canvasContext = canvasOutput.getContext('2d');


  startAndStop.addEventListener('click', () => {

    if (!streaming) {
      utils.clearError();


      navigator.mediaDevices = navigator.mediaDevices || ((navigator.mozGetUserMedia || navigator.webkitGetUserMedia) ? {
        getUserMedia: function (c) {
          return new Promise(function (y, n) {
            (navigator.mozGetUserMedia ||
                navigator.webkitGetUserMedia).call(navigator, c, y, n);
          });
        }
      } : null);

      if (!navigator.mediaDevices) {
        console.log("getUserMedia() not supported.");
        return;
      }

      const medias = {
        audio: false,
        video: {
          facingMode: "user"
        }
      };

      navigator.mediaDevices.getUserMedia(medias)
          .then(function (stream) {
            streaming = true;
            var video = document.getElementById("videoInput");
            video.src = window.URL.createObjectURL(stream);
            video.onloadedmetadata = function (e) {
              video.play();
              onVideoStarted();
            };
          })
          .catch(function (err) {
            console.error('mediaDevice.getUserMedia() error:' + (error.message || error));
          });


    } else {
      utils.stopCamera();
      onVideoStopped();
    }

  });

  function onVideoStarted() {
    startAndStop.innerText = 'Stop';
    start();
  }
  function onVideoStopped() {
    streaming = false;
    canvasContext.clearRect(0, 0, canvasOutput.width, canvasOutput.height);
    startAndStop.innerText = 'Start';
  }

  async function start() {
    let video = document.getElementById('videoInput');
    let src = new cv.Mat(video.height, video.width, cv.CV_8UC4);
    let dst = new cv.Mat(video.height, video.width, cv.CV_8UC4);
    let gray = new cv.Mat();
    let cap = new cv.VideoCapture(video);
    let faces = new cv.RectVector();
    let classifier = new cv.CascadeClassifier();
    let gaussian = new cv.Mat();
    let binary = new cv.Mat();
    let hsv = new cv.Mat();
    let mask = new cv.Mat();
    
    motorRear.forward();

    let result = classifier.load("haarcascade_frontalface_default.xml");

    const FPS = 30;

    function processVideo() {
      try {
        if (!streaming) {
          // clean and stop.
          src.delete();
          dst.delete();
          gray.delete();
          faces.delete();
          classifier.delete();
          gaussian.delete();
          binary.delete();
          hsv.delete();
          mask.delete();
          motorRear.stop();
          motorSteer.stop();
          return;
        }
        let begin = Date.now();
        // start processing.
        cap.read(src);
        src.copyTo(dst);
        cv.cvtColor(dst, gray, cv.COLOR_RGBA2GRAY, 0);
        let ksize = new cv.Size(5, 5);
        cv.GaussianBlur(gray, binary, ksize, 0, 0, cv.BORDER_DEFAULT); // add gaussian blur
        cv.threshold(binary, binary, 0, 255, cv.THRESH_BINARY+cv.THRESH_OTSU);
        let pixelArray = binary.ucharPtr(video.height*(0.7));
        
        
        var boundary = [];
        for (let i = 0; i < pixelArray.length; i++) {
          let lastBoundary = 255;
          if (pixelArray[i] != pixelArray[i+1]){
            boundary.push(i);
          }
        }
        //console.log(Date.now()); //30ms
        
        let centerX = 0;
        if (boundary.length >= 4){
          centerX = ((boundary[2] - boundary[1])/2) + boundary[1];
        } else{
          centerX = 0;
        }
        
        let centerPoint = new cv.Point(centerX, video.height*(0.7));
        cv.circle(dst,centerPoint,5,[255,0,0,255],2); // center point
        let leftPoint = new cv.Point(boundary[1], video.height*(0.7));
        let rightPoint = new cv.Point(boundary[2], video.height*(0.7));
        cv.circle(dst,leftPoint,4,[0,0,255,255],2); 
        cv.circle(dst,rightPoint,4,[0,0,255,255],2); 
        let carCenter = new cv.Point(video.width/2, video.height*(0.7));
        cv.circle(dst,carCenter,5,[255,255,255,255],2);
        cv.imshow('canvasOutput', dst);
        
        
        if ((video.width/2-centerX) < -10){ 
          motorSteer.reverse();
        } else if ((video.width/2-centerX) > 10){ 
          motorSteer.forward();
        } else {
          motorSteer.stop();
        }
        

        // schedule the next one.
        let delay = 1000 / FPS - (Date.now() - begin);
        setTimeout(processVideo, delay);
      } catch (err) {
        console.error(err);
      }
    };

    // schedule the first one.
    setTimeout(processVideo, 0);

  }
</script>
</body>
</html>