GMU:Sensor Hacklab/Rachel Smith/Link to processing code

From Medien Wiki
< GMU:Sensor Hacklab‎ | Rachel Smith
Revision as of 12:58, 10 June 2016 by Rachel (talk | contribs) (Created page with "import gab.opencv.*; import processing.video.*; import java.awt.*; import processing.serial.*; Serial myPort; // Create object from Serial class Capture video; OpenCV opencv; ...")
(diff) ← Older revision | Latest revision (diff) | Newer revision → (diff)

import gab.opencv.*; import processing.video.*; import java.awt.*; import processing.serial.*;

Serial myPort; // Create object from Serial class

Capture video; OpenCV opencv; color one; color two; color three; color yes; int trigger; boolean selectMode = true; boolean recording = false;

void setup() {

 size(640, 360);
  String portName = Serial.list()[1]; //set up for arduino
 myPort = new Serial(this, portName, 9600);
 video = new Capture(this, 640, 360);
 opencv = new OpenCV(this, 640, 360);
 opencv.loadCascade(OpenCV.CASCADE_FRONTALFACE);
 frameRate(30);
 video.start();

}

void draw() {

 //read the colour at the middle of the target
 
color right = get(width-100, height/2);
color left = get(150, height/2);
color down = get(width/2, height - 100);

// println(red(right)); // println(green(left)); // println(blue(down));


 if (red(right)<=245 && green(left) >245 && blue(down) >245) {                            
 trigger = 10;  
 myPort.write(trigger);
 }   
 else if (green(left) <=245 && red(right) >245  && blue(down) >245) {
   trigger = 20;
   myPort.write(trigger);
 }
 else if (blue(down) <=245 && green(left) >245 && red(right) >245) {
   trigger = 30;
   myPort.write(trigger);
 }

println(trigger);

 //draw the 'targets'
 
 //right
 fill(255, 0, 0);
 rect(width-100, height/2, 20, 20);
 
 //left
 fill(0, 255, 0);
 rect(150, height/2, 20, 20);
 
 //down
 fill(0, 0, 255);
 rect(width/2, height -100, 20, 20);
 
 
 if (selectMode) {
   //create mirror image and scale
   pushMatrix();
   scale(-1.5, 1.5);
   translate(40, -80);
   image(video, -width, 0, width, height);
   popMatrix();
   fill(#FAFC6B);
   rect(0, 25, 640, 20);
   fill(#212E9D);
   text("Select three points on eye - hover mouse over and select using up, left and right. When finished, press down.", 7, 40);
 }
 //extract rgb vals from selected colours
 float r1 = red(one);
 float r2 = red(two);
 float r3 = red(three);
 float g1 = green(one);
 float g2 = green(two);
 float g3 = green(three);
 float b1 = blue(one);
 float b2 = blue(two);
 float b3 = blue(three);
 //find max and min rgb vals
 float maxr = max(r1, r2, r3);
 float minr = min(r1, r2, r3);
 float maxg = max(g1, g2, g3);
 float ming = min(g1, g2, g3);
 float maxb = max(b1, b2, b3);
 float minb = min(b1, b2, b3);
 noStroke();
 fill(#FAFC6B, 10);
 rect(0, 0, 640, 360);
 opencv.loadImage(video);
 Rectangle[] faces = opencv.detect();
 loadPixels();
 video.loadPixels();
 if (faces.length !=0) {
   for ( int i =0; i<faces.length; i++) {
     //get colour information from cam
     color [] pxls = video.pixels;
     int count = 0;
     int xsum = 0;
     int ysum = 0;
     //only use data within upper half of face
     for (int y = faces[i].y +int(0.15*(faces[i].y+faces[i].height)); y < faces[i].y+int(0.3*(faces[i].y+faces[i].height)); y++) {
       for (float x =faces[i].x+20; x< faces[i].x+faces[i].width-20; x++) {
         int loc = int(x + y*width);
         if (loc < pxls.length) {
           float r = red(pxls[loc]);
           float g = green(pxls[loc]);
           float b = blue(pxls[loc]);
           if ( r>minr && r<maxr && g >ming && g<maxg && b>minb && b<maxb) {
             // sum the values for x and y
             count = count + 1;
             xsum = xsum + int(x);
             ysum = ysum + y;
             //fill(r, g, b);
           }
         }
       }
     }
     //draw ellipse at average point of eyes 
     if (count !=0) {
       int xmean = xsum/count;
       int ymean = ysum/count;
       translate(60, 0);
       fill(50, 50, 50);
       ellipse(width-xmean, ymean, 17, 17);
      // println(xmean, ymean);
     }
   }
 }
   if (recording) {
   saveFrame("frames-#####.png");
   }

}


void captureEvent(Capture c) {

 c.read();

}

//takes colour at mouse position void keyPressed() {

 if (key == CODED) {
   if (keyCode == UP) {
     one = get(mouseX, mouseY);
     //creates visual feedback
     fill(#FAFC6B);
     ellipse(mouseX, mouseY, 10, 10);
   } 
   else if (keyCode == LEFT) {
     two = get(mouseX, mouseY);
     fill(#FAFC6B);
     ellipse(mouseX, mouseY, 10, 10);
   } 
   else if (keyCode == RIGHT) {
     three = get(mouseX, mouseY);
     fill(#FAFC6B);
     ellipse(mouseX, mouseY, 10, 10);
   }
   else if (keyCode == DOWN) {
     selectMode = !selectMode;
   }
 }

}

void mousePressed() {

 recording = !recording;

}