User Tools

Site Tools


cs276:project_2_cjk

Project 2

This is a screenshot from my final project, a motion detected twinkling star show. When implemented, it appears as though tons of small stars are within the frame as the colors and locations constantly change. The larger star follows any detected motion.

Here is the Pure Data sketch I used to make the sound for my large star. The frequency produced is relative to the star's location in the frame.

This is the code:

/* * Motion Detected Twinkling Star Show * * Small colorful stars randomly appear all over the video. A larger * star tracks any detected motion. * Using Pure Data, notes produced by sine waves play with relation * to the detected motion's location on the screen as well. * * Created 08 Dec 2014 by Corinne Jean Konoza * CS 276 Final Project * */

import processing.video.*; import oscP5.*; import netP5.*;

Global variables for connecting Pure Data OscP5 oscP5; NetAddress myRemoteLocation; Global variables for creating video Capture video;

Global variables for detecting motion PImage prevFrame; float threshold = 150; int Mx = 0; int My = 0; int ave = 0; int motionX = width/2; int motionY = height/2; int rsp = 25; void setup() { Initialize screen size

size(640,480);

// Connect to port 12000 to connect with Pure Data sketch
oscP5 = new OscP5(this,12000);
myRemoteLocation = new NetAddress("127.0.0.1",12000);

// Set up the camera
video = new Capture(this, width, height, 24);

// Create image to compare video with
prevFrame = createImage(video.width,video.height,RGB);

// Start the video
video.start();

}

void draw() {

// Begin video and display
if (video.available()) {
  prevFrame.copy(video,0,0,video.width,video.height,0,0,video.width,video.height); 
  prevFrame.updatePixels();
  video.read();
  grayscale(video);
}

loadPixels();
video.loadPixels();
prevFrame.loadPixels();
// Code for comparing pixels in video and prevFrame to detect motion
Mx = 0;
My = 0;
ave = 0;

// Parse through all pixels in video and prevFrame using for loops
for (int x = 0; x < video.width; x ++ ) {
  for (int y = 0; y < video.height; y ++ ) {
    
    // Generate color of current pixel location
    int loc = x + y*video.width;            
    color current = video.pixels[loc];      
    color previous = prevFrame.pixels[loc]; 
    
    // Compare RGB values from colors of pixels in video
    // and prevFrame and find the difference
    float r1 = red(current); 
    float g1 = green(current); 
    float b1 = blue(current);
    float r2 = red(previous); 
    float g2 = green(previous); 
    float b2 = blue(previous);
    float diff = dist(r1,g1,b1,r2,g2,b2);
    
    // If the difference between colors is larger than the
    // threshold value then there is motion at that location
    if (diff > threshold) { 
      pixels[loc] = video.pixels[loc];
      Mx += x;
      My += y;
      ave++;
    } else {
      pixels[loc] = video.pixels[loc];
    }
  }
}

// Average of where motion was detected to make star steadier
if(ave != 0){ 
  Mx = Mx/ave;
  My = My/ave;
} 

if (Mx > motionX + rsp/2 && Mx > 50){

  motionX+= rsp;
} else if (Mx < motionX - rsp/2 && Mx > 50){
  motionX-= rsp;
}

if (My > motionY + rsp/2 && My > 50){
  motionY+= rsp;
} else if (My < motionY - rsp/2 && My > 50){
  motionY-= rsp;
}
  
updatePixels();
noStroke();

// Draw large star at location of motion
star(motionX, motionY, 60, 5); 

// Draw other smaller stars to add variety to the video
star(random(0,width), random(0,height), 15, 5); 
star(random(0,width), random(0,height), 15, 5); 

// Use the pythagorean theorem to create location value
float val = sqrt(pow(motionX,2) + pow(motionY,2));

// Play sounds according to location value created
OscMessage myMessage = new OscMessage("/test");
myMessage.add(val);
oscP5.send(myMessage, myRemoteLocation); 

}

Method used to transfer messages between Processing and PureData void oscEvent(OscMessage theOscMessage) { print(“## recieved an osc message.”); print(“ addrpattern: ”+theOscMessage.addrPattern()); println(“ typetag: ”+theOscMessage.typetag()); } /* * Grayscale * * Converts video to grayscale. * * Whenever R=G=B, you get gray; to get the intensity of the gray, * average the rgb values. Output the old and new RGB values and * display a grayscaled image * * Implements the Processing function filter(GRAY); * * Created May 2007 by rtomlinson * Modified 24 Sep 2007 by spc * Modified 08 Dec 2014 by Corinne Jean Konoza * Now uses Capture instead of PImage * */ void grayscale(Capture vid) { for each pixel in pixels array

 vid.loadPixels();
 loadPixels();
 for (int col = 0; col < vid.width; col++) {
   for (int row = 0; row < vid.height; row ++) {
     // grab the color
     color c = vid.get(col, row);
     // grayscale intensity is average of RGB values
     float gray = red(c) + green(c) + blue(c);
     gray /= 3;
     c = color(gray, gray, gray);
     vid.set(col, row, c);
   }
 }
 updatePixels();

}

/* * Star * * Uses lines to create a star shape * * Source code: https://processing.org/examples/star.html * * Floats x and y describe the star's position. Radius is the outer * radius. It is used to generate an inner radius 1/3 the size of the * given outer radius. The number of points can be changed by * manipulating npoints. * * Modified 07 Dec 2014 by Corinne Jean Konoza * * */

void star(float x, float y, float radius, int npoints) {

// Make each star a random color
fill(random(0,255),random(0,255),random(0,255));
float angle = TWO_PI / npoints;
float halfAngle = angle/2.0;
beginShape();
for (float a = 0; a < TWO_PI; a += angle) {
  float sx = x + cos(a) * radius;
  float sy = y + sin(a) * radius;
  vertex(sx, sy);
  sx = x + cos(a+halfAngle) * (radius*(1.0/3.0));
  sy = y + sin(a+halfAngle) * (radius*(1.0/3.0));
  vertex(sx, sy);
}
endShape(CLOSE);

}

cs276/project_2_cjk.txt · Last modified: 2014/12/09 02:56 by ckonoza