Difference between revisions of "Happy Days - Gregory Parsons"
Line 17: | Line 17: | ||
http://acsweb.ucsd.edu/~gparsons/smog.png | http://acsweb.ucsd.edu/~gparsons/smog.png | ||
− | == '''Final Documentation''' === | + | == '''Final Documentation''' == |
+ | |||
+ | I feel the project was successful and it turned out well polished. I feel that the user interaction was smooth, and over a long period of running it the project reacted well. | ||
+ | |||
+ | == '''Final Video Documentation''' == | ||
+ | |||
+ | [http://www.youtube.com/watch?v=TVhZmXEU4P0 link Video Documentation] | ||
== '''Final Code''' == | == '''Final Code''' == |
Latest revision as of 16:15, 3 June 2010
Contents
[hide]Motivation
We have a growing shortage of fresh water in the world, and as populations rise and sources become more contaminated the problem accelerates. We often do not take into consideration our day to day affect on the planet, and often will ignore obvious means to reduce our footprint. I want to provide a means for a viewer to understand this relationship and begin to think about what they could do to counter their affect.
Interaction
I want the viewer of my project to question their existence within the imagery that is displayed on the screen. The longer that they 'interact' with the project the more of an affect they will have on it. Using face tracking as the main stimulus for change the project will react based the length of time that the user's face is tracked by the camera.
Function
The affect on the project will change with the amount of time the viewer is in front of the camera, for instance the sun that represents the viewer will drop water bottles onto the landscape, and the longer they are in front of the camera the more "smog" that will be visible on the screen.
Visualization
Final Documentation
I feel the project was successful and it turned out well polished. I feel that the user interaction was smooth, and over a long period of running it the project reacted well.
Final Video Documentation
Final Code
/**
* Final Project; Greg Parsons
* VIS145B
*
* Face Tracking from OpenCV with actions modified for my needs, rest original programming
*
* The project is designed as a means to promote the viewers thought about their impact on the enviornment.
*
**/
import hypermedia.video.*;
import ddf.minim.*;
import oscP5.*;
import netP5.*;
//declare a new object of opencv
OpenCV opencv;
//wiimote
float wiimote1Pitch;
float wiimote1Roll;
float wiimote1Yaw;
float wiimote1Accel;
int wiimote1ButtonA;
//osc and wiimote
OscP5 oscP5;
NetAddress myRemoteLocation;
//image values
PImage bg;
PImage cloud1;
PImage cloud2;
PImage trash;
PImage waterbottle;
PImage star;
PImage starsad;
PImage starquesy;
PImage stardead;
//smog
PGraphics smog;
//audio
AudioPlayer player;
Minim minim;
//organic values
int c1 = -120;
int c2 = 550;
int h1 = 50;
int h2 = 100;
float posX = 0;
float posY = 0;
int numWb = 0;
int counter = 0;
int splashCounter = 0;
int wbDissapearCounter = 0;
int wbDelayCounter = 0;
int smogOpacity = 0;
int wiiACounter = 0;
int creationCounter = 0;
boolean faceDetected = false;
int wbcount=0;
//creating waterbottle objects
WaterBottle[] waterBottle = new WaterBottle[200];
void setup()
{
size(1024, 768, P2D);
frameRate(30);
noCursor();
//declaring images
bg = loadImage("background2.png");
cloud1 = loadImage("cloud1.png");
cloud2 = loadImage("cloud2.png");
waterbottle = loadImage("waterbottle.png");
star = loadImage("star.png");
starsad = loadImage("starsad.png");
starquesy = loadImage("starquesy.png");
stardead = loadImage("stardead.png");
//wiimote
oscP5 = new OscP5(this, 12000);
myRemoteLocation = new NetAddress("localhost", 12000);
//audio
minim = new Minim(this);
// load a file, give the AudioPlayer buffers that are 2048 samples long
player = minim.loadFile("Cartoon Accent 28.mp3", 2048);
//opencv logic
opencv = new OpenCV( this );
opencv.capture( width/2, height/2 ); // open video stream
opencv.cascade( OpenCV.CASCADE_FRONTALFACE_ALT ); // load detection description, here-> front face detection : "haarcascade_frontalface_alt.xml"
//creatinh smog graphic element
smog = createGraphics(width, height, P3D);
}
void draw()
{
wbcount++;
image(bg, 0, 0);
// grab a new frame
// and convert to gray
opencv.read();
opencv.convert( GRAY );
opencv.flip( OpenCV.FLIP_HORIZONTAL );
// proceed detection
java.awt.Rectangle[] faces = opencv.detect( 1.2, 2, OpenCV.HAAR_DO_CANNY_PRUNING, 40, 40 );
float posX = 0;
float posY = 0;
//assign the position of the detected face to usable int values
for( int i=0; i<faces.length; i++ )
{
posX = faces[i].x*2;
posY = faces[i].y*2;
//create a counter for the amount of waterbottles that is created for each new position of posX, posY and create a water bottle
if (numWb > 199)
{
numWb = 0;
}
if(wbcount>3) {
waterBottle[numWb] = new WaterBottle(posX, posY);
numWb++;
wbcount=0;
}
}
System.out.println(numWb);
//debugging code for checking position of the detected face (if it is acting up and not throwing out 0,0 positions)
//System.out.println("posX = " + posX + " posY = " + posY);
//System.out.println(numWb);
if (counter > 1)
{
for (int i = 0; i < numWb; i++)
{
waterBottle[i].displayWaterBottle();
}
}
if (counter > 1)
{
for (int i = 0; i < numWb; i++)
{
waterBottle[i].update();
}
}
//cloud movement and new position for clouds after full cycle
if (c1 < 1300) {
c1++;
}
else {
c1 = -400;
h1 = round(random(400));
}
if (c2 < 1300) {
c2++;
}
else {
c2 = -400;
h2 = round(random(400));
}
//drawing the clouds on the screen with moving variables
image(cloud1, c1, h1);
image(cloud2, c2, h2);
//logic for face either detected or not
if ((posX != 0) && (posY != 0)) {
faceDetected = true;
}
else
{
faceDetected = false;
}
//drawing the smog
smog.beginDraw();
if (smogOpacity < 252)
{
smog.background(139, 131, 134, smogOpacity);
}
else if (smogOpacity > 252)
{
smog.background(255, 36, 0, smogOpacity);
}
smog.endDraw();
image(smog, 0, 0);
//actions for face detected or not detected
if (faceDetected == true)
{
if (smogOpacity < 120)
{
image(star, posX, posY, star.width/3, star.height/3);
}
else if ((smogOpacity > 120) && (smogOpacity < 200))
{
image(starsad, posX, posY, star.width/3, star.height/3);
}
else if ((smogOpacity > 200) && (smogOpacity < 252))
{
image(starquesy, posX, posY, star.width/3, star.height/3);
}
else if (smogOpacity > 255)
{
image(stardead, 500, 500, star.width/3, star.height/3);
}
if (smogOpacity < 350)
{
smogOpacity = smogOpacity+2;
}
if (smogOpacity < 255)
{
if (splashCounter == 1)
{
player.play();
}
if (splashCounter == 10)
{
splashCounter = 0;
player.rewind();
}
//System.out.println(splashCounter);
splashCounter++;
}
}
else
{
if (smogOpacity > -150)
{
smogOpacity--;
}
if ((numWb > 1) && (wbDissapearCounter == 1))
{
numWb--;
}
if (wbDissapearCounter > 3)
{
wbDissapearCounter = 0;
}
wbDissapearCounter++;
}
//debug
//System.out.println(smogOpacity);
//System.out.println(faceDetected);
counter++;
if (wiimote1ButtonA == 1)
{
numWb = numWb/2;
smogOpacity = smogOpacity-6;
wiimote1ButtonA = 0;
}
}
//waterbottle class
class WaterBottle {
float wbPosX, wbPosY;
WaterBottle(float posX, float posY) {
wbPosX = posX;
wbPosY = posY;
}
//updates the position of the Y value to cause the bottles to fall
void update() {
if (wbPosY < 468)
{
wbPosY = wbPosY + 10;
}
}
//creates the bottle images
void displayWaterBottle() {
image(waterbottle, wbPosX, wbPosY);
}
}
//wiimote
void oscEvent(OscMessage theOscMessage) {
if(theOscMessage.checkAddrPattern("/wii/1/accel/pry")==true){
wiimote1Pitch = theOscMessage.get(0).floatValue();
wiimote1Roll = theOscMessage.get(1).floatValue();
wiimote1Yaw = theOscMessage.get(2).floatValue();
wiimote1Accel = theOscMessage.get(3).floatValue();
}
if(theOscMessage.checkAddrPattern("/wii/1/button/A")==true){
wiimote1ButtonA = theOscMessage.get(0).intValue();
}
}
//opencv actions at the end of the runtime
public void stop() {
opencv.stop();
player.close();
minim.stop();
super.stop();
}