/* -------------------------------------------------------------------------- * SimpleOpenNI User3d Test * -------------------------------------------------------------------------- * Processing Wrapper for the OpenNI/Kinect library * http://code.google.com/p/simple-openni * -------------------------------------------------------------------------- * prog: Max Rheiner / Interaction Design / zhdk / http://iad.zhdk.ch/ * date: 02/16/2011 (m/d/y) * ---------------------------------------------------------------------------- * this demos is at the moment only for 1 user, will be implemented later * ---------------------------------------------------------------------------- */ import SimpleOpenNI.*; SimpleOpenNI context; PaperDoll doll; float zoomF =0.5f; float rotX = radians(180); // by default rotate the whole scene 180deg around the x-axis, // the data from openni comes upside down float rotY = radians(0); boolean autoCalib=true; PVector bodyCenter = new PVector(); PVector bodyDir = new PVector(); //PVector[] sc = new PVector[14]; float []sx = new float[14]; float []sy = new float[14]; ArrayList frames; PFont font; int [] jointTypes = new int[14]; boolean recordFlag=false; void setup() { size(800, 600, P3D); // strange, get drawing error in the cameraFrustum if i use P3D, in opengl there is no problem context = new SimpleOpenNI(this); font = loadFont("Courier-48.vlw"); textFont(font); println(SimpleOpenNI.SKEL_HEAD); // disable mirror context.setMirror(false); if (recordFlag == false) { // playing, this works without the camera if ( context.openFileRecording("/Users/rtwomey/Movies/puppet/kinect_recordings/20120719_184947.oni") == false) //if ( context.openFileRecording("/Users/rtwomey/Movies/puppet/kinect_recordings/robert.oni") == false) { println("can't find recording !!!!"); exit(); } // it's possible to run the sceneAnalyzer over the recorded data strea if ( context.enableScene() == false) { println("can't setup scene!!!!"); exit(); return; } println("This file has " + context.framesPlayer() + " frames."); } else { println(SimpleOpenNI.SKEL_HEAD); // disable mirror context.setMirror(false); // enable depthMap generation if (context.enableDepth() == false) { println("Can't open the depthMap, maybe the camera is not connected!"); exit(); return; } } // enable skeleton generation for all joints context.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL); context.start(); // setup drawing and perspective stroke(255, 255, 255); smooth(); perspective(radians(45), float(width)/float(height), 10, 150000); noCursor(); frameRate(30); // preload animation and skeleton tracking data frames = new ArrayList(); preloadSkeletonPoints(); // load paper doll graphics and data doll=new PaperDoll(); doll.loadBodyTextures(); assignJointTypes(); } void draw() { // update the cam context.update(); //delay(100); int framenum=frameCount%(frames.size()); background(0, 0, 0); fill(255); text(framenum, 10, 10); print(framenum); Frame currframe= (Frame) frames.get(framenum); // draw the skeleton if it's available if (currframe.isTracked()) { println("*"); //drawSkeleton(userList[i]); getFrameCoords(currframe); doll.constrainCoords(sx, sy); strokeWeight(5); stroke(255, 0, 0); doll.drawDoll(); } else { println(); } if ((context.nodes() & SimpleOpenNI.NODE_DEPTH) != 0) { if ((context.nodes() & SimpleOpenNI.NODE_IMAGE) != 0) { image(context.depthImage(), 0, 0); image(context.rgbImage(), context.depthWidth() + 10, 0); } else image(context.depthImage(), 0, 0); } } void drawTimeline() { pushStyle(); stroke(255, 255, 0); line(10, height - 20, width -10, height - 20); stroke(0); rectMode(CENTER); fill(255, 255, 0); int pos = (int)((width - 2 * 10) * (float)context.curFramePlayer() / (float)context.framesPlayer()); rect(pos, height - 20, 7, 17); popStyle(); } //////// kinect stuff //////// void assignJointTypes() { jointTypes[PaperDoll.HEAD]=SimpleOpenNI.SKEL_HEAD; jointTypes[PaperDoll.NECK]=SimpleOpenNI.SKEL_NECK; jointTypes[PaperDoll.LEFT_SHOULDER]=SimpleOpenNI.SKEL_LEFT_SHOULDER; jointTypes[PaperDoll.LEFT_ELBOW]=SimpleOpenNI.SKEL_LEFT_ELBOW; jointTypes[PaperDoll.LEFT_HAND]=SimpleOpenNI.SKEL_LEFT_HAND; jointTypes[PaperDoll.RIGHT_SHOULDER]=SimpleOpenNI.SKEL_RIGHT_SHOULDER; jointTypes[PaperDoll.RIGHT_ELBOW]=SimpleOpenNI.SKEL_RIGHT_ELBOW; jointTypes[PaperDoll.RIGHT_HAND]=SimpleOpenNI.SKEL_RIGHT_HAND; jointTypes[PaperDoll.LEFT_HIP]=SimpleOpenNI.SKEL_LEFT_HIP; jointTypes[PaperDoll.LEFT_KNEE]=SimpleOpenNI.SKEL_LEFT_KNEE; jointTypes[PaperDoll.LEFT_FOOT]=SimpleOpenNI.SKEL_LEFT_FOOT; jointTypes[PaperDoll.RIGHT_HIP]=SimpleOpenNI.SKEL_RIGHT_HIP; jointTypes[PaperDoll.RIGHT_KNEE]=SimpleOpenNI.SKEL_RIGHT_KNEE; jointTypes[PaperDoll.RIGHT_FOOT]=SimpleOpenNI.SKEL_RIGHT_FOOT; } //////// frame buffering //////// void preloadSkeletonPoints() { print("Loading kinect tracking data... "); int tracked=0; //context.setPlaybackSpeedPlayer(0.0f); pushMatrix(); translate(width/2, height/2, 0); rotateX(rotX); rotateY(rotY); scale(zoomF); translate(0, 0, -1000); // set the rotation center of the scene 1000 infront of the camera context.seekPlayer(0, SimpleOpenNI.PLAYER_SEEK_SET); for (int f=0; f < context.framesPlayer(); f++) { context.seekPlayer(f, SimpleOpenNI.PLAYER_SEEK_SET); context.update(); int[] depthMap = context.depthMap(); frames.add(new Frame()); // draw the skeleton if it's available int[] userList = context.getUsers(); for (int i=0;i