How to make rectangle perpendicular to my hand?

70 views
Skip to first unread message

lee

unread,
Nov 23, 2011, 7:58:08 AM11/23/11
to simple-openni-discuss
Hi :)

I'm using Processing, simple-openni and toxilib to make some
rectangles and texture mapping.

I've made rectangles continue side by side..... just like ribbon.

but I don't know how to make rectangles perpendicular to my hand's
movement.

I'm quite a newbie :( I struggled about two weeks!!!! but I couldn't
do this

I appreciate your help !!

-------------------------------------this is code i'm using. this is
for 'ribbon type rectangles'-----------------------------------

import toxi.geom.*;
import toxi.geom.mesh.*;
PImage t;
TriangleMesh mesh=new TriangleMesh("doodle");

Vec3D prev=new Vec3D();
Vec3D p=new Vec3D();
Vec3D q=new Vec3D();

Vec2D rotation=new Vec2D();

float weight=0;
import SimpleOpenNI.*;
import processing.opengl.*;


SimpleOpenNI context;


void setup()
{
context = new SimpleOpenNI(this);
context.setMirror(true);

t= loadImage("2.png");

//depthMap, RGB generation
context.enableDepth();
context.enableRGB();

// enable skeleton generation for all joints
context.enableUser(SimpleOpenNI.SKEL_PROFILE_ALL);

background(0,0,0);
smooth();
size(context.depthWidth(), context.depthHeight(),OPENGL);
}

void draw()
{
// update the cam
context.update();

// draw RGBImage
image(context.rgbImage(),0,0);
textureMode(NORMALIZED);

if(context.isTrackingSkeleton(1))
{
PVector jointPos1 = new PVector();
PVector jointPos2 = new PVector();
PVector jointPos3 = new PVector();
PVector jointPos = new PVector();

//tracking right hand, left hand, torso
context.getJointPositionSkeleton(1, SimpleOpenNI.SKEL_RIGHT_HAND,
jointPos1);
context.getJointPositionSkeleton(1, SimpleOpenNI.SKEL_LEFT_HAND,
jointPos2);
context.getJointPositionSkeleton(1, SimpleOpenNI.SKEL_TORSO,
jointPos3);

context.convertRealWorldToProjective(jointPos1,jointPos);

Vec3D pos=new Vec3D(jointPos.x, jointPos.y, 0);

float d = map(jointPos.z, 0, 2047, 0, 100);
weight=pow(100/d, 4);


Vec3D a=pos.add(0,0,weight);
Vec3D b=pos.add(0,0,-weight);


// rectangle
mesh.addFace(p,q,b,a);

// store current points for next iteration
prev=pos;
p=a;
q=b;

// noStroke();

pushMatrix();
beginShape(QUAD_STRIP);

texture(t);
// iterate over all faces/triangles of the mesh
for(Iterator i=mesh.faces.iterator(); i.hasNext();) {
Face f=(Face)i.next();

// create vertices for each corner point
vertex(f.a.x,f.a.y,f.a.z+50,0,0);
vertex(f.b.x,f.b.y,f.b.z+50,1,1);
vertex(f.a.x,f.a.y,f.a.z+50,1,0);
vertex(f.b.x,f.b.y,f.b.z+50,0,1);
}
endShape();
popMatrix();

}


}


// -----------------------------------------------------------------
// SimpleOpenNI events

void onNewUser(int userId)
{
println("onNewUser - userId: " + userId);
println(" start pose detection");
context.startPoseDetection("Psi",userId);
}

void onLostUser(int userId)
{
println("onLostUser - userId: " + userId);
}

void onStartCalibration(int userId)
{
println("onStartCalibration - userId: " + userId);
}

void onEndCalibration(int userId, boolean successfull)
{
println("onEndCalibration - userId: " + userId + ", successfull: " +
successfull);
if (successfull)
{
println(" User calibrated !!!");
context.startTrackingSkeleton(userId);
}
else
{
println(" Failed to calibrate user !!!");
println(" Start pose detection");
context.startPoseDetection("Psi",userId);
}
}

void onStartPose(String pose,int userId)
{
println("onStartPose - userId: " + userId + ", pose: " + pose);
println(" stop pose detection");

context.stopPoseDetection(userId);
context.requestCalibrationSkeleton(userId, true);

}

void onEndPose(String pose,int userId)
{
println("onEndPose - userId: " + userId + ", pose: " + pose);
}

Reply all
Reply to author
Forward
0 new messages