Show Posts

This section allows you to view all posts made by this member. Note that you can only see posts made in areas you currently have access to.


Topics - lbb19930915

Pages: [1]
1
I use jpct-ae for AR 3D rendering base on Lbs.There are two questions confused me.Could some one help me?
Now I need to check  if a AnimationGroup is showing on the android screen.If not showing can I get the Position even if it is out of Fov(instead of null..)?Because I need to make a Indicator to indicate the position of the 3DObject.
Now I have known how to transform a Point in world space to screen space. I use the following code .
 SimpleVector bottomP =  Interact2D.project3D2D(cam,fb,ninja.getRoot().getCenter());//how to avoid botoomP be null,even if it is out of Camera Fov?
The question is how can I get someting like a Rect or a box or anyother useful vertex to represent the 3dObject position?

2
Support / Ninja AnimatedGroup combine with vuforia upside down.
« on: November 22, 2016, 10:03:47 am »
I am a new user of jpct-ae.When I comebine ninja demo and vuforia demo I meet a little problem.The ninja animatedGroup is upside down. Just upside down and track is OK.Can some one help me? Here is my render file.(I just change very little from https://github.com/TheMaggieSimpson/Vuforia559_jPCT-AE)


Code: [Select]
public class ImageTargetRenderer implements GLSurfaceView.Renderer
{
    private static final String LOGTAG = "ImageTargetRenderer";
    private SampleApplicationSession vuforiaAppSession;
    private ImageTargets mActivity;
   
    private Renderer mRenderer;
    boolean mIsActive = false;
       
    private World world;
    private Light sun;
//    private Object3D cylinder;
    private AnimatedGroup animatedGroup;
    private Camera cam;
    private FrameBuffer fb;
    private float[] modelViewMat;
    private float fov;
private float fovy;
float  animationIndex=0;

    public ImageTargetRenderer(ImageTargets activity, SampleApplicationSession session) {
        mActivity = activity;
        vuforiaAppSession = session;
       
        world = new World();
    world.setAmbientLight(20, 20, 20);
    // set the following value according to your need, so the object won't be disappeared.
    world.setClippingPlanes(2.0f, 3000.0f);

    sun = new Light(world);
    sun.setIntensity(250, 250, 250);

    // Create a texture out of the icon...:-)
    if ( !TextureManager.getInstance().containsTexture("texture") ) {
    Texture texture = new Texture(BitmapHelper.rescale(BitmapHelper.convert(
    mActivity.getResources().getDrawable(R.drawable.icon)), 64, 64));
        TextureManager.getInstance().addTexture("texture", texture);
    }
        try {
            animatedGroup = BonesIO.loadGroup(mActivity.getResources().openRawResource(R.raw.ninja_group_bones));
        } catch (IOException e) {
            e.printStackTrace();
        }
        TextureManager.getInstance().flush();
        // Create a texture out of the icon...:-)
        if ( !TextureManager.getInstance().containsTexture("ninja") ) {
            Resources res = mActivity.getResources();
//                Texture texture = new Texture(res.openRawResource(R.raw.lenaa));
            Texture texture = new Texture(res.openRawResource(R.raw.ninja_texture));
            texture.keepPixelData(true);
            TextureManager.getInstance().addTexture("ninja", texture);
        }
        for (Animated3D anim : animatedGroup){
//                anim.scale(1f);
            anim.setTexture("ninja");
        }
        animatedGroup.addToWorld(world);
        cam = world.getCamera();


    // for older Android versions, which had massive problems with garbage collection
    MemoryHelper.compact();
       
    }
       
   
    // Called to draw the current frame.
    @Override
    public void onDrawFrame(GL10 gl) {
        if (!mIsActive)
            return;
       
        // Call our function to render content
        renderFrame();
        updateCamera();


            animationIndex += 0.0008f;
            while (animationIndex > 1)
                animationIndex -= 1;

            animatedGroup.animateSkin(animationIndex, 0);
//        }
        world.renderScene(fb);
        world.draw(fb);
        fb.display();
       
    }
   
   
    // Called when the surface is created or recreated.
    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");
       
        initRendering(); // NOTE: Cocokin sama cpp - DONE
       
        // Call Vuforia function to (re)initialize rendering after first use
        // or after OpenGL ES context was lost (e.g. after onPause/onResume):
        vuforiaAppSession.onSurfaceCreated();
    }
   
   
    // Called when the surface changed size.
    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");
                 
        if (fb != null) {
            fb.dispose();
        }
        fb = new FrameBuffer(width, height);
        Config.viewportOffsetAffectsRenderTarget = true;
       
        updateRendering(width, height);       
       
        // Call Vuforia function to handle render surface size changes:
        vuforiaAppSession.onSurfaceChanged(width, height);
    }
   
   
    // Function for initializing the renderer.   
    private void initRendering() {
        mRenderer = Renderer.getInstance();
       
    // Define clear color
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f);       
       
        // Hide the Loading Dialog
        mActivity.loadingDialogHandler
            .sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
    }
   
    private void updateRendering(int width, int height) {
   
    // Update screen dimensions
    vuforiaAppSession.setmScreenWidth(width);
    vuforiaAppSession.setmScreenHeight(height);
   
    // Reconfigure the video background
    vuforiaAppSession.configureVideoBackground();
   
    CameraCalibration camCalibration = com.vuforia.CameraDevice.getInstance().getCameraCalibration();
    Vec2F size = camCalibration.getSize();
    Vec2F focalLength = camCalibration.getFocalLength();
    float fovyRadians = (float) (2 * Math.atan(0.5f * size.getData()[1] / focalLength.getData()[1]));
    float fovRadians = (float) (2 * Math.atan(0.5f * size.getData()[0] / focalLength.getData()[0]));
   
    if (vuforiaAppSession.mIsPortrait) {
    setFovy(fovRadians);
    setFov(fovyRadians);
    } else {
    setFov(fovRadians);
    setFovy(fovyRadians);
    }
   
    }
   
    // The render function.
    private void renderFrame() {
        // clear color and depth buffer
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        // get the state, and mark the beginning of a rendering section
        State state = mRenderer.begin();
        // explicitly render the video background
        mRenderer.drawVideoBackground();       
       
        float[] modelviewArray = new float[16];
        // did we find any trackables this frame?
        for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) {
        // get the trackable
            TrackableResult result = state.getTrackableResult(tIdx);
            Trackable trackable = result.getTrackable();
            printUserData(trackable);
           
            Matrix44F modelViewMatrix = Tool.convertPose2GLMatrix(result.getPose());                     
            Matrix44F inverseMV = SampleMath.Matrix44FInverse(modelViewMatrix);           
            Matrix44F invTranspMV = SampleMath.Matrix44FTranspose(inverseMV);
           
            modelviewArray = invTranspMV.getData();
            updateModelviewMatrix(modelviewArray);
                                       
        }                     
        // hide the objects when the targets are not detected
    if (state.getNumTrackableResults() == 0) {
    float m [] = {
    1,0,0,0,
    0,1,0,0,
    0,0,1,0,
    0,0,-10000,1
    };
    modelviewArray = m;
    updateModelviewMatrix(modelviewArray);
    }   
       
        mRenderer.end();
    }
   
   
    private void printUserData(Trackable trackable) {
        String userData = (String) trackable.getUserData();
        Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\"");
    }
           
    private void updateModelviewMatrix(float mat[]) {
        modelViewMat = mat;
    }
       
    private void updateCamera() {
    if (modelViewMat != null) {   
    float[] m = modelViewMat;

final SimpleVector camUp;
if (vuforiaAppSession.mIsPortrait) {
camUp = new SimpleVector(-m[0], -m[1], -m[2]);
} else {
camUp = new SimpleVector(-m[4], -m[5], -m[6]);
}

final SimpleVector camDirection = new SimpleVector(m[8], m[9], m[10]);
final SimpleVector camPosition = new SimpleVector(m[12], m[13], m[14]);

cam.setOrientation(camDirection, camUp);
cam.setPosition(camPosition);

cam.setFOV(fov);
cam.setYFOV(fovy);
    }
    }
       
    private void setFov(float fov) {
this.fov = fov;
}
   
private void setFovy(float fovy) {
this.fovy = fovy;
}
   
}

Pages: [1]