Author Topic: Ninja AnimatedGroup combine with vuforia upside down.  (Read 460 times)

Offline lbb19930915

  • byte
  • *
  • Posts: 7
    • View Profile
Ninja AnimatedGroup combine with vuforia upside down.
« on: November 22, 2016, 10:03:47 am »
I am a new user of jpct-ae.When I comebine ninja demo and vuforia demo I meet a little problem.The ninja animatedGroup is upside down. Just upside down and track is OK.Can some one help me? Here is my render file.(I just change very little from https://github.com/TheMaggieSimpson/Vuforia559_jPCT-AE)


Code: [Select]
public class ImageTargetRenderer implements GLSurfaceView.Renderer
{
    private static final String LOGTAG = "ImageTargetRenderer";
    private SampleApplicationSession vuforiaAppSession;
    private ImageTargets mActivity;
   
    private Renderer mRenderer;
    boolean mIsActive = false;
       
    private World world;
    private Light sun;
//    private Object3D cylinder;
    private AnimatedGroup animatedGroup;
    private Camera cam;
    private FrameBuffer fb;
    private float[] modelViewMat;
    private float fov;
private float fovy;
float  animationIndex=0;

    public ImageTargetRenderer(ImageTargets activity, SampleApplicationSession session) {
        mActivity = activity;
        vuforiaAppSession = session;
       
        world = new World();
    world.setAmbientLight(20, 20, 20);
    // set the following value according to your need, so the object won't be disappeared.
    world.setClippingPlanes(2.0f, 3000.0f);

    sun = new Light(world);
    sun.setIntensity(250, 250, 250);

    // Create a texture out of the icon...:-)
    if ( !TextureManager.getInstance().containsTexture("texture") ) {
    Texture texture = new Texture(BitmapHelper.rescale(BitmapHelper.convert(
    mActivity.getResources().getDrawable(R.drawable.icon)), 64, 64));
        TextureManager.getInstance().addTexture("texture", texture);
    }
        try {
            animatedGroup = BonesIO.loadGroup(mActivity.getResources().openRawResource(R.raw.ninja_group_bones));
        } catch (IOException e) {
            e.printStackTrace();
        }
        TextureManager.getInstance().flush();
        // Create a texture out of the icon...:-)
        if ( !TextureManager.getInstance().containsTexture("ninja") ) {
            Resources res = mActivity.getResources();
//                Texture texture = new Texture(res.openRawResource(R.raw.lenaa));
            Texture texture = new Texture(res.openRawResource(R.raw.ninja_texture));
            texture.keepPixelData(true);
            TextureManager.getInstance().addTexture("ninja", texture);
        }
        for (Animated3D anim : animatedGroup){
//                anim.scale(1f);
            anim.setTexture("ninja");
        }
        animatedGroup.addToWorld(world);
        cam = world.getCamera();


    // for older Android versions, which had massive problems with garbage collection
    MemoryHelper.compact();
       
    }
       
   
    // Called to draw the current frame.
    @Override
    public void onDrawFrame(GL10 gl) {
        if (!mIsActive)
            return;
       
        // Call our function to render content
        renderFrame();
        updateCamera();


            animationIndex += 0.0008f;
            while (animationIndex > 1)
                animationIndex -= 1;

            animatedGroup.animateSkin(animationIndex, 0);
//        }
        world.renderScene(fb);
        world.draw(fb);
        fb.display();
       
    }
   
   
    // Called when the surface is created or recreated.
    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");
       
        initRendering(); // NOTE: Cocokin sama cpp - DONE
       
        // Call Vuforia function to (re)initialize rendering after first use
        // or after OpenGL ES context was lost (e.g. after onPause/onResume):
        vuforiaAppSession.onSurfaceCreated();
    }
   
   
    // Called when the surface changed size.
    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");
                 
        if (fb != null) {
            fb.dispose();
        }
        fb = new FrameBuffer(width, height);
        Config.viewportOffsetAffectsRenderTarget = true;
       
        updateRendering(width, height);       
       
        // Call Vuforia function to handle render surface size changes:
        vuforiaAppSession.onSurfaceChanged(width, height);
    }
   
   
    // Function for initializing the renderer.   
    private void initRendering() {
        mRenderer = Renderer.getInstance();
       
    // Define clear color
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f);       
       
        // Hide the Loading Dialog
        mActivity.loadingDialogHandler
            .sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
    }
   
    private void updateRendering(int width, int height) {
   
    // Update screen dimensions
    vuforiaAppSession.setmScreenWidth(width);
    vuforiaAppSession.setmScreenHeight(height);
   
    // Reconfigure the video background
    vuforiaAppSession.configureVideoBackground();
   
    CameraCalibration camCalibration = com.vuforia.CameraDevice.getInstance().getCameraCalibration();
    Vec2F size = camCalibration.getSize();
    Vec2F focalLength = camCalibration.getFocalLength();
    float fovyRadians = (float) (2 * Math.atan(0.5f * size.getData()[1] / focalLength.getData()[1]));
    float fovRadians = (float) (2 * Math.atan(0.5f * size.getData()[0] / focalLength.getData()[0]));
   
    if (vuforiaAppSession.mIsPortrait) {
    setFovy(fovRadians);
    setFov(fovyRadians);
    } else {
    setFov(fovRadians);
    setFovy(fovyRadians);
    }
   
    }
   
    // The render function.
    private void renderFrame() {
        // clear color and depth buffer
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        // get the state, and mark the beginning of a rendering section
        State state = mRenderer.begin();
        // explicitly render the video background
        mRenderer.drawVideoBackground();       
       
        float[] modelviewArray = new float[16];
        // did we find any trackables this frame?
        for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) {
        // get the trackable
            TrackableResult result = state.getTrackableResult(tIdx);
            Trackable trackable = result.getTrackable();
            printUserData(trackable);
           
            Matrix44F modelViewMatrix = Tool.convertPose2GLMatrix(result.getPose());                     
            Matrix44F inverseMV = SampleMath.Matrix44FInverse(modelViewMatrix);           
            Matrix44F invTranspMV = SampleMath.Matrix44FTranspose(inverseMV);
           
            modelviewArray = invTranspMV.getData();
            updateModelviewMatrix(modelviewArray);
                                       
        }                     
        // hide the objects when the targets are not detected
    if (state.getNumTrackableResults() == 0) {
    float m [] = {
    1,0,0,0,
    0,1,0,0,
    0,0,1,0,
    0,0,-10000,1
    };
    modelviewArray = m;
    updateModelviewMatrix(modelviewArray);
    }   
       
        mRenderer.end();
    }
   
   
    private void printUserData(Trackable trackable) {
        String userData = (String) trackable.getUserData();
        Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\"");
    }
           
    private void updateModelviewMatrix(float mat[]) {
        modelViewMat = mat;
    }
       
    private void updateCamera() {
    if (modelViewMat != null) {   
    float[] m = modelViewMat;

final SimpleVector camUp;
if (vuforiaAppSession.mIsPortrait) {
camUp = new SimpleVector(-m[0], -m[1], -m[2]);
} else {
camUp = new SimpleVector(-m[4], -m[5], -m[6]);
}

final SimpleVector camDirection = new SimpleVector(m[8], m[9], m[10]);
final SimpleVector camPosition = new SimpleVector(m[12], m[13], m[14]);

cam.setOrientation(camDirection, camUp);
cam.setPosition(camPosition);

cam.setFOV(fov);
cam.setYFOV(fovy);
    }
    }
       
    private void setFov(float fov) {
this.fov = fov;
}
   
private void setFovy(float fovy) {
this.fovy = fovy;
}
   
}

Offline EgonOlsen

  • Administrator
  • quad
  • *****
  • Posts: 11776
    • View Profile
    • http://www.jpct.net
Re: Ninja AnimatedGroup combine with vuforia upside down.
« Reply #1 on: November 22, 2016, 12:04:16 pm »
A simple ninja.rotateAxis(ninja.getXAxis(), <PI>); doesn't do the trick?

Offline lbb19930915

  • byte
  • *
  • Posts: 7
    • View Profile
Re: Ninja AnimatedGroup combine with vuforia upside down.
« Reply #2 on: November 22, 2016, 03:32:22 pm »
Very lucky that you can reply me so quickly~ 8) 8)
I got it !Thank you sir,you save my time.Here is my way,hope this can help other new user that have the same question.


Code: [Select]
// The renderer class for the ImageTargets sample.
public class ImageTargetRenderer implements GLSurfaceView.Renderer
{
    private static final String LOGTAG = "ImageTargetRenderer";
    private SampleApplicationSession vuforiaAppSession;
    private ImageTargets mActivity;

    private Renderer mRenderer;
    boolean mIsActive = false;

    private World world;
    private Light sun;
    //    private Object3D cylinder;
    private AnimatedGroup animatedGroup;
    private Camera cam;
    private FrameBuffer fb;
    private float[] modelViewMat;
    private float fov;
    private float fovy;
    float  animationIndex=0;

    public ImageTargetRenderer(ImageTargets activity, SampleApplicationSession session) {
        mActivity = activity;
        vuforiaAppSession = session;

        world = new World();
        world.setAmbientLight(200, 200, 200);
        // set the following value according to your need, so the object won't be disappeared.
        world.setClippingPlanes(2.0f, 3000.0f);

        sun = new Light(world);
        sun.setIntensity(250, 250, 250);

        // Create a texture out of the icon...:-)
        if ( !TextureManager.getInstance().containsTexture("texture") ) {
            Texture texture = new Texture(BitmapHelper.rescale(BitmapHelper.convert(
                    mActivity.getResources().getDrawable(R.drawable.icon)), 64, 64));
            TextureManager.getInstance().addTexture("texture", texture);
        }
        try {
            animatedGroup = BonesIO.loadGroup(mActivity.getResources().openRawResource(R.raw.ninja_group_bones));
        } catch (IOException e) {
            e.printStackTrace();
        }
        TextureManager.getInstance().flush();
        // Create a texture out of the icon...:-)
        if ( !TextureManager.getInstance().containsTexture("ninja") ) {
            Resources res = mActivity.getResources();
//                Texture texture = new Texture(res.openRawResource(R.raw.lenaa));
            Texture texture = new Texture(res.openRawResource(R.raw.ninja_texture));
            texture.keepPixelData(true);
            TextureManager.getInstance().addTexture("ninja", texture);
        }
        for (Animated3D anim : animatedGroup){
//                anim.scale(1f);
            anim.setTexture("ninja");
            //this line not work well ,will change the positon of Object3d
//            anim.rotateX((float) Math.PI);

        }
        //I add follow two lines
        animatedGroup.getRoot().rotateX(-(float) Math.PI);
        animatedGroup.getRoot().rotateY((float) Math.PI);
        animatedGroup.addToWorld(world);
        cam = world.getCamera();


        // for older Android versions, which had massive problems with garbage collection
        MemoryHelper.compact();

    }


    // Called to draw the current frame.
    @Override
    public void onDrawFrame(GL10 gl) {
        if (!mIsActive)
            return;

        // Call our function to render content
        renderFrame();
        updateCamera();


        animationIndex += 0.0008f;
        while (animationIndex > 1)
            animationIndex -= 1;

        animatedGroup.animateSkin(animationIndex, 0);
//        }
        world.renderScene(fb);
        world.draw(fb);
        fb.display();

    }


    // Called when the surface is created or recreated.
    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        Log.d(LOGTAG, "GLRenderer.onSurfaceCreated");

        initRendering(); // NOTE: Cocokin sama cpp - DONE

        // Call Vuforia function to (re)initialize rendering after first use
        // or after OpenGL ES context was lost (e.g. after onPause/onResume):
        vuforiaAppSession.onSurfaceCreated();
    }


    // Called when the surface changed size.
    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        Log.d(LOGTAG, "GLRenderer.onSurfaceChanged");

        if (fb != null) {
            fb.dispose();
        }
        fb = new FrameBuffer(width, height);
        Config.viewportOffsetAffectsRenderTarget = true;

        updateRendering(width, height);

        // Call Vuforia function to handle render surface size changes:
        vuforiaAppSession.onSurfaceChanged(width, height);
    }


    // Function for initializing the renderer.   
    private void initRendering() {
        mRenderer = Renderer.getInstance();

        // Define clear color
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, Vuforia.requiresAlpha() ? 0.0f : 1.0f);

        // Hide the Loading Dialog
        mActivity.loadingDialogHandler
                .sendEmptyMessage(LoadingDialogHandler.HIDE_LOADING_DIALOG);
    }

    private void updateRendering(int width, int height) {

        // Update screen dimensions
        vuforiaAppSession.setmScreenWidth(width);
        vuforiaAppSession.setmScreenHeight(height);

        // Reconfigure the video background
        vuforiaAppSession.configureVideoBackground();

        CameraCalibration camCalibration = com.vuforia.CameraDevice.getInstance().getCameraCalibration();
        Vec2F size = camCalibration.getSize();
        Vec2F focalLength = camCalibration.getFocalLength();
        float fovyRadians = (float) (2 * Math.atan(0.5f * size.getData()[1] / focalLength.getData()[1]));
        float fovRadians = (float) (2 * Math.atan(0.5f * size.getData()[0] / focalLength.getData()[0]));

        if (vuforiaAppSession.mIsPortrait) {
            setFovy(fovRadians);
            setFov(fovyRadians);
        } else {
            setFov(fovRadians);
            setFovy(fovyRadians);
        }

    }

    // The render function.
    private void renderFrame() {
        // clear color and depth buffer
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        // get the state, and mark the beginning of a rendering section
        State state = mRenderer.begin();
        // explicitly render the video background
        mRenderer.drawVideoBackground();

        float[] modelviewArray = new float[16];
        // did we find any trackables this frame?
        for (int tIdx = 0; tIdx < state.getNumTrackableResults(); tIdx++) {
            // get the trackable
            TrackableResult result = state.getTrackableResult(tIdx);
            Trackable trackable = result.getTrackable();
            printUserData(trackable);

            Matrix44F modelViewMatrix = Tool.convertPose2GLMatrix(result.getPose());
            Matrix44F inverseMV = SampleMath.Matrix44FInverse(modelViewMatrix);
            Matrix44F invTranspMV = SampleMath.Matrix44FTranspose(inverseMV);

            modelviewArray = invTranspMV.getData();
            updateModelviewMatrix(modelviewArray);

        }
        // hide the objects when the targets are not detected
        if (state.getNumTrackableResults() == 0) {
            float m [] = {
                    1,0,0,0,
                    0,1,0,0,
                    0,0,1,0,
                    0,0,-10000,1
            };
            modelviewArray = m;
            updateModelviewMatrix(modelviewArray);
        }

        mRenderer.end();
    }


    private void printUserData(Trackable trackable) {
        String userData = (String) trackable.getUserData();
        Log.d(LOGTAG, "UserData:Retreived User Data \"" + userData + "\"");
    }

    private void updateModelviewMatrix(float mat[]) {
        modelViewMat = mat;
    }

    private void updateCamera() {
        if (modelViewMat != null) {
            float[] m = modelViewMat;

            final SimpleVector camUp;
            if (vuforiaAppSession.mIsPortrait) {
                camUp = new SimpleVector(-m[0], -m[1], -m[2]);
            } else {
                camUp = new SimpleVector(-m[4], -m[5], -m[6]);
            }

            final SimpleVector camDirection = new SimpleVector(m[8], m[9], m[10]);
            final SimpleVector camPosition = new SimpleVector(m[12], m[13], m[14]);

            cam.setOrientation(camDirection, camUp);
            camPosition.y +=100;
            camPosition.z +=100;
            cam.setPosition(camPosition);

            cam.setFOV(fov);
            cam.setYFOV(fovy);
        }
    }

    private void setFov(float fov) {
        this.fov = fov;
    }

    private void setFovy(float fovy) {
        this.fovy = fovy;
    }

}