ArrayList<FloatBuffer> verticesList = new ArrayList<>();
ArrayList<float []> poseMatrixList = new ArrayList<>();
float[] mProjectionMatrix = new float[16];
float[] mViewMatrix = new float[16];
public void onDrawFrame(GL10 gl) {
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
arcoreSession.setCameraTextureName(backgroundRenderer.getTextureId());
Frame frame = arcoreSession.update();
if (frame == null || frame.getCamera() == null) return;
if (frame.getTimestamp() == 0) return;
if (arcoreSession.isAugmentedFaceMode()) {
for (AugmentedFace face : arcoreSession.getSession().getAllTrackables(AugmentedFace.class)) {
if (face.getTrackingState() == TrackingState.TRACKING) {
FloatBuffer faceVertices = face.getMeshVertices();
verticesList.add(faceVertices);
// center and region poses
float[] facePoseMatrix = new float[16];
Pose facePose = face.getCenterPose();
facePose.toMatrix(facePoseMatrix, 0);
poseMatrixList.add(facePoseMatrix);
// arcore camera projection matrix
frame.getCamera().getProjectionMatrix(mProjectionMatrix, 0, 0.1f, 100.0f);
// arcore camera view matrix
frame.getCamera().getViewMatrix(mViewMatrix, 0);
Size textureSize = arcoreSession.getSession().getCameraConfig().getTextureSize();
if (mTextureSize == null || !mTextureSize.equals(textureSize)) {
mTextureSize = textureSize;
mARGSession.setCameraConfig(new ARGCameraConfig(textureSize.getWidth(),
// feed mesh vertices, pose matrix, projection matrix, view matrix
mARGSession.applyAdditionalFaceInfo(verticesList, poseMatrixList, mProjectionMatrix, mViewMatrix);
mARGSession.feedTexture(arcoreSession.getTextureId(), mTextureSize);
ARGFrame argFrame = mARGSession.drawFrame(gl, mScreenWidth, mScreenHeight);
mScreenRenderer.draw(argFrame, mScreenWidth, mScreenHeight, mTextureSize.getWidth(), mTextureSize.getHeight());