Camera

To use ARGear, you must provide camera configuration information and video frames to ARGear. Based on the information, ARGear applies AR functions such as face tracking, segmentation, ETC on top of the received frames as configured in ARGInferenceConfig.Feature.

The sample code below describes an example of camera configuration and how to feed video frames to ARGear.

ReferenceCamera.CameraListener cameraListener = new ReferenceCamera.CameraListener() {
@Override
public void setConfig(int previewWidth,
int previewHeight,
float verticalFov,
float horizontalFov,
int orientation,
boolean isFrontFacing,
float fps) {
argsession.setCameraConfig(new ARGCameraConfig(previewWidth,
previewHeight,
verticalFov,
horizontalFov,
orientation,
isFrontFacing,
fps));
}
// region - for camera api 1
@Override
public void updateFaceRects(Camera.Face[] faces) {
// Send face related information from camera to ARGear
argsession.updateFaceRects(faces);
}
@Override
public void feedRawData(byte[] data) {
// Send preview frame raw data from camera device to ARGear
argsession.feedRawData(data);
}
// endregion
// region - for camera api 2
@Override
public void updateFaceRects(int numFaces, int[][] bbox) {
// Send face related information from camera to ARGear
argsession.updateFaceRects(numFaces, bbox);
}
@Override
public void feedRawData(Image data) {
// Send preview frame image from camera device to ARGear
argsession.feedRawData(data);
}
// endregion
};

A sample code of a camera class that calls updateFaceRects and feedRawData functions is written below.

Camera API 1 Sample Code

<Camera API 1 Sample Code>
private class FaceDetecionCallBack implements FaceDetectionListener {
@Override
public void onFaceDetection(Face[] faces, Camera camera) {
listener.updateFaceRects(faces);
}
}
private class CameraPreviewCallback implements Camera.PreviewCallback {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
listener.feedRawData(data.array());
}
}

Camera API 2 Sample Code

<Camera API 2 Sample Code>
private CameraCaptureSession.CaptureCallback mCaptureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
Integer mode = result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
if(faces != null && mode != null) {
if (faces.length > 0) {
int[][] bbox = new int[MAXFACE][4];
Rect rect;
for (int i = 0; i < faces.length; ++i) {
rect = faces[i].getBounds();
bbox[i][0] = rect.left*mPreviewSize[0]/mCameraSensorResolution.getWidth();
bbox[i][1] = rect.top*mPreviewSize[1]/mCameraSensorResolution.getHeight();
bbox[i][2] = rect.right*mPreviewSize[0]/mCameraSensorResolution.getWidth();
bbox[i][3] = rect.bottom*mPreviewSize[1]/mCameraSensorResolution.getHeight();
}
listener.updateFaceRects(faces.length, bbox);
}
}
}
@Override
public void onCaptureProgressed(CameraCaptureSession session,
CaptureRequest request,
CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
process(result);
}
};
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(final ImageReader reader) {
mHandler.post(new Runnable() {
@Override
public void run() {
final Image image = reader.acquireLatestImage();
if (image != null) {
listener.feedRawData(image);
image.close();
}
}
});
}
};