3 minute read

OpenGL ES 3.0

public void onDrawFrame(SampleRender render) { if (session == null) { return; }

// Texture names should only be set once on a GL thread unless they change. This is done during
// onDrawFrame rather than onSurfaceCreated since the session is not guaranteed to have been
// initialized during the execution of onSurfaceCreated.
if (!hasSetTextureNames) {
  session.setCameraTextureNames(
      new int[] {backgroundRenderer.getCameraColorTexture().getTextureId()});
  hasSetTextureNames = true;
}

// -- Update per-frame state

// Notify ARCore session that the view size changed so that the perspective matrix and
// the video background can be properly adjusted.
displayRotationHelper.updateSessionIfNeeded(session);

// Obtain the current frame from ARSession. When the configuration is set to
// UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
// camera framerate.
Frame frame;
try {
  frame = session.update();
} catch (CameraNotAvailableException e) {
  Log.e(TAG, "Camera not available during onDrawFrame", e);
  messageSnackbarHelper.showError(this, "Camera not available. Try restarting the app.");
  return;
}
Camera camera = frame.getCamera();

// Update BackgroundRenderer state to match the depth settings.
try {
  backgroundRenderer.setUseDepthVisualization(
      render, depthSettings.depthColorVisualizationEnabled());
  backgroundRenderer.setUseOcclusion(render, depthSettings.useDepthForOcclusion());
} catch (IOException e) {
  Log.e(TAG, "Failed to read a required asset file", e);
  messageSnackbarHelper.showError(this, "Failed to read a required asset file: " + e);
  return;
}
// BackgroundRenderer.updateDisplayGeometry must be called every frame to update the coordinates
// used to draw the background camera image.
backgroundRenderer.updateDisplayGeometry(frame);

if (camera.getTrackingState() == TrackingState.TRACKING
    && (depthSettings.useDepthForOcclusion()
        || depthSettings.depthColorVisualizationEnabled())) {
  try (Image depthImage = frame.acquireDepthImage16Bits()) {
    backgroundRenderer.updateCameraDepthTexture(depthImage);
  } catch (NotYetAvailableException e) {
    // This normally means that depth data is not available yet. This is normal so we will not
    // spam the logcat with this.
  }
}

// Handle one tap per frame.
handleTap(frame, camera);

// Keep the screen unlocked while tracking, but allow it to lock when tracking stops.
trackingStateHelper.updateKeepScreenOnFlag(camera.getTrackingState());

// Show a message based on whether tracking has failed, if planes are detected, and if the user
// has placed any objects.
String message = null;
if (camera.getTrackingState() == TrackingState.PAUSED) {
  if (camera.getTrackingFailureReason() == TrackingFailureReason.NONE) {
    message = SEARCHING_PLANE_MESSAGE;
  } else {
    message = TrackingStateHelper.getTrackingFailureReasonString(camera);
  }
} else if (hasTrackingPlane()) {
  if (wrappedAnchors.isEmpty()) {
    message = WAITING_FOR_TAP_MESSAGE;
  }
} else {
  message = SEARCHING_PLANE_MESSAGE;
}
if (message == null) {
  messageSnackbarHelper.hide(this);
} else {
  messageSnackbarHelper.showMessage(this, message);
}

// -- Draw background

if (frame.getTimestamp() != 0) {
  // Suppress rendering if the camera did not produce the first frame yet. This is to avoid
  // drawing possible leftover data from previous sessions if the texture is reused.
  backgroundRenderer.drawBackground(render);
}

// If not tracking, don't draw 3D objects.
if (camera.getTrackingState() == TrackingState.PAUSED) {
  return;
}

// -- Draw non-occluded virtual objects (planes, point cloud)

// Get projection matrix.
camera.getProjectionMatrix(projectionMatrix, 0, Z_NEAR, Z_FAR);

// Get camera matrix and draw.
camera.getViewMatrix(viewMatrix, 0);

// Visualize tracked points.
// Use try-with-resources to automatically release the point cloud.
try (PointCloud pointCloud = frame.acquirePointCloud()) {
  if (pointCloud.getTimestamp() > lastPointCloudTimestamp) {
    pointCloudVertexBuffer.set(pointCloud.getPoints());
    lastPointCloudTimestamp = pointCloud.getTimestamp();
  }
  Matrix.multiplyMM(modelViewProjectionMatrix, 0, projectionMatrix, 0, viewMatrix, 0);
  pointCloudShader.setMat4("u_ModelViewProjection", modelViewProjectionMatrix);
  render.draw(pointCloudMesh, pointCloudShader);
}

// Visualize planes.
planeRenderer.drawPlanes(
    render,
    session.getAllTrackables(Plane.class),
    camera.getDisplayOrientedPose(),
    projectionMatrix);

// -- Draw occluded virtual objects

// Update lighting parameters in the shader
updateLightEstimation(frame.getLightEstimate(), viewMatrix);

// Visualize anchors created by touch.
render.clear(virtualSceneFramebuffer, 0f, 0f, 0f, 0f);
for (WrappedAnchor wrappedAnchor : wrappedAnchors) {
  Anchor anchor = wrappedAnchor.getAnchor();
  Trackable trackable = wrappedAnchor.getTrackable();
  if (anchor.getTrackingState() != TrackingState.TRACKING) {
    continue;
  }

  // Get the current pose of an Anchor in world space. The Anchor pose is updated
  // during calls to session.update() as ARCore refines its estimate of the world.
  anchor.getPose().toMatrix(modelMatrix, 0);

  // Calculate model/view/projection matrices
  Matrix.multiplyMM(modelViewMatrix, 0, viewMatrix, 0, modelMatrix, 0);
  Matrix.multiplyMM(modelViewProjectionMatrix, 0, projectionMatrix, 0, modelViewMatrix, 0);

  // Update shader properties and draw
  virtualObjectShader.setMat4("u_ModelView", modelViewMatrix);
  virtualObjectShader.setMat4("u_ModelViewProjection", modelViewProjectionMatrix);

  if (trackable instanceof InstantPlacementPoint
      && ((InstantPlacementPoint) trackable).getTrackingMethod()
          == InstantPlacementPoint.TrackingMethod.SCREENSPACE_WITH_APPROXIMATE_DISTANCE) {
    virtualObjectShader.setTexture(
        "u_AlbedoTexture", virtualObjectAlbedoInstantPlacementTexture);
  } else {
    virtualObjectShader.setTexture("u_AlbedoTexture", virtualObjectAlbedoTexture);
  }

  render.draw(virtualObjectMesh, virtualObjectShader, virtualSceneFramebuffer);
}

// Compose the virtual scene with the background.
backgroundRenderer.drawVirtualScene(render, virtualSceneFramebuffer, Z_NEAR, Z_FAR);   }   </code>

Categories: ,

Updated: