Skip to content

Commit

Permalink
Advance predictive timing on tracked items used *next* frame
Browse files Browse the repository at this point in the history
  • Loading branch information
BastiaanOlij committed Jan 30, 2022
1 parent 0d2ea91 commit c9b4266
Show file tree
Hide file tree
Showing 6 changed files with 48 additions and 4 deletions.
10 changes: 9 additions & 1 deletion demo/Main.tscn
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[gd_scene load_steps=21 format=2]
[gd_scene load_steps=22 format=2]

[ext_resource path="res://Main.gd" type="Script" id=1]
[ext_resource path="res://Ground.tscn" type="PackedScene" id=2]
Expand All @@ -25,6 +25,9 @@ mid_height = 0.1
radius = 0.03
mid_height = 0.04

[sub_resource type="CubeMesh" id=5]
size = Vector3( 0.1, 0.1, 0.1 )

[sub_resource type="CylinderMesh" id=3]
top_radius = 0.01
bottom_radius = 0.01
Expand Down Expand Up @@ -64,6 +67,11 @@ layers = 524288
mesh = SubResource( 2 )
material/0 = null

[node name="TestInView" type="MeshInstance" parent="FPSController/ARVRCamera" index="2"]
transform = Transform( 0.981744, -0.0364435, -0.186685, 0, 0.981474, -0.191597, 0.190209, 0.188099, 0.963556, 0.129696, 0.100312, -0.347681 )
mesh = SubResource( 5 )
material/0 = null

[node name="LeftHandController" parent="FPSController" index="2"]
visible = true

Expand Down
1 change: 1 addition & 0 deletions demo/addons/godot-openxr/CHANGES.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ Changes to the Godot OpenXR asset
- Removed deprecated `com.samsung.android.vr.application.mode` meta-data tag.
- Updated repo `README`.
- Added controller tracking confidence
- Use correct predictive timing for controllers.

1.1.1
-------------------
Expand Down
34 changes: 33 additions & 1 deletion src/openxr/OpenXRApi.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2451,6 +2451,23 @@ bool OpenXRApi::check_graphics_requirements_gl(XrSystemId system_id) {
return true;
}

XrTime OpenXRApi::get_next_frame_time() const {
if (!initialised || !running) {
return 0;
}

// xrWaitFrame not run yet
if (frameState.predictedDisplayTime == 0) {
return 0;
}

// We retrieve our tracking information right before we render.
// We use the current frames predicted display time while rendering.
// However when position nodes in our scene, we update this while processing the next frame.
// We thus need to advance our frame timing by one frame when retreiving this data.
return frameState.predictedDisplayTime + frameState.predictedDisplayPeriod;
}

XrResult OpenXRApi::acquire_image(int eye) {
XrResult result;
XrSwapchainImageAcquireInfo swapchainImageAcquireInfo = {
Expand Down Expand Up @@ -2885,6 +2902,7 @@ bool OpenXRApi::get_view_transform(int eye, float world_scale, godot_transform *
return false;
}

// Note that our views[eye].pose uses the current frames timing which is correct as this is what we use for rendering.
Transform *t = (Transform *)transform_for_eye;
*t = transform_from_pose(views[eye].pose, world_scale);

Expand All @@ -2906,7 +2924,21 @@ bool OpenXRApi::get_head_center(float world_scale, godot_transform *transform) {
.type = XR_TYPE_SPACE_LOCATION,
.next = NULL
};
result = xrLocateSpace(view_space, play_space, frameState.predictedDisplayTime, &location);

XrTime time;
if (form_factor == XR_FORM_FACTOR_HANDHELD_DISPLAY) {
// For handheld displays we are rendering MONO and this method is called both for our rendering position
// and to position our camera node for the next frame.
// It is more important to get the rendering right here.
// Note, there currently are no platforms where OpenXR is used with Godot that support this mode
// so it's probably a non issue for the time being. This is already resolved in Godot 4.
time = frameState.predictedDisplayTime;
} else {
// We retrieve our tracking information right before we render, we use the current frames predicted display time while rendering.
// Our head center however is retrieved to place our camera node in the scene after rendering for the next frame.
time = get_next_frame_time();
}
result = xrLocateSpace(view_space, play_space, time, &location);
if (!xr_result(result, "Failed to locate view space in play space!")) {
return false;
}
Expand Down
1 change: 1 addition & 0 deletions src/openxr/OpenXRApi.h
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,7 @@ class OpenXRApi {
XrFrameState get_frame_state() { return frameState; }
String get_system_name() const { return system_name; }
uint32_t get_vendor_id() const { return vendor_id; }
XrTime get_next_frame_time() const;

float get_render_target_size_multiplier() { return render_target_size_multiplier; }
bool set_render_target_size_multiplier(float multiplier);
Expand Down
3 changes: 2 additions & 1 deletion src/openxr/actions/action.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -250,7 +250,8 @@ TrackingConfidence Action::get_as_pose(XrPath p_path, float p_world_scale, Trans
location.type = XR_TYPE_SPACE_LOCATION;
location.next = NULL;

XrResult result = xrLocateSpace(toplevel_paths[index].space, xr_api->play_space, xr_api->frameState.predictedDisplayTime, &location);
XrTime time = xr_api->get_next_frame_time(); // This data will be used for the next frame we render
XrResult result = xrLocateSpace(toplevel_paths[index].space, xr_api->play_space, time, &location);
if (!xr_api->xr_result(result, "failed to locate space!")) {
return TRACKING_CONFIDENCE_NONE;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,7 +194,8 @@ void XRExtHandTrackingExtensionWrapper::update_handtracking() {
return;
}

const XrTime time = openxr_api->get_frame_state().predictedDisplayTime;
const XrTime time = openxr_api->get_next_frame_time(); // This data will be used for the next frame we render

XrResult result;

for (int i = 0; i < 2; i++) {
Expand Down

0 comments on commit c9b4266

Please sign in to comment.