Made a few tweaks to the interface

This commit is contained in:
BastiaanOlij 2017-09-29 21:36:27 +10:00
parent ba2c49531d
commit ce74efacbb
17 changed files with 543 additions and 118 deletions

View file

@ -34,6 +34,13 @@
Returns true if the anchor is being tracked and false if no anchor with this id is currently known.
</description>
</method>
<method name="get_plane" qualifiers="const">
<return type="Plane">
</return>
<description>
Returns a plane aligned with our anchor, handy for intersection testing
</description>
</method>
<method name="get_size" qualifiers="const">
<return type="Vector3">
</return>

View file

@ -12,6 +12,20 @@
<demos>
</demos>
<methods>
<method name="get_anchor_detection_is_enabled" qualifiers="const">
<return type="bool">
</return>
<description>
Returns true if achor detection is enabled (AR only).
</description>
</method>
<method name="get_capabilities" qualifiers="const">
<return type="int">
</return>
<description>
Returns a combination of flags providing information about the capabilities of this interface.
</description>
</method>
<method name="get_name" qualifiers="const">
<return type="String">
</return>
@ -26,11 +40,11 @@
Returns the resolution at which we should render our intermediate results before things like lens distortion are applied by the VR platform.
</description>
</method>
<method name="hmd_is_present">
<return type="bool">
<method name="get_tracking_status" qualifiers="const">
<return type="int" enum="ARVRInterface.Tracking_status">
</return>
<description>
Returns true if an HMD is available for this interface.
If supported, returns the status of our tracking. This will allow you to provide feedback to the user whether there are issues with positional tracking.
</description>
</method>
<method name="initialize">
@ -51,13 +65,6 @@
Returns true if this interface is active.
</description>
</method>
<method name="is_installed">
<return type="bool">
</return>
<description>
Returns true if this interface has been installed. Say your game is designed to work with OpenVR so you are using the OpenVR interface but the user hasn't installed SteamVR, this would return false.
</description>
</method>
<method name="is_primary">
<return type="bool">
</return>
@ -65,6 +72,31 @@
Returns true if this interface is currently the primary interface (the interface responsible for showing the output).
</description>
</method>
<method name="is_stereo">
<return type="bool">
</return>
<description>
Returns true if the current output of this interface is in stereo.
</description>
</method>
<method name="set_anchor_detection_is_enabled">
<return type="void">
</return>
<argument index="0" name="enable" type="bool">
</argument>
<description>
Enables anchor detection, this is used on AR interfaces and enables the extra logic that will detect planes, features, objects, etc. and adds/modifies anchor points.
</description>
</method>
<method name="set_is_initialized">
<return type="void">
</return>
<argument index="0" name="initialized" type="bool">
</argument>
<description>
Initialise/uninitilise this interface (same effect as calling intialize/uninitialize).
</description>
</method>
<method name="set_is_primary">
<return type="void">
</return>
@ -74,13 +106,6 @@
Set this interface to the primary interface (unset the old one).
</description>
</method>
<method name="supports_hmd">
<return type="bool">
</return>
<description>
Returns true if this interface supports HMDs and by extension uses stereo scopic rendering.
</description>
</method>
<method name="uninitialize">
<return type="void">
</return>
@ -90,10 +115,32 @@
</method>
</methods>
<members>
<member name="primary" type="bool" setter="set_is_primary" getter="is_primary">
<member name="ar_is_anchor_detection_enabled" type="bool" setter="set_anchor_detection_is_enabled" getter="get_anchor_detection_is_enabled">
On an AR interface, is our anchor detection enabled?
</member>
<member name="interface_is_initialized" type="bool" setter="set_is_initialized" getter="is_initialized">
Has this interface been initialized?
</member>
<member name="interface_is_primary" type="bool" setter="set_is_primary" getter="is_primary">
Is this our primary interface?
</member>
</members>
<constants>
<constant name="ARVR_NONE" value="0">
No ARVR capabilities.
</constant>
<constant name="ARVR_MONO" value="1">
This interface can work with normal rendering output (non-HMD based AR).
</constant>
<constant name="ARVR_STEREO" value="2">
This interface supports stereoscopic rendering.
</constant>
<constant name="ARVR_AR" value="4">
This interface support AR (video background and real world tracking).
</constant>
<constant name="ARVR_EXTERNAL" value="8">
This interface outputs to an external device, if the main viewport is used the on screen output is an unmodified buffer of either the left or right eye (stretched if the viewport size is not changed to the same aspect ratio of get_recommended_render_targetsize. Using a seperate viewport node frees up the main viewport for other purposes.
</constant>
<constant name="EYE_MONO" value="0">
Mono output, this is mostly used internally when retrieving positioning information for our camera node or when stereo scopic rendering is not supported.
</constant>
@ -103,5 +150,20 @@
<constant name="EYE_RIGHT" value="2">
Right eye output, this is mostly used internally when rendering the image for the right eye and obtaining positioning and projection information.
</constant>
<constant name="ARVR_NORMAL_TRACKING" value="0">
Tracking is behaving as expected.
</constant>
<constant name="ARVR_EXCESSIVE_MOTION" value="1">
Tracking is hindered by excessive motion, player is moving faster then tracking can keep up.
</constant>
<constant name="ARVR_INSUFFICIENT_FEATURES" value="2">
Tracking is hindered by insufficient features, it's too dark (for camera based tracking), player is blocked, etc.
</constant>
<constant name="ARVR_UNKNOWN_TRACKING" value="3">
We don't know the status of the tracking or this interface does not provide feedback.
</constant>
<constant name="ARVR_NOT_TRACKING" value="4">
Tracking is not functional (camera not plugged in or obscured, lighthouses turned off, etc.)
</constant>
</constants>
</class>

View file

@ -29,6 +29,20 @@
Outputs a finished render buffer to the AR/VR device for the given eye.
</description>
</method>
<method name="get_anchor_detection_is_enabled" qualifiers="virtual">
<return type="bool">
</return>
<description>
Returns true if achor detection is enabled (AR only).
</description>
</method>
<method name="get_capabilities" qualifiers="virtual">
<return type="int">
</return>
<description>
Returns a combination of flags providing information about the capabilities of this interface.
</description>
</method>
<method name="get_recommended_render_targetsize" qualifiers="virtual">
<return type="Vector2">
</return>
@ -36,6 +50,13 @@
Returns the size at which we should render our scene to get optimal quality on the output device.
</description>
</method>
<method name="get_tracking_status" qualifiers="virtual">
<return type="int">
</return>
<description>
If supported, returns the status of our tracking. This will allow you to provide feedback to the user whether there are issues with positional tracking.
</description>
</method>
<method name="get_transform_for_eye" qualifiers="virtual">
<return type="Transform">
</return>
@ -47,13 +68,6 @@
Get the location and orientation transform used when rendering a specific eye.
</description>
</method>
<method name="hmd_is_present" qualifiers="virtual">
<return type="bool">
</return>
<description>
Return true is an HMD is available.
</description>
</method>
<method name="initialize" qualifiers="virtual">
<return type="bool">
</return>
@ -68,13 +82,6 @@
Returns true if this interface has been initialized and is active.
</description>
</method>
<method name="is_installed" qualifiers="virtual">
<return type="bool">
</return>
<description>
Returns true if the required middleware is installed.
</description>
</method>
<method name="is_stereo" qualifiers="virtual">
<return type="bool">
</return>
@ -89,11 +96,13 @@
Gets called before rendering each frame so tracking data gets updated in time.
</description>
</method>
<method name="supports_hmd" qualifiers="virtual">
<return type="bool">
<method name="set_anchor_detection_is_enabled" qualifiers="virtual">
<return type="void">
</return>
<argument index="0" name="enabled" type="bool">
</argument>
<description>
Returns true if this interface supports HMDs.
Enables anchor detection, this is used on AR interfaces and enables the extra logic that will detect planes, features, objects, etc. and adds/modifies anchor points.
</description>
</method>
<method name="uninitialize" qualifiers="virtual">

View file

@ -20,6 +20,22 @@
Mostly exposed for GDNative based interfaces, this is called to register an available interface with the AR/VR server.
</description>
</method>
<method name="center_on_hmd">
<return type="void">
</return>
<argument index="0" name="ignore_tilt" type="bool">
</argument>
<argument index="1" name="keep_height" type="bool">
</argument>
<description>
This is a really important function to understand correctly. AR and VR platforms all handle positioning slightly differently.
For platforms that do not offer spatial tracking our origin point (0,0,0) is the location of our HMD but you have little control over the direction the player is facing in the real world.
For platforms that do offer spatial tracking our origin point depends very much on the system. For OpenVR our origin point is usually the center of the tracking space, on the ground. For other platforms its often the location of the tracking camera.
This method allows you to center our tracker on the location of the HMD, it will take the current location of the HMD and use that to adjust all our tracking data in essence realigning the real world to your players current position in your game world.
For this method to produce usable results tracking information should be available and this often takes a few frames after starting your game.
You should call this method after a few seconds have passed, when the user requests a realignment of the display holding a designated button on a controller for a short period of time, and when implementing a teleport mechanism.
</description>
</method>
<method name="find_interface" qualifiers="const">
<return type="ARVRInterface">
</return>
@ -84,22 +100,6 @@
Removes a registered interface, again exposed mostly for GDNative based interfaces.
</description>
</method>
<method name="request_reference_frame">
<return type="void">
</return>
<argument index="0" name="ignore_tilt" type="bool">
</argument>
<argument index="1" name="keep_height" type="bool">
</argument>
<description>
This is a really important function to understand correctly. AR and VR platforms all handle positioning slightly differently.
For platforms that do not offer spatial tracking our origin point (0,0,0) is the location of our HMD but you have little control over the direction the player is facing in the real world.
For platforms that do offer spatial tracking our origin point depends very much on the system. For OpenVR our origin point is usually the center of the tracking space, on the ground. For other platforms its often the location of the tracking camera.
This method allows you to create a reference frame, it will take the current location of the HMD and use that to adjust all our tracking data in essence realigning the real world to your players current position in your game world.
For this method to produce usable results tracking information should be available and this often takes a few frames after starting your game.
You should call this method after a few seconds have passed, when the user requests a realignment of the display holding a designated button on a controller for a short period of time, and when implementing a teleport mechanism.
</description>
</method>
<method name="set_primary_interface">
<return type="void">
</return>

View file

@ -3,4 +3,10 @@ def can_build(platform):
return True
def configure(env):
pass
pass
def get_doc_classes():
return ["MobileVRInterface"]
def get_doc_path():
return "doc_classes"

View file

@ -0,0 +1,134 @@
<?xml version="1.0" encoding="UTF-8" ?>
<class name="MobileVRInterface" inherits="ARVRInterface" category="Core" version="3.0.alpha.custom_build">
<brief_description>
Generic mobile VR implementation
</brief_description>
<description>
This is a generic mobile VR implementation where you need to provide details about the phone and HMD used. It does not rely on any existing framework. This is the most basic interface we have. For the best effect you do need a mobile phone with a gyroscope and accelerometer.
Note that even though there is no positional tracking the camera will assume the headset is at a height of 1.85 meters.
</description>
<tutorials>
</tutorials>
<demos>
</demos>
<methods>
<method name="get_display_to_lens" qualifiers="const">
<return type="float">
</return>
<description>
Returns the distance between the display and the lens.
</description>
</method>
<method name="get_display_width" qualifiers="const">
<return type="float">
</return>
<description>
Return the width of the LCD screen of the device.
</description>
</method>
<method name="get_iod" qualifiers="const">
<return type="float">
</return>
<description>
Returns the interocular distance.
</description>
</method>
<method name="get_k1" qualifiers="const">
<return type="float">
</return>
<description>
Returns the k1 lens constant.
</description>
</method>
<method name="get_k2" qualifiers="const">
<return type="float">
</return>
<description>
Retuns the k2 lens constant
</description>
</method>
<method name="get_oversample" qualifiers="const">
<return type="float">
</return>
<description>
Returns the oversampling setting.
</description>
</method>
<method name="set_display_to_lens">
<return type="void">
</return>
<argument index="0" name="display_to_lens" type="float">
</argument>
<description>
Sets the distance between display and the lens.
</description>
</method>
<method name="set_display_width">
<return type="void">
</return>
<argument index="0" name="display_width" type="float">
</argument>
<description>
Sets the width of the LCD screen of the device.
</description>
</method>
<method name="set_iod">
<return type="void">
</return>
<argument index="0" name="iod" type="float">
</argument>
<description>
Sets the interocular distance.
</description>
</method>
<method name="set_k1">
<return type="void">
</return>
<argument index="0" name="k" type="float">
</argument>
<description>
Sets the k1 lens constant.
</description>
</method>
<method name="set_k2">
<return type="void">
</return>
<argument index="0" name="k" type="float">
</argument>
<description>
Sets the k2 lens constant.
</description>
</method>
<method name="set_oversample">
<return type="void">
</return>
<argument index="0" name="oversample" type="float">
</argument>
<description>
Sets the oversampling setting.
</description>
</method>
</methods>
<members>
<member name="display_to_lens" type="float" setter="set_display_to_lens" getter="get_display_to_lens">
The distance between the display and the lenses inside of the device in centimeters.
</member>
<member name="display_width" type="float" setter="set_display_width" getter="get_display_width">
The width of the display in centimeters.
</member>
<member name="iod" type="float" setter="set_iod" getter="get_iod">
The interocular distance, also known as the interpupillary distance. The distance between the pupils of the left and right eye.
</member>
<member name="k1" type="float" setter="set_k1" getter="get_k1">
The k1 lens factor is one of the two constants that define the strength of the lens used and directly influences the lens distortion effect.
</member>
<member name="k2" type="float" setter="set_k2" getter="get_k2">
The k2 lens factor, see k1.
</member>
<member name="oversample" type="float" setter="set_oversample" getter="get_oversample">
The oversample setting. Because of the lens distortion we have to render our buffers at a higher resolution then the screen can natively handle. A value between 1.5 and 2.0 often provides good results but at the cost of performance.
</member>
</members>
<constants>
</constants>
</class>

View file

@ -37,6 +37,10 @@ StringName MobileVRInterface::get_name() const {
return "Native mobile";
};
int MobileVRInterface::get_capabilities() const {
return ARVRInterface::ARVR_STEREO;
};
Vector3 MobileVRInterface::scale_magneto(const Vector3 &p_magnetometer) {
// Our magnetometer doesn't give us nice clean data.
// Well it may on Mac OS X because we're getting a calibrated value in the current implementation but Android we're getting raw data.
@ -166,6 +170,8 @@ void MobileVRInterface::set_position_from_sensors() {
rotate.rotate(orientation.get_axis(1), gyro.y * delta_time);
rotate.rotate(orientation.get_axis(2), gyro.z * delta_time);
orientation = rotate * orientation;
tracking_state = ARVRInterface::ARVR_NORMAL_TRACKING;
};
///@TODO improve this, the magnetometer is very fidgity sometimes flipping the axis for no apparent reason (probably a bug on my part)
@ -176,6 +182,8 @@ void MobileVRInterface::set_position_from_sensors() {
Quat acc_mag_quat(combine_acc_mag(grav, magneto));
transform_quat = transform_quat.slerp(acc_mag_quat, 0.1);
orientation = Basis(transform_quat);
tracking_state = ARVRInterface::ARVR_NORMAL_TRACKING;
} else if (has_grav) {
// use gravity vector to make sure down is down...
// transform gravity into our world space
@ -273,21 +281,6 @@ real_t MobileVRInterface::get_k2() const {
return k2;
};
bool MobileVRInterface::is_installed() {
// we don't have any middle ware here, if we have our interface, we can use it
return true;
};
bool MobileVRInterface::hmd_is_present() {
// our device is our HMD
return true;
};
bool MobileVRInterface::supports_hmd() {
// our device is our HMD
return true;
};
bool MobileVRInterface::is_stereo() {
// needs stereo...
return true;
@ -461,11 +454,11 @@ MobileVRInterface::MobileVRInterface() {
// Just set some defaults for these. At some point we need to look at adding a lookup table for common device + headset combos and/or support reading cardboard QR codes
eye_height = 1.85;
intraocular_dist = 6.0;
display_width = 13.0;
display_width = 14.5;
display_to_lens = 4.0;
oversample = 1.5;
k1 = 0.22;
k2 = 0.23;
k1 = 0.215;
k2 = 0.215;
last_ticks = 0;
// create our shader stuff

View file

@ -131,10 +131,7 @@ public:
real_t get_k2() const;
virtual StringName get_name() const;
virtual bool is_installed();
virtual bool hmd_is_present();
virtual bool supports_hmd();
virtual int get_capabilities() const;
virtual bool is_initialized();
virtual bool initialize();

View file

@ -67,6 +67,105 @@ String ARVRCamera::get_configuration_warning() const {
return String();
};
Vector3 ARVRCamera::project_local_ray_normal(const Point2 &p_pos) const {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, Vector3());
Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface();
ERR_FAIL_COND_V(arvr_interface.is_null(), Vector3());
if (!is_inside_tree()) {
ERR_EXPLAIN("Camera is not inside scene.");
ERR_FAIL_COND_V(!is_inside_tree(), Vector3());
};
Size2 viewport_size = get_viewport()->get_camera_rect_size();
Vector2 cpos = get_viewport()->get_camera_coords(p_pos);
Vector3 ray;
CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar());
float screen_w, screen_h;
cm.get_viewport_size(screen_w, screen_h);
ray = Vector3(((cpos.x / viewport_size.width) * 2.0 - 1.0) * screen_w, ((1.0 - (cpos.y / viewport_size.height)) * 2.0 - 1.0) * screen_h, -get_znear()).normalized();
return ray;
};
Point2 ARVRCamera::unproject_position(const Vector3 &p_pos) const {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, Vector2());
Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface();
ERR_FAIL_COND_V(arvr_interface.is_null(), Vector2());
if (!is_inside_tree()) {
ERR_EXPLAIN("Camera is not inside scene.");
ERR_FAIL_COND_V(!is_inside_tree(), Vector2());
};
Size2 viewport_size = get_viewport()->get_visible_rect().size;
CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar());
Plane p(get_camera_transform().xform_inv(p_pos), 1.0);
p = cm.xform4(p);
p.normal /= p.d;
Point2 res;
res.x = (p.normal.x * 0.5 + 0.5) * viewport_size.x;
res.y = (-p.normal.y * 0.5 + 0.5) * viewport_size.y;
return res;
};
Vector3 ARVRCamera::project_position(const Point2 &p_point) const {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, Vector3());
Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface();
ERR_FAIL_COND_V(arvr_interface.is_null(), Vector3());
if (!is_inside_tree()) {
ERR_EXPLAIN("Camera is not inside scene.");
ERR_FAIL_COND_V(!is_inside_tree(), Vector3());
};
Size2 viewport_size = get_viewport()->get_visible_rect().size;
CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar());
Size2 vp_size;
cm.get_viewport_size(vp_size.x, vp_size.y);
Vector2 point;
point.x = (p_point.x / viewport_size.x) * 2.0 - 1.0;
point.y = (1.0 - (p_point.y / viewport_size.y)) * 2.0 - 1.0;
point *= vp_size;
Vector3 p(point.x, point.y, -get_znear());
return get_camera_transform().xform(p);
};
Vector<Plane> ARVRCamera::get_frustum() const {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, Vector<Plane>());
Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface();
ERR_FAIL_COND_V(arvr_interface.is_null(), Vector<Plane>());
ERR_FAIL_COND_V(!is_inside_world(), Vector<Plane>());
Size2 viewport_size = get_viewport()->get_visible_rect().size;
CameraMatrix cm = arvr_interface->get_projection_for_eye(ARVRInterface::EYE_MONO, viewport_size.aspect(), get_znear(), get_zfar());
return cm.get_projection_planes(get_camera_transform());
};
ARVRCamera::ARVRCamera(){
// nothing to do here yet for now..
};
@ -297,6 +396,8 @@ void ARVRAnchor::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_is_active"), &ARVRAnchor::get_is_active);
ClassDB::bind_method(D_METHOD("get_size"), &ARVRAnchor::get_size);
ClassDB::bind_method(D_METHOD("get_plane"), &ARVRAnchor::get_plane);
};
void ARVRAnchor::set_anchor_id(int p_anchor_id) {
@ -346,6 +447,15 @@ String ARVRAnchor::get_configuration_warning() const {
return String();
};
Plane ARVRAnchor::get_plane() const {
Vector3 location = get_translation();
Basis orientation = get_transform().basis;
Plane plane(location, orientation.get_axis(1).normalized());
return plane;
};
ARVRAnchor::ARVRAnchor() {
anchor_id = 0;
is_active = true;

View file

@ -52,6 +52,11 @@ protected:
public:
String get_configuration_warning() const;
virtual Vector3 project_local_ray_normal(const Point2 &p_pos) const;
virtual Point2 unproject_position(const Vector3 &p_pos) const;
virtual Vector3 project_position(const Point2 &p_point) const;
virtual Vector<Plane> get_frustum() const;
ARVRCamera();
~ARVRCamera();
};
@ -118,6 +123,8 @@ public:
bool get_is_active() const;
Vector3 get_size() const;
Plane get_plane() const;
String get_configuration_warning() const;
ARVRAnchor();

View file

@ -127,16 +127,16 @@ public:
virtual Transform get_camera_transform() const;
Vector3 project_ray_normal(const Point2 &p_pos) const;
Vector3 project_ray_origin(const Point2 &p_pos) const;
virtual Vector3 project_ray_origin(const Point2 &p_pos) const;
Vector3 project_local_ray_normal(const Point2 &p_pos) const;
Point2 unproject_position(const Vector3 &p_pos) const;
virtual Point2 unproject_position(const Vector3 &p_pos) const;
bool is_position_behind(const Vector3 &p_pos) const;
Vector3 project_position(const Point2 &p_point) const;
virtual Vector3 project_position(const Point2 &p_point) const;
void set_cull_mask(uint32_t p_layers);
uint32_t get_cull_mask() const;
Vector<Plane> get_frustum() const;
virtual Vector<Plane> get_frustum() const;
void set_environment(const Ref<Environment> &p_environment);
Ref<Environment> get_environment() const;

View file

@ -31,29 +31,49 @@
void ARVRInterface::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_name"), &ARVRInterface::get_name);
ClassDB::bind_method(D_METHOD("get_capabilities"), &ARVRInterface::get_capabilities);
ClassDB::bind_method(D_METHOD("is_primary"), &ARVRInterface::is_primary);
ClassDB::bind_method(D_METHOD("set_is_primary", "enable"), &ARVRInterface::set_is_primary);
ClassDB::bind_method(D_METHOD("is_installed"), &ARVRInterface::is_installed);
ClassDB::bind_method(D_METHOD("hmd_is_present"), &ARVRInterface::hmd_is_present);
ClassDB::bind_method(D_METHOD("supports_hmd"), &ARVRInterface::supports_hmd);
ClassDB::bind_method(D_METHOD("is_initialized"), &ARVRInterface::is_initialized);
ClassDB::bind_method(D_METHOD("set_is_initialized", "initialized"), &ARVRInterface::set_is_initialized);
ClassDB::bind_method(D_METHOD("initialize"), &ARVRInterface::initialize);
ClassDB::bind_method(D_METHOD("uninitialize"), &ARVRInterface::uninitialize);
ClassDB::bind_method(D_METHOD("get_tracking_status"), &ARVRInterface::get_tracking_status);
ClassDB::bind_method(D_METHOD("get_recommended_render_targetsize"), &ARVRInterface::get_recommended_render_targetsize);
ClassDB::bind_method(D_METHOD("is_stereo"), &ARVRInterface::is_stereo);
// These are now purely used internally, we may expose them again if we expose CameraMatrix through Variant but reduz is not a fan for good reasons :)
// ClassDB::bind_method(D_METHOD("get_transform_for_eye", "eye", "cam_transform"), &ARVRInterface::get_transform_for_eye);
// ClassDB::bind_method(D_METHOD("get_projection_for_eye", "eye"), &ARVRInterface::get_projection_for_eye);
// ClassDB::bind_method(D_METHOD("commit_for_eye", "node:viewport"), &ARVRInterface::commit_for_eye);
ADD_GROUP("Interface", "interface_");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "interface_is_primary"), "set_is_primary", "is_primary");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "interface_is_initialized"), "set_is_initialized", "is_initialized");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "primary"), "set_is_primary", "is_primary");
// we don't have any properties specific to VR yet....
// but we do have properties specific to AR....
ClassDB::bind_method(D_METHOD("get_anchor_detection_is_enabled"), &ARVRInterface::get_anchor_detection_is_enabled);
ClassDB::bind_method(D_METHOD("set_anchor_detection_is_enabled", "enable"), &ARVRInterface::set_anchor_detection_is_enabled);
ADD_GROUP("AR", "ar_");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "ar_is_anchor_detection_enabled"), "set_anchor_detection_is_enabled", "get_anchor_detection_is_enabled");
BIND_ENUM_CONSTANT(ARVR_NONE);
BIND_ENUM_CONSTANT(ARVR_MONO);
BIND_ENUM_CONSTANT(ARVR_STEREO);
BIND_ENUM_CONSTANT(ARVR_AR);
BIND_ENUM_CONSTANT(ARVR_EXTERNAL);
BIND_ENUM_CONSTANT(EYE_MONO);
BIND_ENUM_CONSTANT(EYE_LEFT);
BIND_ENUM_CONSTANT(EYE_RIGHT);
BIND_ENUM_CONSTANT(ARVR_NORMAL_TRACKING);
BIND_ENUM_CONSTANT(ARVR_EXCESSIVE_MOTION);
BIND_ENUM_CONSTANT(ARVR_INSUFFICIENT_FEATURES);
BIND_ENUM_CONSTANT(ARVR_UNKNOWN_TRACKING);
BIND_ENUM_CONSTANT(ARVR_NOT_TRACKING);
};
StringName ARVRInterface::get_name() const {
@ -73,10 +93,40 @@ void ARVRInterface::set_is_primary(bool p_is_primary) {
if (p_is_primary) {
ERR_FAIL_COND(!is_initialized());
ERR_FAIL_COND(!supports_hmd());
arvr_server->set_primary_interface(this);
} else {
arvr_server->clear_primary_interface_if(this);
};
};
void ARVRInterface::set_is_initialized(bool p_initialized) {
if (p_initialized) {
if (!is_initialized()) {
initialize();
};
} else {
if (is_initialized()) {
uninitialize();
};
};
};
ARVRInterface::Tracking_status ARVRInterface::get_tracking_status() const {
return tracking_state;
};
ARVRInterface::ARVRInterface() {
tracking_state = ARVR_UNKNOWN_TRACKING;
};
ARVRInterface::~ARVRInterface(){};
/** these will only be implemented on AR interfaces, so we want dummies for VR **/
bool ARVRInterface::get_anchor_detection_is_enabled() const {
return false;
};
void ARVRInterface::set_anchor_detection_is_enabled(bool p_enable){
// don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc.
};

View file

@ -50,31 +50,59 @@
class ARVRInterface : public Reference {
GDCLASS(ARVRInterface, Reference);
protected:
_THREAD_SAFE_CLASS_
static void _bind_methods();
public:
enum Capabilities { /* purely meta data, provides some info about what this interface supports */
ARVR_NONE = 0, /* no capabilities */
ARVR_MONO = 1, /* can be used with mono output */
ARVR_STEREO = 2, /* can be used with stereo output */
ARVR_AR = 4, /* offers a camera feed for AR */
ARVR_EXTERNAL = 8 /* renders to external device */
};
enum Eyes {
EYE_MONO, /* my son says we should call this EYE_CYCLOPS */
EYE_LEFT,
EYE_RIGHT
};
enum Tracking_status { /* tracking status currently based on AR but we can start doing more with this for VR as well */
ARVR_NORMAL_TRACKING,
ARVR_EXCESSIVE_MOTION,
ARVR_INSUFFICIENT_FEATURES,
ARVR_UNKNOWN_TRACKING,
ARVR_NOT_TRACKING
};
protected:
_THREAD_SAFE_CLASS_
Tracking_status tracking_state;
static void _bind_methods();
public:
/** general interface information **/
virtual StringName get_name() const;
virtual int get_capabilities() const = 0;
bool is_primary();
void set_is_primary(bool p_is_primary);
virtual bool is_installed() = 0; /* returns true if the middle ware related to this interface has been installed */
virtual bool hmd_is_present() = 0; /* returns true if our HMD is connected */
virtual bool supports_hmd() = 0; /* returns true is this interface handles output to an HMD or only handles positioning */
virtual bool is_initialized() = 0; /* returns true if we've initialized this interface */
void set_is_initialized(bool p_initialized); /* helper function, will call initialize or uninitialize */
virtual bool initialize() = 0; /* initialize this interface, if this has an HMD it becomes the primary interface */
virtual void uninitialize() = 0; /* deinitialize this interface */
Tracking_status get_tracking_status() const; /* get the status of our current tracking */
/** specific to VR **/
// nothing yet
/** specific to AR **/
virtual bool get_anchor_detection_is_enabled() const;
virtual void set_anchor_detection_is_enabled(bool p_enable);
/** rendering and internal **/
virtual Size2 get_recommended_render_targetsize() = 0; /* returns the recommended render target size per eye for this device */
virtual bool is_stereo() = 0; /* returns true if this interface requires stereo rendering (for VR HMDs) or mono rendering (for mobile AR) */
virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) = 0; /* get each eyes camera transform, also implement EYE_MONO */
@ -82,8 +110,13 @@ public:
virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) = 0; /* output the left or right eye */
virtual void process() = 0;
ARVRInterface();
~ARVRInterface();
};
VARIANT_ENUM_CAST(ARVRInterface::Capabilities);
VARIANT_ENUM_CAST(ARVRInterface::Eyes);
VARIANT_ENUM_CAST(ARVRInterface::Tracking_status);
#endif

View file

@ -23,20 +23,26 @@ StringName ARVRScriptInterface::get_name() const {
}
}
bool ARVRScriptInterface::is_installed() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("is_installed")), false);
return get_script_instance()->call("is_installed");
int ARVRScriptInterface::get_capabilities() const {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("get_capabilities")), ARVRInterface::ARVR_NONE);
return get_script_instance()->call("get_capabilities");
};
ARVRInterface::Tracking_status ARVRScriptInterface::get_tracking_status() const {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("get_tracking_status")), ARVRInterface::ARVR_NOT_TRACKING);
int status = get_script_instance()->call("get_tracking_status");
return (ARVRInterface::Tracking_status)status;
}
bool ARVRScriptInterface::hmd_is_present() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("hmd_is_present")), false);
return get_script_instance()->call("hmd_is_present");
}
bool ARVRScriptInterface::get_anchor_detection_is_enabled() const {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("get_anchor_detection_is_enabled")), false);
return get_script_instance()->call("get_anchor_detection_is_enabled");
};
bool ARVRScriptInterface::supports_hmd() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("supports_hmd")), false);
return get_script_instance()->call("supports_hmd");
}
void ARVRScriptInterface::set_anchor_detection_is_enabled(bool p_enable) {
ERR_FAIL_COND(!(get_script_instance() && get_script_instance()->has_method("set_anchor_detection_is_enabled")));
get_script_instance()->call("set_anchor_detection_is_enabled");
};
bool ARVRScriptInterface::is_stereo() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("is_stereo")), false);
@ -110,14 +116,17 @@ void ARVRScriptInterface::process() {
}
void ARVRScriptInterface::_bind_methods() {
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "is_installed"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "hmd_is_present"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "supports_hmd"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::INT, "get_capabilities"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "is_initialized"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "initialize"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo("uninitialize"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::INT, "get_tracking_status"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "get_anchor_detection_is_enabled"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo("set_anchor_detection_is_enabled", PropertyInfo(Variant::BOOL, "enabled")));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "is_stereo"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::VECTOR2, "get_recommended_render_targetsize"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::TRANSFORM, "get_transform_for_eye", PropertyInfo(Variant::INT, "eye"), PropertyInfo(Variant::TRANSFORM, "cam_transform")));

View file

@ -16,19 +16,24 @@ protected:
static void _bind_methods();
public:
/** general interface information **/
ARVRScriptInterface();
~ARVRScriptInterface();
virtual StringName get_name() const;
virtual bool is_installed();
virtual bool hmd_is_present();
virtual bool supports_hmd();
virtual int get_capabilities() const;
virtual bool is_initialized();
virtual bool initialize();
virtual void uninitialize();
ARVRInterface::Tracking_status get_tracking_status() const; /* get the status of our current tracking */
/** specific to AR **/
virtual bool get_anchor_detection_is_enabled() const;
virtual void set_anchor_detection_is_enabled(bool p_enable);
/** rendering and internal **/
virtual Size2 get_recommended_render_targetsize();
virtual bool is_stereo();
virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform);

View file

@ -43,7 +43,7 @@ void ARVRServer::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_world_scale"), &ARVRServer::get_world_scale);
ClassDB::bind_method(D_METHOD("set_world_scale"), &ARVRServer::set_world_scale);
ClassDB::bind_method(D_METHOD("get_reference_frame"), &ARVRServer::get_reference_frame);
ClassDB::bind_method(D_METHOD("request_reference_frame", "ignore_tilt", "keep_height"), &ARVRServer::request_reference_frame);
ClassDB::bind_method(D_METHOD("center_on_hmd", "ignore_tilt", "keep_height"), &ARVRServer::center_on_hmd);
ADD_PROPERTY(PropertyInfo(Variant::REAL, "world_scale"), "set_world_scale", "get_world_scale");
@ -98,7 +98,7 @@ Transform ARVRServer::get_reference_frame() const {
return reference_frame;
};
void ARVRServer::request_reference_frame(bool p_ignore_tilt, bool p_keep_height) {
void ARVRServer::center_on_hmd(bool p_ignore_tilt, bool p_keep_height) {
if (primary_interface != NULL) {
// clear our current reference frame or we'll end up double adjusting it
reference_frame = Transform();

View file

@ -117,14 +117,17 @@ public:
void set_world_origin(const Transform p_world_origin);
/*
Requesting a reference frame results in a matrix being calculated that ensures the HMD is positioned to 0,0,0 facing 0,0,-1 (need to verify this direction)
center_on_hmd calculates a new reference frame. This ensures the HMD is positioned to 0,0,0 facing 0,0,-1 (need to verify this direction)
in the virtual world.
You can ignore the tilt of the device ensuring you're looking straight forward even if the player is looking down or sideways.
You can chose to keep the height the tracking provides which is important for room scale capable tracking.
Note: this should not be used in AR and should be ignored by an AR based interface as it would throw what you're looking at in the real world
and in the virtual world out of sync
*/
Transform get_reference_frame() const;
void request_reference_frame(bool p_ignore_tilt, bool p_keep_height);
void center_on_hmd(bool p_ignore_tilt, bool p_keep_height);
/*
Interfaces are objects that 'glue' Godot to an AR or VR SDK such as the Oculus SDK, OpenVR, OpenHMD, etc.