mirror of
https://github.com/godotengine/godot.git
synced 2024-11-21 03:18:37 +08:00
Adding a new Camera Server implementation to Godot.
This is a new singleton where camera sources such as webcams or cameras on a mobile phone can register themselves with the Server. Other parts of Godot can interact with this to obtain images from the camera as textures. This work includes additions to the Visual Server to use this functionality to present the camera image in the background. This is specifically targetted at AR applications.
This commit is contained in:
parent
0a3c21d999
commit
02ea99129e
@ -18,6 +18,9 @@
|
||||
<member name="AudioServer" type="AudioServer" setter="" getter="">
|
||||
[AudioServer] singleton
|
||||
</member>
|
||||
<member name="CameraServer" type="CameraServer" setter="" getter="">
|
||||
[CameraServer] singleton
|
||||
</member>
|
||||
<member name="ClassDB" type="ClassDB" setter="" getter="">
|
||||
[ClassDB] singleton
|
||||
</member>
|
||||
|
@ -10,6 +10,13 @@
|
||||
<tutorials>
|
||||
</tutorials>
|
||||
<methods>
|
||||
<method name="get_camera_feed_id">
|
||||
<return type="int">
|
||||
</return>
|
||||
<description>
|
||||
If this is an AR interface that requires displaying a camera feed as the background, this method returns the feed id in the [CameraServer] for this interface.
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_capabilities" qualifiers="const">
|
||||
<return type="int">
|
||||
</return>
|
||||
|
64
doc/classes/CameraFeed.xml
Normal file
64
doc/classes/CameraFeed.xml
Normal file
@ -0,0 +1,64 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<class name="CameraFeed" inherits="Reference" category="Core" version="3.2">
|
||||
<brief_description>
|
||||
A camera feed gives you access to a single physical camera attached to your device.
|
||||
</brief_description>
|
||||
<description>
|
||||
A camera feed gives you access to a single physical camera attached to your device.
|
||||
When enabled Godot will start capturing frames from the camera which can then be used. Do note that many cameras will return YCbCr images which are split into two textures and need to be combined in a shader. Godot does this automatically for you if you set the environment to show the camera image in the background.
|
||||
</description>
|
||||
<tutorials>
|
||||
</tutorials>
|
||||
<methods>
|
||||
<method name="get_id" qualifiers="const">
|
||||
<return type="int">
|
||||
</return>
|
||||
<description>
|
||||
Get unique id for this feed
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_name" qualifiers="const">
|
||||
<return type="String">
|
||||
</return>
|
||||
<description>
|
||||
Get name of the camera
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_position" qualifiers="const">
|
||||
<return type="int" enum="CameraFeed.FeedPosition">
|
||||
</return>
|
||||
<description>
|
||||
Position of camera on the device.
|
||||
</description>
|
||||
</method>
|
||||
</methods>
|
||||
<members>
|
||||
<member name="feed_is_active" type="bool" setter="set_active" getter="is_active">
|
||||
</member>
|
||||
<member name="feed_transform" type="Transform2D" setter="set_transform" getter="get_transform">
|
||||
</member>
|
||||
</members>
|
||||
<constants>
|
||||
<constant name="FEED_NOIMAGE" value="0" enum="FeedDataType">
|
||||
No image set for the feed.
|
||||
</constant>
|
||||
<constant name="FEED_RGB" value="1" enum="FeedDataType">
|
||||
Feed supplies RGB images.
|
||||
</constant>
|
||||
<constant name="FEED_YCbCr" value="2" enum="FeedDataType">
|
||||
Feed supplies YCbCr images that need to be converted to RGB.
|
||||
</constant>
|
||||
<constant name="FEED_YCbCr_Sep" value="3" enum="FeedDataType">
|
||||
Feed supplies separate Y and CbCr images that need to be combined and converted to RGB.
|
||||
</constant>
|
||||
<constant name="FEED_UNSPECIFIED" value="0" enum="FeedPosition">
|
||||
Unspecified position.
|
||||
</constant>
|
||||
<constant name="FEED_FRONT" value="1" enum="FeedPosition">
|
||||
Camera is mounted at the front of the device.
|
||||
</constant>
|
||||
<constant name="FEED_BACK" value="2" enum="FeedPosition">
|
||||
Camera is moutned at the back of the device.
|
||||
</constant>
|
||||
</constants>
|
||||
</class>
|
83
doc/classes/CameraServer.xml
Normal file
83
doc/classes/CameraServer.xml
Normal file
@ -0,0 +1,83 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<class name="CameraServer" inherits="Object" category="Core" version="3.2">
|
||||
<brief_description>
|
||||
Our camera server keeps track of different cameras accessible in Godot. These are external cameras such as webcams or the cameras on your phone.
|
||||
</brief_description>
|
||||
<description>
|
||||
</description>
|
||||
<tutorials>
|
||||
</tutorials>
|
||||
<methods>
|
||||
<method name="add_feed">
|
||||
<return type="void">
|
||||
</return>
|
||||
<argument index="0" name="feed" type="CameraFeed">
|
||||
</argument>
|
||||
<description>
|
||||
Adds a camera feed to the camera server.
|
||||
</description>
|
||||
</method>
|
||||
<method name="feeds">
|
||||
<return type="Array">
|
||||
</return>
|
||||
<description>
|
||||
Returns an array of [CameraFeed]s.
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_feed">
|
||||
<return type="CameraFeed">
|
||||
</return>
|
||||
<argument index="0" name="index" type="int">
|
||||
</argument>
|
||||
<description>
|
||||
Returns the [CameraFeed] with this id.
|
||||
</description>
|
||||
</method>
|
||||
<method name="get_feed_count">
|
||||
<return type="int">
|
||||
</return>
|
||||
<description>
|
||||
Returns the number of [CameraFeed]s registered.
|
||||
</description>
|
||||
</method>
|
||||
<method name="remove_feed">
|
||||
<return type="void">
|
||||
</return>
|
||||
<argument index="0" name="feed" type="CameraFeed">
|
||||
</argument>
|
||||
<description>
|
||||
Removes a [CameraFeed].
|
||||
</description>
|
||||
</method>
|
||||
</methods>
|
||||
<signals>
|
||||
<signal name="camera_feed_added">
|
||||
<argument index="0" name="id" type="int">
|
||||
</argument>
|
||||
<description>
|
||||
Emitted when a [CameraFeed] is added (webcam is plugged in).
|
||||
</description>
|
||||
</signal>
|
||||
<signal name="camera_feed_removed">
|
||||
<argument index="0" name="id" type="int">
|
||||
</argument>
|
||||
<description>
|
||||
Emitted when a [CameraFeed] is removed (webcam is removed).
|
||||
</description>
|
||||
</signal>
|
||||
</signals>
|
||||
<constants>
|
||||
<constant name="FEED_RGBA_IMAGE" value="0" enum="FeedImage">
|
||||
The RGBA camera image.
|
||||
</constant>
|
||||
<constant name="FEED_YCbCr_IMAGE" value="0" enum="FeedImage">
|
||||
The YCbCr camera image.
|
||||
</constant>
|
||||
<constant name="FEED_Y_IMAGE" value="0" enum="FeedImage">
|
||||
The Y component camera image.
|
||||
</constant>
|
||||
<constant name="FEED_CbCr_IMAGE" value="1" enum="FeedImage">
|
||||
The CbCr component camera image.
|
||||
</constant>
|
||||
</constants>
|
||||
</class>
|
25
doc/classes/CameraTexture.xml
Normal file
25
doc/classes/CameraTexture.xml
Normal file
@ -0,0 +1,25 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<class name="CameraTexture" inherits="Texture" category="Core" version="3.2">
|
||||
<brief_description>
|
||||
This texture gives access to the camera texture provided by a [CameraFeed]. Note that many cameras supply YCbCr images which need to be converted in a shader.
|
||||
</brief_description>
|
||||
<description>
|
||||
</description>
|
||||
<tutorials>
|
||||
</tutorials>
|
||||
<methods>
|
||||
</methods>
|
||||
<members>
|
||||
<member name="camera_feed_id" type="int" setter="set_camera_feed_id" getter="get_camera_feed_id">
|
||||
Id of the [CameraFeed] for which we want to display the image.
|
||||
</member>
|
||||
<member name="camera_is_active" type="bool" setter="set_camera_active" getter="get_camera_active">
|
||||
Convenience property that gives access to the active property of the [CameraFeed].
|
||||
</member>
|
||||
<member name="which_feed" type="int" setter="set_which_feed" getter="get_which_feed" enum="CameraServer.FeedImage">
|
||||
Which image within the [CameraFeed] we want access to, important if the camera image is split in a Y and CbCr component.
|
||||
</member>
|
||||
</members>
|
||||
<constants>
|
||||
</constants>
|
||||
</class>
|
@ -57,6 +57,9 @@
|
||||
<member name="auto_exposure_speed" type="float" setter="set_tonemap_auto_exposure_speed" getter="get_tonemap_auto_exposure_speed">
|
||||
Speed of the auto exposure effect. Affects the time needed for the camera to perform auto exposure.
|
||||
</member>
|
||||
<member name="background_camera_feed_id" type="int" setter="set_camera_feed_id" getter="get_camera_feed_id">
|
||||
The id of the camera feed to show in the background.
|
||||
</member>
|
||||
<member name="background_canvas_max_layer" type="int" setter="set_canvas_max_layer" getter="get_canvas_max_layer">
|
||||
Maximum layer id (if using Layer background mode).
|
||||
</member>
|
||||
@ -266,7 +269,10 @@
|
||||
<constant name="BG_CANVAS" value="4" enum="BGMode">
|
||||
Display a [CanvasLayer] in the background.
|
||||
</constant>
|
||||
<constant name="BG_MAX" value="6" enum="BGMode">
|
||||
<constant name="BG_CAMERA_FEED" value="6" enum="BGMode">
|
||||
Display a camera feed in the background.
|
||||
</constant>
|
||||
<constant name="BG_MAX" value="7" enum="BGMode">
|
||||
Helper constant keeping track of the enum's size, has no direct usage in API calls.
|
||||
</constant>
|
||||
<constant name="GLOW_BLEND_MODE_ADDITIVE" value="0" enum="GlowBlendMode">
|
||||
|
@ -3359,6 +3359,17 @@
|
||||
<description>
|
||||
</description>
|
||||
</method>
|
||||
<method name="texture_bind">
|
||||
<return type="void">
|
||||
</return>
|
||||
<argument index="0" name="texture" type="RID">
|
||||
</argument>
|
||||
<argument index="1" name="number" type="int">
|
||||
</argument>
|
||||
<description>
|
||||
Binds the texture to a texture slot.
|
||||
</description>
|
||||
</method>
|
||||
<method name="texture_create">
|
||||
<return type="RID">
|
||||
</return>
|
||||
@ -4427,7 +4438,7 @@
|
||||
</constant>
|
||||
<constant name="ENV_BG_KEEP" value="5" enum="EnvironmentBG">
|
||||
</constant>
|
||||
<constant name="ENV_BG_MAX" value="6" enum="EnvironmentBG">
|
||||
<constant name="ENV_BG_MAX" value="7" enum="EnvironmentBG">
|
||||
</constant>
|
||||
<constant name="ENV_DOF_BLUR_QUALITY_LOW" value="0" enum="EnvironmentDOFBlurQuality">
|
||||
</constant>
|
||||
|
@ -61,6 +61,7 @@ public:
|
||||
void environment_set_bg_energy(RID p_env, float p_energy) {}
|
||||
void environment_set_canvas_max_layer(RID p_env, int p_max_layer) {}
|
||||
void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0) {}
|
||||
void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id){};
|
||||
|
||||
void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) {}
|
||||
void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) {}
|
||||
@ -216,6 +217,7 @@ public:
|
||||
uint32_t texture_get_height(RID p_texture) const { return 0; }
|
||||
uint32_t texture_get_depth(RID p_texture) const { return 0; }
|
||||
void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth_3d) {}
|
||||
void texture_bind(RID p_texture, uint32_t p_texture_no) {}
|
||||
|
||||
void texture_set_path(RID p_texture, const String &p_path) {
|
||||
DummyTexture *t = texture_owner.getornull(p_texture);
|
||||
|
@ -36,6 +36,7 @@
|
||||
#include "core/project_settings.h"
|
||||
#include "core/vmap.h"
|
||||
#include "rasterizer_canvas_gles2.h"
|
||||
#include "servers/camera/camera_feed.h"
|
||||
#include "servers/visual/visual_server_raster.h"
|
||||
|
||||
#ifndef GLES_OVER_GL
|
||||
@ -769,6 +770,13 @@ void RasterizerSceneGLES2::environment_set_ambient_light(RID p_env, const Color
|
||||
env->ambient_sky_contribution = p_sky_contribution;
|
||||
}
|
||||
|
||||
void RasterizerSceneGLES2::environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) {
|
||||
Environment *env = environment_owner.getornull(p_env);
|
||||
ERR_FAIL_COND(!env);
|
||||
|
||||
env->camera_feed_id = p_camera_feed_id;
|
||||
}
|
||||
|
||||
void RasterizerSceneGLES2::environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality) {
|
||||
Environment *env = environment_owner.getornull(p_env);
|
||||
ERR_FAIL_COND(!env);
|
||||
@ -2843,6 +2851,7 @@ void RasterizerSceneGLES2::render_scene(const Transform &p_cam_transform, const
|
||||
// clear color
|
||||
|
||||
Color clear_color(0, 0, 0, 1);
|
||||
Ref<CameraFeed> feed;
|
||||
|
||||
if (storage->frame.current_rt && storage->frame.current_rt->flags[RasterizerStorage::RENDER_TARGET_TRANSPARENT]) {
|
||||
clear_color = Color(0, 0, 0, 0);
|
||||
@ -2855,6 +2864,9 @@ void RasterizerSceneGLES2::render_scene(const Transform &p_cam_transform, const
|
||||
} else if (env->bg_mode == VS::ENV_BG_CANVAS || env->bg_mode == VS::ENV_BG_COLOR || env->bg_mode == VS::ENV_BG_COLOR_SKY) {
|
||||
clear_color = env->bg_color;
|
||||
storage->frame.clear_request = false;
|
||||
} else if (env->bg_mode == VS::ENV_BG_CAMERA_FEED) {
|
||||
feed = CameraServer::get_singleton()->get_feed_by_id(env->camera_feed_id);
|
||||
storage->frame.clear_request = false;
|
||||
} else {
|
||||
storage->frame.clear_request = false;
|
||||
}
|
||||
@ -2891,7 +2903,66 @@ void RasterizerSceneGLES2::render_scene(const Transform &p_cam_transform, const
|
||||
env_radiance_tex = sky->radiance;
|
||||
}
|
||||
} break;
|
||||
case VS::ENV_BG_CAMERA_FEED: {
|
||||
if (feed.is_valid() && (feed->get_base_width() > 0) && (feed->get_base_height() > 0)) {
|
||||
// copy our camera feed to our background
|
||||
|
||||
glDisable(GL_BLEND);
|
||||
glDepthMask(GL_FALSE);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDisable(GL_CULL_FACE);
|
||||
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_NO_ALPHA, true);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_DISPLAY_TRANSFORM, true);
|
||||
|
||||
if (feed->get_datatype() == CameraFeed::FEED_RGB) {
|
||||
RID camera_RGBA = feed->get_texture(CameraServer::FEED_RGBA_IMAGE);
|
||||
|
||||
VS::get_singleton()->texture_bind(camera_RGBA, 0);
|
||||
|
||||
} else if (feed->get_datatype() == CameraFeed::FEED_YCbCr) {
|
||||
RID camera_YCbCr = feed->get_texture(CameraServer::FEED_YCbCr_IMAGE);
|
||||
|
||||
VS::get_singleton()->texture_bind(camera_YCbCr, 0);
|
||||
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::YCBCR_TO_RGB, true);
|
||||
|
||||
} else if (feed->get_datatype() == CameraFeed::FEED_YCbCr_Sep) {
|
||||
RID camera_Y = feed->get_texture(CameraServer::FEED_Y_IMAGE);
|
||||
RID camera_CbCr = feed->get_texture(CameraServer::FEED_CbCr_IMAGE);
|
||||
|
||||
VS::get_singleton()->texture_bind(camera_Y, 0);
|
||||
VS::get_singleton()->texture_bind(camera_CbCr, 1);
|
||||
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::SEP_CBCR_TEXTURE, true);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::YCBCR_TO_RGB, true);
|
||||
};
|
||||
|
||||
storage->shaders.copy.bind();
|
||||
storage->shaders.copy.set_uniform(CopyShaderGLES2::DISPLAY_TRANSFORM, feed->get_transform());
|
||||
|
||||
storage->bind_quad_array();
|
||||
glDrawArrays(GL_TRIANGLE_FAN, 0, 4);
|
||||
glDisableVertexAttribArray(VS::ARRAY_VERTEX);
|
||||
glDisableVertexAttribArray(VS::ARRAY_TEX_UV);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
|
||||
// turn off everything used
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::SEP_CBCR_TEXTURE, false);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::YCBCR_TO_RGB, false);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_NO_ALPHA, false);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES2::USE_DISPLAY_TRANSFORM, false);
|
||||
|
||||
//restore
|
||||
glEnable(GL_BLEND);
|
||||
glDepthMask(GL_TRUE);
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_CULL_FACE);
|
||||
} else {
|
||||
// don't have a feed, just show greenscreen :)
|
||||
clear_color = Color(0.0, 1.0, 0.0, 1.0);
|
||||
}
|
||||
} break;
|
||||
default: {
|
||||
// FIXME: implement other background modes
|
||||
} break;
|
||||
|
@ -354,6 +354,8 @@ public:
|
||||
float bg_energy;
|
||||
float sky_ambient;
|
||||
|
||||
int camera_feed_id;
|
||||
|
||||
Color ambient_color;
|
||||
float ambient_energy;
|
||||
float ambient_sky_contribution;
|
||||
@ -381,6 +383,7 @@ public:
|
||||
sky_custom_fov(0.0),
|
||||
bg_energy(1.0),
|
||||
sky_ambient(0),
|
||||
camera_feed_id(0),
|
||||
ambient_energy(1.0),
|
||||
ambient_sky_contribution(0.0),
|
||||
canvas_max_layer(0),
|
||||
@ -413,6 +416,7 @@ public:
|
||||
virtual void environment_set_bg_energy(RID p_env, float p_energy);
|
||||
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer);
|
||||
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0);
|
||||
virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id);
|
||||
|
||||
virtual void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
|
||||
virtual void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
|
||||
|
@ -968,6 +968,15 @@ uint32_t RasterizerStorageGLES2::texture_get_texid(RID p_texture) const {
|
||||
return texture->tex_id;
|
||||
}
|
||||
|
||||
void RasterizerStorageGLES2::texture_bind(RID p_texture, uint32_t p_texture_no) {
|
||||
Texture *texture = texture_owner.getornull(p_texture);
|
||||
|
||||
ERR_FAIL_COND(!texture);
|
||||
|
||||
glActiveTexture(GL_TEXTURE0 + p_texture_no);
|
||||
glBindTexture(texture->target, texture->tex_id);
|
||||
}
|
||||
|
||||
uint32_t RasterizerStorageGLES2::texture_get_width(RID p_texture) const {
|
||||
Texture *texture = texture_owner.getornull(p_texture);
|
||||
|
||||
|
@ -350,6 +350,7 @@ public:
|
||||
virtual uint32_t texture_get_height(RID p_texture) const;
|
||||
virtual uint32_t texture_get_depth(RID p_texture) const;
|
||||
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth);
|
||||
virtual void texture_bind(RID p_texture, uint32_t p_texture_no);
|
||||
|
||||
virtual void texture_set_path(RID p_texture, const String &p_path);
|
||||
virtual String texture_get_path(RID p_texture) const;
|
||||
|
@ -28,8 +28,15 @@ varying vec2 uv_interp;
|
||||
#endif
|
||||
varying vec2 uv2_interp;
|
||||
|
||||
// These definitions are here because the shader-wrapper builder does
|
||||
// not understand `#elif defined()`
|
||||
#ifdef USE_DISPLAY_TRANSFORM
|
||||
#endif
|
||||
|
||||
#ifdef USE_COPY_SECTION
|
||||
uniform highp vec4 copy_section;
|
||||
#elif defined(USE_DISPLAY_TRANSFORM)
|
||||
uniform highp mat4 display_transform;
|
||||
#endif
|
||||
|
||||
void main() {
|
||||
@ -48,6 +55,8 @@ void main() {
|
||||
#ifdef USE_COPY_SECTION
|
||||
uv_interp = copy_section.xy + uv_interp * copy_section.zw;
|
||||
gl_Position.xy = (copy_section.xy + (gl_Position.xy * 0.5 + 0.5) * copy_section.zw) * 2.0 - 1.0;
|
||||
#elif defined(USE_DISPLAY_TRANSFORM)
|
||||
uv_interp = (display_transform * vec4(uv_in, 1.0, 1.0)).xy;
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -88,6 +97,10 @@ uniform samplerCube source_cube; // texunit:0
|
||||
uniform sampler2D source; // texunit:0
|
||||
#endif
|
||||
|
||||
#ifdef SEP_CBCR_TEXTURE
|
||||
uniform sampler2D CbCr; //texunit:1
|
||||
#endif
|
||||
|
||||
varying vec2 uv2_interp;
|
||||
|
||||
#ifdef USE_MULTIPLIER
|
||||
@ -145,10 +158,26 @@ void main() {
|
||||
|
||||
#elif defined(USE_CUBEMAP)
|
||||
vec4 color = textureCube(source_cube, normalize(cube_interp));
|
||||
#elif defined(SEP_CBCR_TEXTURE)
|
||||
vec4 color;
|
||||
color.r = texture2D(source, uv_interp).r;
|
||||
color.gb = texture2D(CbCr, uv_interp).rg - vec2(0.5, 0.5);
|
||||
color.a = 1.0;
|
||||
#else
|
||||
vec4 color = texture2D(source, uv_interp);
|
||||
#endif
|
||||
|
||||
#ifdef YCBCR_TO_RGB
|
||||
// YCbCr -> RGB conversion
|
||||
|
||||
// Using BT.601, which is the standard for SDTV is provided as a reference
|
||||
color.rgb = mat3(
|
||||
vec3(1.00000, 1.00000, 1.00000),
|
||||
vec3(0.00000, -0.34413, 1.77200),
|
||||
vec3(1.40200, -0.71414, 0.00000)) *
|
||||
color.rgb;
|
||||
#endif
|
||||
|
||||
#ifdef USE_NO_ALPHA
|
||||
color.a = 1.0;
|
||||
#endif
|
||||
|
@ -34,6 +34,7 @@
|
||||
#include "core/os/os.h"
|
||||
#include "core/project_settings.h"
|
||||
#include "rasterizer_canvas_gles3.h"
|
||||
#include "servers/camera/camera_feed.h"
|
||||
#include "servers/visual/visual_server_raster.h"
|
||||
|
||||
#ifndef GLES_OVER_GL
|
||||
@ -830,6 +831,12 @@ void RasterizerSceneGLES3::environment_set_ambient_light(RID p_env, const Color
|
||||
env->ambient_energy = p_energy;
|
||||
env->ambient_sky_contribution = p_sky_contribution;
|
||||
}
|
||||
void RasterizerSceneGLES3::environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) {
|
||||
Environment *env = environment_owner.getornull(p_env);
|
||||
ERR_FAIL_COND(!env);
|
||||
|
||||
env->camera_feed_id = p_camera_feed_id;
|
||||
}
|
||||
|
||||
void RasterizerSceneGLES3::environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality) {
|
||||
|
||||
@ -4342,6 +4349,7 @@ void RasterizerSceneGLES3::render_scene(const Transform &p_cam_transform, const
|
||||
Color clear_color(0, 0, 0, 0);
|
||||
|
||||
RasterizerStorageGLES3::Sky *sky = NULL;
|
||||
Ref<CameraFeed> feed;
|
||||
GLuint env_radiance_tex = 0;
|
||||
|
||||
if (state.debug_draw == VS::VIEWPORT_DEBUG_DRAW_OVERDRAW) {
|
||||
@ -4376,6 +4384,9 @@ void RasterizerSceneGLES3::render_scene(const Transform &p_cam_transform, const
|
||||
clear_color = env->bg_color.to_linear();
|
||||
storage->frame.clear_request = false;
|
||||
|
||||
} else if (env->bg_mode == VS::ENV_BG_CAMERA_FEED) {
|
||||
feed = CameraServer::get_singleton()->get_feed_by_id(env->camera_feed_id);
|
||||
storage->frame.clear_request = false;
|
||||
} else {
|
||||
storage->frame.clear_request = false;
|
||||
}
|
||||
@ -4426,6 +4437,63 @@ void RasterizerSceneGLES3::render_scene(const Transform &p_cam_transform, const
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_CULL_FACE);
|
||||
break;
|
||||
case VS::ENV_BG_CAMERA_FEED:
|
||||
if (feed.is_valid() && (feed->get_base_width() > 0) && (feed->get_base_height() > 0)) {
|
||||
// copy our camera feed to our background
|
||||
|
||||
glDisable(GL_BLEND);
|
||||
glDepthMask(GL_FALSE);
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glDisable(GL_CULL_FACE);
|
||||
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::USE_DISPLAY_TRANSFORM, true);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::DISABLE_ALPHA, true);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::SRGB_TO_LINEAR, true);
|
||||
|
||||
if (feed->get_datatype() == CameraFeed::FEED_RGB) {
|
||||
RID camera_RGBA = feed->get_texture(CameraServer::FEED_RGBA_IMAGE);
|
||||
|
||||
VS::get_singleton()->texture_bind(camera_RGBA, 0);
|
||||
} else if (feed->get_datatype() == CameraFeed::FEED_YCbCr) {
|
||||
RID camera_YCbCr = feed->get_texture(CameraServer::FEED_YCbCr_IMAGE);
|
||||
|
||||
VS::get_singleton()->texture_bind(camera_YCbCr, 0);
|
||||
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::YCBCR_TO_SRGB, true);
|
||||
|
||||
} else if (feed->get_datatype() == CameraFeed::FEED_YCbCr_Sep) {
|
||||
RID camera_Y = feed->get_texture(CameraServer::FEED_Y_IMAGE);
|
||||
RID camera_CbCr = feed->get_texture(CameraServer::FEED_CbCr_IMAGE);
|
||||
|
||||
VS::get_singleton()->texture_bind(camera_Y, 0);
|
||||
VS::get_singleton()->texture_bind(camera_CbCr, 1);
|
||||
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::SEP_CBCR_TEXTURE, true);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::YCBCR_TO_SRGB, true);
|
||||
};
|
||||
|
||||
storage->shaders.copy.bind();
|
||||
storage->shaders.copy.set_uniform(CopyShaderGLES3::DISPLAY_TRANSFORM, feed->get_transform());
|
||||
|
||||
_copy_screen(true, true);
|
||||
|
||||
//turn off everything used
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::USE_DISPLAY_TRANSFORM, false);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::DISABLE_ALPHA, false);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::SRGB_TO_LINEAR, false);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::SEP_CBCR_TEXTURE, false);
|
||||
storage->shaders.copy.set_conditional(CopyShaderGLES3::YCBCR_TO_SRGB, false);
|
||||
|
||||
//restore
|
||||
glEnable(GL_BLEND);
|
||||
glDepthMask(GL_TRUE);
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_CULL_FACE);
|
||||
} else {
|
||||
// don't have a feed, just show greenscreen :)
|
||||
clear_color = Color(0.0, 1.0, 0.0, 1.0);
|
||||
}
|
||||
break;
|
||||
default: {
|
||||
}
|
||||
}
|
||||
|
@ -376,6 +376,8 @@ public:
|
||||
float bg_energy;
|
||||
float sky_ambient;
|
||||
|
||||
int camera_feed_id;
|
||||
|
||||
Color ambient_color;
|
||||
float ambient_energy;
|
||||
float ambient_sky_contribution;
|
||||
@ -461,6 +463,7 @@ public:
|
||||
sky_custom_fov(0.0),
|
||||
bg_energy(1.0),
|
||||
sky_ambient(0),
|
||||
camera_feed_id(0),
|
||||
ambient_energy(1.0),
|
||||
ambient_sky_contribution(0.0),
|
||||
canvas_max_layer(0),
|
||||
@ -542,6 +545,7 @@ public:
|
||||
virtual void environment_set_bg_energy(RID p_env, float p_energy);
|
||||
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer);
|
||||
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0);
|
||||
virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id);
|
||||
|
||||
virtual void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
|
||||
virtual void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_amount, VS::EnvironmentDOFBlurQuality p_quality);
|
||||
|
@ -1437,6 +1437,15 @@ uint32_t RasterizerStorageGLES3::texture_get_texid(RID p_texture) const {
|
||||
|
||||
return texture->tex_id;
|
||||
}
|
||||
void RasterizerStorageGLES3::texture_bind(RID p_texture, uint32_t p_texture_no) {
|
||||
|
||||
Texture *texture = texture_owner.getornull(p_texture);
|
||||
|
||||
ERR_FAIL_COND(!texture);
|
||||
|
||||
glActiveTexture(GL_TEXTURE0 + p_texture_no);
|
||||
glBindTexture(texture->target, texture->tex_id);
|
||||
}
|
||||
uint32_t RasterizerStorageGLES3::texture_get_width(RID p_texture) const {
|
||||
|
||||
Texture *texture = texture_owner.get(p_texture);
|
||||
|
@ -358,6 +358,7 @@ public:
|
||||
virtual uint32_t texture_get_height(RID p_texture) const;
|
||||
virtual uint32_t texture_get_depth(RID p_texture) const;
|
||||
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth);
|
||||
virtual void texture_bind(RID p_texture, uint32_t p_texture_no);
|
||||
|
||||
virtual void texture_set_path(RID p_texture, const String &p_path);
|
||||
virtual String texture_get_path(RID p_texture) const;
|
||||
|
@ -18,10 +18,19 @@ out vec2 uv_interp;
|
||||
|
||||
out vec2 uv2_interp;
|
||||
|
||||
// These definitions are here because the shader-wrapper builder does
|
||||
// not understand `#elif defined()`
|
||||
#ifdef USE_DISPLAY_TRANSFORM
|
||||
#endif
|
||||
|
||||
#ifdef USE_COPY_SECTION
|
||||
|
||||
uniform vec4 copy_section;
|
||||
|
||||
#elif defined(USE_DISPLAY_TRANSFORM)
|
||||
|
||||
uniform highp mat4 display_transform;
|
||||
|
||||
#endif
|
||||
|
||||
void main() {
|
||||
@ -44,6 +53,9 @@ void main() {
|
||||
|
||||
uv_interp = copy_section.xy + uv_interp * copy_section.zw;
|
||||
gl_Position.xy = (copy_section.xy + (gl_Position.xy * 0.5 + 0.5) * copy_section.zw) * 2.0 - 1.0;
|
||||
#elif defined(USE_DISPLAY_TRANSFORM)
|
||||
|
||||
uv_interp = (display_transform * vec4(uv_in, 1.0, 1.0)).xy;
|
||||
#endif
|
||||
}
|
||||
|
||||
@ -73,6 +85,8 @@ uniform highp vec4 asym_proj;
|
||||
#endif
|
||||
#ifdef USE_TEXTURE2DARRAY
|
||||
#endif
|
||||
#ifdef YCBCR_TO_SRGB
|
||||
#endif
|
||||
|
||||
#ifdef USE_CUBEMAP
|
||||
uniform samplerCube source_cube; //texunit:0
|
||||
@ -84,6 +98,10 @@ uniform sampler2DArray source_2d_array; //texunit:0
|
||||
uniform sampler2D source; //texunit:0
|
||||
#endif
|
||||
|
||||
#ifdef SEP_CBCR_TEXTURE
|
||||
uniform sampler2D CbCr; //texunit:1
|
||||
#endif
|
||||
|
||||
/* clang-format on */
|
||||
|
||||
#if defined(USE_TEXTURE3D) || defined(USE_TEXTURE2DARRAY)
|
||||
@ -166,14 +184,30 @@ void main() {
|
||||
vec4 color = textureLod(source_3d, vec3(uv_interp, layer), 0.0);
|
||||
#elif defined(USE_TEXTURE2DARRAY)
|
||||
vec4 color = textureLod(source_2d_array, vec3(uv_interp, layer), 0.0);
|
||||
#elif defined(SEP_CBCR_TEXTURE)
|
||||
vec4 color;
|
||||
color.r = textureLod(source, uv_interp, 0.0).r;
|
||||
color.gb = textureLod(CbCr, uv_interp, 0.0).rg - vec2(0.5, 0.5);
|
||||
color.a = 1.0;
|
||||
#else
|
||||
vec4 color = textureLod(source, uv_interp, 0.0);
|
||||
#endif
|
||||
|
||||
#ifdef LINEAR_TO_SRGB
|
||||
//regular Linear -> SRGB conversion
|
||||
// regular Linear -> SRGB conversion
|
||||
vec3 a = vec3(0.055);
|
||||
color.rgb = mix((vec3(1.0) + a) * pow(color.rgb, vec3(1.0 / 2.4)) - a, 12.92 * color.rgb, lessThan(color.rgb, vec3(0.0031308)));
|
||||
|
||||
#elif defined(YCBCR_TO_SRGB)
|
||||
|
||||
// YCbCr -> SRGB conversion
|
||||
// Using BT.709 which is the standard for HDTV
|
||||
color.rgb = mat3(
|
||||
vec3(1.00000, 1.00000, 1.00000),
|
||||
vec3(0.00000, -0.18732, 1.85560),
|
||||
vec3(1.57481, -0.46813, 0.00000)) *
|
||||
color.rgb;
|
||||
|
||||
#endif
|
||||
|
||||
#ifdef SRGB_TO_LINEAR
|
||||
|
@ -115,6 +115,17 @@ void ARVRInterfaceGDNative::set_anchor_detection_is_enabled(bool p_enable) {
|
||||
interface->set_anchor_detection_is_enabled(data, p_enable);
|
||||
}
|
||||
|
||||
int ARVRInterfaceGDNative::get_camera_feed_id() {
|
||||
|
||||
ERR_FAIL_COND_V(interface == NULL, 0);
|
||||
|
||||
if ((interface->version.major > 1) || ((interface->version.major) == 1 && (interface->version.minor >= 1))) {
|
||||
return (unsigned int)interface->get_camera_feed_id(data);
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
bool ARVRInterfaceGDNative::is_stereo() {
|
||||
bool stereo;
|
||||
|
||||
|
@ -66,6 +66,7 @@ public:
|
||||
/** specific to AR **/
|
||||
virtual bool get_anchor_detection_is_enabled() const;
|
||||
virtual void set_anchor_detection_is_enabled(bool p_enable);
|
||||
virtual int get_camera_feed_id();
|
||||
|
||||
/** rendering and internal **/
|
||||
virtual Size2 get_render_targetsize();
|
||||
|
@ -64,6 +64,7 @@ typedef struct {
|
||||
// only in 1.1 onwards
|
||||
godot_int (*get_external_texture_for_eye)(void *, godot_int);
|
||||
void (*notification)(void *, godot_int);
|
||||
godot_int (*get_camera_feed_id)(void *);
|
||||
} godot_arvr_interface_gdnative;
|
||||
|
||||
void GDAPI godot_arvr_register_interface(const godot_arvr_interface_gdnative *p_interface);
|
||||
|
@ -176,6 +176,9 @@ Error OS_Android::initialize(const VideoMode &p_desired, int p_video_driver, int
|
||||
input = memnew(InputDefault);
|
||||
input->set_fallback_mapping("Default Android Gamepad");
|
||||
|
||||
///@TODO implement a subclass for Android and instantiate that instead
|
||||
camera_server = memnew(CameraServer);
|
||||
|
||||
//power_manager = memnew(PowerAndroid);
|
||||
|
||||
return OK;
|
||||
@ -193,6 +196,9 @@ void OS_Android::delete_main_loop() {
|
||||
}
|
||||
|
||||
void OS_Android::finalize() {
|
||||
|
||||
memdelete(camera_server);
|
||||
|
||||
memdelete(input);
|
||||
}
|
||||
|
||||
|
@ -39,6 +39,7 @@
|
||||
#include "main/input_default.h"
|
||||
//#include "power_android.h"
|
||||
#include "servers/audio_server.h"
|
||||
#include "servers/camera_server.h"
|
||||
#include "servers/visual/rasterizer.h"
|
||||
|
||||
class GodotJavaWrapper;
|
||||
@ -77,6 +78,8 @@ private:
|
||||
|
||||
VisualServer *visual_server;
|
||||
|
||||
CameraServer *camera_server;
|
||||
|
||||
mutable String data_dir_cache;
|
||||
|
||||
//AudioDriverAndroid audio_driver_android;
|
||||
|
@ -133,6 +133,8 @@ Error OS_Haiku::initialize(const VideoMode &p_desired, int p_video_driver, int p
|
||||
window->Show();
|
||||
visual_server->init();
|
||||
|
||||
camera_server = memnew(CameraServer);
|
||||
|
||||
AudioDriverManager::initialize(p_audio_driver);
|
||||
|
||||
return OK;
|
||||
@ -148,6 +150,8 @@ void OS_Haiku::finalize() {
|
||||
visual_server->finish();
|
||||
memdelete(visual_server);
|
||||
|
||||
memdelete(camera_server);
|
||||
|
||||
memdelete(input);
|
||||
|
||||
#if defined(OPENGL_ENABLED)
|
||||
|
@ -38,6 +38,7 @@
|
||||
#include "haiku_direct_window.h"
|
||||
#include "main/input_default.h"
|
||||
#include "servers/audio_server.h"
|
||||
#include "servers/camera_server.h"
|
||||
#include "servers/visual_server.h"
|
||||
|
||||
class OS_Haiku : public OS_Unix {
|
||||
@ -49,6 +50,7 @@ private:
|
||||
VisualServer *visual_server;
|
||||
VideoMode current_video_mode;
|
||||
int video_driver_index;
|
||||
CameraServer *camera_server;
|
||||
|
||||
#ifdef MEDIA_KIT_ENABLED
|
||||
AudioDriverMediaKit driver_media_kit;
|
||||
|
@ -14,6 +14,7 @@ iphone_lib = [
|
||||
'in_app_store.mm',
|
||||
'icloud.mm',
|
||||
'ios.mm',
|
||||
'camera_ios.mm',
|
||||
]
|
||||
|
||||
env_ios = env.Clone()
|
||||
|
47
platform/iphone/camera_ios.h
Normal file
47
platform/iphone/camera_ios.h
Normal file
@ -0,0 +1,47 @@
|
||||
/*************************************************************************/
|
||||
/* camera_ios.h */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#ifndef CAMERAIOS_H
|
||||
#define CAMERAIOS_H
|
||||
|
||||
///@TODO this is a near duplicate of CameraOSX, we should find a way to combine those to minimise code duplication!!!!
|
||||
// If you fix something here, make sure you fix it there as wel!
|
||||
|
||||
#include "servers/camera_server.h"
|
||||
|
||||
class CameraIOS : public CameraServer {
|
||||
public:
|
||||
CameraIOS();
|
||||
~CameraIOS();
|
||||
|
||||
void update_feeds();
|
||||
};
|
||||
|
||||
#endif /* CAMERAIOS_H */
|
429
platform/iphone/camera_ios.mm
Normal file
429
platform/iphone/camera_ios.mm
Normal file
@ -0,0 +1,429 @@
|
||||
/*************************************************************************/
|
||||
/* camera_ios.mm */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
///@TODO this is a near duplicate of CameraOSX, we should find a way to combine those to minimise code duplication!!!!
|
||||
// If you fix something here, make sure you fix it there as wel!
|
||||
|
||||
#include "camera_ios.h"
|
||||
#include "servers/camera/camera_feed.h"
|
||||
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
#import <UIKit/UIKit.h>
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// MyCaptureSession - This is a little helper class so we can capture our frames
|
||||
|
||||
@interface MyCaptureSession : AVCaptureSession <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
||||
Ref<CameraFeed> feed;
|
||||
size_t width[2];
|
||||
size_t height[2];
|
||||
PoolVector<uint8_t> img_data[2];
|
||||
|
||||
AVCaptureDeviceInput *input;
|
||||
AVCaptureVideoDataOutput *output;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation MyCaptureSession
|
||||
|
||||
- (id)initForFeed:(Ref<CameraFeed>)p_feed andDevice:(AVCaptureDevice *)p_device {
|
||||
if (self = [super init]) {
|
||||
NSError *error;
|
||||
feed = p_feed;
|
||||
width[0] = 0;
|
||||
height[0] = 0;
|
||||
width[1] = 0;
|
||||
height[1] = 0;
|
||||
|
||||
// prepare our device
|
||||
[p_device lockForConfiguration:&error];
|
||||
|
||||
[p_device setFocusMode:AVCaptureFocusModeLocked];
|
||||
[p_device setExposureMode:AVCaptureExposureModeLocked];
|
||||
[p_device setWhiteBalanceMode:AVCaptureWhiteBalanceModeLocked];
|
||||
|
||||
[p_device unlockForConfiguration];
|
||||
|
||||
[self beginConfiguration];
|
||||
|
||||
// setup our capture
|
||||
self.sessionPreset = AVCaptureSessionPreset1280x720;
|
||||
|
||||
input = [AVCaptureDeviceInput deviceInputWithDevice:p_device error:&error];
|
||||
if (!input) {
|
||||
print_line("Couldn't get input device for camera");
|
||||
} else {
|
||||
[self addInput:input];
|
||||
}
|
||||
|
||||
output = [AVCaptureVideoDataOutput new];
|
||||
if (!output) {
|
||||
print_line("Couldn't get output device for camera");
|
||||
} else {
|
||||
NSDictionary *settings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
|
||||
output.videoSettings = settings;
|
||||
|
||||
// discard if the data output queue is blocked (as we process the still image)
|
||||
[output setAlwaysDiscardsLateVideoFrames:YES];
|
||||
|
||||
// now set ourselves as the delegate to receive new frames. Note that we're doing this on the main thread at the moment, we may need to change this..
|
||||
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
|
||||
|
||||
[self addOutput:output];
|
||||
}
|
||||
|
||||
[self commitConfiguration];
|
||||
|
||||
// kick off our session..
|
||||
[self startRunning];
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)cleanup {
|
||||
// stop running
|
||||
[self stopRunning];
|
||||
|
||||
// cleanup
|
||||
[self beginConfiguration];
|
||||
|
||||
if (input) {
|
||||
[self removeInput:input];
|
||||
// don't release this
|
||||
input = nil;
|
||||
}
|
||||
|
||||
if (output) {
|
||||
[self removeOutput:output];
|
||||
[output setSampleBufferDelegate:nil queue:NULL];
|
||||
[output release];
|
||||
output = nil;
|
||||
}
|
||||
|
||||
[self commitConfiguration];
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
// bye bye
|
||||
[super dealloc];
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
|
||||
// This gets called every time our camera has a new image for us to process.
|
||||
// May need to investigate in a way to throttle this if we get more images then we're rendering frames..
|
||||
|
||||
// For now, version 1, we're just doing the bare minimum to make this work...
|
||||
|
||||
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
// int width = CVPixelBufferGetWidth(pixelBuffer);
|
||||
// int height = CVPixelBufferGetHeight(pixelBuffer);
|
||||
|
||||
// It says that we need to lock this on the documentation pages but it's not in the samples
|
||||
// need to lock our base address so we can access our pixel buffers, better safe then sorry?
|
||||
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
|
||||
// get our buffers
|
||||
unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
|
||||
unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
|
||||
if (dataY == NULL) {
|
||||
print_line("Couldn't access Y pixel buffer data");
|
||||
} else if (dataCbCr == NULL) {
|
||||
print_line("Couldn't access CbCr pixel buffer data");
|
||||
} else {
|
||||
UIDeviceOrientation orientation = [[UIApplication sharedApplication] statusBarOrientation];
|
||||
Ref<Image> img[2];
|
||||
|
||||
{
|
||||
// do Y
|
||||
int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
|
||||
int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
|
||||
int _bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
|
||||
|
||||
if ((width[0] != new_width) || (height[0] != new_height)) {
|
||||
// printf("Camera Y plane %i, %i - %i\n", new_width, new_height, bytes_per_row);
|
||||
|
||||
width[0] = new_width;
|
||||
height[0] = new_height;
|
||||
img_data[0].resize(new_width * new_height);
|
||||
}
|
||||
|
||||
PoolVector<uint8_t>::Write w = img_data[0].write();
|
||||
memcpy(w.ptr(), dataY, new_width * new_height);
|
||||
|
||||
img[0].instance();
|
||||
img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
|
||||
}
|
||||
|
||||
{
|
||||
// do CbCr
|
||||
int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
|
||||
int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
|
||||
int bytes_per_row = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
|
||||
|
||||
if ((width[1] != new_width) || (height[1] != new_height)) {
|
||||
// printf("Camera CbCr plane %i, %i - %i\n", new_width, new_height, bytes_per_row);
|
||||
|
||||
width[1] = new_width;
|
||||
height[1] = new_height;
|
||||
img_data[1].resize(2 * new_width * new_height);
|
||||
}
|
||||
|
||||
PoolVector<uint8_t>::Write w = img_data[1].write();
|
||||
memcpy(w.ptr(), dataCbCr, 2 * new_width * new_height);
|
||||
|
||||
///TODO GLES2 doesn't support FORMAT_RG8, need to do some form of conversion
|
||||
img[1].instance();
|
||||
img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]);
|
||||
}
|
||||
|
||||
// set our texture...
|
||||
feed->set_YCbCr_imgs(img[0], img[1]);
|
||||
|
||||
// update our matrix to match the orientation, note, before changing anything
|
||||
// here, be aware that the project orientation settings must match your xcode
|
||||
// settings or this will go wrong!
|
||||
Transform2D display_transform;
|
||||
switch (orientation) {
|
||||
case UIInterfaceOrientationPortrait: {
|
||||
display_transform = Transform2D(0.0, -1.0, -1.0, 0.0, 1.0, 1.0);
|
||||
} break;
|
||||
case UIInterfaceOrientationLandscapeRight: {
|
||||
display_transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
|
||||
} break;
|
||||
case UIInterfaceOrientationLandscapeLeft: {
|
||||
display_transform = Transform2D(-1.0, 0.0, 0.0, 1.0, 1.0, 0.0);
|
||||
} break;
|
||||
default: {
|
||||
display_transform = Transform2D(0.0, 1.0, 1.0, 0.0, 0.0, 0.0);
|
||||
} break;
|
||||
}
|
||||
|
||||
//TODO: this is correct for the camera on the back, I have a feeling this needs to be inversed for the camera on the front!
|
||||
feed->set_transform(display_transform);
|
||||
}
|
||||
|
||||
// and unlock
|
||||
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// CameraFeedIOS - Subclass for camera feeds in iOS
|
||||
|
||||
class CameraFeedIOS : public CameraFeed {
|
||||
private:
|
||||
bool is_arkit; // if true this feed is updated through ARKit (should only have one and not yet implemented)
|
||||
AVCaptureDevice *device;
|
||||
MyCaptureSession *capture_session;
|
||||
|
||||
public:
|
||||
bool get_is_arkit() const;
|
||||
AVCaptureDevice *get_device() const;
|
||||
|
||||
CameraFeedIOS();
|
||||
~CameraFeedIOS();
|
||||
|
||||
void set_device(AVCaptureDevice *p_device);
|
||||
|
||||
bool activate_feed();
|
||||
void deactivate_feed();
|
||||
};
|
||||
|
||||
bool CameraFeedIOS::get_is_arkit() const {
|
||||
return is_arkit;
|
||||
};
|
||||
|
||||
AVCaptureDevice *CameraFeedIOS::get_device() const {
|
||||
return device;
|
||||
};
|
||||
|
||||
CameraFeedIOS::CameraFeedIOS() {
|
||||
capture_session = NULL;
|
||||
device = NULL;
|
||||
transform = Transform2D(1.0, 0.0, 0.0, 1.0, 0.0, 0.0); /* should re-orientate this based on device orientation */
|
||||
};
|
||||
|
||||
void CameraFeedIOS::set_device(AVCaptureDevice *p_device) {
|
||||
device = p_device;
|
||||
if (device == NULL) {
|
||||
///@TODO finish this!
|
||||
is_arkit = true;
|
||||
name = "ARKit";
|
||||
position = CameraFeed::FEED_BACK;
|
||||
} else {
|
||||
is_arkit = false;
|
||||
[device retain];
|
||||
|
||||
// get some info
|
||||
NSString *device_name = p_device.localizedName;
|
||||
name = device_name.UTF8String;
|
||||
position = CameraFeed::FEED_UNSPECIFIED;
|
||||
if ([p_device position] == AVCaptureDevicePositionBack) {
|
||||
position = CameraFeed::FEED_BACK;
|
||||
} else if ([p_device position] == AVCaptureDevicePositionFront) {
|
||||
position = CameraFeed::FEED_FRONT;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
CameraFeedIOS::~CameraFeedIOS() {
|
||||
if (capture_session != NULL) {
|
||||
[capture_session release];
|
||||
capture_session = NULL;
|
||||
};
|
||||
|
||||
if (device != NULL) {
|
||||
[device release];
|
||||
device = NULL;
|
||||
};
|
||||
};
|
||||
|
||||
bool CameraFeedIOS::activate_feed() {
|
||||
if (is_arkit) {
|
||||
///@TODO to implement;
|
||||
} else {
|
||||
if (capture_session) {
|
||||
// already recording!
|
||||
} else {
|
||||
// start camera capture
|
||||
capture_session = [[MyCaptureSession alloc] initForFeed:this andDevice:device];
|
||||
};
|
||||
};
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
void CameraFeedIOS::deactivate_feed() {
|
||||
// end camera capture if we have one
|
||||
if (capture_session) {
|
||||
[capture_session cleanup];
|
||||
[capture_session release];
|
||||
capture_session = NULL;
|
||||
};
|
||||
};
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// MyDeviceNotifications - This is a little helper class gets notifications
|
||||
// when devices are connected/disconnected
|
||||
|
||||
@interface MyDeviceNotifications : NSObject {
|
||||
CameraIOS *camera_server;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation MyDeviceNotifications
|
||||
|
||||
- (void)devices_changed:(NSNotification *)notification {
|
||||
camera_server->update_feeds();
|
||||
}
|
||||
|
||||
- (id)initForServer:(CameraIOS *)p_server {
|
||||
if (self = [super init]) {
|
||||
camera_server = p_server;
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasConnectedNotification object:nil];
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
// remove notifications
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasConnectedNotification object:nil];
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasDisconnectedNotification object:nil];
|
||||
|
||||
[super dealloc];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
MyDeviceNotifications *device_notifications = nil;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// CameraIOS - Subclass for our camera server on iPhone
|
||||
|
||||
void CameraIOS::update_feeds() {
|
||||
// this way of doing things is deprecated but still works,
|
||||
// rewrite to using AVCaptureDeviceDiscoverySession
|
||||
|
||||
AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:[NSArray arrayWithObjects:AVCaptureDeviceTypeBuiltInTelephotoCamera, AVCaptureDeviceTypeBuiltInDualCamera, AVCaptureDeviceTypeBuiltInTrueDepthCamera] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
|
||||
|
||||
// remove devices that are gone..
|
||||
for (int i = feeds.size() - 1; i >= 0; i--) {
|
||||
Ref<CameraFeedIOS> feed = (Ref<CameraFeedIOS>)feeds[i];
|
||||
|
||||
if (feed->get_is_arkit()) {
|
||||
// ignore, this is our arkit entry
|
||||
} else if (![session.devices containsObject:feed->get_device()]) {
|
||||
// remove it from our array, this will also destroy it ;)
|
||||
remove_feed(feed);
|
||||
};
|
||||
};
|
||||
|
||||
// add new devices..
|
||||
for (AVCaptureDevice *device in session.devices) {
|
||||
bool found = false;
|
||||
for (int i = 0; i < feeds.size() && !found; i++) {
|
||||
Ref<CameraFeedIOS> feed = (Ref<CameraFeedIOS>)feeds[i];
|
||||
if (feed->get_device() == device) {
|
||||
found = true;
|
||||
};
|
||||
};
|
||||
|
||||
if (!found) {
|
||||
Ref<CameraFeedIOS> newfeed;
|
||||
newfeed.instance();
|
||||
newfeed->set_device(device);
|
||||
add_feed(newfeed);
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
CameraIOS::CameraIOS() {
|
||||
[AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo
|
||||
completionHandler:^(BOOL granted) {
|
||||
if (granted) {
|
||||
// Find available cameras we have at this time
|
||||
update_feeds();
|
||||
|
||||
// should only have one of these....
|
||||
device_notifications = [[MyDeviceNotifications alloc] initForServer:this];
|
||||
} else {
|
||||
print_line("No access to cameras!");
|
||||
}
|
||||
}];
|
||||
};
|
||||
|
||||
CameraIOS::~CameraIOS() {
|
||||
[device_notifications release];
|
||||
};
|
@ -144,6 +144,7 @@ def configure(env):
|
||||
'-framework', 'CoreAudio',
|
||||
'-framework', 'CoreGraphics',
|
||||
'-framework', 'CoreMedia',
|
||||
'-framework', 'CoreVideo',
|
||||
'-framework', 'CoreMotion',
|
||||
'-framework', 'Foundation',
|
||||
'-framework', 'GameController',
|
||||
|
@ -167,6 +167,8 @@ Error OSIPhone::initialize(const VideoMode &p_desired, int p_video_driver, int p
|
||||
|
||||
input = memnew(InputDefault);
|
||||
|
||||
camera_server = memnew(CameraIOS);
|
||||
|
||||
#ifdef GAME_CENTER_ENABLED
|
||||
game_center = memnew(GameCenter);
|
||||
Engine::get_singleton()->add_singleton(Engine::Singleton("GameCenter", game_center));
|
||||
@ -361,6 +363,11 @@ void OSIPhone::finalize() {
|
||||
if (main_loop) // should not happen?
|
||||
memdelete(main_loop);
|
||||
|
||||
if (camera_server) {
|
||||
memdelete(camera_server);
|
||||
camera_server = NULL;
|
||||
}
|
||||
|
||||
visual_server->finish();
|
||||
memdelete(visual_server);
|
||||
// memdelete(rasterizer);
|
||||
|
@ -37,6 +37,7 @@
|
||||
#include "drivers/coreaudio/audio_driver_coreaudio.h"
|
||||
#include "drivers/unix/os_unix.h"
|
||||
|
||||
#include "camera_ios.h"
|
||||
#include "game_center.h"
|
||||
#include "icloud.h"
|
||||
#include "in_app_store.h"
|
||||
@ -60,6 +61,8 @@ private:
|
||||
|
||||
AudioDriverCoreAudio audio_driver;
|
||||
|
||||
CameraServer *camera_server;
|
||||
|
||||
#ifdef GAME_CENTER_ENABLED
|
||||
GameCenter *game_center;
|
||||
#endif
|
||||
|
@ -942,6 +942,8 @@ Error OS_JavaScript::initialize(const VideoMode &p_desired, int p_video_driver,
|
||||
VisualServer *visual_server = memnew(VisualServerRaster());
|
||||
input = memnew(InputDefault);
|
||||
|
||||
camera_server = memnew(CameraServer);
|
||||
|
||||
EMSCRIPTEN_RESULT result;
|
||||
#define EM_CHECK(ev) \
|
||||
if (result != EMSCRIPTEN_RESULT_SUCCESS) \
|
||||
@ -1076,6 +1078,7 @@ void OS_JavaScript::delete_main_loop() {
|
||||
|
||||
void OS_JavaScript::finalize() {
|
||||
|
||||
memdelete(camera_server);
|
||||
memdelete(input);
|
||||
}
|
||||
|
||||
|
@ -35,6 +35,7 @@
|
||||
#include "drivers/unix/os_unix.h"
|
||||
#include "main/input_default.h"
|
||||
#include "servers/audio_server.h"
|
||||
#include "servers/camera_server.h"
|
||||
#include "servers/visual/rasterizer.h"
|
||||
|
||||
#include <emscripten/html5.h>
|
||||
@ -65,6 +66,8 @@ class OS_JavaScript : public OS_Unix {
|
||||
int64_t sync_wait_time;
|
||||
int64_t last_sync_check_time;
|
||||
|
||||
CameraServer *camera_server;
|
||||
|
||||
static EM_BOOL fullscreen_change_callback(int p_event_type, const EmscriptenFullscreenChangeEvent *p_event, void *p_user_data);
|
||||
|
||||
static EM_BOOL keydown_callback(int p_event_type, const EmscriptenKeyboardEvent *p_event, void *p_user_data);
|
||||
|
@ -13,6 +13,7 @@ files = [
|
||||
'dir_access_osx.mm',
|
||||
'joypad_osx.cpp',
|
||||
'power_osx.cpp',
|
||||
'camera_osx.mm',
|
||||
]
|
||||
|
||||
prog = env.add_program('#bin/godot', files)
|
||||
|
47
platform/osx/camera_osx.h
Normal file
47
platform/osx/camera_osx.h
Normal file
@ -0,0 +1,47 @@
|
||||
/*************************************************************************/
|
||||
/* camera_osx.h */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#ifndef CAMERAOSX_H
|
||||
#define CAMERAOSX_H
|
||||
|
||||
///@TODO this is a near duplicate of CameraIOS, we should find a way to combine those to minimise code duplication!!!!
|
||||
// If you fix something here, make sure you fix it there as wel!
|
||||
|
||||
#include "servers/camera_server.h"
|
||||
|
||||
class CameraOSX : public CameraServer {
|
||||
public:
|
||||
CameraOSX();
|
||||
~CameraOSX();
|
||||
|
||||
void update_feeds();
|
||||
};
|
||||
|
||||
#endif /* CAMERAOSX_H */
|
362
platform/osx/camera_osx.mm
Normal file
362
platform/osx/camera_osx.mm
Normal file
@ -0,0 +1,362 @@
|
||||
/*************************************************************************/
|
||||
/* camera_osx.mm */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
///@TODO this is a near duplicate of CameraIOS, we should find a way to combine those to minimise code duplication!!!!
|
||||
// If you fix something here, make sure you fix it there as wel!
|
||||
|
||||
#include "camera_osx.h"
|
||||
#include "servers/camera/camera_feed.h"
|
||||
#import <AVFoundation/AVFoundation.h>
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// MyCaptureSession - This is a little helper class so we can capture our frames
|
||||
|
||||
@interface MyCaptureSession : AVCaptureSession <AVCaptureVideoDataOutputSampleBufferDelegate> {
|
||||
Ref<CameraFeed> feed;
|
||||
size_t width[2];
|
||||
size_t height[2];
|
||||
PoolVector<uint8_t> img_data[2];
|
||||
|
||||
AVCaptureDeviceInput *input;
|
||||
AVCaptureVideoDataOutput *output;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation MyCaptureSession
|
||||
|
||||
- (id)initForFeed:(Ref<CameraFeed>)p_feed andDevice:(AVCaptureDevice *)p_device {
|
||||
if (self = [super init]) {
|
||||
NSError *error;
|
||||
feed = p_feed;
|
||||
width[0] = 0;
|
||||
height[0] = 0;
|
||||
width[1] = 0;
|
||||
height[1] = 0;
|
||||
|
||||
[self beginConfiguration];
|
||||
|
||||
input = [AVCaptureDeviceInput deviceInputWithDevice:p_device error:&error];
|
||||
if (!input) {
|
||||
print_line("Couldn't get input device for camera");
|
||||
} else {
|
||||
[self addInput:input];
|
||||
}
|
||||
|
||||
output = [AVCaptureVideoDataOutput new];
|
||||
if (!output) {
|
||||
print_line("Couldn't get output device for camera");
|
||||
} else {
|
||||
NSDictionary *settings = @{ (NSString *)kCVPixelBufferPixelFormatTypeKey : @(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange) };
|
||||
output.videoSettings = settings;
|
||||
|
||||
// discard if the data output queue is blocked (as we process the still image)
|
||||
[output setAlwaysDiscardsLateVideoFrames:YES];
|
||||
|
||||
// now set ourselves as the delegate to receive new frames.
|
||||
[output setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
|
||||
|
||||
// this takes ownership
|
||||
[self addOutput:output];
|
||||
}
|
||||
|
||||
[self commitConfiguration];
|
||||
|
||||
// kick off our session..
|
||||
[self startRunning];
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)cleanup {
|
||||
// stop running
|
||||
[self stopRunning];
|
||||
|
||||
// cleanup
|
||||
[self beginConfiguration];
|
||||
|
||||
// remove input
|
||||
if (input) {
|
||||
[self removeInput:input];
|
||||
// don't release this
|
||||
input = NULL;
|
||||
}
|
||||
|
||||
// free up our output
|
||||
if (output) {
|
||||
[self removeOutput:output];
|
||||
[output setSampleBufferDelegate:nil queue:NULL];
|
||||
[output release];
|
||||
output = NULL;
|
||||
}
|
||||
|
||||
[self commitConfiguration];
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
// bye bye
|
||||
[super dealloc];
|
||||
}
|
||||
|
||||
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
|
||||
// This gets called every time our camera has a new image for us to process.
|
||||
// May need to investigate in a way to throttle this if we get more images then we're rendering frames..
|
||||
|
||||
// For now, version 1, we're just doing the bare minimum to make this work...
|
||||
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
|
||||
// int _width = CVPixelBufferGetWidth(pixelBuffer);
|
||||
// int _height = CVPixelBufferGetHeight(pixelBuffer);
|
||||
|
||||
// It says that we need to lock this on the documentation pages but it's not in the samples
|
||||
// need to lock our base address so we can access our pixel buffers, better safe then sorry?
|
||||
CVPixelBufferLockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
|
||||
// get our buffers
|
||||
unsigned char *dataY = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
|
||||
unsigned char *dataCbCr = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
|
||||
if (dataY == NULL) {
|
||||
print_line("Couldn't access Y pixel buffer data");
|
||||
} else if (dataCbCr == NULL) {
|
||||
print_line("Couldn't access CbCr pixel buffer data");
|
||||
} else {
|
||||
Ref<Image> img[2];
|
||||
|
||||
{
|
||||
// do Y
|
||||
int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 0);
|
||||
int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
|
||||
|
||||
if ((width[0] != new_width) || (height[0] != new_height)) {
|
||||
width[0] = new_width;
|
||||
height[0] = new_height;
|
||||
img_data[0].resize(new_width * new_height);
|
||||
}
|
||||
|
||||
PoolVector<uint8_t>::Write w = img_data[0].write();
|
||||
memcpy(w.ptr(), dataY, new_width * new_height);
|
||||
|
||||
img[0].instance();
|
||||
img[0]->create(new_width, new_height, 0, Image::FORMAT_R8, img_data[0]);
|
||||
}
|
||||
|
||||
{
|
||||
// do CbCr
|
||||
int new_width = CVPixelBufferGetWidthOfPlane(pixelBuffer, 1);
|
||||
int new_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
|
||||
|
||||
if ((width[1] != new_width) || (height[1] != new_height)) {
|
||||
width[1] = new_width;
|
||||
height[1] = new_height;
|
||||
img_data[1].resize(2 * new_width * new_height);
|
||||
}
|
||||
|
||||
PoolVector<uint8_t>::Write w = img_data[1].write();
|
||||
memcpy(w.ptr(), dataCbCr, 2 * new_width * new_height);
|
||||
|
||||
///TODO GLES2 doesn't support FORMAT_RG8, need to do some form of conversion
|
||||
img[1].instance();
|
||||
img[1]->create(new_width, new_height, 0, Image::FORMAT_RG8, img_data[1]);
|
||||
}
|
||||
|
||||
// set our texture...
|
||||
feed->set_YCbCr_imgs(img[0], img[1]);
|
||||
}
|
||||
|
||||
// and unlock
|
||||
CVPixelBufferUnlockBaseAddress(pixelBuffer, kCVPixelBufferLock_ReadOnly);
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// CameraFeedOSX - Subclass for camera feeds in OSX
|
||||
|
||||
class CameraFeedOSX : public CameraFeed {
|
||||
private:
|
||||
AVCaptureDevice *device;
|
||||
MyCaptureSession *capture_session;
|
||||
|
||||
public:
|
||||
AVCaptureDevice *get_device() const;
|
||||
|
||||
CameraFeedOSX();
|
||||
~CameraFeedOSX();
|
||||
|
||||
void set_device(AVCaptureDevice *p_device);
|
||||
|
||||
bool activate_feed();
|
||||
void deactivate_feed();
|
||||
};
|
||||
|
||||
AVCaptureDevice *CameraFeedOSX::get_device() const {
|
||||
return device;
|
||||
};
|
||||
|
||||
CameraFeedOSX::CameraFeedOSX() {
|
||||
device = NULL;
|
||||
capture_session = NULL;
|
||||
};
|
||||
|
||||
void CameraFeedOSX::set_device(AVCaptureDevice *p_device) {
|
||||
device = p_device;
|
||||
[device retain];
|
||||
|
||||
// get some info
|
||||
NSString *device_name = p_device.localizedName;
|
||||
name = device_name.UTF8String;
|
||||
position = CameraFeed::FEED_UNSPECIFIED;
|
||||
if ([p_device position] == AVCaptureDevicePositionBack) {
|
||||
position = CameraFeed::FEED_BACK;
|
||||
} else if ([p_device position] == AVCaptureDevicePositionFront) {
|
||||
position = CameraFeed::FEED_FRONT;
|
||||
};
|
||||
};
|
||||
|
||||
CameraFeedOSX::~CameraFeedOSX() {
|
||||
if (capture_session != NULL) {
|
||||
[capture_session release];
|
||||
capture_session = NULL;
|
||||
};
|
||||
|
||||
if (device != NULL) {
|
||||
[device release];
|
||||
device = NULL;
|
||||
};
|
||||
};
|
||||
|
||||
bool CameraFeedOSX::activate_feed() {
|
||||
if (capture_session) {
|
||||
// already recording!
|
||||
} else {
|
||||
// start camera capture
|
||||
capture_session = [[MyCaptureSession alloc] initForFeed:this andDevice:device];
|
||||
};
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
void CameraFeedOSX::deactivate_feed() {
|
||||
// end camera capture if we have one
|
||||
if (capture_session) {
|
||||
[capture_session cleanup];
|
||||
[capture_session release];
|
||||
capture_session = NULL;
|
||||
};
|
||||
};
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// MyDeviceNotifications - This is a little helper class gets notifications
|
||||
// when devices are connected/disconnected
|
||||
|
||||
@interface MyDeviceNotifications : NSObject {
|
||||
CameraOSX *camera_server;
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
@implementation MyDeviceNotifications
|
||||
|
||||
- (void)devices_changed:(NSNotification *)notification {
|
||||
camera_server->update_feeds();
|
||||
}
|
||||
|
||||
- (id)initForServer:(CameraOSX *)p_server {
|
||||
if (self = [super init]) {
|
||||
camera_server = p_server;
|
||||
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasConnectedNotification object:nil];
|
||||
[[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(devices_changed:) name:AVCaptureDeviceWasDisconnectedNotification object:nil];
|
||||
};
|
||||
return self;
|
||||
}
|
||||
|
||||
- (void)dealloc {
|
||||
// remove notifications
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasConnectedNotification object:nil];
|
||||
[[NSNotificationCenter defaultCenter] removeObserver:self name:AVCaptureDeviceWasDisconnectedNotification object:nil];
|
||||
|
||||
[super dealloc];
|
||||
}
|
||||
|
||||
@end
|
||||
|
||||
MyDeviceNotifications *device_notifications = nil;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// CameraOSX - Subclass for our camera server on OSX
|
||||
|
||||
void CameraOSX::update_feeds() {
|
||||
NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeVideo];
|
||||
|
||||
// remove devices that are gone..
|
||||
for (int i = feeds.size() - 1; i >= 0; i--) {
|
||||
Ref<CameraFeedOSX> feed = (Ref<CameraFeedOSX>)feeds[i];
|
||||
|
||||
if (![devices containsObject:feed->get_device()]) {
|
||||
// remove it from our array, this will also destroy it ;)
|
||||
remove_feed(feed);
|
||||
};
|
||||
};
|
||||
|
||||
// add new devices..
|
||||
for (AVCaptureDevice *device in devices) {
|
||||
bool found = false;
|
||||
for (int i = 0; i < feeds.size() && !found; i++) {
|
||||
Ref<CameraFeedOSX> feed = (Ref<CameraFeedOSX>)feeds[i];
|
||||
if (feed->get_device() == device) {
|
||||
found = true;
|
||||
};
|
||||
};
|
||||
|
||||
if (!found) {
|
||||
Ref<CameraFeedOSX> newfeed;
|
||||
newfeed.instance();
|
||||
newfeed->set_device(device);
|
||||
|
||||
// assume display camera so inverse
|
||||
Transform2D transform = Transform2D(-1.0, 0.0, 0.0, -1.0, 1.0, 1.0);
|
||||
newfeed->set_transform(transform);
|
||||
|
||||
add_feed(newfeed);
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
CameraOSX::CameraOSX() {
|
||||
// Find available cameras we have at this time
|
||||
update_feeds();
|
||||
|
||||
// should only have one of these....
|
||||
device_notifications = [[MyDeviceNotifications alloc] initForServer:this];
|
||||
};
|
||||
|
||||
CameraOSX::~CameraOSX() {
|
||||
[device_notifications release];
|
||||
};
|
@ -128,7 +128,7 @@ def configure(env):
|
||||
|
||||
env.Prepend(CPPPATH=['#platform/osx'])
|
||||
env.Append(CPPFLAGS=['-DOSX_ENABLED', '-DUNIX_ENABLED', '-DGLES_ENABLED', '-DAPPLE_STYLE_KEYS', '-DCOREAUDIO_ENABLED', '-DCOREMIDI_ENABLED'])
|
||||
env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-framework', 'OpenGL', '-framework', 'AGL', '-framework', 'AudioUnit', '-framework', 'CoreAudio', '-framework', 'CoreMIDI', '-lz', '-framework', 'IOKit', '-framework', 'ForceFeedback', '-framework', 'CoreVideo'])
|
||||
env.Append(LINKFLAGS=['-framework', 'Cocoa', '-framework', 'Carbon', '-framework', 'OpenGL', '-framework', 'AGL', '-framework', 'AudioUnit', '-framework', 'CoreAudio', '-framework', 'CoreMIDI', '-lz', '-framework', 'IOKit', '-framework', 'ForceFeedback', '-framework', 'AVFoundation', '-framework', 'CoreMedia', '-framework', 'CoreVideo'])
|
||||
env.Append(LIBS=['pthread'])
|
||||
|
||||
env.Append(CCFLAGS=['-mmacosx-version-min=10.9'])
|
||||
|
@ -31,6 +31,7 @@
|
||||
#ifndef OS_OSX_H
|
||||
#define OS_OSX_H
|
||||
|
||||
#include "camera_osx.h"
|
||||
#include "core/os/input.h"
|
||||
#include "crash_handler_osx.h"
|
||||
#include "drivers/coreaudio/audio_driver_coreaudio.h"
|
||||
@ -73,6 +74,8 @@ public:
|
||||
//Rasterizer *rasterizer;
|
||||
VisualServer *visual_server;
|
||||
|
||||
CameraServer *camera_server;
|
||||
|
||||
List<String> args;
|
||||
MainLoop *main_loop;
|
||||
|
||||
|
@ -1542,6 +1542,8 @@ Error OS_OSX::initialize(const VideoMode &p_desired, int p_video_driver, int p_a
|
||||
visual_server->init();
|
||||
AudioDriverManager::initialize(p_audio_driver);
|
||||
|
||||
camera_server = memnew(CameraOSX);
|
||||
|
||||
input = memnew(InputDefault);
|
||||
joypad_osx = memnew(JoypadOSX);
|
||||
|
||||
@ -1573,6 +1575,11 @@ void OS_OSX::finalize() {
|
||||
|
||||
delete_main_loop();
|
||||
|
||||
if (camera_server) {
|
||||
memdelete(camera_server);
|
||||
camera_server = NULL;
|
||||
}
|
||||
|
||||
memdelete(joypad_osx);
|
||||
memdelete(input);
|
||||
|
||||
|
@ -302,6 +302,10 @@ Error OS_UWP::initialize(const VideoMode &p_desired, int p_video_driver, int p_a
|
||||
}
|
||||
|
||||
visual_server->init();
|
||||
|
||||
///@TODO implement a subclass for UWP and instantiate that instead
|
||||
camera_server = memnew(CameraServer);
|
||||
|
||||
input = memnew(InputDefault);
|
||||
|
||||
joypad = ref new JoypadUWP(input);
|
||||
@ -400,6 +404,8 @@ void OS_UWP::finalize() {
|
||||
|
||||
memdelete(input);
|
||||
|
||||
memdelete(camera_server);
|
||||
|
||||
joypad = nullptr;
|
||||
}
|
||||
|
||||
|
@ -41,6 +41,7 @@
|
||||
#include "main/input_default.h"
|
||||
#include "power_uwp.h"
|
||||
#include "servers/audio_server.h"
|
||||
#include "servers/camera_server.h"
|
||||
#include "servers/visual/rasterizer.h"
|
||||
#include "servers/visual_server.h"
|
||||
|
||||
@ -95,6 +96,8 @@ private:
|
||||
VisualServer *visual_server;
|
||||
int pressrc;
|
||||
|
||||
CameraServer *camera_server;
|
||||
|
||||
ContextEGL_UWP *gl_context;
|
||||
Windows::UI::Core::CoreWindow ^ window;
|
||||
|
||||
|
@ -8,6 +8,7 @@ import platform_windows_builders
|
||||
|
||||
common_win = [
|
||||
"godot_windows.cpp",
|
||||
"camera_win.cpp",
|
||||
"context_gl_windows.cpp",
|
||||
"crash_handler_windows.cpp",
|
||||
"os_windows.cpp",
|
||||
|
94
platform/windows/camera_win.cpp
Normal file
94
platform/windows/camera_win.cpp
Normal file
@ -0,0 +1,94 @@
|
||||
/*************************************************************************/
|
||||
/* camera_win.cpp */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#include "camera_win.h"
|
||||
|
||||
///@TODO sorry guys, I got about 80% through implementing this using DirectShow only to find out Microsoft deprecated half the API and its replacement is as confusing as they could make it
|
||||
// Joey suggested looking into libuvc which offers a more direct route to webcams over USB and this is very promissing but it wouldn't compile on windows for me...
|
||||
// I've gutted the classes I implemented DirectShow in just to have a skeleton for someone to work on, mail me for more details or if you want a copy....
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// CameraFeedWindows - Subclass for our camera feed on windows
|
||||
|
||||
/// @TODO need to implement this
|
||||
|
||||
class CameraFeedWindows : public CameraFeed {
|
||||
private:
|
||||
protected:
|
||||
public:
|
||||
CameraFeedWindows();
|
||||
virtual ~CameraFeedWindows();
|
||||
|
||||
bool activate_feed();
|
||||
void deactivate_feed();
|
||||
};
|
||||
|
||||
CameraFeedWindows::CameraFeedWindows(){
|
||||
///@TODO implement this, should store information about our available camera
|
||||
};
|
||||
|
||||
CameraFeedWindows::~CameraFeedWindows() {
|
||||
// make sure we stop recording if we are!
|
||||
if (is_active()) {
|
||||
deactivate_feed();
|
||||
};
|
||||
|
||||
///@TODO free up anything used by this
|
||||
};
|
||||
|
||||
bool CameraFeedWindows::activate_feed() {
|
||||
///@TODO this should activate our camera and start the process of capturing frames
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
///@TODO we should probably have a callback method here that is being called by the camera API which provides frames and call back into the CameraServer to update our texture
|
||||
|
||||
void CameraFeedWindows::deactivate_feed(){
|
||||
///@TODO this should deactivate our camera and stop the process of capturing frames
|
||||
};
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////
|
||||
// CameraWindows - Subclass for our camera server on windows
|
||||
|
||||
void CameraWindows::add_active_cameras(){
|
||||
///@TODO scan through any active cameras and create CameraFeedWindows objects for them
|
||||
};
|
||||
|
||||
CameraWindows::CameraWindows() {
|
||||
// Find cameras active right now
|
||||
add_active_cameras();
|
||||
|
||||
// need to add something that will react to devices being connected/removed...
|
||||
};
|
||||
|
||||
CameraWindows::~CameraWindows(){
|
||||
|
||||
};
|
46
platform/windows/camera_win.h
Normal file
46
platform/windows/camera_win.h
Normal file
@ -0,0 +1,46 @@
|
||||
/*************************************************************************/
|
||||
/* camera_win.h */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#ifndef CAMERAWIN_H
|
||||
#define CAMERAWIN_H
|
||||
|
||||
#include "servers/camera/camera_feed.h"
|
||||
#include "servers/camera_server.h"
|
||||
|
||||
class CameraWindows : public CameraServer {
|
||||
private:
|
||||
void add_active_cameras();
|
||||
|
||||
public:
|
||||
CameraWindows();
|
||||
~CameraWindows();
|
||||
};
|
||||
|
||||
#endif /* CAMERAWIN_H */
|
@ -1358,6 +1358,8 @@ Error OS_Windows::initialize(const VideoMode &p_desired, int p_video_driver, int
|
||||
|
||||
power_manager = memnew(PowerWindows);
|
||||
|
||||
camera_server = memnew(CameraWindows);
|
||||
|
||||
AudioDriverManager::initialize(p_audio_driver);
|
||||
|
||||
TRACKMOUSEEVENT tme;
|
||||
@ -1517,6 +1519,7 @@ void OS_Windows::finalize() {
|
||||
|
||||
memdelete(joypad);
|
||||
memdelete(input);
|
||||
memdelete(camera_server);
|
||||
touch_state.clear();
|
||||
|
||||
visual_server->finish();
|
||||
|
@ -31,6 +31,7 @@
|
||||
#ifndef OS_WINDOWS_H
|
||||
#define OS_WINDOWS_H
|
||||
|
||||
#include "camera_win.h"
|
||||
#include "context_gl_windows.h"
|
||||
#include "core/os/input.h"
|
||||
#include "core/os/os.h"
|
||||
@ -108,6 +109,7 @@ class OS_Windows : public OS {
|
||||
ContextGL_Windows *gl_context;
|
||||
#endif
|
||||
VisualServer *visual_server;
|
||||
CameraWindows *camera_server;
|
||||
int pressrc;
|
||||
HDC hDC; // Private GDI Device Context
|
||||
HINSTANCE hInstance; // Holds The Instance Of The Application
|
||||
|
@ -583,6 +583,9 @@ Error OS_X11::initialize(const VideoMode &p_desired, int p_video_driver, int p_a
|
||||
|
||||
AudioDriverManager::initialize(p_audio_driver);
|
||||
|
||||
///@TODO implement a subclass for Linux and instantiate that instead
|
||||
camera_server = memnew(CameraServer);
|
||||
|
||||
input = memnew(InputDefault);
|
||||
|
||||
window_has_focus = true; // Set focus to true at init
|
||||
@ -783,6 +786,8 @@ void OS_X11::finalize() {
|
||||
|
||||
memdelete(input);
|
||||
|
||||
memdelete(camera_server);
|
||||
|
||||
visual_server->finish();
|
||||
memdelete(visual_server);
|
||||
//memdelete(rasterizer);
|
||||
|
@ -42,6 +42,7 @@
|
||||
#include "main/input_default.h"
|
||||
#include "power_x11.h"
|
||||
#include "servers/audio_server.h"
|
||||
#include "servers/camera_server.h"
|
||||
#include "servers/visual/rasterizer.h"
|
||||
#include "servers/visual_server.h"
|
||||
//#include "servers/visual/visual_server_wrap_mt.h"
|
||||
@ -146,6 +147,8 @@ class OS_X11 : public OS_Unix {
|
||||
void get_key_modifier_state(unsigned int p_x11_state, Ref<InputEventWithModifiers> state);
|
||||
void flush_mouse_motion();
|
||||
|
||||
CameraServer *camera_server;
|
||||
|
||||
MouseMode mouse_mode;
|
||||
Point2i center;
|
||||
|
||||
|
@ -646,6 +646,7 @@ void register_scene_types() {
|
||||
ClassDB::register_class<GradientTexture>();
|
||||
ClassDB::register_class<ProxyTexture>();
|
||||
ClassDB::register_class<AnimatedTexture>();
|
||||
ClassDB::register_class<CameraTexture>();
|
||||
ClassDB::register_class<CubeMap>();
|
||||
ClassDB::register_virtual_class<TextureLayered>();
|
||||
ClassDB::register_class<Texture3D>();
|
||||
|
@ -111,6 +111,11 @@ void Environment::set_ambient_light_sky_contribution(float p_energy) {
|
||||
VS::get_singleton()->environment_set_ambient_light(environment, ambient_color, ambient_energy, ambient_sky_contribution);
|
||||
}
|
||||
|
||||
void Environment::set_camera_feed_id(int p_camera_feed_id) {
|
||||
camera_feed_id = p_camera_feed_id;
|
||||
VS::get_singleton()->environment_set_camera_feed_id(environment, camera_feed_id);
|
||||
};
|
||||
|
||||
Environment::BGMode Environment::get_background() const {
|
||||
|
||||
return bg_mode;
|
||||
@ -165,6 +170,10 @@ float Environment::get_ambient_light_sky_contribution() const {
|
||||
|
||||
return ambient_sky_contribution;
|
||||
}
|
||||
int Environment::get_camera_feed_id(void) const {
|
||||
|
||||
return camera_feed_id;
|
||||
}
|
||||
|
||||
void Environment::set_tonemapper(ToneMapper p_tone_mapper) {
|
||||
|
||||
@ -321,6 +330,12 @@ void Environment::_validate_property(PropertyInfo &property) const {
|
||||
}
|
||||
}
|
||||
|
||||
if (property.name == "background_camera_feed_id") {
|
||||
if (bg_mode != BG_CAMERA_FEED) {
|
||||
property.usage = PROPERTY_USAGE_NOEDITOR;
|
||||
}
|
||||
}
|
||||
|
||||
static const char *hide_prefixes[] = {
|
||||
"fog_",
|
||||
"auto_exposure_",
|
||||
@ -946,6 +961,7 @@ void Environment::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("set_ambient_light_color", "color"), &Environment::set_ambient_light_color);
|
||||
ClassDB::bind_method(D_METHOD("set_ambient_light_energy", "energy"), &Environment::set_ambient_light_energy);
|
||||
ClassDB::bind_method(D_METHOD("set_ambient_light_sky_contribution", "energy"), &Environment::set_ambient_light_sky_contribution);
|
||||
ClassDB::bind_method(D_METHOD("set_camera_feed_id", "camera_feed_id"), &Environment::set_camera_feed_id);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_background"), &Environment::get_background);
|
||||
ClassDB::bind_method(D_METHOD("get_sky"), &Environment::get_sky);
|
||||
@ -959,9 +975,10 @@ void Environment::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("get_ambient_light_color"), &Environment::get_ambient_light_color);
|
||||
ClassDB::bind_method(D_METHOD("get_ambient_light_energy"), &Environment::get_ambient_light_energy);
|
||||
ClassDB::bind_method(D_METHOD("get_ambient_light_sky_contribution"), &Environment::get_ambient_light_sky_contribution);
|
||||
ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &Environment::get_camera_feed_id);
|
||||
|
||||
ADD_GROUP("Background", "background_");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "background_mode", PROPERTY_HINT_ENUM, "Clear Color,Custom Color,Sky,Color+Sky,Canvas,Keep"), "set_background", "get_background");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "background_mode", PROPERTY_HINT_ENUM, "Clear Color,Custom Color,Sky,Color+Sky,Canvas,Keep,Camera Feed"), "set_background", "get_background");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "background_sky", PROPERTY_HINT_RESOURCE_TYPE, "Sky"), "set_sky", "get_sky");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::REAL, "background_sky_custom_fov", PROPERTY_HINT_RANGE, "0,180,0.1"), "set_sky_custom_fov", "get_sky_custom_fov");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::BASIS, "background_sky_orientation"), "set_sky_orientation", "get_sky_orientation");
|
||||
@ -970,6 +987,7 @@ void Environment::_bind_methods() {
|
||||
ADD_PROPERTY(PropertyInfo(Variant::COLOR, "background_color"), "set_bg_color", "get_bg_color");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::REAL, "background_energy", PROPERTY_HINT_RANGE, "0,16,0.01"), "set_bg_energy", "get_bg_energy");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "background_canvas_max_layer", PROPERTY_HINT_RANGE, "-1000,1000,1"), "set_canvas_max_layer", "get_canvas_max_layer");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "background_camera_feed_id", PROPERTY_HINT_RANGE, "1,10,1"), "set_camera_feed_id", "get_camera_feed_id");
|
||||
ADD_GROUP("Ambient Light", "ambient_light_");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::COLOR, "ambient_light_color"), "set_ambient_light_color", "get_ambient_light_color");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::REAL, "ambient_light_energy", PROPERTY_HINT_RANGE, "0,16,0.01"), "set_ambient_light_energy", "get_ambient_light_energy");
|
||||
@ -1265,6 +1283,7 @@ void Environment::_bind_methods() {
|
||||
BIND_ENUM_CONSTANT(BG_SKY);
|
||||
BIND_ENUM_CONSTANT(BG_COLOR_SKY);
|
||||
BIND_ENUM_CONSTANT(BG_CANVAS);
|
||||
BIND_ENUM_CONSTANT(BG_CAMERA_FEED);
|
||||
BIND_ENUM_CONSTANT(BG_MAX);
|
||||
|
||||
BIND_ENUM_CONSTANT(GLOW_BLEND_MODE_ADDITIVE);
|
||||
@ -1310,6 +1329,7 @@ Environment::Environment() :
|
||||
ambient_energy = 1.0;
|
||||
//ambient_sky_contribution = 1.0;
|
||||
set_ambient_light_sky_contribution(1.0);
|
||||
set_camera_feed_id(1);
|
||||
|
||||
tone_mapper = TONE_MAPPER_LINEAR;
|
||||
tonemap_exposure = 1.0;
|
||||
|
@ -49,6 +49,7 @@ public:
|
||||
BG_COLOR_SKY,
|
||||
BG_CANVAS,
|
||||
BG_KEEP,
|
||||
BG_CAMERA_FEED,
|
||||
BG_MAX
|
||||
};
|
||||
|
||||
@ -98,6 +99,7 @@ private:
|
||||
Color ambient_color;
|
||||
float ambient_energy;
|
||||
float ambient_sky_contribution;
|
||||
int camera_feed_id;
|
||||
|
||||
ToneMapper tone_mapper;
|
||||
float tonemap_exposure;
|
||||
@ -192,6 +194,7 @@ public:
|
||||
void set_ambient_light_color(const Color &p_color);
|
||||
void set_ambient_light_energy(float p_energy);
|
||||
void set_ambient_light_sky_contribution(float p_energy);
|
||||
void set_camera_feed_id(int p_camera_feed_id);
|
||||
|
||||
BGMode get_background() const;
|
||||
Ref<Sky> get_sky() const;
|
||||
@ -205,6 +208,7 @@ public:
|
||||
Color get_ambient_light_color() const;
|
||||
float get_ambient_light_energy() const;
|
||||
float get_ambient_light_sky_contribution() const;
|
||||
int get_camera_feed_id(void) const;
|
||||
|
||||
void set_tonemapper(ToneMapper p_tone_mapper);
|
||||
ToneMapper get_tonemapper() const;
|
||||
|
@ -36,6 +36,7 @@
|
||||
#include "core/os/os.h"
|
||||
#include "mesh.h"
|
||||
#include "scene/resources/bit_map.h"
|
||||
#include "servers/camera/camera_feed.h"
|
||||
|
||||
Size2 Texture::get_size() const {
|
||||
|
||||
@ -2498,3 +2499,107 @@ String ResourceFormatLoaderTextureLayered::get_resource_type(const String &p_pat
|
||||
return "TextureArray";
|
||||
return "";
|
||||
}
|
||||
|
||||
void CameraTexture::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("set_camera_feed_id", "feed_id"), &CameraTexture::set_camera_feed_id);
|
||||
ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &CameraTexture::get_camera_feed_id);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("set_which_feed", "which_feed"), &CameraTexture::set_which_feed);
|
||||
ClassDB::bind_method(D_METHOD("get_which_feed"), &CameraTexture::get_which_feed);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("set_camera_active", "active"), &CameraTexture::set_camera_active);
|
||||
ClassDB::bind_method(D_METHOD("get_camera_active"), &CameraTexture::get_camera_active);
|
||||
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "camera_feed_id"), "set_camera_feed_id", "get_camera_feed_id");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::INT, "which_feed"), "set_which_feed", "get_which_feed");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "camera_is_active"), "set_camera_active", "get_camera_active");
|
||||
}
|
||||
|
||||
int CameraTexture::get_width() const {
|
||||
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
|
||||
if (feed.is_valid()) {
|
||||
return feed->get_base_width();
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
int CameraTexture::get_height() const {
|
||||
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
|
||||
if (feed.is_valid()) {
|
||||
return feed->get_base_height();
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
bool CameraTexture::has_alpha() const {
|
||||
return false;
|
||||
}
|
||||
|
||||
RID CameraTexture::get_rid() const {
|
||||
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
|
||||
if (feed.is_valid()) {
|
||||
return feed->get_texture(which_feed);
|
||||
} else {
|
||||
return RID();
|
||||
}
|
||||
}
|
||||
|
||||
void CameraTexture::set_flags(uint32_t p_flags) {
|
||||
// not supported
|
||||
}
|
||||
|
||||
uint32_t CameraTexture::get_flags() const {
|
||||
// not supported
|
||||
return 0;
|
||||
}
|
||||
|
||||
Ref<Image> CameraTexture::get_data() const {
|
||||
// not (yet) supported
|
||||
return Ref<Image>();
|
||||
}
|
||||
|
||||
void CameraTexture::set_camera_feed_id(int p_new_id) {
|
||||
camera_feed_id = p_new_id;
|
||||
_change_notify();
|
||||
}
|
||||
|
||||
int CameraTexture::get_camera_feed_id() const {
|
||||
return camera_feed_id;
|
||||
}
|
||||
|
||||
void CameraTexture::set_which_feed(CameraServer::FeedImage p_which) {
|
||||
which_feed = p_which;
|
||||
_change_notify();
|
||||
}
|
||||
|
||||
CameraServer::FeedImage CameraTexture::get_which_feed() const {
|
||||
return which_feed;
|
||||
}
|
||||
|
||||
void CameraTexture::set_camera_active(bool p_active) {
|
||||
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
|
||||
if (feed.is_valid()) {
|
||||
feed->set_active(p_active);
|
||||
_change_notify();
|
||||
}
|
||||
}
|
||||
|
||||
bool CameraTexture::get_camera_active() const {
|
||||
Ref<CameraFeed> feed = CameraServer::get_singleton()->get_feed_by_id(camera_feed_id);
|
||||
if (feed.is_valid()) {
|
||||
return feed->is_active();
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
CameraTexture::CameraTexture() {
|
||||
camera_feed_id = 0;
|
||||
which_feed = CameraServer::FEED_RGBA_IMAGE;
|
||||
}
|
||||
|
||||
CameraTexture::~CameraTexture() {
|
||||
// nothing to do here yet
|
||||
}
|
||||
|
@ -39,6 +39,7 @@
|
||||
#include "core/resource.h"
|
||||
#include "scene/resources/curve.h"
|
||||
#include "scene/resources/gradient.h"
|
||||
#include "servers/camera_server.h"
|
||||
#include "servers/visual_server.h"
|
||||
|
||||
/**
|
||||
@ -740,4 +741,38 @@ public:
|
||||
~AnimatedTexture();
|
||||
};
|
||||
|
||||
class CameraTexture : public Texture {
|
||||
GDCLASS(CameraTexture, Texture)
|
||||
|
||||
private:
|
||||
int camera_feed_id;
|
||||
CameraServer::FeedImage which_feed;
|
||||
|
||||
protected:
|
||||
static void _bind_methods();
|
||||
|
||||
public:
|
||||
virtual int get_width() const;
|
||||
virtual int get_height() const;
|
||||
virtual RID get_rid() const;
|
||||
virtual bool has_alpha() const;
|
||||
|
||||
virtual void set_flags(uint32_t p_flags);
|
||||
virtual uint32_t get_flags() const;
|
||||
|
||||
virtual Ref<Image> get_data() const;
|
||||
|
||||
void set_camera_feed_id(int p_new_id);
|
||||
int get_camera_feed_id() const;
|
||||
|
||||
void set_which_feed(CameraServer::FeedImage p_which);
|
||||
CameraServer::FeedImage get_which_feed() const;
|
||||
|
||||
void set_camera_active(bool p_active);
|
||||
bool get_camera_active() const;
|
||||
|
||||
CameraTexture();
|
||||
~CameraTexture();
|
||||
};
|
||||
|
||||
#endif
|
||||
|
@ -6,6 +6,7 @@ env.servers_sources = []
|
||||
env.add_source_files(env.servers_sources, "*.cpp")
|
||||
|
||||
SConscript('arvr/SCsub')
|
||||
SConscript('camera/SCsub')
|
||||
SConscript('physics/SCsub')
|
||||
SConscript('physics_2d/SCsub')
|
||||
SConscript('visual/SCsub')
|
||||
|
@ -56,6 +56,7 @@ void ARVRInterface::_bind_methods() {
|
||||
// but we do have properties specific to AR....
|
||||
ClassDB::bind_method(D_METHOD("get_anchor_detection_is_enabled"), &ARVRInterface::get_anchor_detection_is_enabled);
|
||||
ClassDB::bind_method(D_METHOD("set_anchor_detection_is_enabled", "enable"), &ARVRInterface::set_anchor_detection_is_enabled);
|
||||
ClassDB::bind_method(D_METHOD("get_camera_feed_id"), &ARVRInterface::get_camera_feed_id);
|
||||
|
||||
ADD_GROUP("AR", "ar_");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "ar_is_anchor_detection_enabled"), "set_anchor_detection_is_enabled", "get_anchor_detection_is_enabled");
|
||||
@ -136,3 +137,9 @@ bool ARVRInterface::get_anchor_detection_is_enabled() const {
|
||||
void ARVRInterface::set_anchor_detection_is_enabled(bool p_enable){
|
||||
// don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc.
|
||||
};
|
||||
|
||||
int ARVRInterface::get_camera_feed_id() {
|
||||
// don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc.
|
||||
|
||||
return 0;
|
||||
};
|
||||
|
@ -101,6 +101,7 @@ public:
|
||||
/** specific to AR **/
|
||||
virtual bool get_anchor_detection_is_enabled() const;
|
||||
virtual void set_anchor_detection_is_enabled(bool p_enable);
|
||||
virtual int get_camera_feed_id();
|
||||
|
||||
/** rendering and internal **/
|
||||
|
||||
|
7
servers/camera/SCsub
Normal file
7
servers/camera/SCsub
Normal file
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
Import('env')
|
||||
|
||||
env.add_source_files(env.servers_sources, "*.cpp")
|
||||
|
||||
Export('env')
|
266
servers/camera/camera_feed.cpp
Normal file
266
servers/camera/camera_feed.cpp
Normal file
@ -0,0 +1,266 @@
|
||||
/*************************************************************************/
|
||||
/* camera_feed.cpp */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#include "camera_feed.h"
|
||||
#include "servers/visual_server.h"
|
||||
|
||||
void CameraFeed::_bind_methods() {
|
||||
// The setters prefixed with _ are only exposed so we can have feeds through GDNative!
|
||||
// They should not be called by the end user.
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_id"), &CameraFeed::get_id);
|
||||
ClassDB::bind_method(D_METHOD("get_name"), &CameraFeed::get_name);
|
||||
ClassDB::bind_method(D_METHOD("_set_name", "name"), &CameraFeed::set_name);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("is_active"), &CameraFeed::is_active);
|
||||
ClassDB::bind_method(D_METHOD("set_active", "active"), &CameraFeed::set_active);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("get_position"), &CameraFeed::get_position);
|
||||
ClassDB::bind_method(D_METHOD("_set_position", "position"), &CameraFeed::set_position);
|
||||
|
||||
// Note, for transform some feeds may override what the user sets (such as ARKit)
|
||||
ClassDB::bind_method(D_METHOD("get_transform"), &CameraFeed::get_transform);
|
||||
ClassDB::bind_method(D_METHOD("set_transform", "transform"), &CameraFeed::set_transform);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("_set_RGB_img", "rgb_img"), &CameraFeed::set_RGB_img);
|
||||
ClassDB::bind_method(D_METHOD("_set_YCbCr_img", "ycbcr_img"), &CameraFeed::set_YCbCr_img);
|
||||
ClassDB::bind_method(D_METHOD("_set_YCbCr_imgs", "y_img", "cbcr_img"), &CameraFeed::set_YCbCr_imgs);
|
||||
ClassDB::bind_method(D_METHOD("_allocate_texture", "width", "height", "format", "texture_type", "data_type"), &CameraFeed::allocate_texture);
|
||||
|
||||
ADD_GROUP("Feed", "feed_");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "feed_is_active"), "set_active", "is_active");
|
||||
ADD_PROPERTY(PropertyInfo(Variant::TRANSFORM2D, "feed_transform"), "set_transform", "get_transform");
|
||||
|
||||
BIND_ENUM_CONSTANT(FEED_NOIMAGE);
|
||||
BIND_ENUM_CONSTANT(FEED_RGB);
|
||||
BIND_ENUM_CONSTANT(FEED_YCbCr);
|
||||
BIND_ENUM_CONSTANT(FEED_YCbCr_Sep);
|
||||
|
||||
BIND_ENUM_CONSTANT(FEED_UNSPECIFIED);
|
||||
BIND_ENUM_CONSTANT(FEED_FRONT);
|
||||
BIND_ENUM_CONSTANT(FEED_BACK);
|
||||
}
|
||||
|
||||
int CameraFeed::get_id() const {
|
||||
return id;
|
||||
}
|
||||
|
||||
bool CameraFeed::is_active() const {
|
||||
return active;
|
||||
}
|
||||
|
||||
void CameraFeed::set_active(bool p_is_active) {
|
||||
if (p_is_active == active) {
|
||||
// all good
|
||||
} else if (p_is_active) {
|
||||
// attempt to activate this feed
|
||||
if (activate_feed()) {
|
||||
print_line("Activate " + name);
|
||||
active = true;
|
||||
}
|
||||
} else {
|
||||
// just deactivate it
|
||||
deactivate_feed();
|
||||
print_line("Deactivate " + name);
|
||||
active = false;
|
||||
}
|
||||
}
|
||||
|
||||
String CameraFeed::get_name() const {
|
||||
return name;
|
||||
}
|
||||
|
||||
void CameraFeed::set_name(String p_name) {
|
||||
name = p_name;
|
||||
}
|
||||
|
||||
int CameraFeed::get_base_width() const {
|
||||
return base_width;
|
||||
}
|
||||
|
||||
int CameraFeed::get_base_height() const {
|
||||
return base_height;
|
||||
}
|
||||
|
||||
CameraFeed::FeedDataType CameraFeed::get_datatype() const {
|
||||
return datatype;
|
||||
}
|
||||
|
||||
CameraFeed::FeedPosition CameraFeed::get_position() const {
|
||||
return position;
|
||||
}
|
||||
|
||||
void CameraFeed::set_position(CameraFeed::FeedPosition p_position) {
|
||||
position = p_position;
|
||||
}
|
||||
|
||||
Transform2D CameraFeed::get_transform() const {
|
||||
return transform;
|
||||
}
|
||||
|
||||
void CameraFeed::set_transform(const Transform2D &p_transform) {
|
||||
transform = p_transform;
|
||||
}
|
||||
|
||||
RID CameraFeed::get_texture(CameraServer::FeedImage p_which) {
|
||||
return texture[p_which];
|
||||
}
|
||||
|
||||
CameraFeed::CameraFeed() {
|
||||
// initialize our feed
|
||||
id = CameraServer::get_singleton()->get_free_id();
|
||||
name = "???";
|
||||
active = false;
|
||||
datatype = CameraFeed::FEED_RGB;
|
||||
position = CameraFeed::FEED_UNSPECIFIED;
|
||||
transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
|
||||
|
||||
// create a texture object
|
||||
VisualServer *vs = VisualServer::get_singleton();
|
||||
texture[CameraServer::FEED_Y_IMAGE] = vs->texture_create(); // also used for RGBA
|
||||
texture[CameraServer::FEED_CbCr_IMAGE] = vs->texture_create();
|
||||
}
|
||||
|
||||
CameraFeed::CameraFeed(String p_name, FeedPosition p_position) {
|
||||
// initialize our feed
|
||||
id = CameraServer::get_singleton()->get_free_id();
|
||||
base_width = 0;
|
||||
base_height = 0;
|
||||
name = p_name;
|
||||
active = false;
|
||||
datatype = CameraFeed::FEED_NOIMAGE;
|
||||
position = p_position;
|
||||
transform = Transform2D(1.0, 0.0, 0.0, -1.0, 0.0, 1.0);
|
||||
|
||||
// create a texture object
|
||||
VisualServer *vs = VisualServer::get_singleton();
|
||||
texture[CameraServer::FEED_Y_IMAGE] = vs->texture_create(); // also used for RGBA
|
||||
texture[CameraServer::FEED_CbCr_IMAGE] = vs->texture_create();
|
||||
}
|
||||
|
||||
CameraFeed::~CameraFeed() {
|
||||
// Free our textures
|
||||
VisualServer *vs = VisualServer::get_singleton();
|
||||
vs->free(texture[CameraServer::FEED_Y_IMAGE]);
|
||||
vs->free(texture[CameraServer::FEED_CbCr_IMAGE]);
|
||||
}
|
||||
|
||||
void CameraFeed::set_RGB_img(Ref<Image> p_rgb_img) {
|
||||
if (active) {
|
||||
VisualServer *vs = VisualServer::get_singleton();
|
||||
|
||||
int new_width = p_rgb_img->get_width();
|
||||
int new_height = p_rgb_img->get_height();
|
||||
|
||||
if ((base_width != new_width) || (base_height != new_height)) {
|
||||
// We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
|
||||
base_width = new_width;
|
||||
base_height = new_height;
|
||||
|
||||
vs->texture_allocate(texture[CameraServer::FEED_RGBA_IMAGE], new_width, new_height, 0, Image::FORMAT_RGB8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAGS_DEFAULT);
|
||||
}
|
||||
|
||||
vs->texture_set_data(texture[CameraServer::FEED_RGBA_IMAGE], p_rgb_img);
|
||||
datatype = CameraFeed::FEED_RGB;
|
||||
}
|
||||
}
|
||||
|
||||
void CameraFeed::set_YCbCr_img(Ref<Image> p_ycbcr_img) {
|
||||
if (active) {
|
||||
VisualServer *vs = VisualServer::get_singleton();
|
||||
|
||||
int new_width = p_ycbcr_img->get_width();
|
||||
int new_height = p_ycbcr_img->get_height();
|
||||
|
||||
if ((base_width != new_width) || (base_height != new_height)) {
|
||||
// We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
|
||||
base_width = new_width;
|
||||
base_height = new_height;
|
||||
|
||||
vs->texture_allocate(texture[CameraServer::FEED_RGBA_IMAGE], new_width, new_height, 0, Image::FORMAT_RGB8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAGS_DEFAULT);
|
||||
}
|
||||
|
||||
vs->texture_set_data(texture[CameraServer::FEED_RGBA_IMAGE], p_ycbcr_img);
|
||||
datatype = CameraFeed::FEED_YCbCr;
|
||||
}
|
||||
}
|
||||
|
||||
void CameraFeed::set_YCbCr_imgs(Ref<Image> p_y_img, Ref<Image> p_cbcr_img) {
|
||||
if (active) {
|
||||
VisualServer *vs = VisualServer::get_singleton();
|
||||
|
||||
///@TODO investigate whether we can use thirdparty/misc/yuv2rgb.h here to convert our YUV data to RGB, our shader approach is potentially faster though..
|
||||
// Wondering about including that into multiple projects, may cause issues.
|
||||
// That said, if we convert to RGB, we could enable using texture resources again...
|
||||
|
||||
int new_y_width = p_y_img->get_width();
|
||||
int new_y_height = p_y_img->get_height();
|
||||
int new_cbcr_width = p_cbcr_img->get_width();
|
||||
int new_cbcr_height = p_cbcr_img->get_height();
|
||||
|
||||
if ((base_width != new_y_width) || (base_height != new_y_height)) {
|
||||
// We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
|
||||
base_width = new_y_width;
|
||||
base_height = new_y_height;
|
||||
|
||||
vs->texture_allocate(texture[CameraServer::FEED_Y_IMAGE], new_y_width, new_y_height, 0, Image::FORMAT_R8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAG_USED_FOR_STREAMING);
|
||||
|
||||
///@TODO GLES2 doesn't support FORMAT_RG8, need to do some form of conversion
|
||||
vs->texture_allocate(texture[CameraServer::FEED_CbCr_IMAGE], new_cbcr_width, new_cbcr_height, 0, Image::FORMAT_RG8, VS::TEXTURE_TYPE_2D, VS::TEXTURE_FLAG_USED_FOR_STREAMING);
|
||||
}
|
||||
|
||||
vs->texture_set_data(texture[CameraServer::FEED_Y_IMAGE], p_y_img);
|
||||
vs->texture_set_data(texture[CameraServer::FEED_CbCr_IMAGE], p_cbcr_img);
|
||||
datatype = CameraFeed::FEED_YCbCr_Sep;
|
||||
}
|
||||
}
|
||||
|
||||
void CameraFeed::allocate_texture(int p_width, int p_height, Image::Format p_format, VisualServer::TextureType p_texture_type, FeedDataType p_data_type) {
|
||||
VisualServer *vs = VisualServer::get_singleton();
|
||||
|
||||
if ((base_width != p_width) || (base_height != p_height)) {
|
||||
// We're assuming here that our camera image doesn't change around formats etc, allocate the whole lot...
|
||||
base_width = p_width;
|
||||
base_height = p_height;
|
||||
|
||||
vs->texture_allocate(texture[0], p_width, p_height, 0, p_format, p_texture_type, VS::TEXTURE_FLAGS_DEFAULT);
|
||||
}
|
||||
|
||||
datatype = p_data_type;
|
||||
}
|
||||
|
||||
bool CameraFeed::activate_feed() {
|
||||
// nothing to do here
|
||||
return true;
|
||||
}
|
||||
|
||||
void CameraFeed::deactivate_feed() {
|
||||
// nothing to do here
|
||||
}
|
115
servers/camera/camera_feed.h
Normal file
115
servers/camera/camera_feed.h
Normal file
@ -0,0 +1,115 @@
|
||||
/*************************************************************************/
|
||||
/* camera_feed.h */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#ifndef CAMERA_FEED_H
|
||||
#define CAMERA_FEED_H
|
||||
|
||||
#include "core/image.h"
|
||||
#include "core/math/transform_2d.h"
|
||||
#include "servers/camera_server.h"
|
||||
#include "servers/visual_server.h"
|
||||
|
||||
/**
|
||||
@author Bastiaan Olij <mux213@gmail.com>
|
||||
|
||||
The camera server is a singleton object that gives access to the various
|
||||
camera feeds that can be used as the background for our environment.
|
||||
**/
|
||||
|
||||
class CameraFeed : public Reference {
|
||||
GDCLASS(CameraFeed, Reference);
|
||||
|
||||
public:
|
||||
enum FeedDataType {
|
||||
FEED_NOIMAGE, // we don't have an image yet
|
||||
FEED_RGB, // our texture will contain a normal RGB texture that can be used directly
|
||||
FEED_YCbCr, // our texture will contain a YCbCr texture that needs to be converted to RGB before output
|
||||
FEED_YCbCr_Sep // our camera is split into two textures, first plane contains Y data, second plane contains CbCr data
|
||||
};
|
||||
|
||||
enum FeedPosition {
|
||||
FEED_UNSPECIFIED, // we have no idea
|
||||
FEED_FRONT, // this is a camera on the front of the device
|
||||
FEED_BACK // this is a camera on the back of the device
|
||||
};
|
||||
|
||||
private:
|
||||
int id; // unique id for this, for internal use in case feeds are removed
|
||||
int base_width;
|
||||
int base_height;
|
||||
|
||||
protected:
|
||||
String name; // name of our camera feed
|
||||
FeedDataType datatype; // type of texture data stored
|
||||
FeedPosition position; // position of camera on the device
|
||||
Transform2D transform; // display transform
|
||||
|
||||
bool active; // only when active do we actually update the camera texture each frame
|
||||
RID texture[CameraServer::FEED_IMAGES]; // texture images needed for this
|
||||
|
||||
static void _bind_methods();
|
||||
|
||||
public:
|
||||
int get_id() const;
|
||||
bool is_active() const;
|
||||
void set_active(bool p_is_active);
|
||||
|
||||
String get_name() const;
|
||||
void set_name(String p_name);
|
||||
|
||||
int get_base_width() const;
|
||||
int get_base_height() const;
|
||||
|
||||
FeedPosition get_position() const;
|
||||
void set_position(FeedPosition p_position);
|
||||
|
||||
Transform2D get_transform() const;
|
||||
void set_transform(const Transform2D &p_transform);
|
||||
|
||||
RID get_texture(CameraServer::FeedImage p_which);
|
||||
|
||||
CameraFeed();
|
||||
CameraFeed(String p_name, FeedPosition p_position = CameraFeed::FEED_UNSPECIFIED);
|
||||
virtual ~CameraFeed();
|
||||
|
||||
FeedDataType get_datatype() const;
|
||||
void set_RGB_img(Ref<Image> p_rgb_img);
|
||||
void set_YCbCr_img(Ref<Image> p_ycbcr_img);
|
||||
void set_YCbCr_imgs(Ref<Image> p_y_img, Ref<Image> p_cbcr_img);
|
||||
void allocate_texture(int p_width, int p_height, Image::Format p_format, VisualServer::TextureType p_texture_type, FeedDataType p_data_type);
|
||||
|
||||
virtual bool activate_feed();
|
||||
virtual void deactivate_feed();
|
||||
};
|
||||
|
||||
VARIANT_ENUM_CAST(CameraFeed::FeedDataType);
|
||||
VARIANT_ENUM_CAST(CameraFeed::FeedPosition);
|
||||
|
||||
#endif /* !CAMERA_FEED_H */
|
169
servers/camera_server.cpp
Normal file
169
servers/camera_server.cpp
Normal file
@ -0,0 +1,169 @@
|
||||
/*************************************************************************/
|
||||
/* camera_server.cpp */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#include "camera_server.h"
|
||||
#include "servers/camera/camera_feed.h"
|
||||
#include "visual_server.h"
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// CameraServer
|
||||
|
||||
void CameraServer::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("get_feed", "index"), &CameraServer::get_feed);
|
||||
ClassDB::bind_method(D_METHOD("get_feed_count"), &CameraServer::get_feed_count);
|
||||
ClassDB::bind_method(D_METHOD("feeds"), &CameraServer::get_feeds);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("add_feed", "feed"), &CameraServer::add_feed);
|
||||
ClassDB::bind_method(D_METHOD("remove_feed", "feed"), &CameraServer::remove_feed);
|
||||
|
||||
ADD_SIGNAL(MethodInfo("camera_feed_added", PropertyInfo(Variant::INT, "id")));
|
||||
ADD_SIGNAL(MethodInfo("camera_feed_removed", PropertyInfo(Variant::INT, "id")));
|
||||
|
||||
BIND_ENUM_CONSTANT(FEED_RGBA_IMAGE);
|
||||
BIND_ENUM_CONSTANT(FEED_YCbCr_IMAGE);
|
||||
BIND_ENUM_CONSTANT(FEED_Y_IMAGE);
|
||||
BIND_ENUM_CONSTANT(FEED_CbCr_IMAGE);
|
||||
};
|
||||
|
||||
CameraServer *CameraServer::singleton = NULL;
|
||||
|
||||
CameraServer *CameraServer::get_singleton() {
|
||||
return singleton;
|
||||
};
|
||||
|
||||
int CameraServer::get_free_id() {
|
||||
bool id_exists = true;
|
||||
int newid = 0;
|
||||
|
||||
// find a free id
|
||||
while (id_exists) {
|
||||
newid++;
|
||||
id_exists = false;
|
||||
for (int i = 0; i < feeds.size() && !id_exists; i++) {
|
||||
if (feeds[i]->get_id() == newid) {
|
||||
id_exists = true;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
return newid;
|
||||
};
|
||||
|
||||
int CameraServer::get_feed_index(int p_id) {
|
||||
for (int i = 0; i < feeds.size(); i++) {
|
||||
if (feeds[i]->get_id() == p_id) {
|
||||
return i;
|
||||
};
|
||||
};
|
||||
|
||||
return -1;
|
||||
};
|
||||
|
||||
Ref<CameraFeed> CameraServer::get_feed_by_id(int p_id) {
|
||||
int index = get_feed_index(p_id);
|
||||
|
||||
if (index == -1) {
|
||||
return NULL;
|
||||
} else {
|
||||
return feeds[index];
|
||||
}
|
||||
};
|
||||
|
||||
void CameraServer::add_feed(const Ref<CameraFeed> &p_feed) {
|
||||
// add our feed
|
||||
feeds.push_back(p_feed);
|
||||
|
||||
// record for debugging
|
||||
#ifdef DEBUG_ENABLED
|
||||
print_line("Registered camera " + p_feed->get_name() + " with id " + itos(p_feed->get_id()) + " position " + itos(p_feed->get_position()) + " at index " + itos(feeds.size() - 1));
|
||||
#endif
|
||||
|
||||
// let whomever is interested know
|
||||
emit_signal("camera_feed_added", p_feed->get_id());
|
||||
};
|
||||
|
||||
void CameraServer::remove_feed(const Ref<CameraFeed> &p_feed) {
|
||||
for (int i = 0; i < feeds.size(); i++) {
|
||||
if (feeds[i] == p_feed) {
|
||||
int feed_id = p_feed->get_id();
|
||||
|
||||
// record for debugging
|
||||
#ifdef DEBUG_ENABLED
|
||||
print_line("Removed camera " + p_feed->get_name() + " with id " + itos(feed_id) + " position " + itos(p_feed->get_position()));
|
||||
#endif
|
||||
|
||||
// remove it from our array, if this results in our feed being unreferenced it will be destroyed
|
||||
feeds.remove(i);
|
||||
|
||||
// let whomever is interested know
|
||||
emit_signal("camera_feed_removed", feed_id);
|
||||
return;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
Ref<CameraFeed> CameraServer::get_feed(int p_index) {
|
||||
ERR_FAIL_INDEX_V(p_index, feeds.size(), NULL);
|
||||
|
||||
return feeds[p_index];
|
||||
};
|
||||
|
||||
int CameraServer::get_feed_count() {
|
||||
return feeds.size();
|
||||
};
|
||||
|
||||
Array CameraServer::get_feeds() {
|
||||
Array return_feeds;
|
||||
int cc = get_feed_count();
|
||||
return_feeds.resize(cc);
|
||||
|
||||
for (int i = 0; i < feeds.size(); i++) {
|
||||
return_feeds[i] = get_feed(i);
|
||||
};
|
||||
|
||||
return return_feeds;
|
||||
};
|
||||
|
||||
RID CameraServer::feed_texture(int p_id, CameraServer::FeedImage p_texture) {
|
||||
int index = get_feed_index(p_id);
|
||||
ERR_FAIL_COND_V(index == -1, RID());
|
||||
|
||||
Ref<CameraFeed> feed = get_feed(index);
|
||||
|
||||
return feed->get_texture(p_texture);
|
||||
};
|
||||
|
||||
CameraServer::CameraServer() {
|
||||
singleton = this;
|
||||
};
|
||||
|
||||
CameraServer::~CameraServer() {
|
||||
singleton = NULL;
|
||||
};
|
96
servers/camera_server.h
Normal file
96
servers/camera_server.h
Normal file
@ -0,0 +1,96 @@
|
||||
/*************************************************************************/
|
||||
/* camera_server.h */
|
||||
/*************************************************************************/
|
||||
/* This file is part of: */
|
||||
/* GODOT ENGINE */
|
||||
/* http://www.godotengine.org */
|
||||
/*************************************************************************/
|
||||
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
|
||||
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
|
||||
/* */
|
||||
/* Permission is hereby granted, free of charge, to any person obtaining */
|
||||
/* a copy of this software and associated documentation files (the */
|
||||
/* "Software"), to deal in the Software without restriction, including */
|
||||
/* without limitation the rights to use, copy, modify, merge, publish, */
|
||||
/* distribute, sublicense, and/or sell copies of the Software, and to */
|
||||
/* permit persons to whom the Software is furnished to do so, subject to */
|
||||
/* the following conditions: */
|
||||
/* */
|
||||
/* The above copyright notice and this permission notice shall be */
|
||||
/* included in all copies or substantial portions of the Software. */
|
||||
/* */
|
||||
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
|
||||
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
|
||||
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
|
||||
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
|
||||
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
|
||||
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
|
||||
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
|
||||
/*************************************************************************/
|
||||
|
||||
#ifndef CAMERA_SERVER_H
|
||||
#define CAMERA_SERVER_H
|
||||
|
||||
#include "core/object.h"
|
||||
#include "core/os/thread_safe.h"
|
||||
#include "core/reference.h"
|
||||
#include "core/rid.h"
|
||||
#include "core/variant.h"
|
||||
|
||||
/**
|
||||
@author Bastiaan Olij <mux213@gmail.com>
|
||||
|
||||
The camera server is a singleton object that gives access to the various
|
||||
camera feeds that can be used as the background for our environment.
|
||||
**/
|
||||
|
||||
class CameraFeed;
|
||||
|
||||
class CameraServer : public Object {
|
||||
GDCLASS(CameraServer, Object);
|
||||
_THREAD_SAFE_CLASS_
|
||||
|
||||
public:
|
||||
enum FeedImage {
|
||||
FEED_RGBA_IMAGE = 0,
|
||||
FEED_YCbCr_IMAGE = 0,
|
||||
FEED_Y_IMAGE = 0,
|
||||
FEED_CbCr_IMAGE = 1,
|
||||
FEED_IMAGES = 2
|
||||
};
|
||||
|
||||
private:
|
||||
protected:
|
||||
Vector<Ref<CameraFeed> > feeds;
|
||||
|
||||
static CameraServer *singleton;
|
||||
|
||||
static void _bind_methods();
|
||||
|
||||
public:
|
||||
static CameraServer *get_singleton();
|
||||
|
||||
// Right now we identify our feed by it's ID when it's used in the background.
|
||||
// May see if we can change this to purely relying on CameraFeed objects or by name.
|
||||
int get_free_id();
|
||||
int get_feed_index(int p_id);
|
||||
Ref<CameraFeed> get_feed_by_id(int p_id);
|
||||
|
||||
// add and remove feeds
|
||||
void add_feed(const Ref<CameraFeed> &p_feed);
|
||||
void remove_feed(const Ref<CameraFeed> &p_feed);
|
||||
|
||||
// get our feeds
|
||||
Ref<CameraFeed> get_feed(int p_idx);
|
||||
int get_feed_count();
|
||||
Array get_feeds();
|
||||
|
||||
RID feed_texture(int p_id, FeedImage p_texture);
|
||||
|
||||
CameraServer();
|
||||
~CameraServer();
|
||||
};
|
||||
|
||||
VARIANT_ENUM_CAST(CameraServer::FeedImage);
|
||||
|
||||
#endif /* CAMERA_SERVER_H */
|
@ -54,6 +54,8 @@
|
||||
#include "audio/effects/audio_effect_stereo_enhance.h"
|
||||
#include "audio/effects/audio_stream_generator.h"
|
||||
#include "audio_server.h"
|
||||
#include "camera/camera_feed.h"
|
||||
#include "camera_server.h"
|
||||
#include "core/script_debugger_remote.h"
|
||||
#include "physics/physics_server_sw.h"
|
||||
#include "physics_2d/physics_2d_server_sw.h"
|
||||
@ -114,6 +116,7 @@ void register_server_types() {
|
||||
ClassDB::register_virtual_class<PhysicsServer>();
|
||||
ClassDB::register_virtual_class<Physics2DServer>();
|
||||
ClassDB::register_class<ARVRServer>();
|
||||
ClassDB::register_class<CameraServer>();
|
||||
|
||||
shader_types = memnew(ShaderTypes);
|
||||
|
||||
@ -169,6 +172,8 @@ void register_server_types() {
|
||||
ClassDB::register_virtual_class<AudioEffectSpectrumAnalyzerInstance>();
|
||||
}
|
||||
|
||||
ClassDB::register_class<CameraFeed>();
|
||||
|
||||
ClassDB::register_virtual_class<Physics2DDirectBodyState>();
|
||||
ClassDB::register_virtual_class<Physics2DDirectSpaceState>();
|
||||
ClassDB::register_virtual_class<Physics2DShapeQueryResult>();
|
||||
@ -208,4 +213,5 @@ void register_server_singletons() {
|
||||
Engine::get_singleton()->add_singleton(Engine::Singleton("PhysicsServer", PhysicsServer::get_singleton()));
|
||||
Engine::get_singleton()->add_singleton(Engine::Singleton("Physics2DServer", Physics2DServer::get_singleton()));
|
||||
Engine::get_singleton()->add_singleton(Engine::Singleton("ARVRServer", ARVRServer::get_singleton()));
|
||||
Engine::get_singleton()->add_singleton(Engine::Singleton("CameraServer", CameraServer::get_singleton()));
|
||||
}
|
||||
|
@ -60,6 +60,7 @@ public:
|
||||
virtual void environment_set_bg_energy(RID p_env, float p_energy) = 0;
|
||||
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer) = 0;
|
||||
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0) = 0;
|
||||
virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) = 0;
|
||||
|
||||
virtual void environment_set_dof_blur_near(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) = 0;
|
||||
virtual void environment_set_dof_blur_far(RID p_env, bool p_enable, float p_distance, float p_transition, float p_far_amount, VS::EnvironmentDOFBlurQuality p_quality) = 0;
|
||||
@ -204,6 +205,7 @@ public:
|
||||
virtual uint32_t texture_get_height(RID p_texture) const = 0;
|
||||
virtual uint32_t texture_get_depth(RID p_texture) const = 0;
|
||||
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth_3d) = 0;
|
||||
virtual void texture_bind(RID p_texture, uint32_t p_texture_no) = 0;
|
||||
|
||||
virtual void texture_set_path(RID p_texture, const String &p_path) = 0;
|
||||
virtual String texture_get_path(RID p_texture) const = 0;
|
||||
|
@ -159,6 +159,7 @@ public:
|
||||
BIND1RC(uint32_t, texture_get_height, RID)
|
||||
BIND1RC(uint32_t, texture_get_depth, RID)
|
||||
BIND4(texture_set_size_override, RID, int, int, int)
|
||||
BIND2(texture_bind, RID, uint32_t)
|
||||
|
||||
BIND3(texture_set_detect_3d_callback, RID, TextureDetectCallback, void *)
|
||||
BIND3(texture_set_detect_srgb_callback, RID, TextureDetectCallback, void *)
|
||||
@ -503,6 +504,7 @@ public:
|
||||
BIND2(environment_set_bg_energy, RID, float)
|
||||
BIND2(environment_set_canvas_max_layer, RID, int)
|
||||
BIND4(environment_set_ambient_light, RID, const Color &, float, float)
|
||||
BIND2(environment_set_camera_feed_id, RID, int)
|
||||
BIND7(environment_set_ssr, RID, bool, int, float, float, float, bool)
|
||||
BIND13(environment_set_ssao, RID, bool, float, float, float, float, float, float, float, const Color &, EnvironmentSSAOQuality, EnvironmentSSAOBlur, float)
|
||||
|
||||
|
@ -95,6 +95,7 @@ public:
|
||||
FUNC1RC(uint32_t, texture_get_height, RID)
|
||||
FUNC1RC(uint32_t, texture_get_depth, RID)
|
||||
FUNC4(texture_set_size_override, RID, int, int, int)
|
||||
FUNC2(texture_bind, RID, uint32_t)
|
||||
|
||||
FUNC3(texture_set_detect_3d_callback, RID, TextureDetectCallback, void *)
|
||||
FUNC3(texture_set_detect_srgb_callback, RID, TextureDetectCallback, void *)
|
||||
@ -430,6 +431,7 @@ public:
|
||||
FUNC2(environment_set_bg_energy, RID, float)
|
||||
FUNC2(environment_set_canvas_max_layer, RID, int)
|
||||
FUNC4(environment_set_ambient_light, RID, const Color &, float, float)
|
||||
FUNC2(environment_set_camera_feed_id, RID, int)
|
||||
FUNC7(environment_set_ssr, RID, bool, int, float, float, float, bool)
|
||||
FUNC13(environment_set_ssao, RID, bool, float, float, float, float, float, float, float, const Color &, EnvironmentSSAOQuality, EnvironmentSSAOBlur, float)
|
||||
|
||||
|
@ -1676,6 +1676,7 @@ void VisualServer::_bind_methods() {
|
||||
ClassDB::bind_method(D_METHOD("texture_set_path", "texture", "path"), &VisualServer::texture_set_path);
|
||||
ClassDB::bind_method(D_METHOD("texture_get_path", "texture"), &VisualServer::texture_get_path);
|
||||
ClassDB::bind_method(D_METHOD("texture_set_shrink_all_x2_on_set_data", "shrink"), &VisualServer::texture_set_shrink_all_x2_on_set_data);
|
||||
ClassDB::bind_method(D_METHOD("texture_bind", "texture", "number"), &VisualServer::texture_bind);
|
||||
|
||||
ClassDB::bind_method(D_METHOD("texture_debug_usage"), &VisualServer::_texture_debug_usage_bind);
|
||||
ClassDB::bind_method(D_METHOD("textures_keep_original", "enable"), &VisualServer::textures_keep_original);
|
||||
|
@ -140,6 +140,7 @@ public:
|
||||
virtual uint32_t texture_get_height(RID p_texture) const = 0;
|
||||
virtual uint32_t texture_get_depth(RID p_texture) const = 0;
|
||||
virtual void texture_set_size_override(RID p_texture, int p_width, int p_height, int p_depth_3d) = 0;
|
||||
virtual void texture_bind(RID p_texture, uint32_t p_texture_no) = 0;
|
||||
|
||||
virtual void texture_set_path(RID p_texture, const String &p_path) = 0;
|
||||
virtual String texture_get_path(RID p_texture) const = 0;
|
||||
@ -707,6 +708,7 @@ public:
|
||||
ENV_BG_COLOR_SKY,
|
||||
ENV_BG_CANVAS,
|
||||
ENV_BG_KEEP,
|
||||
ENV_BG_CAMERA_FEED,
|
||||
ENV_BG_MAX
|
||||
};
|
||||
|
||||
@ -718,6 +720,7 @@ public:
|
||||
virtual void environment_set_bg_energy(RID p_env, float p_energy) = 0;
|
||||
virtual void environment_set_canvas_max_layer(RID p_env, int p_max_layer) = 0;
|
||||
virtual void environment_set_ambient_light(RID p_env, const Color &p_color, float p_energy = 1.0, float p_sky_contribution = 0.0) = 0;
|
||||
virtual void environment_set_camera_feed_id(RID p_env, int p_camera_feed_id) = 0;
|
||||
|
||||
//set default SSAO options
|
||||
//set default SSR options
|
||||
|
Loading…
Reference in New Issue
Block a user