1
1
use crate :: {
2
- camera:: CameraProjection , prelude:: Image , render_asset:: RenderAssets ,
3
- render_resource:: TextureView , view:: ExtractedWindows ,
2
+ camera:: CameraProjection ,
3
+ prelude:: Image ,
4
+ primitives:: { Line , Plane } ,
5
+ render_asset:: RenderAssets ,
6
+ render_resource:: TextureView ,
7
+ view:: ExtractedWindows ,
4
8
} ;
5
9
use bevy_asset:: { AssetEvent , Assets , Handle } ;
6
10
use bevy_ecs:: {
@@ -12,7 +16,7 @@ use bevy_ecs::{
12
16
reflect:: ReflectComponent ,
13
17
system:: { QuerySet , Res } ,
14
18
} ;
15
- use bevy_math:: { Mat4 , UVec2 , Vec2 , Vec3 } ;
19
+ use bevy_math:: { Mat4 , UVec2 , Vec2 , Vec3 , Vec4 } ;
16
20
use bevy_reflect:: { Reflect , ReflectDeserialize } ;
17
21
use bevy_transform:: components:: GlobalTransform ;
18
22
use bevy_utils:: HashSet ;
@@ -138,6 +142,74 @@ impl Camera {
138
142
None
139
143
}
140
144
}
145
+
146
+ /// Given a position in screen space, compute the world-space line that corresponds to it.
147
+ pub fn screen_to_world_ray (
148
+ & self ,
149
+ pos_screen : Vec2 ,
150
+ windows : & Windows ,
151
+ images : & Assets < Image > ,
152
+ camera_transform : & GlobalTransform ,
153
+ ) -> Line {
154
+ let camera_position = camera_transform. compute_matrix ( ) ;
155
+ let window_size = self . target . get_logical_size ( windows, images) . unwrap ( ) ;
156
+ let projection_matrix = self . projection_matrix ;
157
+
158
+ // Normalized device coordinate cursor position from (-1, -1, -1) to (1, 1, 1)
159
+ let cursor_ndc = ( pos_screen / window_size) * 2.0 - Vec2 :: from ( [ 1.0 , 1.0 ] ) ;
160
+ let cursor_pos_ndc_near: Vec3 = cursor_ndc. extend ( -1.0 ) ;
161
+ let cursor_pos_ndc_far: Vec3 = cursor_ndc. extend ( 1.0 ) ;
162
+
163
+ // Use near and far ndc points to generate a ray in world space
164
+ // This method is more robust than using the location of the camera as the start of
165
+ // the ray, because ortho cameras have a focal point at infinity!
166
+ let ndc_to_world: Mat4 = camera_position * projection_matrix. inverse ( ) ;
167
+ let cursor_pos_near: Vec3 = ndc_to_world. project_point3 ( cursor_pos_ndc_near) ;
168
+ let cursor_pos_far: Vec3 = ndc_to_world. project_point3 ( cursor_pos_ndc_far) ;
169
+ let ray_direction = cursor_pos_far - cursor_pos_near;
170
+ Line :: from_point_direction ( cursor_pos_near, ray_direction)
171
+ }
172
+
173
+ /// Given a position in screen space and a plane in world space, compute what point on the plane the point in screen space corresponds to.
174
+ /// In 2D, use `screen_to_point_2d`.
175
+ pub fn screen_to_point_on_plane (
176
+ & self ,
177
+ pos_screen : Vec2 ,
178
+ plane : Plane ,
179
+ windows : & Windows ,
180
+ images : & Assets < Image > ,
181
+ camera_transform : & GlobalTransform ,
182
+ ) -> Option < Vec3 > {
183
+ let world_ray = self . screen_to_world_ray ( pos_screen, windows, images, camera_transform) ;
184
+ let d = world_ray. point . dot ( plane. normal ( ) ) ;
185
+ if d == 0. {
186
+ None
187
+ } else {
188
+ let diff = world_ray. point . extend ( 1.0 ) - plane. normal_d ( ) ;
189
+ let p = diff. dot ( plane. normal_d ( ) ) ;
190
+ let dist = p / d;
191
+ Some ( world_ray. point - world_ray. direction * dist)
192
+ }
193
+ }
194
+
195
+ /// Computes the world position for a given screen position.
196
+ /// The output will always be on the XY plane with Z at zero. It is designed for 2D, but also works with a 3D camera.
197
+ /// For more flexibility in 3D, consider `screen_to_point_on_plane`.
198
+ pub fn screen_to_point_2d (
199
+ & self ,
200
+ pos_screen : Vec2 ,
201
+ windows : & Windows ,
202
+ images : & Assets < Image > ,
203
+ camera_transform : & GlobalTransform ,
204
+ ) -> Option < Vec3 > {
205
+ self . screen_to_point_on_plane (
206
+ pos_screen,
207
+ Plane :: new ( Vec4 :: new ( 0. , 0. , 1. , 0. ) ) ,
208
+ windows,
209
+ images,
210
+ camera_transform,
211
+ )
212
+ }
141
213
}
142
214
143
215
#[ allow( clippy:: type_complexity) ]
0 commit comments