This is page 6 of 9. Use http://codebase.md/mehmetoguzderin/shaderc-vkrunner-mcp?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .devcontainer
│ ├── devcontainer.json
│ ├── docker-compose.yml
│ └── Dockerfile
├── .gitattributes
├── .github
│ └── workflows
│ └── build-push-image.yml
├── .gitignore
├── .vscode
│ └── mcp.json
├── Cargo.lock
├── Cargo.toml
├── Dockerfile
├── LICENSE
├── README.adoc
├── shaderc-vkrunner-mcp.jpg
├── src
│ └── main.rs
└── vkrunner
├── .editorconfig
├── .gitignore
├── .gitlab-ci.yml
├── build.rs
├── Cargo.toml
├── COPYING
├── examples
│ ├── compute-shader.shader_test
│ ├── cooperative-matrix.shader_test
│ ├── depth-buffer.shader_test
│ ├── desc_set_and_binding.shader_test
│ ├── entrypoint.shader_test
│ ├── float-framebuffer.shader_test
│ ├── frexp.shader_test
│ ├── geometry.shader_test
│ ├── indices.shader_test
│ ├── layouts.shader_test
│ ├── properties.shader_test
│ ├── push-constants.shader_test
│ ├── require-subgroup-size.shader_test
│ ├── row-major.shader_test
│ ├── spirv.shader_test
│ ├── ssbo.shader_test
│ ├── tolerance.shader_test
│ ├── tricolore.shader_test
│ ├── ubo.shader_test
│ ├── vertex-data-piglit.shader_test
│ └── vertex-data.shader_test
├── include
│ ├── vk_video
│ │ ├── vulkan_video_codec_av1std_decode.h
│ │ ├── vulkan_video_codec_av1std_encode.h
│ │ ├── vulkan_video_codec_av1std.h
│ │ ├── vulkan_video_codec_h264std_decode.h
│ │ ├── vulkan_video_codec_h264std_encode.h
│ │ ├── vulkan_video_codec_h264std.h
│ │ ├── vulkan_video_codec_h265std_decode.h
│ │ ├── vulkan_video_codec_h265std_encode.h
│ │ ├── vulkan_video_codec_h265std.h
│ │ └── vulkan_video_codecs_common.h
│ └── vulkan
│ ├── vk_platform.h
│ ├── vulkan_core.h
│ └── vulkan.h
├── precompile-script.py
├── README.md
├── scripts
│ └── update-vulkan.sh
├── src
│ └── main.rs
├── test-build.sh
└── vkrunner
├── allocate_store.rs
├── buffer.rs
├── compiler
│ └── fake_process.rs
├── compiler.rs
├── config.rs
├── context.rs
├── enum_table.rs
├── env_var_test.rs
├── executor.rs
├── fake_vulkan.rs
├── features.rs
├── flush_memory.rs
├── format_table.rs
├── format.rs
├── half_float.rs
├── hex.rs
├── inspect.rs
├── lib.rs
├── logger.rs
├── make-enums.py
├── make-features.py
├── make-formats.py
├── make-pipeline-key-data.py
├── make-vulkan-funcs-data.py
├── parse_num.rs
├── pipeline_key_data.rs
├── pipeline_key.rs
├── pipeline_set.rs
├── requirements.rs
├── result.rs
├── script.rs
├── shader_stage.rs
├── slot.rs
├── small_float.rs
├── source.rs
├── stream.rs
├── temp_file.rs
├── tester.rs
├── tolerance.rs
├── util.rs
├── vbo.rs
├── vk.rs
├── vulkan_funcs_data.rs
├── vulkan_funcs.rs
├── window_format.rs
└── window.rs
```
# Files
--------------------------------------------------------------------------------
/vkrunner/vkrunner/requirements.rs:
--------------------------------------------------------------------------------
```rust
1 | // vkrunner
2 | //
3 | // Copyright (C) 2019 Intel Corporation
4 | // Copyright 2023 Neil Roberts
5 | //
6 | // Permission is hereby granted, free of charge, to any person obtaining a
7 | // copy of this software and associated documentation files (the "Software"),
8 | // to deal in the Software without restriction, including without limitation
9 | // the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 | // and/or sell copies of the Software, and to permit persons to whom the
11 | // Software is furnished to do so, subject to the following conditions:
12 | //
13 | // The above copyright notice and this permission notice (including the next
14 | // paragraph) shall be included in all copies or substantial portions of the
15 | // Software.
16 | //
17 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 | // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
22 | // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 | // DEALINGS IN THE SOFTWARE.
24 |
25 | use crate::vk;
26 | use crate::vulkan_funcs;
27 | use crate::vulkan_funcs::{NEXT_PTR_OFFSET, FIRST_FEATURE_OFFSET};
28 | use crate::result;
29 | use std::mem;
30 | use std::collections::{HashMap, HashSet};
31 | use std::ffi::CStr;
32 | use std::convert::TryInto;
33 | use std::fmt;
34 | use std::cell::UnsafeCell;
35 | use std::ptr;
36 |
37 |
38 | #[derive(Debug)]
39 | struct Extension {
40 | // The name of the extension that provides this set of features
41 | // stored as a null-terminated byte sequence. It is stored this
42 | // way because that is how bindgen generates string literals from
43 | // headers.
44 | name_bytes: &'static [u8],
45 | // The size of the corresponding features struct
46 | struct_size: usize,
47 | // The enum for this struct
48 | struct_type: vk::VkStructureType,
49 | // List of feature names in this extension in the order they
50 | // appear in the features struct. The names are as written in the
51 | // struct definition.
52 | features: &'static [&'static str],
53 | }
54 |
55 | impl Extension {
56 | fn name(&self) -> &str {
57 | // The name comes from static data so it should always be
58 | // valid
59 | CStr::from_bytes_with_nul(self.name_bytes)
60 | .unwrap()
61 | .to_str()
62 | .unwrap()
63 | }
64 | }
65 |
66 | include!("features.rs");
67 |
68 | // Lazily generated extensions data used for passing to Vulkan
69 | struct LazyExtensions {
70 | // All of the null-terminated strings concatenated into a buffer
71 | strings: Vec<u8>,
72 | // Pointers to the start of each string
73 | pointers: Vec<* const u8>,
74 | }
75 |
76 | // Lazily generated structures data used for passing to Vulkan
77 | struct LazyStructures {
78 | // A buffer used to return a linked chain of structs that can be
79 | // accessed from C.
80 | list: Vec<u8>,
81 | }
82 |
83 | // Number of features in VkPhysicalDeviceFeatures. The struct is just
84 | // a series of VkBool32 so we should be able to safely calculate the
85 | // number based on the struct size.
86 | const N_PHYSICAL_DEVICE_FEATURES: usize =
87 | mem::size_of::<vk::VkPhysicalDeviceFeatures>()
88 | / mem::size_of::<vk::VkBool32>();
89 |
90 | // Lazily generated VkPhysicalDeviceFeatures struct to pass to Vulkan
91 | struct LazyBaseFeatures {
92 | // Array big enough to store a VkPhysicalDeciveFeatures struct.
93 | // It’s easier to manipulate as an array instead of the actual
94 | // struct because we want to update the bools by index.
95 | bools: [vk::VkBool32; N_PHYSICAL_DEVICE_FEATURES],
96 | }
97 |
98 | #[derive(Debug, Default, Clone)]
99 | pub struct CooperativeMatrix {
100 | pub m_size: Option<u32>,
101 | pub n_size: Option<u32>,
102 | pub k_size: Option<u32>,
103 | pub a_type: Option<vk::VkComponentTypeKHR>,
104 | pub b_type: Option<vk::VkComponentTypeKHR>,
105 | pub c_type: Option<vk::VkComponentTypeKHR>,
106 | pub result_type: Option<vk::VkComponentTypeKHR>,
107 | pub saturating_accumulation: Option<vk::VkBool32>,
108 | pub scope: Option<vk::VkScopeKHR>,
109 |
110 | pub line: String,
111 | }
112 |
113 | #[derive(Debug)]
114 | pub struct Requirements {
115 | // Minimum vulkan version
116 | version: u32,
117 | // Set of extension names required
118 | extensions: HashSet<String>,
119 | // A map indexed by extension number. The value is an array of
120 | // bools representing whether we need each feature in the
121 | // extension’s feature struct.
122 | features: HashMap<usize, Box<[bool]>>,
123 | // An array of bools corresponding to each feature in the base
124 | // VkPhysicalDeviceFeatures struct
125 | base_features: [bool; N_BASE_FEATURES],
126 | // Required subgroup size to be used with
127 | // VkPipelineShaderStageRequiredSubgroupSizeCreateInfo.
128 | pub required_subgroup_size: Option<u32>,
129 |
130 | cooperative_matrix_reqs: Vec<CooperativeMatrix>,
131 |
132 | // The rest of the struct is lazily created from the above data
133 | // and shouldn’t be part of the PartialEq implementation.
134 |
135 | lazy_extensions: UnsafeCell<Option<LazyExtensions>>,
136 | lazy_structures: UnsafeCell<Option<LazyStructures>>,
137 | lazy_base_features: UnsafeCell<Option<LazyBaseFeatures>>,
138 | }
139 |
140 | /// Error returned by [Requirements::check]
141 | #[derive(Debug)]
142 | pub enum Error {
143 | EnumerateDeviceExtensionPropertiesFailed,
144 | ExtensionMissingNullTerminator,
145 | ExtensionInvalidUtf8,
146 | /// A required base feature from VkPhysicalDeviceFeatures is missing.
147 | MissingBaseFeature(usize),
148 | /// A required extension is missing. The string is the name of the
149 | /// extension.
150 | MissingExtension(String),
151 | /// A required feature is missing.
152 | MissingFeature { extension: usize, feature: usize },
153 | /// The API version reported by the driver is too low
154 | VersionTooLow { required_version: u32, actual_version: u32 },
155 | /// Required subgroup size out of supported range.
156 | RequiredSubgroupSizeInvalid { size: u32, min: u32, max: u32 },
157 | MissingCooperativeMatrixProperties(String),
158 | }
159 |
160 | impl Error {
161 | pub fn result(&self) -> result::Result {
162 | match self {
163 | Error::EnumerateDeviceExtensionPropertiesFailed => {
164 | result::Result::Fail
165 | },
166 | Error::ExtensionMissingNullTerminator => result::Result::Fail,
167 | Error::ExtensionInvalidUtf8 => result::Result::Fail,
168 | Error::MissingBaseFeature(_) => result::Result::Skip,
169 | Error::MissingExtension(_) => result::Result::Skip,
170 | Error::MissingFeature { .. } => result::Result::Skip,
171 | Error::VersionTooLow { .. } => result::Result::Skip,
172 | Error::RequiredSubgroupSizeInvalid { .. } => result::Result::Skip,
173 | Error::MissingCooperativeMatrixProperties(_) => result::Result::Skip,
174 | }
175 | }
176 | }
177 |
178 | impl fmt::Display for Error {
179 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
180 | match self {
181 | Error::EnumerateDeviceExtensionPropertiesFailed => {
182 | write!(f, "vkEnumerateDeviceExtensionProperties failed")
183 | },
184 | Error::ExtensionMissingNullTerminator => {
185 | write!(
186 | f,
187 | "NULL terminator missing in string returned from \
188 | vkEnumerateDeviceExtensionProperties"
189 | )
190 | },
191 | Error::ExtensionInvalidUtf8 => {
192 | write!(
193 | f,
194 | "Invalid UTF-8 in string returned from \
195 | vkEnumerateDeviceExtensionProperties"
196 | )
197 | },
198 | &Error::MissingBaseFeature(feature_num) => {
199 | write!(
200 | f,
201 | "Missing required feature: {}",
202 | BASE_FEATURES[feature_num],
203 | )
204 | },
205 | Error::MissingExtension(s) => {
206 | write!(f, "Missing required extension: {}", s)
207 | },
208 | &Error::MissingFeature { extension, feature } => {
209 | write!(
210 | f,
211 | "Missing required feature “{}” from extension “{}”",
212 | EXTENSIONS[extension].features[feature],
213 | EXTENSIONS[extension].name(),
214 | )
215 | },
216 | &Error::VersionTooLow { required_version, actual_version } => {
217 | let (req_major, req_minor, req_patch) =
218 | extract_version(required_version);
219 | let (actual_major, actual_minor, actual_patch) =
220 | extract_version(actual_version);
221 | write!(
222 | f,
223 | "Vulkan API version {}.{}.{} required but the driver \
224 | reported {}.{}.{}",
225 | req_major, req_minor, req_patch,
226 | actual_major, actual_minor, actual_patch,
227 | )
228 | },
229 | &Error::RequiredSubgroupSizeInvalid { size, min, max } => {
230 | write!(
231 | f,
232 | "Required subgroup size {} is not in the supported range {}-{}",
233 | size, min, max
234 | )
235 | },
236 | Error::MissingCooperativeMatrixProperties(s) => {
237 | write!(f, "Physical device can't fulfill cooperative matrix requirements: {}", s.trim())
238 | },
239 | }
240 | }
241 | }
242 |
243 | /// Convert a decomposed Vulkan version into an integer. This is the
244 | /// same as the `VK_MAKE_VERSION` macro in the Vulkan headers.
245 | pub const fn make_version(major: u32, minor: u32, patch: u32) -> u32 {
246 | (major << 22) | (minor << 12) | patch
247 | }
248 |
249 | /// Decompose a Vulkan version into its component major, minor and
250 | /// patch parts. This is the equivalent of the `VK_VERSION_MAJOR`,
251 | /// `VK_VERSION_MINOR` and `VK_VERSION_PATCH` macros in the Vulkan
252 | /// header.
253 | pub const fn extract_version(version: u32) -> (u32, u32, u32) {
254 | (version >> 22, (version >> 12) & 0x3ff, version & 0xfff)
255 | }
256 |
257 | impl Requirements {
258 | pub fn new() -> Requirements {
259 | Requirements {
260 | version: make_version(1, 0, 0),
261 | extensions: HashSet::new(),
262 | features: HashMap::new(),
263 | base_features: [false; N_BASE_FEATURES],
264 | required_subgroup_size: None,
265 | lazy_extensions: UnsafeCell::new(None),
266 | lazy_structures: UnsafeCell::new(None),
267 | lazy_base_features: UnsafeCell::new(None),
268 | cooperative_matrix_reqs: Vec::new(),
269 | }
270 | }
271 |
272 | /// Get the required Vulkan version that was previously set with
273 | /// [add_version](Requirements::add_version).
274 | pub fn version(&self) -> u32 {
275 | self.version
276 | }
277 |
278 | /// Set the minimum required Vulkan version.
279 | pub fn add_version(&mut self, major: u32, minor: u32, patch: u32) {
280 | self.version = std::cmp::max(self.version, make_version(major, minor, patch));
281 | }
282 |
283 | pub fn add_cooperative_matrix_req(&mut self, req: CooperativeMatrix) {
284 | self.cooperative_matrix_reqs.push(req);
285 | }
286 |
287 | fn update_lazy_extensions(&self) -> &[* const u8] {
288 | // SAFETY: The only case where lazy_extensions will be
289 | // modified is if it is None and the only way for that to
290 | // happen is if something modifies the requirements through a
291 | // mutable reference. If that is the case then at that point
292 | // there were no other references to the requirements so
293 | // nothing can be holding a reference to the lazy data.
294 | let extensions = unsafe {
295 | match &mut *self.lazy_extensions.get() {
296 | Some(data) => return data.pointers.as_ref(),
297 | option @ None => {
298 | option.insert(LazyExtensions {
299 | strings: Vec::new(),
300 | pointers: Vec::new(),
301 | })
302 | },
303 | }
304 | };
305 |
306 | // Store a list of offsets into the c_extensions array for
307 | // each extension. We can’t directly store the pointers yet
308 | // because the Vec will probably be reallocated while we are
309 | // adding to it.
310 | let mut offsets = Vec::<usize>::new();
311 |
312 | for extension in self.extensions.iter() {
313 | offsets.push(extensions.strings.len());
314 |
315 | extensions.strings.extend_from_slice(extension.as_bytes());
316 | // Add the null terminator
317 | extensions.strings.push(0);
318 | }
319 |
320 | let base_ptr = extensions.strings.as_ptr();
321 |
322 | extensions.pointers.reserve(offsets.len());
323 |
324 | for offset in offsets {
325 | // SAFETY: These are all valid offsets into the
326 | // c_extensions Vec so they should all be in the same
327 | // allocation and no overflow is possible.
328 | extensions.pointers.push(unsafe { base_ptr.add(offset) });
329 | }
330 |
331 | extensions.pointers.as_ref()
332 | }
333 |
334 | /// Return a reference to an array of pointers to C-style strings
335 | /// that can be passed to `vkCreateDevice`.
336 | pub fn c_extensions(&self) -> &[* const u8] {
337 | self.update_lazy_extensions()
338 | }
339 |
340 | // Make a linked list of features structures that is suitable for
341 | // passing to VkPhysicalDeviceFeatures2 or vkCreateDevice. All of
342 | // the bools are set to 0. The vec with the structs is returned as
343 | // well as a list of offsets to the bools along with the extension
344 | // number.
345 | fn make_empty_structures(
346 | &self
347 | ) -> (Vec<u8>, Vec<(usize, usize)>)
348 | {
349 | let mut structures = Vec::new();
350 |
351 | // Keep an array of offsets and corresponding extension num in a
352 | // vec for each structure that will be returned. The offset is
353 | // also used to update the pNext pointers. We have to do this
354 | // after filling the vec because the vec will probably
355 | // reallocate while we are adding to it which would invalidate
356 | // the pointers.
357 | let mut offsets = Vec::<(usize, usize)>::new();
358 |
359 | for (&extension_num, features) in self.features.iter() {
360 | let extension = &EXTENSIONS[extension_num];
361 |
362 | // Make sure the struct size is big enough to hold all of
363 | // the bools
364 | assert!(
365 | extension.struct_size
366 | >= (FIRST_FEATURE_OFFSET
367 | + features.len() * mem::size_of::<vk::VkBool32>())
368 | );
369 |
370 | let offset = structures.len();
371 | structures.resize(offset + extension.struct_size, 0);
372 |
373 | // The structure type is the first field in the structure
374 | let type_end = offset + mem::size_of::<vk::VkStructureType>();
375 | structures[offset..type_end]
376 | .copy_from_slice(&extension.struct_type.to_ne_bytes());
377 |
378 | offsets.push((offset + FIRST_FEATURE_OFFSET, extension_num));
379 | }
380 |
381 | let mut last_offset = 0;
382 |
383 | for &(offset, _) in &offsets[1..] {
384 | let offset = offset - FIRST_FEATURE_OFFSET;
385 |
386 | // SAFETY: The offsets are all valid offsets into the
387 | // structures vec so they should all be in the same
388 | // allocation and no overflow should occur.
389 | let ptr = unsafe { structures.as_ptr().add(offset) };
390 |
391 | let ptr_start = last_offset + NEXT_PTR_OFFSET;
392 | let ptr_end = ptr_start + mem::size_of::<*const u8>();
393 | structures[ptr_start..ptr_end]
394 | .copy_from_slice(&(ptr as usize).to_ne_bytes());
395 |
396 | last_offset = offset;
397 | }
398 |
399 | (structures, offsets)
400 | }
401 |
402 | fn update_lazy_structures(&self) -> &[u8] {
403 | // SAFETY: The only case where lazy_structures will be
404 | // modified is if it is None and the only way for that to
405 | // happen is if something modifies the requirements through a
406 | // mutable reference. If that is the case then at that point
407 | // there were no other references to the requirements so
408 | // nothing can be holding a reference to the lazy data.
409 | let (structures, offsets) = unsafe {
410 | match &mut *self.lazy_structures.get() {
411 | Some(data) => return data.list.as_slice(),
412 | option @ None => {
413 | let (list, offsets) = self.make_empty_structures();
414 |
415 | (option.insert(LazyStructures { list }), offsets)
416 | },
417 | }
418 | };
419 |
420 | for (offset, extension_num) in offsets {
421 | let features = &self.features[&extension_num];
422 |
423 | for (feature_num, &feature) in features.iter().enumerate() {
424 | let feature_start =
425 | offset
426 | + mem::size_of::<vk::VkBool32>()
427 | * feature_num;
428 | let feature_end =
429 | feature_start + mem::size_of::<vk::VkBool32>();
430 |
431 | structures.list[feature_start..feature_end]
432 | .copy_from_slice(&(feature as vk::VkBool32).to_ne_bytes());
433 | }
434 | }
435 |
436 | structures.list.as_slice()
437 | }
438 |
439 | /// Return a pointer to a linked list of feature structures that
440 | /// can be passed to `vkCreateDevice`, or `None` if no feature
441 | /// structs are required.
442 | pub fn c_structures(&self) -> Option<&[u8]> {
443 | if self.features.is_empty() {
444 | None
445 | } else {
446 | Some(self.update_lazy_structures())
447 | }
448 | }
449 |
450 | fn update_lazy_base_features(&self) -> &[vk::VkBool32] {
451 | // SAFETY: The only case where lazy_structures will be
452 | // modified is if it is None and the only way for that to
453 | // happen is if something modifies the requirements through a
454 | // mutable reference. If that is the case then at that point
455 | // there were no other references to the requirements so
456 | // nothing can be holding a reference to the lazy data.
457 | let base_features = unsafe {
458 | match &mut *self.lazy_base_features.get() {
459 | Some(data) => return &data.bools,
460 | option @ None => {
461 | option.insert(LazyBaseFeatures {
462 | bools: [0; N_PHYSICAL_DEVICE_FEATURES],
463 | })
464 | },
465 | }
466 | };
467 |
468 | for (feature_num, &feature) in self.base_features.iter().enumerate() {
469 | base_features.bools[feature_num] = feature as vk::VkBool32;
470 | }
471 |
472 | &base_features.bools
473 | }
474 |
475 | /// Return a pointer to a `VkPhysicalDeviceFeatures` struct that
476 | /// can be passed to `vkCreateDevice`.
477 | pub fn c_base_features(&self) -> &vk::VkPhysicalDeviceFeatures {
478 | unsafe {
479 | &*self.update_lazy_base_features().as_ptr().cast()
480 | }
481 | }
482 |
483 | fn add_extension_name(&mut self, name: &str) {
484 | // It would be nice to use get_or_insert_owned here if it
485 | // becomes stable. It’s probably better not to use
486 | // HashSet::replace directly because if it’s already in the
487 | // set then we’ll pointlessly copy the str slice and
488 | // immediately free it.
489 | if !self.extensions.contains(name) {
490 | self.extensions.insert(name.to_owned());
491 | // SAFETY: self is immutable so there should be no other
492 | // reference to the lazy data
493 | unsafe {
494 | *self.lazy_extensions.get() = None;
495 | }
496 | }
497 | }
498 |
499 | /// Adds a requirement to the list of requirements.
500 | ///
501 | /// The name can be either a feature as written in the
502 | /// corresponding features struct or the name of an extension. If
503 | /// it is a feature it needs to be either the name of a field in
504 | /// the `VkPhysicalDeviceFeatures` struct or a field in any of the
505 | /// features structs of the extensions that vkrunner knows about.
506 | /// In the latter case the name of the corresponding extension
507 | /// will be automatically added as a requirement.
508 | pub fn add(&mut self, name: &str) {
509 | if let Some((extension_num, feature_num)) = find_feature(name) {
510 | let extension = &EXTENSIONS[extension_num];
511 |
512 | self.add_extension_name(extension.name());
513 |
514 | let features = self
515 | .features
516 | .entry(extension_num)
517 | .or_insert_with(|| {
518 | vec![false; extension.features.len()].into_boxed_slice()
519 | });
520 |
521 | if !features[feature_num] {
522 | // SAFETY: self is immutable so there should be no other
523 | // reference to the lazy data
524 | unsafe {
525 | *self.lazy_structures.get() = None;
526 | }
527 | features[feature_num] = true;
528 | }
529 | } else if let Some(num) = find_base_feature(name) {
530 | if !self.base_features[num] {
531 | self.base_features[num] = true;
532 | // SAFETY: self is immutable so there should be no other
533 | // reference to the lazy data
534 | unsafe {
535 | *self.lazy_structures.get() = None;
536 | }
537 | }
538 | } else {
539 | self.add_extension_name(name);
540 | }
541 | }
542 |
543 | fn check_base_features(
544 | &self,
545 | vkinst: &vulkan_funcs::Instance,
546 | device: vk::VkPhysicalDevice
547 | ) -> Result<(), Error> {
548 | let mut actual_features: [vk::VkBool32; N_BASE_FEATURES] =
549 | [0; N_BASE_FEATURES];
550 |
551 | unsafe {
552 | vkinst.vkGetPhysicalDeviceFeatures.unwrap()(
553 | device,
554 | actual_features.as_mut_ptr().cast()
555 | );
556 | }
557 |
558 | for (feature_num, &required) in self.base_features.iter().enumerate() {
559 | if required && actual_features[feature_num] == 0 {
560 | return Err(Error::MissingBaseFeature(feature_num));
561 | }
562 | }
563 |
564 | Ok(())
565 | }
566 |
567 | fn get_device_extensions(
568 | &self,
569 | vkinst: &vulkan_funcs::Instance,
570 | device: vk::VkPhysicalDevice
571 | ) -> Result<HashSet::<String>, Error> {
572 | let mut property_count = 0u32;
573 |
574 | let res = unsafe {
575 | vkinst.vkEnumerateDeviceExtensionProperties.unwrap()(
576 | device,
577 | std::ptr::null(), // layerName
578 | &mut property_count as *mut u32,
579 | std::ptr::null_mut(), // properties
580 | )
581 | };
582 |
583 | if res != vk::VK_SUCCESS {
584 | return Err(Error::EnumerateDeviceExtensionPropertiesFailed);
585 | }
586 |
587 | let mut extensions = Vec::<vk::VkExtensionProperties>::with_capacity(
588 | property_count as usize
589 | );
590 |
591 | unsafe {
592 | let res = vkinst.vkEnumerateDeviceExtensionProperties.unwrap()(
593 | device,
594 | std::ptr::null(), // layerName
595 | &mut property_count as *mut u32,
596 | extensions.as_mut_ptr(),
597 | );
598 |
599 | if res != vk::VK_SUCCESS {
600 | return Err(Error::EnumerateDeviceExtensionPropertiesFailed);
601 | }
602 |
603 | // SAFETY: The FFI call to
604 | // vkEnumerateDeviceExtensionProperties should have filled
605 | // the extensions array with valid values so we can safely
606 | // set the length to the capacity we allocated earlier
607 | extensions.set_len(property_count as usize);
608 | };
609 |
610 | let mut extensions_set = HashSet::new();
611 |
612 | for extension in extensions.iter() {
613 | let name = &extension.extensionName;
614 | // Make sure it has a NULL terminator
615 | if let None = name.iter().find(|&&b| b == 0) {
616 | return Err(Error::ExtensionMissingNullTerminator);
617 | }
618 | // SAFETY: we just checked that the array has a null terminator
619 | let name = unsafe { CStr::from_ptr(name.as_ptr()) };
620 | let name = match name.to_str() {
621 | Err(_) => {
622 | return Err(Error::ExtensionInvalidUtf8);
623 | },
624 | Ok(s) => s,
625 | };
626 | extensions_set.insert(name.to_owned());
627 | }
628 |
629 | Ok(extensions_set)
630 | }
631 |
632 | fn check_extensions(
633 | &self,
634 | vkinst: &vulkan_funcs::Instance,
635 | device: vk::VkPhysicalDevice
636 | ) -> Result<(), Error> {
637 | if self.extensions.is_empty() {
638 | return Ok(());
639 | }
640 |
641 | let actual_extensions = self.get_device_extensions(vkinst, device)?;
642 |
643 | for extension in self.extensions.iter() {
644 | if !actual_extensions.contains(extension) {
645 | return Err(Error::MissingExtension(extension.to_string()));
646 | }
647 | }
648 |
649 | Ok(())
650 | }
651 |
652 | fn check_structures(
653 | &self,
654 | vkinst: &vulkan_funcs::Instance,
655 | device: vk::VkPhysicalDevice
656 | ) -> Result<(), Error> {
657 | if self.features.is_empty() {
658 | return Ok(());
659 | }
660 |
661 | // If vkGetPhysicalDeviceFeatures2KHR isn’t available then we
662 | // can probably assume that none of the extensions are
663 | // available.
664 | let get_features = match vkinst.vkGetPhysicalDeviceFeatures2KHR {
665 | None => {
666 | // Find the first feature and report that as missing
667 | for (&extension, features) in self.features.iter() {
668 | for (feature, &enabled) in features.iter().enumerate() {
669 | if enabled {
670 | return Err(Error::MissingFeature {
671 | extension,
672 | feature,
673 | });
674 | }
675 | }
676 | }
677 | unreachable!("Requirements::features should be empty if no \
678 | features are required");
679 | },
680 | Some(func) => func,
681 | };
682 |
683 | let (mut actual_structures, offsets) = self.make_empty_structures();
684 |
685 | let mut features_query = vk::VkPhysicalDeviceFeatures2 {
686 | sType: vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
687 | pNext: actual_structures.as_mut_ptr().cast(),
688 | features: Default::default(),
689 | };
690 |
691 | unsafe {
692 | get_features(
693 | device,
694 | &mut features_query as *mut vk::VkPhysicalDeviceFeatures2,
695 | );
696 | }
697 |
698 | for (offset, extension_num) in offsets {
699 | let features = &self.features[&extension_num];
700 |
701 | for (feature_num, &feature) in features.iter().enumerate() {
702 | if !feature {
703 | continue;
704 | }
705 |
706 | let feature_start =
707 | offset
708 | + mem::size_of::<vk::VkBool32>()
709 | * feature_num;
710 | let feature_end =
711 | feature_start + mem::size_of::<vk::VkBool32>();
712 | let actual_value = vk::VkBool32::from_ne_bytes(
713 | actual_structures[feature_start..feature_end]
714 | .try_into()
715 | .unwrap()
716 | );
717 |
718 | if actual_value == 0 {
719 | return Err(Error::MissingFeature {
720 | extension: extension_num,
721 | feature: feature_num,
722 | });
723 | }
724 | }
725 | }
726 |
727 | Ok(())
728 | }
729 |
730 | fn check_version(
731 | &self,
732 | vkinst: &vulkan_funcs::Instance,
733 | device: vk::VkPhysicalDevice,
734 | ) -> Result<(), Error> {
735 | let actual_version = if self.version() >= make_version(1, 1, 0) {
736 | let mut subgroup_size_props = vk::VkPhysicalDeviceSubgroupSizeControlProperties {
737 | sType: vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SUBGROUP_SIZE_CONTROL_PROPERTIES,
738 | ..Default::default()
739 | };
740 | let mut props = vk::VkPhysicalDeviceProperties2 {
741 | sType: vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2,
742 | pNext: std::ptr::addr_of_mut!(subgroup_size_props).cast(),
743 | ..Default::default()
744 | };
745 |
746 | unsafe {
747 | vkinst.vkGetPhysicalDeviceProperties2.unwrap()(
748 | device,
749 | &mut props as *mut vk::VkPhysicalDeviceProperties2,
750 | );
751 | }
752 |
753 | if let Some(size) = self.required_subgroup_size {
754 | let min = subgroup_size_props.minSubgroupSize;
755 | let max = subgroup_size_props.maxSubgroupSize;
756 | if size < min || size > max {
757 | return Err(Error::RequiredSubgroupSizeInvalid {
758 | size, min, max
759 | });
760 | }
761 | }
762 |
763 | props.properties.apiVersion
764 | } else {
765 | let mut props = vk::VkPhysicalDeviceProperties::default();
766 |
767 | unsafe {
768 | vkinst.vkGetPhysicalDeviceProperties.unwrap()(
769 | device,
770 | &mut props as *mut vk::VkPhysicalDeviceProperties,
771 | );
772 | }
773 |
774 | props.apiVersion
775 | };
776 |
777 | if actual_version < self.version() {
778 | Err(Error::VersionTooLow {
779 | required_version: self.version(),
780 | actual_version,
781 | })
782 | } else {
783 | Ok(())
784 | }
785 | }
786 |
787 | fn check_cooperative_matrix(&self, vkinst: &vulkan_funcs::Instance, device: vk::VkPhysicalDevice) -> Result<(), Error> {
788 | if self.cooperative_matrix_reqs.len() == 0 {
789 | return Ok(());
790 | }
791 |
792 | let mut count = 0u32;
793 |
794 | unsafe {
795 | vkinst.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.unwrap()(
796 | device,
797 | ptr::addr_of_mut!(count),
798 | ptr::null_mut(),
799 | );
800 | }
801 |
802 | let mut props = Vec::<vk::VkCooperativeMatrixPropertiesKHR>::new();
803 | props.resize_with(count as usize, Default::default);
804 |
805 | for prop in &mut props {
806 | prop.sType = vk::VK_STRUCTURE_TYPE_COOPERATIVE_MATRIX_PROPERTIES_KHR;
807 | }
808 |
809 | unsafe {
810 | vkinst.vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR.unwrap()(
811 | device,
812 | ptr::addr_of_mut!(count),
813 | props.as_mut_ptr(),
814 | );
815 | }
816 |
817 | for req in &self.cooperative_matrix_reqs {
818 | let mut found = false;
819 | for prop in &props {
820 | if let Some(m) = req.m_size {
821 | if m != prop.MSize {
822 | continue
823 | }
824 | }
825 | if let Some(n) = req.n_size {
826 | if n != prop.NSize {
827 | continue
828 | }
829 | }
830 | if let Some(k) = req.k_size {
831 | if k != prop.KSize {
832 | continue
833 | }
834 | }
835 | if let Some(a) = req.a_type {
836 | if a != prop.AType {
837 | continue
838 | }
839 | }
840 | if let Some(b) = req.b_type {
841 | if b != prop.BType {
842 | continue
843 | }
844 | }
845 | if let Some(c) = req.c_type {
846 | if c != prop.CType {
847 | continue
848 | }
849 | }
850 | if let Some(result) = req.result_type {
851 | if result != prop.ResultType {
852 | continue
853 | }
854 | }
855 | if let Some(sa) = req.saturating_accumulation {
856 | if sa != prop.saturatingAccumulation {
857 | continue
858 | }
859 | }
860 | if let Some(scope) = req.scope {
861 | if scope != prop.scope {
862 | continue
863 | }
864 | }
865 | found = true;
866 | break
867 | }
868 | if !found {
869 | return Err(Error::MissingCooperativeMatrixProperties(req.line.clone()));
870 | }
871 | }
872 |
873 | Ok(())
874 | }
875 |
876 | pub fn check(
877 | &self,
878 | vkinst: &vulkan_funcs::Instance,
879 | device: vk::VkPhysicalDevice
880 | ) -> Result<(), Error> {
881 | self.check_base_features(vkinst, device)?;
882 | self.check_extensions(vkinst, device)?;
883 | self.check_structures(vkinst, device)?;
884 | self.check_version(vkinst, device)?;
885 | self.check_cooperative_matrix(vkinst, device)?;
886 |
887 | Ok(())
888 | }
889 | }
890 |
891 | // Looks for a feature with the given name. If found it returns the
892 | // index of the extension it was found in and the index of the feature
893 | // name.
894 | fn find_feature(name: &str) -> Option<(usize, usize)> {
895 | for (extension_num, extension) in EXTENSIONS.iter().enumerate() {
896 | for (feature_num, &feature) in extension.features.iter().enumerate() {
897 | if feature == name {
898 | return Some((extension_num, feature_num));
899 | }
900 | }
901 | }
902 |
903 | None
904 | }
905 |
906 | fn find_base_feature(name: &str) -> Option<usize> {
907 | BASE_FEATURES.iter().position(|&f| f == name)
908 | }
909 |
910 | impl PartialEq for Requirements {
911 | fn eq(&self, other: &Requirements) -> bool {
912 | self.version == other.version
913 | && self.extensions == other.extensions
914 | && self.features == other.features
915 | && self.base_features == other.base_features
916 | && self.required_subgroup_size == other.required_subgroup_size
917 | }
918 | }
919 |
920 | impl Eq for Requirements {
921 | }
922 |
923 | // Manual implementation of clone to avoid copying the lazy state
924 | impl Clone for Requirements {
925 | fn clone(&self) -> Requirements {
926 | Requirements {
927 | version: self.version,
928 | extensions: self.extensions.clone(),
929 | features: self.features.clone(),
930 | base_features: self.base_features.clone(),
931 | required_subgroup_size: self.required_subgroup_size,
932 | lazy_extensions: UnsafeCell::new(None),
933 | lazy_structures: UnsafeCell::new(None),
934 | lazy_base_features: UnsafeCell::new(None),
935 | cooperative_matrix_reqs: self.cooperative_matrix_reqs.clone(),
936 | }
937 | }
938 |
939 | fn clone_from(&mut self, source: &Requirements) {
940 | self.version = source.version;
941 | self.extensions.clone_from(&source.extensions);
942 | self.features.clone_from(&source.features);
943 | self.base_features.clone_from(&source.base_features);
944 | self.required_subgroup_size = source.required_subgroup_size;
945 | self.cooperative_matrix_reqs.clone_from(&source.cooperative_matrix_reqs);
946 | // SAFETY: self is immutable so there should be no other
947 | // reference to the lazy data
948 | unsafe {
949 | *self.lazy_extensions.get() = None;
950 | *self.lazy_structures.get() = None;
951 | *self.lazy_base_features.get() = None;
952 | }
953 | }
954 | }
955 |
956 | #[cfg(test)]
957 | mod test {
958 | use super::*;
959 | use std::ffi::{c_char, c_void};
960 | use crate::fake_vulkan;
961 |
962 | fn get_struct_type(structure: &[u8]) -> vk::VkStructureType {
963 | let slice = &structure[0..mem::size_of::<vk::VkStructureType>()];
964 | let mut bytes = [0; mem::size_of::<vk::VkStructureType>()];
965 | bytes.copy_from_slice(slice);
966 | vk::VkStructureType::from_ne_bytes(bytes)
967 | }
968 |
969 | fn get_next_structure(structure: &[u8]) -> &[u8] {
970 | let slice = &structure[
971 | NEXT_PTR_OFFSET..NEXT_PTR_OFFSET + mem::size_of::<usize>()
972 | ];
973 | let mut bytes = [0; mem::size_of::<usize>()];
974 | bytes.copy_from_slice(slice);
975 | let pointer = usize::from_ne_bytes(bytes);
976 | let offset = pointer - structure.as_ptr() as usize;
977 | &structure[offset..]
978 | }
979 |
980 | fn find_bools_for_extension<'a, 'b>(
981 | mut structures: &'a [u8],
982 | extension: &'b Extension
983 | ) -> &'a [vk::VkBool32] {
984 | while !structures.is_empty() {
985 | let struct_type = get_struct_type(structures);
986 |
987 | if struct_type == extension.struct_type {
988 | unsafe {
989 | let bools_ptr = structures
990 | .as_ptr()
991 | .add(FIRST_FEATURE_OFFSET);
992 | return std::slice::from_raw_parts(
993 | bools_ptr as *const vk::VkBool32,
994 | extension.features.len()
995 | );
996 | }
997 | }
998 |
999 | structures = get_next_structure(structures);
1000 | }
1001 |
1002 | unreachable!("No structure found for extension “{}”", extension.name());
1003 | }
1004 |
1005 | unsafe fn extension_in_c_extensions(
1006 | reqs: &mut Requirements,
1007 | ext: &str
1008 | ) -> bool {
1009 | for &p in reqs.c_extensions().iter() {
1010 | if CStr::from_ptr(p as *const c_char).to_str().unwrap() == ext {
1011 | return true;
1012 | }
1013 | }
1014 |
1015 | false
1016 | }
1017 |
1018 | #[test]
1019 | fn test_all_features() {
1020 | let mut reqs = Requirements::new();
1021 |
1022 | for extension in EXTENSIONS.iter() {
1023 | for feature in extension.features.iter() {
1024 | reqs.add(feature);
1025 | }
1026 | }
1027 |
1028 | for feature in BASE_FEATURES.iter() {
1029 | reqs.add(feature);
1030 | }
1031 |
1032 | for (extension_num, extension) in EXTENSIONS.iter().enumerate() {
1033 | // All of the extensions should be in the set
1034 | assert!(reqs.extensions.contains(extension.name()));
1035 | // All of the features of every extension should be true
1036 | assert!(reqs.features[&extension_num].iter().all(|&b| b));
1037 | }
1038 |
1039 | // All of the base features should be enabled
1040 | assert!(reqs.base_features.iter().all(|&b| b));
1041 |
1042 | let base_features = reqs.c_base_features();
1043 |
1044 | assert_eq!(base_features.robustBufferAccess, 1);
1045 | assert_eq!(base_features.fullDrawIndexUint32, 1);
1046 | assert_eq!(base_features.imageCubeArray, 1);
1047 | assert_eq!(base_features.independentBlend, 1);
1048 | assert_eq!(base_features.geometryShader, 1);
1049 | assert_eq!(base_features.tessellationShader, 1);
1050 | assert_eq!(base_features.sampleRateShading, 1);
1051 | assert_eq!(base_features.dualSrcBlend, 1);
1052 | assert_eq!(base_features.logicOp, 1);
1053 | assert_eq!(base_features.multiDrawIndirect, 1);
1054 | assert_eq!(base_features.drawIndirectFirstInstance, 1);
1055 | assert_eq!(base_features.depthClamp, 1);
1056 | assert_eq!(base_features.depthBiasClamp, 1);
1057 | assert_eq!(base_features.fillModeNonSolid, 1);
1058 | assert_eq!(base_features.depthBounds, 1);
1059 | assert_eq!(base_features.wideLines, 1);
1060 | assert_eq!(base_features.largePoints, 1);
1061 | assert_eq!(base_features.alphaToOne, 1);
1062 | assert_eq!(base_features.multiViewport, 1);
1063 | assert_eq!(base_features.samplerAnisotropy, 1);
1064 | assert_eq!(base_features.textureCompressionETC2, 1);
1065 | assert_eq!(base_features.textureCompressionASTC_LDR, 1);
1066 | assert_eq!(base_features.textureCompressionBC, 1);
1067 | assert_eq!(base_features.occlusionQueryPrecise, 1);
1068 | assert_eq!(base_features.pipelineStatisticsQuery, 1);
1069 | assert_eq!(base_features.vertexPipelineStoresAndAtomics, 1);
1070 | assert_eq!(base_features.fragmentStoresAndAtomics, 1);
1071 | assert_eq!(base_features.shaderTessellationAndGeometryPointSize, 1);
1072 | assert_eq!(base_features.shaderImageGatherExtended, 1);
1073 | assert_eq!(base_features.shaderStorageImageExtendedFormats, 1);
1074 | assert_eq!(base_features.shaderStorageImageMultisample, 1);
1075 | assert_eq!(base_features.shaderStorageImageReadWithoutFormat, 1);
1076 | assert_eq!(base_features.shaderStorageImageWriteWithoutFormat, 1);
1077 | assert_eq!(base_features.shaderUniformBufferArrayDynamicIndexing, 1);
1078 | assert_eq!(base_features.shaderSampledImageArrayDynamicIndexing, 1);
1079 | assert_eq!(base_features.shaderStorageBufferArrayDynamicIndexing, 1);
1080 | assert_eq!(base_features.shaderStorageImageArrayDynamicIndexing, 1);
1081 | assert_eq!(base_features.shaderClipDistance, 1);
1082 | assert_eq!(base_features.shaderCullDistance, 1);
1083 | assert_eq!(base_features.shaderFloat64, 1);
1084 | assert_eq!(base_features.shaderInt64, 1);
1085 | assert_eq!(base_features.shaderInt16, 1);
1086 | assert_eq!(base_features.shaderResourceResidency, 1);
1087 | assert_eq!(base_features.shaderResourceMinLod, 1);
1088 | assert_eq!(base_features.sparseBinding, 1);
1089 | assert_eq!(base_features.sparseResidencyBuffer, 1);
1090 | assert_eq!(base_features.sparseResidencyImage2D, 1);
1091 | assert_eq!(base_features.sparseResidencyImage3D, 1);
1092 | assert_eq!(base_features.sparseResidency2Samples, 1);
1093 | assert_eq!(base_features.sparseResidency4Samples, 1);
1094 | assert_eq!(base_features.sparseResidency8Samples, 1);
1095 | assert_eq!(base_features.sparseResidency16Samples, 1);
1096 | assert_eq!(base_features.sparseResidencyAliased, 1);
1097 | assert_eq!(base_features.variableMultisampleRate, 1);
1098 | assert_eq!(base_features.inheritedQueries, 1);
1099 |
1100 | // All of the values should be set in the C structs
1101 | for extension in EXTENSIONS.iter() {
1102 | let structs = reqs.c_structures().unwrap();
1103 |
1104 | assert!(
1105 | find_bools_for_extension(structs, extension)
1106 | .iter()
1107 | .all(|&b| b == 1)
1108 | );
1109 |
1110 | assert!(unsafe { &*reqs.lazy_structures.get() }.is_some());
1111 | }
1112 |
1113 | // All of the extensions should be in the c_extensions
1114 | for extension in EXTENSIONS.iter() {
1115 | assert!(unsafe {
1116 | extension_in_c_extensions(&mut reqs, extension.name())
1117 | });
1118 | assert!(unsafe { &*reqs.lazy_extensions.get() }.is_some());
1119 | }
1120 |
1121 | // Sanity check that a made-up extension isn’t in c_extensions
1122 | assert!(!unsafe {
1123 | extension_in_c_extensions(&mut reqs, "not_a_real_ext")
1124 | });
1125 | }
1126 |
1127 | #[test]
1128 | fn test_version() {
1129 | let mut reqs = Requirements::new();
1130 | reqs.add_version(2, 1, 5);
1131 | assert_eq!(reqs.version(), 0x801005);
1132 | }
1133 |
1134 | #[test]
1135 | fn test_empty() {
1136 | let reqs = Requirements::new();
1137 |
1138 | assert!(unsafe { &*reqs.lazy_extensions.get() }.is_none());
1139 | assert!(unsafe { &*reqs.lazy_structures.get() }.is_none());
1140 |
1141 | let base_features_ptr = reqs.c_base_features()
1142 | as *const vk::VkPhysicalDeviceFeatures
1143 | as *const vk::VkBool32;
1144 |
1145 | unsafe {
1146 | let base_features = std::slice::from_raw_parts(
1147 | base_features_ptr,
1148 | N_BASE_FEATURES
1149 | );
1150 |
1151 | assert!(base_features.iter().all(|&b| b == 0));
1152 | }
1153 | }
1154 |
1155 | #[test]
1156 | fn test_eq() {
1157 | let mut reqs_a = Requirements::new();
1158 | let mut reqs_b = Requirements::new();
1159 |
1160 | assert_eq!(reqs_a, reqs_b);
1161 |
1162 | reqs_a.add("advancedBlendCoherentOperations");
1163 | assert_ne!(reqs_a, reqs_b);
1164 |
1165 | reqs_b.add("advancedBlendCoherentOperations");
1166 | assert_eq!(reqs_a, reqs_b);
1167 |
1168 | // Getting the C data shouldn’t affect the equality
1169 | reqs_a.c_structures();
1170 | reqs_a.c_base_features();
1171 | reqs_a.c_extensions();
1172 |
1173 | assert_eq!(reqs_a, reqs_b);
1174 |
1175 | // The order of adding shouldn’t matter
1176 | reqs_a.add("fake_extension");
1177 | reqs_a.add("another_fake_extension");
1178 |
1179 | reqs_b.add("another_fake_extension");
1180 | reqs_b.add("fake_extension");
1181 |
1182 | assert_eq!(reqs_a, reqs_b);
1183 |
1184 | reqs_a.add("wideLines");
1185 | assert_ne!(reqs_a, reqs_b);
1186 |
1187 | reqs_b.add("wideLines");
1188 | assert_eq!(reqs_a, reqs_b);
1189 |
1190 | reqs_a.add_version(3, 1, 2);
1191 | assert_ne!(reqs_a, reqs_b);
1192 |
1193 | reqs_b.add_version(3, 1, 2);
1194 | assert_eq!(reqs_a, reqs_b);
1195 | }
1196 |
1197 | #[test]
1198 | fn test_clone() {
1199 | let mut reqs = Requirements::new();
1200 |
1201 | reqs.add("wideLines");
1202 | reqs.add("fake_extension");
1203 | reqs.add("advancedBlendCoherentOperations");
1204 | assert_eq!(reqs.clone(), reqs);
1205 |
1206 | assert!(unsafe { &*reqs.lazy_structures.get() }.is_none());
1207 | assert_eq!(reqs.c_extensions().len(), 2);
1208 | assert_eq!(reqs.c_base_features().wideLines, 1);
1209 |
1210 | let empty = Requirements::new();
1211 |
1212 | reqs.clone_from(&empty);
1213 |
1214 | assert!(unsafe { &*reqs.lazy_structures.get() }.is_none());
1215 | assert_eq!(reqs.c_extensions().len(), 0);
1216 | assert_eq!(reqs.c_base_features().wideLines, 0);
1217 |
1218 | assert_eq!(reqs, empty);
1219 | }
1220 |
1221 | struct FakeVulkanData {
1222 | fake_vulkan: Box<fake_vulkan::FakeVulkan>,
1223 | _vklib: vulkan_funcs::Library,
1224 | vkinst: vulkan_funcs::Instance,
1225 | instance: vk::VkInstance,
1226 | device: vk::VkPhysicalDevice,
1227 | }
1228 |
1229 | impl FakeVulkanData {
1230 | fn new() -> FakeVulkanData {
1231 | let mut fake_vulkan = fake_vulkan::FakeVulkan::new();
1232 |
1233 | fake_vulkan.physical_devices.push(Default::default());
1234 |
1235 | fake_vulkan.set_override();
1236 | let vklib = vulkan_funcs::Library::new().unwrap();
1237 |
1238 | let mut instance = std::ptr::null_mut();
1239 |
1240 | unsafe {
1241 | let res = vklib.vkCreateInstance.unwrap()(
1242 | std::ptr::null(), // pCreateInfo
1243 | std::ptr::null(), // pAllocator
1244 | std::ptr::addr_of_mut!(instance),
1245 | );
1246 | assert_eq!(res, vk::VK_SUCCESS);
1247 | }
1248 |
1249 | extern "C" fn get_instance_proc(
1250 | func_name: *const c_char,
1251 | user_data: *const c_void,
1252 | ) -> *const c_void {
1253 | unsafe {
1254 | let fake_vulkan =
1255 | &*user_data.cast::<fake_vulkan::FakeVulkan>();
1256 | std::mem::transmute(
1257 | fake_vulkan.get_function(func_name.cast())
1258 | )
1259 | }
1260 | }
1261 |
1262 | let vkinst = unsafe {
1263 | vulkan_funcs::Instance::new(
1264 | get_instance_proc,
1265 | fake_vulkan.as_ref()
1266 | as *const fake_vulkan::FakeVulkan
1267 | as *const c_void,
1268 | )
1269 | };
1270 |
1271 | let mut device = std::ptr::null_mut();
1272 |
1273 | unsafe {
1274 | let mut count = 1;
1275 | let res = vkinst.vkEnumeratePhysicalDevices.unwrap()(
1276 | instance,
1277 | std::ptr::addr_of_mut!(count),
1278 | std::ptr::addr_of_mut!(device),
1279 | );
1280 |
1281 | assert_eq!(res, vk::VK_SUCCESS);
1282 | }
1283 |
1284 | FakeVulkanData {
1285 | fake_vulkan,
1286 | _vklib: vklib,
1287 | vkinst,
1288 | instance,
1289 | device,
1290 | }
1291 | }
1292 | }
1293 |
1294 | impl Drop for FakeVulkanData {
1295 | fn drop(&mut self) {
1296 | if !std::thread::panicking() {
1297 | unsafe {
1298 | self.vkinst.vkDestroyInstance.unwrap()(
1299 | self.instance,
1300 | std::ptr::null(), // allocator
1301 | );
1302 | }
1303 | }
1304 | }
1305 | }
1306 |
1307 | fn check_base_features<'a>(
1308 | reqs: &'a Requirements,
1309 | features: &vk::VkPhysicalDeviceFeatures
1310 | ) -> Result<(), Error> {
1311 | let mut data = FakeVulkanData::new();
1312 |
1313 | data.fake_vulkan.physical_devices[0].features = features.clone();
1314 |
1315 | reqs.check(&data.vkinst, data.device)
1316 | }
1317 |
1318 | #[test]
1319 | fn test_check_base_features() {
1320 | let mut features = Default::default();
1321 |
1322 | assert!(matches!(
1323 | check_base_features(&Requirements::new(), &features),
1324 | Ok(()),
1325 | ));
1326 |
1327 | features.geometryShader = vk::VK_TRUE;
1328 |
1329 | assert!(matches!(
1330 | check_base_features(&Requirements::new(), &features),
1331 | Ok(()),
1332 | ));
1333 |
1334 | let mut reqs = Requirements::new();
1335 | reqs.add("geometryShader");
1336 | assert!(matches!(
1337 | check_base_features(&reqs, &features),
1338 | Ok(()),
1339 | ));
1340 |
1341 | reqs.add("depthBounds");
1342 | match check_base_features(&reqs, &features) {
1343 | Ok(()) => unreachable!("Requirements::check was supposed to fail"),
1344 | Err(e) => {
1345 | assert!(matches!(e, Error::MissingBaseFeature(_)));
1346 | assert_eq!(
1347 | e.to_string(),
1348 | "Missing required feature: depthBounds"
1349 | );
1350 | assert_eq!(e.result(), result::Result::Skip);
1351 | },
1352 | }
1353 | }
1354 |
1355 | #[test]
1356 | fn test_check_extensions() {
1357 | let mut data = FakeVulkanData::new();
1358 | let mut reqs = Requirements::new();
1359 |
1360 | assert!(matches!(
1361 | reqs.check(&data.vkinst, data.device),
1362 | Ok(()),
1363 | ));
1364 |
1365 | reqs.add("fake_extension");
1366 |
1367 | match reqs.check(&data.vkinst, data.device) {
1368 | Ok(()) => unreachable!("expected extensions check to fail"),
1369 | Err(e) => {
1370 | assert!(matches!(
1371 | e,
1372 | Error::MissingExtension(_),
1373 | ));
1374 | assert_eq!(
1375 | e.to_string(),
1376 | "Missing required extension: fake_extension"
1377 | );
1378 | assert_eq!(e.result(), result::Result::Skip);
1379 | },
1380 | };
1381 |
1382 | data.fake_vulkan.physical_devices[0].add_extension("fake_extension");
1383 |
1384 | assert!(matches!(
1385 | reqs.check(&data.vkinst, data.device),
1386 | Ok(()),
1387 | ));
1388 |
1389 | // Add an extension via a feature
1390 | reqs.add("multiviewGeometryShader");
1391 |
1392 | match reqs.check(&data.vkinst, data.device) {
1393 | Ok(()) => unreachable!("expected extensions check to fail"),
1394 | Err(e) => {
1395 | assert!(matches!(
1396 | e,
1397 | Error::MissingExtension(_)
1398 | ));
1399 | assert_eq!(
1400 | e.to_string(),
1401 | "Missing required extension: VK_KHR_multiview",
1402 | );
1403 | assert_eq!(e.result(), result::Result::Skip);
1404 | },
1405 | };
1406 |
1407 | data.fake_vulkan.physical_devices[0].add_extension("VK_KHR_multiview");
1408 |
1409 | match reqs.check(&data.vkinst, data.device) {
1410 | Ok(()) => unreachable!("expected extensions check to fail"),
1411 | Err(e) => {
1412 | assert!(matches!(
1413 | e,
1414 | Error::MissingFeature { .. },
1415 | ));
1416 | assert_eq!(
1417 | e.to_string(),
1418 | "Missing required feature “multiviewGeometryShader” from \
1419 | extension “VK_KHR_multiview”",
1420 | );
1421 | assert_eq!(e.result(), result::Result::Skip);
1422 | },
1423 | };
1424 |
1425 | // Make an unterminated UTF-8 character
1426 | let extension_name = &mut data
1427 | .fake_vulkan
1428 | .physical_devices[0]
1429 | .extensions[0]
1430 | .extensionName;
1431 | extension_name[0] = -1i8 as c_char;
1432 | extension_name[1] = 0;
1433 |
1434 | match reqs.check(&data.vkinst, data.device) {
1435 | Ok(()) => unreachable!("expected extensions check to fail"),
1436 | Err(e) => {
1437 | assert_eq!(
1438 | e.to_string(),
1439 | "Invalid UTF-8 in string returned from \
1440 | vkEnumerateDeviceExtensionProperties"
1441 | );
1442 | assert!(matches!(e, Error::ExtensionInvalidUtf8));
1443 | assert_eq!(e.result(), result::Result::Fail);
1444 | },
1445 | };
1446 |
1447 | // No null-terminator in the extension
1448 | extension_name.fill(32);
1449 |
1450 | match reqs.check(&data.vkinst, data.device) {
1451 | Ok(()) => unreachable!("expected extensions check to fail"),
1452 | Err(e) => {
1453 | assert_eq!(
1454 | e.to_string(),
1455 | "NULL terminator missing in string returned from \
1456 | vkEnumerateDeviceExtensionProperties"
1457 | );
1458 | assert!(matches!(e, Error::ExtensionMissingNullTerminator));
1459 | assert_eq!(e.result(), result::Result::Fail);
1460 | },
1461 | };
1462 | }
1463 |
1464 | #[test]
1465 | fn test_check_structures() {
1466 | let mut data = FakeVulkanData::new();
1467 | let mut reqs = Requirements::new();
1468 |
1469 | reqs.add("multiview");
1470 | data.fake_vulkan.physical_devices[0].add_extension("VK_KHR_multiview");
1471 |
1472 | match reqs.check(&data.vkinst, data.device) {
1473 | Ok(()) => unreachable!("expected features check to fail"),
1474 | Err(e) => {
1475 | assert_eq!(
1476 | e.to_string(),
1477 | "Missing required feature “multiview” \
1478 | from extension “VK_KHR_multiview”",
1479 | );
1480 | assert!(matches!(
1481 | e,
1482 | Error::MissingFeature { .. },
1483 | ));
1484 | assert_eq!(e.result(), result::Result::Skip);
1485 | },
1486 | };
1487 |
1488 | data.fake_vulkan.physical_devices[0].multiview.multiview = vk::VK_TRUE;
1489 |
1490 | assert!(matches!(
1491 | reqs.check(&data.vkinst, data.device),
1492 | Ok(()),
1493 | ));
1494 |
1495 | reqs.add("shaderBufferInt64Atomics");
1496 | data.fake_vulkan.physical_devices[0].add_extension(
1497 | "VK_KHR_shader_atomic_int64"
1498 | );
1499 |
1500 | match reqs.check(&data.vkinst, data.device) {
1501 | Ok(()) => unreachable!("expected features check to fail"),
1502 | Err(e) => {
1503 | assert_eq!(
1504 | e.to_string(),
1505 | "Missing required feature “shaderBufferInt64Atomics” \
1506 | from extension “VK_KHR_shader_atomic_int64”",
1507 | );
1508 | assert!(matches!(
1509 | e,
1510 | Error::MissingFeature { .. },
1511 | ));
1512 | assert_eq!(e.result(), result::Result::Skip);
1513 | },
1514 | };
1515 |
1516 | data.fake_vulkan
1517 | .physical_devices[0]
1518 | .shader_atomic
1519 | .shaderBufferInt64Atomics =
1520 | vk::VK_TRUE;
1521 |
1522 | assert!(matches!(
1523 | reqs.check(&data.vkinst, data.device),
1524 | Ok(()),
1525 | ));
1526 | }
1527 |
1528 | #[test]
1529 | fn test_check_version() {
1530 | let mut data = FakeVulkanData::new();
1531 | let mut reqs = Requirements::new();
1532 |
1533 | reqs.add_version(1, 2, 0);
1534 | data.fake_vulkan.physical_devices[0].properties.apiVersion =
1535 | make_version(1, 1, 0);
1536 |
1537 | match reqs.check(&data.vkinst, data.device) {
1538 | Ok(()) => unreachable!("expected version check to fail"),
1539 | Err(e) => {
1540 | // The check will report that the version is 1.0.0
1541 | // because vkEnumerateInstanceVersion is not available
1542 | assert_eq!(
1543 | e.to_string(),
1544 | "Vulkan API version 1.2.0 required but the driver \
1545 | reported 1.1.0",
1546 | );
1547 | assert!(matches!(
1548 | e,
1549 | Error::VersionTooLow { .. },
1550 | ));
1551 | assert_eq!(e.result(), result::Result::Skip);
1552 | },
1553 | };
1554 |
1555 | // Set a valid version
1556 | data.fake_vulkan.physical_devices[0].properties.apiVersion =
1557 | make_version(1, 3, 0);
1558 |
1559 | assert!(matches!(
1560 | reqs.check(&data.vkinst, data.device),
1561 | Ok(()),
1562 | ));
1563 | }
1564 | }
1565 |
```
--------------------------------------------------------------------------------
/vkrunner/vkrunner/tester.rs:
--------------------------------------------------------------------------------
```rust
1 | // vkrunner
2 | //
3 | // Copyright (C) 2018, 2023 Neil Roberts
4 | // Copyright (C) 2018 Intel Coporation
5 | // Copyright (C) 2019 Google LLC
6 | //
7 | // Permission is hereby granted, free of charge, to any person obtaining a
8 | // copy of this software and associated documentation files (the "Software"),
9 | // to deal in the Software without restriction, including without limitation
10 | // the rights to use, copy, modify, merge, publish, distribute, sublicense,
11 | // and/or sell copies of the Software, and to permit persons to whom the
12 | // Software is furnished to do so, subject to the following conditions:
13 | //
14 | // The above copyright notice and this permission notice (including the next
15 | // paragraph) shall be included in all copies or substantial portions of the
16 | // Software.
17 | //
18 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
19 | // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
20 | // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
21 | // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
22 | // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
23 | // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
24 | // DEALINGS IN THE SOFTWARE.
25 |
26 | use crate::window::Window;
27 | use crate::context::Context;
28 | use crate::pipeline_set::{PipelineSet, RectangleVertex};
29 | use crate::pipeline_key;
30 | use crate::script::{Script, BufferType, Operation};
31 | use crate::inspect::Inspector;
32 | use crate::vk;
33 | use crate::buffer::{self, MappedMemory, DeviceMemory, Buffer};
34 | use crate::flush_memory::{self, flush_memory};
35 | use crate::tolerance::Tolerance;
36 | use crate::slot;
37 | use crate::inspect;
38 | use std::fmt;
39 | use std::ptr;
40 | use std::mem;
41 | use std::rc::Rc;
42 | use std::ffi::c_int;
43 |
44 | #[derive(Debug)]
45 | pub struct CommandError {
46 | pub line_num: usize,
47 | pub error: Error,
48 | }
49 |
50 | #[derive(Debug)]
51 | pub enum Error {
52 | AllocateDescriptorSetsFailed,
53 | BeginCommandBufferFailed,
54 | EndCommandBufferFailed,
55 | ResetFencesFailed,
56 | QueueSubmitFailed,
57 | WaitForFencesFailed,
58 | ProbeFailed(ProbeFailedError),
59 | InvalidateMappedMemoryRangesFailed,
60 | BufferError(buffer::Error),
61 | FlushMemoryError(flush_memory::Error),
62 | CommandErrors(Vec<CommandError>),
63 | InvalidBufferBinding { desc_set: u32, binding: u32 },
64 | InvalidBufferOffset,
65 | SsboProbeFailed {
66 | slot_type: slot::Type,
67 | layout: slot::Layout,
68 | expected: Box<[u8]>,
69 | observed: Box<[u8]>,
70 | },
71 | }
72 |
73 | #[derive(Debug)]
74 | pub struct ProbeFailedError {
75 | x: u32,
76 | y: u32,
77 | expected: [f64; 4],
78 | observed: [f64; 4],
79 | n_components: usize,
80 | }
81 |
82 | #[derive(Debug, Copy, Clone, PartialEq, Eq)]
83 | enum State {
84 | /// Any rendering or computing has finished and we can read the
85 | /// buffers.
86 | Idle,
87 | /// The command buffer has begun
88 | CommandBuffer,
89 | /// The render pass has begun
90 | RenderPass,
91 | }
92 |
93 | impl fmt::Display for Error {
94 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
95 | match self {
96 | Error::AllocateDescriptorSetsFailed => {
97 | write!(f, "vkAllocateDescriptorSets failed")
98 | },
99 | Error::BeginCommandBufferFailed => {
100 | write!(f, "vkBeginCommandBuffer failed")
101 | },
102 | Error::EndCommandBufferFailed => {
103 | write!(f, "vkEndCommandBuffer failed")
104 | },
105 | Error::ResetFencesFailed => {
106 | write!(f, "vkResetFences failed")
107 | },
108 | Error::QueueSubmitFailed => {
109 | write!(f, "vkQueueSubmit failed")
110 | },
111 | Error::WaitForFencesFailed => {
112 | write!(f, "vkWaitForFences failed")
113 | },
114 | Error::InvalidateMappedMemoryRangesFailed => {
115 | write!(f, "vkInvalidateMappedMemeroyRangesFailed failed")
116 | },
117 | Error::ProbeFailed(e) => e.fmt(f),
118 | &Error::SsboProbeFailed {
119 | slot_type,
120 | layout,
121 | ref expected,
122 | ref observed
123 | } => {
124 | write!(
125 | f,
126 | "SSBO probe failed\n\
127 | \x20 Reference:",
128 | )?;
129 | write_slot(f, slot_type, layout, expected)?;
130 | write!(
131 | f,
132 | "\n\
133 | \x20 Observed: ",
134 | )?;
135 | write_slot(f, slot_type, layout, observed)?;
136 |
137 | Ok(())
138 | },
139 | Error::BufferError(e) => e.fmt(f),
140 | Error::FlushMemoryError(e) => e.fmt(f),
141 | Error::CommandErrors(errors) => {
142 | for (num, e) in errors.iter().enumerate() {
143 | if num > 0 {
144 | writeln!(f)?;
145 | }
146 | write!(f, "line {}: ", e.line_num)?;
147 | e.error.fmt(f)?;
148 | }
149 | Ok(())
150 | },
151 | Error::InvalidBufferBinding { desc_set, binding } => {
152 | write!(f, "Invalid buffer binding: {}:{}", desc_set, binding)
153 | },
154 | Error::InvalidBufferOffset => {
155 | write!(f, "Invalid buffer offset")
156 | },
157 | }
158 | }
159 | }
160 |
161 | fn write_slot(
162 | f: &mut fmt::Formatter,
163 | slot_type: slot::Type,
164 | layout: slot::Layout,
165 | values: &[u8],
166 | ) -> fmt::Result {
167 | let base_type = slot_type.base_type();
168 | let base_type_size = base_type.size();
169 |
170 | for offset in slot_type.offsets(layout) {
171 | let values = &values[offset..offset + base_type_size];
172 | write!(f, " {}", slot::BaseTypeInSlice::new(base_type, values))?;
173 | }
174 |
175 | Ok(())
176 | }
177 |
178 | fn format_pixel(f: &mut fmt::Formatter, pixel: &[f64]) -> fmt::Result {
179 | for component in pixel {
180 | write!(f, " {}", component)?;
181 | }
182 |
183 | Ok(())
184 | }
185 |
186 | impl fmt::Display for ProbeFailedError {
187 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
188 | write!(
189 | f,
190 | "Probe color at ({},{})\n\
191 | \x20 Expected:",
192 | self.x,
193 | self.y,
194 | )?;
195 | format_pixel(f, &self.expected[0..self.n_components])?;
196 | write!(
197 | f,
198 | "\n\
199 | \x20 Observed:"
200 | )?;
201 | format_pixel(f, &self.observed[0..self.n_components])
202 | }
203 | }
204 |
205 | impl From<buffer::Error> for Error {
206 | fn from(e: buffer::Error) -> Error {
207 | Error::BufferError(e)
208 | }
209 | }
210 |
211 | impl From<flush_memory::Error> for Error {
212 | fn from(e: flush_memory::Error) -> Error {
213 | Error::FlushMemoryError(e)
214 | }
215 | }
216 |
217 | #[derive(Debug)]
218 | struct DescriptorSetVec<'a> {
219 | handles: Vec<vk::VkDescriptorSet>,
220 | // needed for the destructor
221 | pipeline_set: &'a PipelineSet,
222 | window: &'a Window,
223 | }
224 |
225 | impl<'a> DescriptorSetVec<'a> {
226 | fn new(
227 | window: &'a Window,
228 | pipeline_set: &'a PipelineSet,
229 | ) -> Result<DescriptorSetVec<'a>, Error> {
230 | let layouts = pipeline_set.descriptor_set_layouts();
231 | let mut handles = Vec::with_capacity(layouts.len());
232 |
233 | if !layouts.is_empty() {
234 | let allocate_info = vk::VkDescriptorSetAllocateInfo {
235 | sType: vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
236 | pNext: ptr::null(),
237 | descriptorPool: pipeline_set.descriptor_pool().unwrap(),
238 | descriptorSetCount: layouts.len() as u32,
239 | pSetLayouts: layouts.as_ptr(),
240 | };
241 |
242 | let res = unsafe {
243 | window.device().vkAllocateDescriptorSets.unwrap()(
244 | window.vk_device(),
245 | ptr::addr_of!(allocate_info),
246 | handles.as_mut_ptr(),
247 | )
248 | };
249 |
250 | if res == vk::VK_SUCCESS {
251 | // SAFETY: We ensured the buffer had the right
252 | // capacity when we constructed it and the call to
253 | // vkAllocateDescriptorSets should have filled it with
254 | // valid values.
255 | unsafe {
256 | handles.set_len(layouts.len());
257 | }
258 | } else {
259 | return Err(Error::AllocateDescriptorSetsFailed);
260 | }
261 | }
262 |
263 | Ok(DescriptorSetVec {
264 | handles,
265 | pipeline_set,
266 | window,
267 | })
268 | }
269 | }
270 |
271 | impl<'a> Drop for DescriptorSetVec<'a> {
272 | fn drop(&mut self) {
273 | if self.handles.is_empty() {
274 | return;
275 | }
276 |
277 | unsafe {
278 | self.window.device().vkFreeDescriptorSets.unwrap()(
279 | self.window.vk_device(),
280 | self.pipeline_set.descriptor_pool().unwrap(),
281 | self.handles.len() as u32,
282 | self.handles.as_ptr(),
283 | );
284 | }
285 | }
286 | }
287 |
288 | #[derive(Debug)]
289 | struct TestBuffer {
290 | map: MappedMemory,
291 | memory: DeviceMemory,
292 | buffer: Buffer,
293 | size: usize,
294 | // true if the buffer has been modified through the CPU-mapped
295 | // memory since the last command buffer submission.
296 | pending_write: bool,
297 |
298 | }
299 |
300 | impl TestBuffer {
301 | fn new(
302 | context: Rc<Context>,
303 | size: usize,
304 | usage: vk::VkBufferUsageFlagBits,
305 | ) -> Result<TestBuffer, Error> {
306 | let buffer = Buffer::new(Rc::clone(&context), size, usage)?;
307 |
308 | let memory = DeviceMemory::new_buffer(
309 | Rc::clone(&context),
310 | vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
311 | buffer.buffer,
312 | )?;
313 |
314 | let map = MappedMemory::new(context, memory.memory)?;
315 |
316 | Ok(TestBuffer { map, memory, buffer, size, pending_write: false })
317 | }
318 | }
319 |
320 | fn allocate_buffer_objects(
321 | window: &Window,
322 | script: &Script,
323 | ) -> Result<Vec<TestBuffer>, Error> {
324 | let mut buffers = Vec::with_capacity(script.buffers().len());
325 |
326 | for script_buffer in script.buffers().iter() {
327 | let usage = match script_buffer.buffer_type {
328 | BufferType::Ubo => vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
329 | BufferType::Ssbo => vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
330 | };
331 |
332 | buffers.push(TestBuffer::new(
333 | Rc::clone(window.context()),
334 | script_buffer.size,
335 | usage,
336 | )?);
337 | }
338 |
339 | Ok(buffers)
340 | }
341 |
342 | fn write_descriptor_sets(
343 | window: &Window,
344 | script: &Script,
345 | buffers: &[TestBuffer],
346 | descriptor_sets: &[vk::VkDescriptorSet],
347 | ) {
348 | let script_buffers = script.buffers();
349 |
350 | let buffer_infos = buffers.iter()
351 | .map(|buffer| vk::VkDescriptorBufferInfo {
352 | buffer: buffer.buffer.buffer,
353 | offset: 0,
354 | range: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
355 | })
356 | .collect::<Vec<_>>();
357 |
358 | let writes = script_buffers.iter()
359 | .enumerate()
360 | .map(|(buffer_num, buffer)| vk::VkWriteDescriptorSet {
361 | sType: vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
362 | pNext: ptr::null(),
363 | dstSet: descriptor_sets[buffer.desc_set as usize],
364 | dstBinding: buffer.binding,
365 | dstArrayElement: 0,
366 | descriptorCount: 1,
367 | descriptorType: match buffer.buffer_type {
368 | BufferType::Ubo => vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
369 | BufferType::Ssbo => vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
370 | },
371 | pBufferInfo: buffer_infos[buffer_num..].as_ptr(),
372 | pImageInfo: ptr::null(),
373 | pTexelBufferView: ptr::null(),
374 | })
375 | .collect::<Vec<_>>();
376 |
377 | unsafe {
378 | window.device().vkUpdateDescriptorSets.unwrap()(
379 | window.vk_device(),
380 | writes.len() as u32,
381 | writes.as_ptr(),
382 | 0, // descriptorCopyCount
383 | ptr::null(), // pDescriptorCopies
384 | );
385 | }
386 | }
387 |
388 | fn compare_pixel(
389 | pixel_a: &[f64],
390 | pixel_b: &[f64],
391 | tolerance: &Tolerance,
392 | ) -> bool {
393 | std::iter::zip(pixel_a, pixel_b)
394 | .enumerate()
395 | .all(|(component, (&a, &b))| tolerance.equal(component, a, b))
396 | }
397 |
398 | #[derive(Debug)]
399 | struct Tester<'a> {
400 | window: &'a Window,
401 | pipeline_set: &'a PipelineSet,
402 | script: &'a Script,
403 | buffer_objects: Vec<TestBuffer>,
404 | test_buffers: Vec<TestBuffer>,
405 | descriptor_sets: DescriptorSetVec<'a>,
406 | bound_pipeline: Option<usize>,
407 | bo_descriptor_set_bound: bool,
408 | first_render: bool,
409 | state: State,
410 | vbo_buffer: Option<TestBuffer>,
411 | index_buffer: Option<TestBuffer>,
412 | inspector: Option<Inspector>,
413 | }
414 |
415 | impl<'a> Tester<'a> {
416 | fn new(
417 | window: &'a Window,
418 | pipeline_set: &'a PipelineSet,
419 | script: &'a Script,
420 | inspector: Option<Inspector>,
421 | ) -> Result<Tester<'a>, Error> {
422 | let buffer_objects = allocate_buffer_objects(window, script)?;
423 | let descriptor_sets = DescriptorSetVec::new(window, pipeline_set)?;
424 |
425 | write_descriptor_sets(
426 | window,
427 | script,
428 | &buffer_objects,
429 | &descriptor_sets.handles,
430 | );
431 |
432 | Ok(Tester {
433 | window,
434 | pipeline_set,
435 | script,
436 | buffer_objects,
437 | test_buffers: Vec::new(),
438 | descriptor_sets,
439 | bound_pipeline: None,
440 | bo_descriptor_set_bound: false,
441 | first_render: true,
442 | state: State::Idle,
443 | vbo_buffer: None,
444 | index_buffer: None,
445 | inspector,
446 | })
447 | }
448 |
449 | fn add_ssbo_barriers(&mut self) {
450 | let barriers = self.buffer_objects.iter().enumerate()
451 | .filter_map(|(buffer_num, buffer)| {
452 | match self.script.buffers()[buffer_num].buffer_type {
453 | BufferType::Ssbo => Some(vk::VkBufferMemoryBarrier {
454 | sType: vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
455 | pNext: ptr::null(),
456 | srcAccessMask: vk::VK_ACCESS_SHADER_WRITE_BIT,
457 | dstAccessMask: vk::VK_ACCESS_HOST_READ_BIT,
458 | srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
459 | dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
460 | buffer: buffer.buffer.buffer,
461 | offset: 0,
462 | size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
463 | }),
464 | _ => None,
465 | }
466 | })
467 | .collect::<Vec<_>>();
468 |
469 | if !barriers.is_empty() {
470 | unsafe {
471 | self.window.device().vkCmdPipelineBarrier.unwrap()(
472 | self.window.context().command_buffer(),
473 | vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
474 | vk::VK_PIPELINE_STAGE_HOST_BIT,
475 | 0, // dependencyFlags
476 | 0, // memoryBarrierCount
477 | ptr::null(), // pMemoryBarriers
478 | barriers.len() as u32, // bufferMemoryBarrierCount
479 | barriers.as_ptr(), // pBufferMemoryBarriers
480 | 0, // imageMemoryBarrierCount
481 | ptr::null(), // pImageMemoryBarriers
482 | );
483 | }
484 | }
485 | }
486 |
487 | fn flush_buffers(&mut self) -> Result<(), Error> {
488 | for buffer in self.buffer_objects.iter_mut() {
489 | if !buffer.pending_write {
490 | continue;
491 | }
492 |
493 | buffer.pending_write = false;
494 |
495 | flush_memory(
496 | self.window.context(),
497 | buffer.memory.memory_type_index as usize,
498 | buffer.memory.memory,
499 | 0, // offset
500 | vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
501 | )?;
502 | }
503 |
504 | Ok(())
505 | }
506 |
507 | fn begin_command_buffer(&mut self) -> Result<(), Error> {
508 | let begin_command_buffer_info = vk::VkCommandBufferBeginInfo {
509 | sType: vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
510 | pNext: ptr::null(),
511 | flags: 0,
512 | pInheritanceInfo: ptr::null(),
513 | };
514 |
515 | let res = unsafe {
516 | self.window.device().vkBeginCommandBuffer.unwrap()(
517 | self.window.context().command_buffer(),
518 | ptr::addr_of!(begin_command_buffer_info),
519 | )
520 | };
521 |
522 | if res == vk::VK_SUCCESS {
523 | self.bound_pipeline = None;
524 | self.bo_descriptor_set_bound = false;
525 |
526 | Ok(())
527 | } else {
528 | Err(Error::BeginCommandBufferFailed)
529 | }
530 | }
531 |
532 | fn reset_fence(&self) -> Result<(), Error> {
533 | let fence = self.window.context().fence();
534 |
535 | let res = unsafe {
536 | self.window.device().vkResetFences.unwrap()(
537 | self.window.vk_device(),
538 | 1, // fenceCount,
539 | ptr::addr_of!(fence),
540 | )
541 | };
542 |
543 | if res == vk::VK_SUCCESS {
544 | Ok(())
545 | } else {
546 | Err(Error::ResetFencesFailed)
547 | }
548 | }
549 |
550 | fn queue_submit(&self) -> Result<(), Error> {
551 | let command_buffer = self.window.context().command_buffer();
552 | let wait_dst_stage_mask = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
553 |
554 | let submit_info = vk::VkSubmitInfo {
555 | sType: vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
556 | pNext: ptr::null(),
557 | waitSemaphoreCount: 0,
558 | pWaitSemaphores: ptr::null(),
559 | pWaitDstStageMask: ptr::addr_of!(wait_dst_stage_mask),
560 | commandBufferCount: 1,
561 | pCommandBuffers: ptr::addr_of!(command_buffer),
562 | signalSemaphoreCount: 0,
563 | pSignalSemaphores: ptr::null(),
564 | };
565 |
566 | let res = unsafe {
567 | self.window.device().vkQueueSubmit.unwrap()(
568 | self.window.context().queue(),
569 | 1, // submitCount
570 | ptr::addr_of!(submit_info),
571 | self.window.context().fence(),
572 | )
573 | };
574 |
575 | if res == vk::VK_SUCCESS {
576 | Ok(())
577 | } else {
578 | Err(Error::QueueSubmitFailed)
579 | }
580 | }
581 |
582 | fn wait_for_fence(&self) -> Result<(), Error> {
583 | let fence = self.window.context().fence();
584 |
585 | let res = unsafe {
586 | self.window.device().vkWaitForFences.unwrap()(
587 | self.window.vk_device(),
588 | 1, // fenceCount
589 | ptr::addr_of!(fence),
590 | vk::VK_TRUE, // waitAll
591 | u64::MAX, // timeout
592 | )
593 | };
594 |
595 | if res == vk::VK_SUCCESS {
596 | Ok(())
597 | } else {
598 | Err(Error::WaitForFencesFailed)
599 | }
600 | }
601 |
602 | fn invalidate_window_linear_memory(&self) -> Result<(), Error> {
603 | if !self.window.need_linear_memory_invalidate() {
604 | return Ok(());
605 | }
606 |
607 | let memory_range = vk::VkMappedMemoryRange {
608 | sType: vk::VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
609 | pNext: ptr::null(),
610 | memory: self.window.linear_memory(),
611 | offset: 0,
612 | size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
613 | };
614 |
615 | let res = unsafe {
616 | self.window.device().vkInvalidateMappedMemoryRanges.unwrap()(
617 | self.window.vk_device(),
618 | 1, // memoryRangeCount
619 | ptr::addr_of!(memory_range),
620 | )
621 | };
622 |
623 | if res == vk::VK_SUCCESS {
624 | Ok(())
625 | } else {
626 | Err(Error::InvalidateMappedMemoryRangesFailed)
627 | }
628 | }
629 |
630 | fn invalidate_ssbos(&self) -> Result<(), Error> {
631 | let memory_properties = self.window.context().memory_properties();
632 |
633 | let memory_ranges = self.buffer_objects.iter()
634 | .enumerate()
635 | .filter_map(|(buffer_num, buffer)| {
636 | if self.script.buffers()[buffer_num].buffer_type
637 | != BufferType::Ssbo
638 | {
639 | return None;
640 | }
641 |
642 | let memory_type = &memory_properties
643 | .memoryTypes[buffer.memory.memory_type_index as usize];
644 |
645 | // We don’t need to do anything if the memory is
646 | // already coherent
647 | if memory_type.propertyFlags
648 | & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
649 | != 0
650 | {
651 | return None;
652 | }
653 |
654 | Some(vk::VkMappedMemoryRange {
655 | sType: vk::VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
656 | pNext: ptr::null(),
657 | memory: buffer.memory.memory,
658 | offset: 0,
659 | size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
660 | })
661 | })
662 | .collect::<Vec<_>>();
663 |
664 | if memory_ranges.is_empty() {
665 | Ok(())
666 | } else {
667 | let res = unsafe {
668 | self.window.device().vkInvalidateMappedMemoryRanges.unwrap()(
669 | self.window.vk_device(),
670 | memory_ranges.len() as u32,
671 | memory_ranges.as_ptr(),
672 | )
673 | };
674 |
675 | if res == vk::VK_SUCCESS {
676 | Ok(())
677 | } else {
678 | Err(Error::InvalidateMappedMemoryRangesFailed)
679 | }
680 | }
681 | }
682 |
683 | fn end_command_buffer(&mut self) -> Result<(), Error> {
684 | self.flush_buffers()?;
685 | self.add_ssbo_barriers();
686 |
687 | let res = unsafe {
688 | self.window.device().vkEndCommandBuffer.unwrap()(
689 | self.window.context().command_buffer(),
690 | )
691 | };
692 |
693 | if res != vk::VK_SUCCESS {
694 | return Err(Error::EndCommandBufferFailed);
695 | }
696 |
697 | self.reset_fence()?;
698 | self.queue_submit()?;
699 | self.wait_for_fence()?;
700 | self.invalidate_window_linear_memory()?;
701 | self.invalidate_ssbos()?;
702 |
703 | Ok(())
704 | }
705 |
706 | fn begin_render_pass(&mut self) {
707 | let render_pass_index = !self.first_render as usize;
708 | let render_pass = self.window.render_passes()[render_pass_index];
709 | let window_format = self.window.format();
710 |
711 | let render_pass_begin_info = vk::VkRenderPassBeginInfo {
712 | sType: vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
713 | pNext: ptr::null(),
714 | renderPass: render_pass,
715 | framebuffer: self.window.framebuffer(),
716 | renderArea: vk::VkRect2D {
717 | offset: vk::VkOffset2D { x: 0, y: 0 },
718 | extent: vk::VkExtent2D {
719 | width: window_format.width as u32,
720 | height: window_format.height as u32,
721 | },
722 | },
723 | clearValueCount: 0,
724 | pClearValues: ptr::null(),
725 | };
726 |
727 | unsafe {
728 | self.window.device().vkCmdBeginRenderPass.unwrap()(
729 | self.window.context().command_buffer(),
730 | ptr::addr_of!(render_pass_begin_info),
731 | vk::VK_SUBPASS_CONTENTS_INLINE,
732 | );
733 | }
734 |
735 | self.first_render = false;
736 | }
737 |
738 | fn add_render_finish_barrier(&self) {
739 | // Image barrier: transition the layout but also ensure:
740 | // - rendering is complete before vkCmdCopyImageToBuffer (below) and
741 | // before any future color attachment accesses
742 | // - the color attachment writes are visible to vkCmdCopyImageToBuffer
743 | // and to any future color attachment accesses */
744 | let render_finish_barrier = vk::VkImageMemoryBarrier {
745 | sType: vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
746 | pNext: ptr::null(),
747 | srcAccessMask: vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
748 | dstAccessMask: vk::VK_ACCESS_TRANSFER_READ_BIT
749 | | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
750 | | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
751 | oldLayout: vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
752 | newLayout: vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
753 | srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
754 | dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
755 | image: self.window.color_image(),
756 | subresourceRange: vk::VkImageSubresourceRange {
757 | aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
758 | baseMipLevel: 0,
759 | levelCount: 1,
760 | baseArrayLayer: 0,
761 | layerCount: 1
762 | },
763 | };
764 |
765 | unsafe {
766 | self.window.device().vkCmdPipelineBarrier.unwrap()(
767 | self.window.context().command_buffer(),
768 | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
769 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT
770 | | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
771 | 0, // dependencyFlags
772 | 0, // memoryBarrierCount
773 | ptr::null(), // pMemoryBarriers
774 | 0, // bufferMemoryBarrierCount
775 | ptr::null(), // pBufferMemoryBarriers
776 | 1, // imageMemoryBarrierCount
777 | ptr::addr_of!(render_finish_barrier),
778 | );
779 | }
780 | }
781 |
782 | fn add_copy_to_linear_buffer(&self) {
783 | let window_format = self.window.format();
784 |
785 | let copy_region = vk::VkBufferImageCopy {
786 | bufferOffset: 0,
787 | bufferRowLength: window_format.width as u32,
788 | bufferImageHeight: window_format.height as u32,
789 | imageSubresource: vk::VkImageSubresourceLayers {
790 | aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
791 | mipLevel: 0,
792 | baseArrayLayer: 0,
793 | layerCount: 1,
794 | },
795 | imageOffset: vk::VkOffset3D { x: 0, y: 0, z: 0 },
796 | imageExtent: vk::VkExtent3D {
797 | width: window_format.width as u32,
798 | height: window_format.height as u32,
799 | depth: 1 as u32
800 | },
801 | };
802 |
803 | unsafe {
804 | self.window.device().vkCmdCopyImageToBuffer.unwrap()(
805 | self.window.context().command_buffer(),
806 | self.window.color_image(),
807 | vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
808 | self.window.linear_buffer(),
809 | 1, // regionCount
810 | ptr::addr_of!(copy_region),
811 | );
812 | }
813 | }
814 |
815 | fn add_copy_finish_barrier(&self) {
816 | // Image barrier: transition the layout back but also ensure:
817 | // - the copy image operation (above) completes before any future color
818 | // attachment operations
819 | // No memory dependencies are needed because the first set of operations
820 | // are reads.
821 | let render_finish_barrier = vk::VkImageMemoryBarrier {
822 | sType: vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
823 | pNext: ptr::null(),
824 | srcAccessMask: 0,
825 | dstAccessMask: 0,
826 | oldLayout: vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
827 | newLayout: vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
828 | srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
829 | dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
830 | image: self.window.color_image(),
831 | subresourceRange: vk::VkImageSubresourceRange {
832 | aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
833 | baseMipLevel: 0,
834 | levelCount: 1,
835 | baseArrayLayer: 0,
836 | layerCount: 1
837 | },
838 | };
839 |
840 | unsafe {
841 | self.window.device().vkCmdPipelineBarrier.unwrap()(
842 | self.window.context().command_buffer(),
843 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT,
844 | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
845 | 0, // dependencyFlags
846 | 0, // memoryBarrierCount
847 | ptr::null(), // pMemoryBarriers
848 | 0, // bufferMemoryBarrierCount
849 | ptr::null(), // pBufferMemoryBarriers
850 | 1, // imageMemoryBarrierCount
851 | ptr::addr_of!(render_finish_barrier),
852 | );
853 | }
854 | }
855 |
856 | fn add_write_finish_buffer_memory_barrier(&self) {
857 | // Buffer barrier: ensure the device transfer writes have
858 | // completed before the host reads and are visible to host
859 | // reads.
860 | let write_finish_buffer_memory_barrier = vk::VkBufferMemoryBarrier {
861 | sType: vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
862 | pNext: ptr::null(),
863 | srcAccessMask: vk::VK_ACCESS_TRANSFER_WRITE_BIT,
864 | dstAccessMask: vk::VK_ACCESS_HOST_READ_BIT,
865 | srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
866 | dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
867 | buffer: self.window.linear_buffer(),
868 | offset: 0,
869 | size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
870 | };
871 |
872 | unsafe {
873 | self.window.device().vkCmdPipelineBarrier.unwrap()(
874 | self.window.context().command_buffer(),
875 | vk::VK_PIPELINE_STAGE_TRANSFER_BIT,
876 | vk::VK_PIPELINE_STAGE_HOST_BIT,
877 | 0, // dependencyFlags
878 | 0, // memoryBarrierCount
879 | ptr::null(), // pMemoryBarriers
880 | 1, // bufferMemoryBarrierCount
881 | ptr::addr_of!(write_finish_buffer_memory_barrier),
882 | 0, // imageMemoryBarrierCount
883 | ptr::null(), // pImageMemoryBarriers
884 | );
885 | }
886 | }
887 |
888 | fn end_render_pass(&self) {
889 | unsafe {
890 | self.window.device().vkCmdEndRenderPass.unwrap()(
891 | self.window.context().command_buffer(),
892 | );
893 | }
894 |
895 | self.add_render_finish_barrier();
896 | self.add_copy_to_linear_buffer();
897 | self.add_copy_finish_barrier();
898 | self.add_write_finish_buffer_memory_barrier();
899 | }
900 |
901 | fn forward_state(&mut self) -> Result<(), Error> {
902 | match &self.state {
903 | State::Idle => {
904 | self.begin_command_buffer()?;
905 | self.state = State::CommandBuffer;
906 | },
907 | State::CommandBuffer => {
908 | self.begin_render_pass();
909 | self.state = State::RenderPass;
910 | },
911 | State::RenderPass => unreachable!(
912 | "Tried to advance after last state"
913 | ),
914 | }
915 |
916 | Ok(())
917 | }
918 |
919 | fn backward_state(&mut self) -> Result<(), Error> {
920 | match &self.state {
921 | State::Idle => unreachable!(
922 | "Tried to go backward to before the first state"
923 | ),
924 | State::CommandBuffer => {
925 | self.end_command_buffer()?;
926 | self.state = State::Idle;
927 | },
928 | State::RenderPass => {
929 | self.end_render_pass();
930 | self.state = State::CommandBuffer;
931 | },
932 | }
933 |
934 | Ok(())
935 | }
936 |
937 | fn goto_state(&mut self, state: State) -> Result<(), Error> {
938 | while (self.state as usize) < state as usize {
939 | self.forward_state()?;
940 | }
941 | while (self.state as usize) > state as usize {
942 | self.backward_state()?;
943 | }
944 |
945 | Ok(())
946 | }
947 |
948 | fn bind_bo_descriptor_set_at_binding_point(
949 | &self,
950 | binding_point: vk::VkPipelineBindPoint
951 | ) {
952 | unsafe {
953 | self.window.device().vkCmdBindDescriptorSets.unwrap()(
954 | self.window.context().command_buffer(),
955 | binding_point,
956 | self.pipeline_set.layout(),
957 | 0, // firstSet
958 | self.descriptor_sets.handles.len() as u32,
959 | self.descriptor_sets.handles.as_ptr(),
960 | 0, // dynamicOffsetCount
961 | ptr::null(), // pDynamicOffsets
962 | );
963 | }
964 | }
965 |
966 | fn bind_bo_descriptor_set(&mut self) {
967 | if self.bo_descriptor_set_bound
968 | || self.descriptor_sets.handles.is_empty()
969 | {
970 | return;
971 | }
972 |
973 | if self.pipeline_set.stages() & !vk::VK_SHADER_STAGE_COMPUTE_BIT != 0 {
974 | self.bind_bo_descriptor_set_at_binding_point(
975 | vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
976 | );
977 | }
978 |
979 | if self.pipeline_set.stages() & vk::VK_SHADER_STAGE_COMPUTE_BIT != 0 {
980 | self.bind_bo_descriptor_set_at_binding_point(
981 | vk::VK_PIPELINE_BIND_POINT_COMPUTE,
982 | );
983 | }
984 |
985 | self.bo_descriptor_set_bound = true;
986 | }
987 |
988 | fn bind_pipeline(&mut self, pipeline_num: usize) {
989 | if Some(pipeline_num) == self.bound_pipeline {
990 | return;
991 | }
992 |
993 | let key = &self.script.pipeline_keys()[pipeline_num];
994 |
995 | let bind_point = match key.pipeline_type() {
996 | pipeline_key::Type::Graphics => vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
997 | pipeline_key::Type::Compute => vk::VK_PIPELINE_BIND_POINT_COMPUTE,
998 | };
999 |
1000 | unsafe {
1001 | self.window.device().vkCmdBindPipeline.unwrap()(
1002 | self.window.context().command_buffer(),
1003 | bind_point,
1004 | self.pipeline_set.pipelines()[pipeline_num],
1005 | );
1006 | }
1007 |
1008 | self.bound_pipeline = Some(pipeline_num);
1009 | }
1010 |
1011 | fn get_buffer_object(
1012 | &mut self,
1013 | desc_set: u32,
1014 | binding: u32,
1015 | ) -> Result<&mut TestBuffer, Error> {
1016 | match self.script
1017 | .buffers()
1018 | .binary_search_by(|buffer| {
1019 | buffer.desc_set
1020 | .cmp(&desc_set)
1021 | .then_with(|| buffer.binding.cmp(&binding))
1022 | })
1023 | {
1024 | Ok(buffer_num) => Ok(&mut self.buffer_objects[buffer_num]),
1025 | Err(_) => Err(Error::InvalidBufferBinding { desc_set, binding }),
1026 | }
1027 | }
1028 |
1029 | fn get_vbo_buffer(&mut self) -> Result<Option<&TestBuffer>, Error> {
1030 | if let Some(ref buffer) = self.vbo_buffer {
1031 | Ok(Some(buffer))
1032 | } else if let Some(vbo) = self.script.vertex_data() {
1033 | let buffer = TestBuffer::new(
1034 | Rc::clone(self.window.context()),
1035 | vbo.raw_data().len(),
1036 | vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
1037 | )?;
1038 |
1039 | unsafe {
1040 | std::slice::from_raw_parts_mut(
1041 | buffer.map.pointer as *mut u8,
1042 | buffer.size
1043 | ).copy_from_slice(vbo.raw_data());
1044 | }
1045 |
1046 | flush_memory(
1047 | self.window.context(),
1048 | buffer.memory.memory_type_index as usize,
1049 | buffer.memory.memory,
1050 | 0, // offset
1051 | vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
1052 | )?;
1053 |
1054 | Ok(Some(&*self.vbo_buffer.insert(buffer)))
1055 | } else {
1056 | Ok(None)
1057 | }
1058 | }
1059 |
1060 | fn get_index_buffer(&mut self) -> Result<&TestBuffer, Error> {
1061 | match self.index_buffer {
1062 | Some(ref buffer) => Ok(buffer),
1063 | None => {
1064 | let indices = self.script.indices();
1065 |
1066 | let buffer = TestBuffer::new(
1067 | Rc::clone(self.window.context()),
1068 | indices.len() * mem::size_of::<u16>(),
1069 | vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
1070 | )?;
1071 |
1072 | unsafe {
1073 | std::slice::from_raw_parts_mut(
1074 | buffer.map.pointer as *mut u16,
1075 | indices.len(),
1076 | ).copy_from_slice(indices);
1077 | }
1078 |
1079 | flush_memory(
1080 | self.window.context(),
1081 | buffer.memory.memory_type_index as usize,
1082 | buffer.memory.memory,
1083 | 0, // offset
1084 | vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
1085 | )?;
1086 |
1087 | Ok(&*self.index_buffer.insert(buffer))
1088 | }
1089 | }
1090 | }
1091 |
1092 | fn draw_rect(
1093 | &mut self,
1094 | op: &Operation,
1095 | ) -> Result<(), Error> {
1096 | let &Operation::DrawRect { x, y, w, h, pipeline_key } = op else {
1097 | unreachable!("bad op");
1098 | };
1099 |
1100 | let buffer = TestBuffer::new(
1101 | Rc::clone(self.window.context()),
1102 | mem::size_of::<RectangleVertex>() * 4,
1103 | vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT
1104 | )?;
1105 |
1106 | self.goto_state(State::RenderPass)?;
1107 |
1108 | let mut v: *mut RectangleVertex = buffer.map.pointer.cast();
1109 |
1110 | unsafe {
1111 | *v = RectangleVertex {
1112 | x: x,
1113 | y: y,
1114 | z: 0.0,
1115 | };
1116 | v = v.add(1);
1117 |
1118 | *v = RectangleVertex {
1119 | x: x + w,
1120 | y: y,
1121 | z: 0.0,
1122 | };
1123 | v = v.add(1);
1124 |
1125 | *v = RectangleVertex {
1126 | x: x,
1127 | y: y + h,
1128 | z: 0.0,
1129 | };
1130 | v = v.add(1);
1131 |
1132 | *v = RectangleVertex {
1133 | x: x + w,
1134 | y: y + h,
1135 | z: 0.0,
1136 | };
1137 | }
1138 |
1139 | flush_memory(
1140 | self.window.context(),
1141 | buffer.memory.memory_type_index as usize,
1142 | buffer.memory.memory,
1143 | 0, // offset
1144 | vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
1145 | )?;
1146 |
1147 | self.bind_bo_descriptor_set();
1148 | self.bind_pipeline(pipeline_key);
1149 |
1150 | let command_buffer = self.window.context().command_buffer();
1151 | let buffer_handle = buffer.buffer.buffer;
1152 | let offset = 0;
1153 |
1154 | unsafe {
1155 | self.window.device().vkCmdBindVertexBuffers.unwrap()(
1156 | command_buffer,
1157 | 0, // firstBinding
1158 | 1, // bindingCount
1159 | ptr::addr_of!(buffer_handle),
1160 | ptr::addr_of!(offset),
1161 | );
1162 | self.window.device().vkCmdDraw.unwrap()(
1163 | command_buffer,
1164 | 4, // vertexCount
1165 | 1, // instanceCount
1166 | 0, // firstVertex
1167 | 0, // firstinstance
1168 | );
1169 | }
1170 |
1171 | self.test_buffers.push(buffer);
1172 |
1173 | Ok(())
1174 | }
1175 |
1176 | fn draw_arrays(
1177 | &mut self,
1178 | op: &Operation,
1179 | ) -> Result<(), Error> {
1180 | let &Operation::DrawArrays {
1181 | indexed,
1182 | vertex_count,
1183 | instance_count,
1184 | first_vertex,
1185 | first_instance,
1186 | pipeline_key,
1187 | ..
1188 | } = op else {
1189 | unreachable!("bad op");
1190 | };
1191 |
1192 | self.goto_state(State::RenderPass)?;
1193 |
1194 | let context = Rc::clone(self.window.context());
1195 |
1196 | if let Some(buffer) = self.get_vbo_buffer()? {
1197 | let offset = 0;
1198 |
1199 | unsafe {
1200 | context.device().vkCmdBindVertexBuffers.unwrap()(
1201 | context.command_buffer(),
1202 | 0, // firstBinding
1203 | 1, // bindingCount
1204 | ptr::addr_of!(buffer.buffer.buffer),
1205 | ptr::addr_of!(offset)
1206 | );
1207 | }
1208 | }
1209 |
1210 | self.bind_bo_descriptor_set();
1211 | self.bind_pipeline(pipeline_key);
1212 |
1213 | if indexed {
1214 | let index_buffer = self.get_index_buffer()?;
1215 |
1216 | unsafe {
1217 | context.device().vkCmdBindIndexBuffer.unwrap()(
1218 | context.command_buffer(),
1219 | index_buffer.buffer.buffer,
1220 | 0, // offset
1221 | vk::VK_INDEX_TYPE_UINT16,
1222 | );
1223 | context.device().vkCmdDrawIndexed.unwrap()(
1224 | context.command_buffer(),
1225 | vertex_count,
1226 | instance_count,
1227 | 0, // firstIndex
1228 | first_vertex as i32,
1229 | first_instance,
1230 | );
1231 | }
1232 | } else {
1233 | unsafe {
1234 | context.device().vkCmdDraw.unwrap()(
1235 | context.command_buffer(),
1236 | vertex_count,
1237 | instance_count,
1238 | first_vertex,
1239 | first_instance,
1240 | );
1241 | }
1242 | }
1243 |
1244 | Ok(())
1245 | }
1246 |
1247 | fn dispatch_compute(
1248 | &mut self,
1249 | op: &Operation,
1250 | ) -> Result<(), Error> {
1251 | let &Operation::DispatchCompute { x, y, z, pipeline_key } = op else {
1252 | unreachable!("bad op");
1253 | };
1254 |
1255 | self.goto_state(State::CommandBuffer)?;
1256 |
1257 | self.bind_bo_descriptor_set();
1258 | self.bind_pipeline(pipeline_key);
1259 |
1260 | unsafe {
1261 | self.window.device().vkCmdDispatch.unwrap()(
1262 | self.window.context().command_buffer(),
1263 | x,
1264 | y,
1265 | z,
1266 | );
1267 | }
1268 |
1269 | Ok(())
1270 | }
1271 |
1272 | fn probe_rect(
1273 | &mut self,
1274 | op: &Operation,
1275 | ) -> Result<(), Error> {
1276 | let &Operation::ProbeRect {
1277 | n_components,
1278 | x,
1279 | y,
1280 | w,
1281 | h,
1282 | ref color,
1283 | ref tolerance,
1284 | } = op else {
1285 | unreachable!("bad op");
1286 | };
1287 |
1288 | // End the render to copy the framebuffer into the linear buffer
1289 | self.goto_state(State::Idle)?;
1290 |
1291 | let linear_memory_map: *const u8 =
1292 | self.window.linear_memory_map().cast();
1293 | let stride = self.window.linear_memory_stride();
1294 | let format = self.window.format().color_format;
1295 | let format_size = format.size();
1296 | let n_components = n_components as usize;
1297 |
1298 | for y_offset in 0..h {
1299 | let mut p = unsafe {
1300 | linear_memory_map.add(
1301 | (y_offset + y) as usize * stride + x as usize * format_size
1302 | )
1303 | };
1304 |
1305 | for x_offset in 0..w {
1306 | let source = unsafe {
1307 | std::slice::from_raw_parts(p, format_size)
1308 | };
1309 |
1310 | let pixel = format.load_pixel(source);
1311 |
1312 | if !compare_pixel(
1313 | &pixel[0..n_components],
1314 | &color[0..n_components],
1315 | tolerance,
1316 | ) {
1317 | return Err(Error::ProbeFailed(ProbeFailedError {
1318 | x: x + x_offset,
1319 | y: y + y_offset,
1320 | expected: color.clone(),
1321 | observed: pixel,
1322 | n_components,
1323 | }));
1324 | }
1325 |
1326 | unsafe {
1327 | p = p.add(format_size);
1328 | }
1329 | }
1330 | }
1331 |
1332 | Ok(())
1333 | }
1334 |
1335 | fn probe_ssbo(
1336 | &mut self,
1337 | op: &Operation,
1338 | ) -> Result<(), Error> {
1339 | let &Operation::ProbeSsbo {
1340 | desc_set,
1341 | binding,
1342 | comparison,
1343 | offset,
1344 | slot_type,
1345 | layout,
1346 | ref values,
1347 | ref tolerance,
1348 | } = op else {
1349 | unreachable!("bad op");
1350 | };
1351 |
1352 | self.goto_state(State::Idle)?;
1353 |
1354 | let buffer = self.get_buffer_object(desc_set, binding)?;
1355 |
1356 | let buffer_slice = unsafe {
1357 | std::slice::from_raw_parts(
1358 | buffer.map.pointer as *const u8,
1359 | buffer.size,
1360 | )
1361 | };
1362 |
1363 | let type_size = slot_type.size(layout);
1364 | let observed_stride = slot_type.array_stride(layout);
1365 | // The values are tightly packed in the operation buffer so we
1366 | // don’t want to use the observed_stride
1367 | let n_values = values.len() / type_size;
1368 |
1369 | if offset
1370 | + (n_values - 1) * observed_stride
1371 | + type_size
1372 | > buffer_slice.len()
1373 | {
1374 | return Err(Error::InvalidBufferOffset);
1375 | }
1376 |
1377 | let buffer_slice = &buffer_slice[offset..];
1378 |
1379 | for i in 0..n_values {
1380 | let observed = &buffer_slice[i * observed_stride
1381 | ..i * observed_stride + type_size];
1382 | let expected = &values[i * type_size..(i + 1) * type_size];
1383 |
1384 | if !comparison.compare(
1385 | tolerance,
1386 | slot_type,
1387 | layout,
1388 | observed,
1389 | expected,
1390 | ) {
1391 | return Err(Error::SsboProbeFailed {
1392 | slot_type,
1393 | layout,
1394 | expected: expected.into(),
1395 | observed: observed.into(),
1396 | });
1397 | }
1398 | }
1399 |
1400 | Ok(())
1401 | }
1402 |
1403 | fn set_push_command(
1404 | &mut self,
1405 | op: &Operation,
1406 | ) -> Result<(), Error> {
1407 | let &Operation::SetPushCommand { offset, ref data } = op else {
1408 | unreachable!("bad op");
1409 | };
1410 |
1411 | if (self.state as usize) < State::CommandBuffer as usize {
1412 | self.goto_state(State::CommandBuffer)?;
1413 | }
1414 |
1415 | unsafe {
1416 | self.window.device().vkCmdPushConstants.unwrap()(
1417 | self.window.context().command_buffer(),
1418 | self.pipeline_set.layout(),
1419 | self.pipeline_set.stages(),
1420 | offset as u32,
1421 | data.len() as u32,
1422 | data.as_ptr().cast(),
1423 | );
1424 | }
1425 |
1426 | Ok(())
1427 | }
1428 |
1429 | fn set_buffer_data(
1430 | &mut self,
1431 | op: &Operation,
1432 | ) -> Result<(), Error> {
1433 | let &Operation::SetBufferData {
1434 | desc_set,
1435 | binding,
1436 | offset,
1437 | ref data
1438 | } = op else {
1439 | unreachable!("bad op");
1440 | };
1441 |
1442 | let buffer = self.get_buffer_object(desc_set, binding)
1443 | .expect(
1444 | "The script parser should make a buffer mentioned by \
1445 | any buffer data command and the tester should make a \
1446 | buffer for every buffer described by the script"
1447 | );
1448 |
1449 | let buffer_slice = unsafe {
1450 | std::slice::from_raw_parts_mut(
1451 | (buffer.map.pointer as *mut u8).add(offset),
1452 | data.len(),
1453 | )
1454 | };
1455 |
1456 | buffer_slice.copy_from_slice(data);
1457 |
1458 | buffer.pending_write = true;
1459 |
1460 | Ok(())
1461 | }
1462 |
1463 | fn clear(
1464 | &mut self,
1465 | op: &Operation,
1466 | ) -> Result<(), Error> {
1467 | let &Operation::Clear { ref color, depth, stencil } = op else {
1468 | unreachable!("bad op");
1469 | };
1470 |
1471 | let window_format = self.window.format();
1472 |
1473 | let depth_stencil_flags = match window_format.depth_stencil_format {
1474 | Some(format) => format.depth_stencil_aspect_flags(),
1475 | None => 0,
1476 | };
1477 |
1478 | self.goto_state(State::RenderPass)?;
1479 |
1480 | let clear_attachments = [
1481 | vk::VkClearAttachment {
1482 | aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
1483 | colorAttachment: 0,
1484 | clearValue: vk::VkClearValue {
1485 | color: vk::VkClearColorValue {
1486 | float32: color.clone(),
1487 | },
1488 | },
1489 | },
1490 | vk::VkClearAttachment {
1491 | aspectMask: depth_stencil_flags,
1492 | colorAttachment: 0,
1493 | clearValue: vk::VkClearValue {
1494 | depthStencil: vk::VkClearDepthStencilValue {
1495 | depth,
1496 | stencil,
1497 | },
1498 | },
1499 | },
1500 | ];
1501 |
1502 | let clear_rect = vk::VkClearRect {
1503 | rect: vk::VkRect2D {
1504 | offset: vk::VkOffset2D { x: 0, y: 0 },
1505 | extent: vk::VkExtent2D {
1506 | width: self.window.format().width as u32,
1507 | height: self.window.format().height as u32,
1508 | },
1509 | },
1510 | baseArrayLayer: 0,
1511 | layerCount: 1,
1512 | };
1513 |
1514 | let n_attachments = 1 + (depth_stencil_flags != 0) as usize;
1515 |
1516 | unsafe {
1517 | self.window.device().vkCmdClearAttachments.unwrap()(
1518 | self.window.context().command_buffer(),
1519 | n_attachments as u32,
1520 | ptr::addr_of!(clear_attachments[0]),
1521 | 1, // rectCount
1522 | ptr::addr_of!(clear_rect),
1523 | );
1524 | }
1525 |
1526 | Ok(())
1527 | }
1528 |
1529 | fn run_operation(
1530 | &mut self,
1531 | op: &Operation,
1532 | ) -> Result<(), Error> {
1533 | match op {
1534 | Operation::DrawRect { .. } => self.draw_rect(op),
1535 | Operation::DrawArrays { .. } => self.draw_arrays(op),
1536 | Operation::DispatchCompute { .. } => self.dispatch_compute(op),
1537 | Operation::ProbeRect { .. } => self.probe_rect(op),
1538 | Operation::ProbeSsbo { .. } => self.probe_ssbo(op),
1539 | Operation::SetPushCommand { .. } => self.set_push_command(op),
1540 | Operation::SetBufferData { .. } => self.set_buffer_data(op),
1541 | Operation::Clear { .. } => self.clear(op),
1542 | }
1543 | }
1544 |
1545 | fn inspect(&self) {
1546 | let Some(inspector) = self.inspector.as_ref() else { return; };
1547 |
1548 | let buffers = self.buffer_objects
1549 | .iter()
1550 | .enumerate()
1551 | .map(|(buffer_num, buffer)| {
1552 | inspect::Buffer {
1553 | binding: self.script.buffers()[buffer_num].binding as c_int,
1554 | size: buffer.size,
1555 | data: buffer.map.pointer,
1556 | }
1557 | })
1558 | .collect::<Vec<_>>();
1559 |
1560 | let window_format = self.window.format();
1561 |
1562 | let data = inspect::Data {
1563 | color_buffer: inspect::Image {
1564 | width: window_format.width as c_int,
1565 | height: window_format.height as c_int,
1566 | stride: self.window.linear_memory_stride(),
1567 | format: window_format.color_format,
1568 | data: self.window.linear_memory_map(),
1569 | },
1570 | n_buffers: buffers.len(),
1571 | buffers: if buffers.is_empty() {
1572 | ptr::null()
1573 | } else {
1574 | buffers.as_ptr()
1575 | },
1576 | };
1577 |
1578 | inspector.inspect(&data);
1579 | }
1580 | }
1581 |
1582 | pub(crate) fn run(
1583 | window: &Window,
1584 | pipeline_set: &PipelineSet,
1585 | script: &Script,
1586 | inspector: Option<Inspector>,
1587 | ) -> Result<(), Error> {
1588 | let mut tester = Tester::new(window, pipeline_set, script, inspector)?;
1589 | let mut errors = Vec::new();
1590 |
1591 | for command in script.commands().iter() {
1592 | if let Err(e) = tester.run_operation(&command.op) {
1593 | errors.push(CommandError {
1594 | line_num: command.line_num,
1595 | error: e,
1596 | });
1597 | }
1598 | }
1599 |
1600 | if let Err(error) = tester.goto_state(State::Idle) {
1601 | let line_num = match script.commands().last() {
1602 | Some(command) => command.line_num,
1603 | None => 1,
1604 | };
1605 |
1606 | errors.push(CommandError { line_num, error });
1607 | }
1608 |
1609 | tester.inspect();
1610 |
1611 | if errors.is_empty() {
1612 | Ok(())
1613 | } else {
1614 | Err(Error::CommandErrors(errors))
1615 | }
1616 | }
1617 |
1618 | #[cfg(test)]
1619 | mod test {
1620 | use super::*;
1621 | use crate::fake_vulkan::{FakeVulkan, Command, HandleType, ClearAttachment};
1622 | use crate::requirements::Requirements;
1623 | use crate::logger::Logger;
1624 | use crate::source::Source;
1625 | use crate::window_format::WindowFormat;
1626 | use crate::config::Config;
1627 | use std::ffi::c_void;
1628 |
1629 | #[derive(Debug)]
1630 | struct TestData {
1631 | pipeline_set: PipelineSet,
1632 | window: Rc<Window>,
1633 | context: Rc<Context>,
1634 | fake_vulkan: Box<FakeVulkan>,
1635 | }
1636 |
1637 | impl TestData {
1638 | fn new_full(
1639 | source: &str,
1640 | inspector: Option<Inspector>,
1641 | ) -> Result<TestData, Error> {
1642 | let mut fake_vulkan = FakeVulkan::new();
1643 |
1644 | fake_vulkan.physical_devices.push(Default::default());
1645 | fake_vulkan.physical_devices[0].format_properties.insert(
1646 | vk::VK_FORMAT_B8G8R8A8_UNORM,
1647 | vk::VkFormatProperties {
1648 | linearTilingFeatures: 0,
1649 | optimalTilingFeatures:
1650 | vk::VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
1651 | | vk::VK_FORMAT_FEATURE_BLIT_SRC_BIT,
1652 | bufferFeatures: 0,
1653 | },
1654 | );
1655 | fake_vulkan.physical_devices[0].format_properties.insert(
1656 | vk::VK_FORMAT_D24_UNORM_S8_UINT,
1657 | vk::VkFormatProperties {
1658 | linearTilingFeatures: 0,
1659 | optimalTilingFeatures:
1660 | vk::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
1661 | bufferFeatures: 0,
1662 | },
1663 | );
1664 |
1665 | let memory_properties =
1666 | &mut fake_vulkan.physical_devices[0].memory_properties;
1667 | memory_properties.memoryTypes[0].propertyFlags =
1668 | vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
1669 | memory_properties.memoryTypeCount = 1;
1670 | fake_vulkan.memory_requirements.memoryTypeBits = 1;
1671 |
1672 | fake_vulkan.set_override();
1673 | let context = Rc::new(Context::new(
1674 | &Requirements::new(),
1675 | None, // device_id
1676 | ).unwrap());
1677 |
1678 | let source = Source::from_string(source.to_string());
1679 | let script = Script::load(&Config::new(), &source).unwrap();
1680 |
1681 | let window = Rc::new(Window::new(
1682 | Rc::clone(&context),
1683 | script.window_format(),
1684 | ).unwrap());
1685 |
1686 | let mut logger = Logger::new(None, ptr::null_mut());
1687 |
1688 | let pipeline_set = PipelineSet::new(
1689 | &mut logger,
1690 | Rc::clone(&window),
1691 | &script,
1692 | false, // show_disassembly
1693 | ).unwrap();
1694 |
1695 | run(
1696 | &window,
1697 | &pipeline_set,
1698 | &script,
1699 | inspector,
1700 | )?;
1701 |
1702 | Ok(TestData {
1703 | pipeline_set,
1704 | window,
1705 | context,
1706 | fake_vulkan,
1707 | })
1708 | }
1709 |
1710 | fn new(source: &str) -> Result<TestData, Error> {
1711 | TestData::new_full(
1712 | source,
1713 | None, // inspector
1714 | )
1715 | }
1716 | }
1717 |
1718 | #[test]
1719 | fn rectangle() {
1720 | let test_data = TestData::new(
1721 | "[test]\n\
1722 | draw rect -1 -1 2 2"
1723 | ).unwrap();
1724 |
1725 | let mut commands = test_data.fake_vulkan.commands.iter();
1726 |
1727 | let &Command::BeginRenderPass(ref begin_info) = commands.next().unwrap()
1728 | else { unreachable!("Bad command"); };
1729 |
1730 | assert_eq!(begin_info.renderPass, test_data.window.render_passes()[0]);
1731 | assert_eq!(begin_info.framebuffer, test_data.window.framebuffer());
1732 | assert_eq!(begin_info.renderArea.offset.x, 0);
1733 | assert_eq!(begin_info.renderArea.offset.y, 0);
1734 | assert_eq!(
1735 | begin_info.renderArea.extent.width as usize,
1736 | WindowFormat::default().width,
1737 | );
1738 | assert_eq!(
1739 | begin_info.renderArea.extent.height as usize,
1740 | WindowFormat::default().height,
1741 | );
1742 | assert_eq!(begin_info.clearValueCount, 0);
1743 |
1744 | let &Command::BindPipeline {
1745 | bind_point,
1746 | pipeline,
1747 | } = commands.next().unwrap()
1748 | else { unreachable!("Bad command"); };
1749 |
1750 | assert_eq!(test_data.pipeline_set.pipelines().len(), 1);
1751 | assert_eq!(test_data.pipeline_set.pipelines()[0], pipeline);
1752 | assert_eq!(bind_point, vk::VK_PIPELINE_BIND_POINT_GRAPHICS);
1753 |
1754 | let &Command::BindVertexBuffers {
1755 | first_binding,
1756 | ref buffers,
1757 | ref offsets,
1758 | } = commands.next().unwrap()
1759 | else { unreachable!("Bad command"); };
1760 |
1761 | assert_eq!(first_binding, 0);
1762 | assert_eq!(buffers.len(), 1);
1763 | assert_eq!(offsets, &[0]);
1764 |
1765 | let HandleType::Buffer { memory: Some(memory), .. } =
1766 | test_data.fake_vulkan.get_freed_handle(buffers[0]).data
1767 | else { unreachable!("Failed to get buffer memory"); };
1768 |
1769 | let HandleType::Memory { ref contents, .. } =
1770 | test_data.fake_vulkan.get_freed_handle(memory).data
1771 | else { unreachable!("Mismatched handle"); };
1772 |
1773 | let mut expected_contents = Vec::<u8>::new();
1774 | for component in [
1775 | -1f32, -1f32, 0f32,
1776 | 1f32, -1f32, 0f32,
1777 | -1f32, 1f32, 0f32,
1778 | 1f32, 1f32, 0f32,
1779 | ] {
1780 | expected_contents.extend(&component.to_ne_bytes());
1781 | }
1782 | assert_eq!(contents, &expected_contents);
1783 |
1784 | let &Command::Draw {
1785 | vertex_count,
1786 | instance_count,
1787 | first_vertex,
1788 | first_instance,
1789 | } = commands.next().unwrap()
1790 | else { unreachable!("Bad command"); };
1791 |
1792 | assert_eq!(vertex_count, 4);
1793 | assert_eq!(instance_count, 1);
1794 | assert_eq!(first_vertex, 0);
1795 | assert_eq!(first_instance, 0);
1796 |
1797 | assert!(matches!(commands.next(), Some(Command::EndRenderPass)));
1798 |
1799 | let &Command::PipelineBarrier {
1800 | ref image_memory_barriers,
1801 | ..
1802 | } = commands.next().unwrap()
1803 | else { unreachable!("Bad command"); };
1804 | assert_eq!(image_memory_barriers.len(), 1);
1805 | assert_eq!(
1806 | image_memory_barriers[0].image,
1807 | test_data.window.color_image()
1808 | );
1809 |
1810 | let &Command::CopyImageToBuffer {
1811 | src_image,
1812 | dst_buffer,
1813 | ..
1814 | } = commands.next().unwrap()
1815 | else { unreachable!("Bad command"); };
1816 |
1817 | assert_eq!(src_image, test_data.window.color_image());
1818 | assert_eq!(dst_buffer, test_data.window.linear_buffer());
1819 |
1820 | let &Command::PipelineBarrier {
1821 | ref image_memory_barriers,
1822 | ..
1823 | } = commands.next().unwrap()
1824 | else { unreachable!("Bad command"); };
1825 | assert_eq!(image_memory_barriers.len(), 1);
1826 | assert_eq!(
1827 | image_memory_barriers[0].image,
1828 | test_data.window.color_image()
1829 | );
1830 |
1831 | let &Command::PipelineBarrier {
1832 | ref buffer_memory_barriers,
1833 | ..
1834 | } = commands.next().unwrap()
1835 | else { unreachable!("Bad command"); };
1836 | assert_eq!(buffer_memory_barriers.len(), 1);
1837 | assert_eq!(
1838 | buffer_memory_barriers[0].buffer,
1839 | test_data.window.linear_buffer()
1840 | );
1841 |
1842 | assert!(commands.next().is_none());
1843 |
1844 | // There should only be one flush with the RectangleVertex vbo
1845 | assert_eq!(test_data.fake_vulkan.memory_flushes.len(), 1);
1846 |
1847 | assert_eq!(test_data.fake_vulkan.memory_invalidations.len(), 1);
1848 | assert_eq!(
1849 | test_data.fake_vulkan.memory_invalidations[0].memory,
1850 | test_data.window.linear_memory(),
1851 | );
1852 |
1853 | let HandleType::Fence { reset_count, wait_count } =
1854 | test_data.fake_vulkan.get_freed_handle(
1855 | test_data.context.fence()
1856 | ).data
1857 | else { unreachable!("Bad handle"); };
1858 |
1859 | assert_eq!(reset_count, 1);
1860 | assert_eq!(wait_count, 1);
1861 | }
1862 |
1863 | #[test]
1864 | fn vbo() {
1865 | let test_data = TestData::new(
1866 | "[vertex data]\n\
1867 | 0/R32_SFLOAT\n\
1868 | 1\n\
1869 | 2\n\
1870 | 3\n\
1871 | [test]\n\
1872 | draw arrays TRIANGLE_LIST 0 3"
1873 | ).unwrap();
1874 |
1875 | let mut commands = test_data.fake_vulkan.commands.iter();
1876 |
1877 | assert!(matches!(
1878 | commands.next(),
1879 | Some(Command::BeginRenderPass { .. })
1880 | ));
1881 |
1882 | let &Command::BindVertexBuffers {
1883 | first_binding,
1884 | ref buffers,
1885 | ref offsets,
1886 | } = commands.next().unwrap()
1887 | else { unreachable!("Bad command"); };
1888 |
1889 | assert_eq!(first_binding, 0);
1890 | assert_eq!(buffers.len(), 1);
1891 | assert_eq!(offsets, &[0]);
1892 |
1893 | let HandleType::Buffer { memory: Some(memory), .. } =
1894 | test_data.fake_vulkan.get_freed_handle(buffers[0]).data
1895 | else { unreachable!("Failed to get buffer memory"); };
1896 |
1897 | let HandleType::Memory { ref contents, .. } =
1898 | test_data.fake_vulkan.get_freed_handle(memory).data
1899 | else { unreachable!("Mismatched handle"); };
1900 |
1901 | let mut expected_contents = Vec::<u8>::new();
1902 | for component in [1f32, 2f32, 3f32] {
1903 | expected_contents.extend(&component.to_ne_bytes());
1904 | }
1905 | assert_eq!(contents, &expected_contents);
1906 |
1907 | assert!(matches!(commands.next(), Some(Command::BindPipeline { .. })));
1908 |
1909 | let &Command::Draw {
1910 | vertex_count,
1911 | instance_count,
1912 | first_vertex,
1913 | first_instance,
1914 | } = commands.next().unwrap()
1915 | else { unreachable!("Bad command"); };
1916 |
1917 | assert_eq!(vertex_count, 3);
1918 | assert_eq!(instance_count, 1);
1919 | assert_eq!(first_vertex, 0);
1920 | assert_eq!(first_instance, 0);
1921 | }
1922 |
1923 | #[test]
1924 | fn dispatch_compute() {
1925 | let test_data = TestData::new(
1926 | "[test]\n\
1927 | compute 1 2 3"
1928 | ).unwrap();
1929 |
1930 | let mut commands = test_data.fake_vulkan.commands.iter();
1931 |
1932 | assert!(matches!(commands.next(), Some(Command::BindPipeline { .. })));
1933 |
1934 | let &Command::Dispatch { x, y, z } = commands.next().unwrap()
1935 | else { unreachable!("Bad command"); };
1936 |
1937 | assert_eq!((x, y, z), (1, 2, 3));
1938 |
1939 | assert!(commands.next().is_none());
1940 | }
1941 |
1942 | #[test]
1943 | fn clear() {
1944 | let test_data = TestData::new(
1945 | "[test]\n\
1946 | clear color 1 2 3 4
1947 | clear"
1948 | ).unwrap();
1949 |
1950 | let mut commands = test_data.fake_vulkan.commands.iter();
1951 |
1952 | assert!(matches!(
1953 | commands.next(),
1954 | Some(Command::BeginRenderPass { .. })
1955 | ));
1956 |
1957 | let &Command::ClearAttachments {
1958 | ref attachments,
1959 | ref rects,
1960 | } = commands.next().unwrap()
1961 | else { unreachable!("Bad command"); };
1962 |
1963 | assert_eq!(attachments.len(), 1);
1964 |
1965 | match &attachments[0] {
1966 | &ClearAttachment::Color { attachment, value } => {
1967 | assert_eq!(attachment, 0);
1968 | assert_eq!(value, [1f32, 2f32, 3f32, 4f32]);
1969 | },
1970 | _ => unreachable!("unexepected clear attachment type"),
1971 | }
1972 |
1973 | assert_eq!(rects.len(), 1);
1974 | assert_eq!(
1975 | rects[0].rect.extent.width as usize,
1976 | WindowFormat::default().width
1977 | );
1978 | assert_eq!(
1979 | rects[0].rect.extent.height as usize,
1980 | WindowFormat::default().height
1981 | );
1982 | }
1983 |
1984 | #[test]
1985 | fn clear_depth_stencil() {
1986 | let test_data = TestData::new(
1987 | "[require]\n\
1988 | depthstencil D24_UNORM_S8_UINT\n\
1989 | [test]\n\
1990 | clear depth 2.0\n\
1991 | clear stencil 5\n\
1992 | clear"
1993 | ).unwrap();
1994 |
1995 | let mut commands = test_data.fake_vulkan.commands.iter();
1996 |
1997 | assert!(matches!(
1998 | commands.next(),
1999 | Some(Command::BeginRenderPass { .. })
2000 | ));
2001 |
2002 | let &Command::ClearAttachments {
2003 | ref attachments,
2004 | ref rects,
2005 | } = commands.next().unwrap()
2006 | else { unreachable!("Bad command"); };
2007 |
2008 | assert_eq!(attachments.len(), 2);
2009 |
2010 | match &attachments[1] {
2011 | &ClearAttachment::DepthStencil { aspect_mask, value } => {
2012 | assert_eq!(
2013 | aspect_mask,
2014 | vk::VK_IMAGE_ASPECT_DEPTH_BIT
2015 | | vk::VK_IMAGE_ASPECT_STENCIL_BIT
2016 | );
2017 | assert_eq!(value.depth, 2.0);
2018 | assert_eq!(value.stencil, 5);
2019 | },
2020 | _ => unreachable!("unexepected clear attachment type"),
2021 | }
2022 |
2023 | assert_eq!(rects.len(), 1);
2024 | assert_eq!(
2025 | rects[0].rect.extent.width as usize,
2026 | WindowFormat::default().width
2027 | );
2028 | assert_eq!(
2029 | rects[0].rect.extent.height as usize,
2030 | WindowFormat::default().height
2031 | );
2032 | }
2033 |
2034 | #[test]
2035 | fn push_constants() {
2036 | let test_data = TestData::new(
2037 | "[test]\n\
2038 | push uint8_t 1 12\n\
2039 | push u8vec2 2 13 14"
2040 | ).unwrap();
2041 |
2042 | let mut commands = test_data.fake_vulkan.commands.iter();
2043 |
2044 | let &Command::PushConstants {
2045 | layout,
2046 | stage_flags,
2047 | offset,
2048 | ref values,
2049 | } = commands.next().unwrap()
2050 | else { unreachable!("Bad command"); };
2051 |
2052 | assert_eq!(layout, test_data.pipeline_set.layout());
2053 | assert_eq!(stage_flags, 0);
2054 | assert_eq!(offset, 1);
2055 | assert_eq!(values.as_slice(), [12].as_slice());
2056 |
2057 | let &Command::PushConstants {
2058 | layout,
2059 | stage_flags,
2060 | offset,
2061 | ref values,
2062 | } = commands.next().unwrap()
2063 | else { unreachable!("Bad command"); };
2064 |
2065 | assert_eq!(layout, test_data.pipeline_set.layout());
2066 | assert_eq!(stage_flags, 0);
2067 | assert_eq!(offset, 2);
2068 | assert_eq!(values.as_slice(), [13, 14].as_slice());
2069 | }
2070 |
2071 | #[test]
2072 | fn set_buffer_data() {
2073 | let test_data = TestData::new(
2074 | "[fragment shader]\n\
2075 | 03 02 23 07\n\
2076 | [test]\n\
2077 | ssbo 5 subdata uint8_t 1 1 2 3\n\
2078 | # draw command to make it flush the memory\n\
2079 | draw rect -1 -1 2 2"
2080 | ).unwrap();
2081 |
2082 | let &Command::BindDescriptorSets {
2083 | first_set,
2084 | ref descriptor_sets,
2085 | ..
2086 | } = test_data.fake_vulkan.commands.iter().find(|command| {
2087 | matches!(command, Command::BindDescriptorSets { .. })
2088 | }).unwrap()
2089 | else { unreachable!() };
2090 |
2091 | assert_eq!(first_set, 0);
2092 | assert_eq!(descriptor_sets.len(), 1);
2093 |
2094 | let HandleType::DescriptorSet {
2095 | ref bindings
2096 | } = test_data.fake_vulkan.get_freed_handle(descriptor_sets[0]).data
2097 | else { unreachable!("bad handle"); };
2098 |
2099 | let descriptor_type = bindings[&5].descriptor_type;
2100 | assert_eq!(descriptor_type, vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);
2101 |
2102 | let buffer_handle = bindings[&5].info.buffer;
2103 |
2104 | let HandleType::Buffer {
2105 | memory: Some(memory_handle),
2106 | ..
2107 | } = test_data.fake_vulkan.get_freed_handle(buffer_handle).data
2108 | else { unreachable!("failed to get buffer memory"); };
2109 |
2110 | let HandleType::Memory {
2111 | ref contents,
2112 | ..
2113 | } = test_data.fake_vulkan.get_freed_handle(memory_handle).data
2114 | else { unreachable!("bad handle"); };
2115 |
2116 | assert_eq!(contents, &[0, 1, 2, 3]);
2117 |
2118 | test_data.fake_vulkan.memory_flushes.iter().find(|flush| {
2119 | flush.memory == memory_handle
2120 | }).expect("expected ssbo memory to be flushed");
2121 | }
2122 |
2123 | #[test]
2124 | fn probe_ssbo_success() {
2125 | TestData::new(
2126 | "[test]\n\
2127 | ssbo 5 subdata uint8_t 1 1 2 3\n\
2128 | probe ssbo u8vec4 5 0 == 0 1 2 3"
2129 | ).expect("expected ssbo probe to succeed");
2130 | }
2131 |
2132 | #[test]
2133 | fn probe_ssbo_fail() {
2134 | let error = TestData::new(
2135 | "[test]\n\
2136 | ssbo 5 subdata uint8_t 1 1 2 3\n\
2137 | probe ssbo u8vec4 5 0 == 0 1 2 4"
2138 | ).unwrap_err();
2139 |
2140 | assert_eq!(
2141 | &error.to_string(),
2142 | "line 3: SSBO probe failed\n\
2143 | \x20 Reference: 0 1 2 4\n\
2144 | \x20 Observed: 0 1 2 3",
2145 | );
2146 | }
2147 |
2148 | #[test]
2149 | fn probe_rect_success() {
2150 | TestData::new(
2151 | "[test]\n\
2152 | probe all rgba 0 0 0 0"
2153 | ).expect("expected probe to succeed");
2154 | }
2155 |
2156 | #[test]
2157 | fn probe_rect_fail() {
2158 | let error = TestData::new(
2159 | "[test]\n\
2160 | probe all rgba 1 0 0 0\n\
2161 | probe all rgba 1 2 0 0"
2162 | ).unwrap_err();
2163 |
2164 | assert_eq!(
2165 | &error.to_string(),
2166 | "line 2: Probe color at (0,0)\n\
2167 | \x20 Expected: 1 0 0 0\n\
2168 | \x20 Observed: 0 0 0 0\n\
2169 | line 3: Probe color at (0,0)\n\
2170 | \x20 Expected: 1 2 0 0\n\
2171 | \x20 Observed: 0 0 0 0"
2172 | );
2173 | }
2174 |
2175 | #[test]
2176 | fn indices() {
2177 | let test_data = TestData::new(
2178 | "[indices]\n\
2179 | 0 1 2\n\
2180 | [test]\n\
2181 | draw arrays indexed TRIANGLE_LIST 0 3"
2182 | ).unwrap();
2183 |
2184 | let mut commands = test_data.fake_vulkan.commands.iter();
2185 |
2186 | println!("{:#?}",commands);
2187 |
2188 | assert!(matches!(
2189 | commands.next(),
2190 | Some(Command::BeginRenderPass { .. })
2191 | ));
2192 |
2193 | assert!(matches!(commands.next(), Some(Command::BindPipeline { .. })));
2194 |
2195 | let &Command::BindIndexBuffer {
2196 | buffer,
2197 | offset,
2198 | index_type,
2199 | } = commands.next().unwrap()
2200 | else { unreachable!("Bad command"); };
2201 |
2202 | assert_eq!(offset, 0);
2203 | assert_eq!(index_type, vk::VK_INDEX_TYPE_UINT16);
2204 |
2205 | let HandleType::Buffer { memory: Some(memory), .. } =
2206 | test_data.fake_vulkan.get_freed_handle(buffer).data
2207 | else { unreachable!("Failed to get buffer memory"); };
2208 |
2209 | let HandleType::Memory { ref contents, .. } =
2210 | test_data.fake_vulkan.get_freed_handle(memory).data
2211 | else { unreachable!("Mismatched handle"); };
2212 |
2213 | let mut expected_contents = Vec::<u8>::new();
2214 | for component in 0u16..3u16 {
2215 | expected_contents.extend(&component.to_ne_bytes());
2216 | }
2217 | assert_eq!(contents, &expected_contents);
2218 |
2219 | let &Command::DrawIndexed {
2220 | index_count,
2221 | instance_count,
2222 | first_index,
2223 | vertex_offset,
2224 | first_instance,
2225 | } = commands.next().unwrap()
2226 | else { unreachable!("Bad command"); };
2227 |
2228 | assert_eq!(index_count, 3);
2229 | assert_eq!(instance_count, 1);
2230 | assert_eq!(first_index, 0);
2231 | assert_eq!(vertex_offset, 0);
2232 | assert_eq!(first_instance, 0);
2233 | }
2234 |
2235 | extern "C" fn inspector_cb(data: &inspect::Data, user_data: *mut c_void) {
2236 | unsafe {
2237 | *(user_data as *mut bool) = true;
2238 | }
2239 |
2240 | let window_format = WindowFormat::default();
2241 |
2242 | assert_eq!(data.color_buffer.width as usize, window_format.width);
2243 | assert_eq!(data.color_buffer.height as usize, window_format.height);
2244 | assert!(data.color_buffer.stride >= window_format.width * 4);
2245 | assert_eq!(
2246 | data.color_buffer.format,
2247 | window_format.color_format,
2248 | );
2249 | assert!(!data.color_buffer.data.is_null());
2250 |
2251 | assert_eq!(data.n_buffers, 1);
2252 |
2253 | let buffer = unsafe { &*data.buffers };
2254 |
2255 | assert_eq!(buffer.binding, 5);
2256 | assert_eq!(buffer.size, 1024);
2257 | assert!(!buffer.data.is_null());
2258 | }
2259 |
2260 | #[test]
2261 | fn inspector() {
2262 | let mut inspector_called = false;
2263 |
2264 | let inspector = inspect::Inspector::new(
2265 | inspector_cb,
2266 | ptr::addr_of_mut!(inspector_called).cast(),
2267 | );
2268 |
2269 | TestData::new_full(
2270 | "[test]\n\
2271 | ssbo 5 1024",
2272 | Some(inspector),
2273 | ).expect("expected test to pass");
2274 |
2275 | assert!(inspector_called);
2276 | }
2277 | }
2278 |
```