#
tokens: 35183/50000 2/78 files (page 5/7)
lines: off (toggle) GitHub
raw markdown copy
This is page 5 of 7. Use http://codebase.md/mehmetoguzderin/shaderc-vkrunner-mcp?page={x} to view the full context.

# Directory Structure

```
├── .devcontainer
│   ├── devcontainer.json
│   ├── docker-compose.yml
│   └── Dockerfile
├── .gitattributes
├── .github
│   └── workflows
│       └── build-push-image.yml
├── .gitignore
├── .vscode
│   └── mcp.json
├── Cargo.lock
├── Cargo.toml
├── Dockerfile
├── LICENSE
├── README.adoc
├── shaderc-vkrunner-mcp.jpg
├── src
│   └── main.rs
└── vkrunner
    ├── .editorconfig
    ├── .gitignore
    ├── .gitlab-ci.yml
    ├── build.rs
    ├── Cargo.toml
    ├── COPYING
    ├── examples
    │   ├── compute-shader.shader_test
    │   ├── cooperative-matrix.shader_test
    │   ├── depth-buffer.shader_test
    │   ├── desc_set_and_binding.shader_test
    │   ├── entrypoint.shader_test
    │   ├── float-framebuffer.shader_test
    │   ├── frexp.shader_test
    │   ├── geometry.shader_test
    │   ├── indices.shader_test
    │   ├── layouts.shader_test
    │   ├── properties.shader_test
    │   ├── push-constants.shader_test
    │   ├── require-subgroup-size.shader_test
    │   ├── row-major.shader_test
    │   ├── spirv.shader_test
    │   ├── ssbo.shader_test
    │   ├── tolerance.shader_test
    │   ├── tricolore.shader_test
    │   ├── ubo.shader_test
    │   ├── vertex-data-piglit.shader_test
    │   └── vertex-data.shader_test
    ├── include
    │   ├── vk_video
    │   │   ├── vulkan_video_codec_av1std_decode.h
    │   │   ├── vulkan_video_codec_av1std_encode.h
    │   │   ├── vulkan_video_codec_av1std.h
    │   │   ├── vulkan_video_codec_h264std_decode.h
    │   │   ├── vulkan_video_codec_h264std_encode.h
    │   │   ├── vulkan_video_codec_h264std.h
    │   │   ├── vulkan_video_codec_h265std_decode.h
    │   │   ├── vulkan_video_codec_h265std_encode.h
    │   │   ├── vulkan_video_codec_h265std.h
    │   │   └── vulkan_video_codecs_common.h
    │   └── vulkan
    │       ├── vk_platform.h
    │       ├── vulkan_core.h
    │       └── vulkan.h
    ├── precompile-script.py
    ├── README.md
    ├── scripts
    │   └── update-vulkan.sh
    ├── src
    │   └── main.rs
    ├── test-build.sh
    └── vkrunner
        ├── allocate_store.rs
        ├── buffer.rs
        ├── compiler
        │   └── fake_process.rs
        ├── compiler.rs
        ├── config.rs
        ├── context.rs
        ├── enum_table.rs
        ├── env_var_test.rs
        ├── executor.rs
        ├── fake_vulkan.rs
        ├── features.rs
        ├── flush_memory.rs
        ├── format_table.rs
        ├── format.rs
        ├── half_float.rs
        ├── hex.rs
        ├── inspect.rs
        ├── lib.rs
        ├── logger.rs
        ├── make-enums.py
        ├── make-features.py
        ├── make-formats.py
        ├── make-pipeline-key-data.py
        ├── make-vulkan-funcs-data.py
        ├── parse_num.rs
        ├── pipeline_key_data.rs
        ├── pipeline_key.rs
        ├── pipeline_set.rs
        ├── requirements.rs
        ├── result.rs
        ├── script.rs
        ├── shader_stage.rs
        ├── slot.rs
        ├── small_float.rs
        ├── source.rs
        ├── stream.rs
        ├── temp_file.rs
        ├── tester.rs
        ├── tolerance.rs
        ├── util.rs
        ├── vbo.rs
        ├── vk.rs
        ├── vulkan_funcs_data.rs
        ├── vulkan_funcs.rs
        ├── window_format.rs
        └── window.rs
```

# Files

--------------------------------------------------------------------------------
/vkrunner/vkrunner/tester.rs:
--------------------------------------------------------------------------------

```rust
// vkrunner
//
// Copyright (C) 2018, 2023 Neil Roberts
// Copyright (C) 2018 Intel Coporation
// Copyright (C) 2019 Google LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice (including the next
// paragraph) shall be included in all copies or substantial portions of the
// Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.

use crate::window::Window;
use crate::context::Context;
use crate::pipeline_set::{PipelineSet, RectangleVertex};
use crate::pipeline_key;
use crate::script::{Script, BufferType, Operation};
use crate::inspect::Inspector;
use crate::vk;
use crate::buffer::{self, MappedMemory, DeviceMemory, Buffer};
use crate::flush_memory::{self, flush_memory};
use crate::tolerance::Tolerance;
use crate::slot;
use crate::inspect;
use std::fmt;
use std::ptr;
use std::mem;
use std::rc::Rc;
use std::ffi::c_int;

#[derive(Debug)]
pub struct CommandError {
    pub line_num: usize,
    pub error: Error,
}

#[derive(Debug)]
pub enum Error {
    AllocateDescriptorSetsFailed,
    BeginCommandBufferFailed,
    EndCommandBufferFailed,
    ResetFencesFailed,
    QueueSubmitFailed,
    WaitForFencesFailed,
    ProbeFailed(ProbeFailedError),
    InvalidateMappedMemoryRangesFailed,
    BufferError(buffer::Error),
    FlushMemoryError(flush_memory::Error),
    CommandErrors(Vec<CommandError>),
    InvalidBufferBinding { desc_set: u32, binding: u32 },
    InvalidBufferOffset,
    SsboProbeFailed {
        slot_type: slot::Type,
        layout: slot::Layout,
        expected: Box<[u8]>,
        observed: Box<[u8]>,
    },
}

#[derive(Debug)]
pub struct ProbeFailedError {
    x: u32,
    y: u32,
    expected: [f64; 4],
    observed: [f64; 4],
    n_components: usize,
}

#[derive(Debug, Copy, Clone, PartialEq, Eq)]
enum State {
    /// Any rendering or computing has finished and we can read the
    /// buffers.
    Idle,
    /// The command buffer has begun
    CommandBuffer,
    /// The render pass has begun
    RenderPass,
}

impl fmt::Display for Error {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        match self {
            Error::AllocateDescriptorSetsFailed => {
                write!(f, "vkAllocateDescriptorSets failed")
            },
            Error::BeginCommandBufferFailed => {
                write!(f, "vkBeginCommandBuffer failed")
            },
            Error::EndCommandBufferFailed => {
                write!(f, "vkEndCommandBuffer failed")
            },
            Error::ResetFencesFailed => {
                write!(f, "vkResetFences failed")
            },
            Error::QueueSubmitFailed => {
                write!(f, "vkQueueSubmit failed")
            },
            Error::WaitForFencesFailed => {
                write!(f, "vkWaitForFences failed")
            },
            Error::InvalidateMappedMemoryRangesFailed => {
                write!(f, "vkInvalidateMappedMemeroyRangesFailed failed")
            },
            Error::ProbeFailed(e) => e.fmt(f),
            &Error::SsboProbeFailed {
                slot_type,
                layout,
                ref expected,
                ref observed
            } => {
                write!(
                    f,
                    "SSBO probe failed\n\
                     \x20 Reference:",
                )?;
                write_slot(f, slot_type, layout, expected)?;
                write!(
                    f,
                    "\n\
                     \x20 Observed: ",
                )?;
                write_slot(f, slot_type, layout, observed)?;

                Ok(())
            },
            Error::BufferError(e) => e.fmt(f),
            Error::FlushMemoryError(e) => e.fmt(f),
            Error::CommandErrors(errors) => {
                for (num, e) in errors.iter().enumerate() {
                    if num > 0 {
                        writeln!(f)?;
                    }
                    write!(f, "line {}: ", e.line_num)?;
                    e.error.fmt(f)?;
                }
                Ok(())
            },
            Error::InvalidBufferBinding { desc_set, binding } => {
                write!(f, "Invalid buffer binding: {}:{}", desc_set, binding)
            },
            Error::InvalidBufferOffset => {
                write!(f, "Invalid buffer offset")
            },
        }
    }
}

fn write_slot(
    f: &mut fmt::Formatter,
    slot_type: slot::Type,
    layout: slot::Layout,
    values: &[u8],
) -> fmt::Result {
    let base_type = slot_type.base_type();
    let base_type_size = base_type.size();

    for offset in slot_type.offsets(layout) {
        let values = &values[offset..offset + base_type_size];
        write!(f, " {}", slot::BaseTypeInSlice::new(base_type, values))?;
    }

    Ok(())
}

fn format_pixel(f: &mut fmt::Formatter, pixel: &[f64]) -> fmt::Result {
    for component in pixel {
        write!(f, " {}", component)?;
    }

    Ok(())
}

impl fmt::Display for ProbeFailedError {
    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
        write!(
            f,
            "Probe color at ({},{})\n\
            \x20 Expected:",
            self.x,
            self.y,
        )?;
        format_pixel(f, &self.expected[0..self.n_components])?;
        write!(
            f,
            "\n\
             \x20 Observed:"
        )?;
        format_pixel(f, &self.observed[0..self.n_components])
    }
}

impl From<buffer::Error> for Error {
    fn from(e: buffer::Error) -> Error {
        Error::BufferError(e)
    }
}

impl From<flush_memory::Error> for Error {
    fn from(e: flush_memory::Error) -> Error {
        Error::FlushMemoryError(e)
    }
}

#[derive(Debug)]
struct DescriptorSetVec<'a> {
    handles: Vec<vk::VkDescriptorSet>,
    // needed for the destructor
    pipeline_set: &'a PipelineSet,
    window: &'a Window,
}

impl<'a> DescriptorSetVec<'a> {
    fn new(
        window: &'a Window,
        pipeline_set: &'a PipelineSet,
    ) -> Result<DescriptorSetVec<'a>, Error> {
        let layouts = pipeline_set.descriptor_set_layouts();
        let mut handles = Vec::with_capacity(layouts.len());

        if !layouts.is_empty() {
            let allocate_info = vk::VkDescriptorSetAllocateInfo {
                sType: vk::VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
                pNext: ptr::null(),
                descriptorPool: pipeline_set.descriptor_pool().unwrap(),
                descriptorSetCount: layouts.len() as u32,
                pSetLayouts: layouts.as_ptr(),
            };

            let res = unsafe {
                window.device().vkAllocateDescriptorSets.unwrap()(
                    window.vk_device(),
                    ptr::addr_of!(allocate_info),
                    handles.as_mut_ptr(),
                )
            };

            if res == vk::VK_SUCCESS {
                // SAFETY: We ensured the buffer had the right
                // capacity when we constructed it and the call to
                // vkAllocateDescriptorSets should have filled it with
                // valid values.
                unsafe {
                    handles.set_len(layouts.len());
                }
            } else {
                return Err(Error::AllocateDescriptorSetsFailed);
            }
        }

        Ok(DescriptorSetVec {
            handles,
            pipeline_set,
            window,
        })
    }
}

impl<'a> Drop for DescriptorSetVec<'a> {
    fn drop(&mut self) {
        if self.handles.is_empty() {
            return;
        }

        unsafe {
            self.window.device().vkFreeDescriptorSets.unwrap()(
                self.window.vk_device(),
                self.pipeline_set.descriptor_pool().unwrap(),
                self.handles.len() as u32,
                self.handles.as_ptr(),
            );
        }
    }
}

#[derive(Debug)]
struct TestBuffer {
    map: MappedMemory,
    memory: DeviceMemory,
    buffer: Buffer,
    size: usize,
    // true if the buffer has been modified through the CPU-mapped
    // memory since the last command buffer submission.
    pending_write: bool,

}

impl TestBuffer {
    fn new(
        context: Rc<Context>,
        size: usize,
        usage: vk::VkBufferUsageFlagBits,
    ) -> Result<TestBuffer, Error> {
        let buffer = Buffer::new(Rc::clone(&context), size, usage)?;

        let memory = DeviceMemory::new_buffer(
            Rc::clone(&context),
            vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT,
            buffer.buffer,
        )?;

        let map = MappedMemory::new(context, memory.memory)?;

        Ok(TestBuffer { map, memory, buffer, size, pending_write: false })
    }
}

fn allocate_buffer_objects(
    window: &Window,
    script: &Script,
) -> Result<Vec<TestBuffer>, Error> {
    let mut buffers = Vec::with_capacity(script.buffers().len());

    for script_buffer in script.buffers().iter() {
        let usage = match script_buffer.buffer_type {
            BufferType::Ubo => vk::VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
            BufferType::Ssbo => vk::VK_BUFFER_USAGE_STORAGE_BUFFER_BIT,
        };

        buffers.push(TestBuffer::new(
            Rc::clone(window.context()),
            script_buffer.size,
            usage,
        )?);
    }

    Ok(buffers)
}

fn write_descriptor_sets(
    window: &Window,
    script: &Script,
    buffers: &[TestBuffer],
    descriptor_sets: &[vk::VkDescriptorSet],
) {
    let script_buffers = script.buffers();

    let buffer_infos = buffers.iter()
        .map(|buffer| vk::VkDescriptorBufferInfo {
            buffer: buffer.buffer.buffer,
            offset: 0,
            range: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
        })
        .collect::<Vec<_>>();

    let writes = script_buffers.iter()
        .enumerate()
        .map(|(buffer_num, buffer)| vk::VkWriteDescriptorSet {
            sType: vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
            pNext: ptr::null(),
            dstSet: descriptor_sets[buffer.desc_set as usize],
            dstBinding: buffer.binding,
            dstArrayElement: 0,
            descriptorCount: 1,
            descriptorType: match buffer.buffer_type {
                BufferType::Ubo => vk::VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
                BufferType::Ssbo => vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
            },
            pBufferInfo: buffer_infos[buffer_num..].as_ptr(),
            pImageInfo: ptr::null(),
            pTexelBufferView: ptr::null(),
        })
        .collect::<Vec<_>>();

    unsafe {
        window.device().vkUpdateDescriptorSets.unwrap()(
            window.vk_device(),
            writes.len() as u32,
            writes.as_ptr(),
            0, // descriptorCopyCount
            ptr::null(), // pDescriptorCopies
        );
    }
}

fn compare_pixel(
    pixel_a: &[f64],
    pixel_b: &[f64],
    tolerance: &Tolerance,
) -> bool {
    std::iter::zip(pixel_a, pixel_b)
        .enumerate()
        .all(|(component, (&a, &b))| tolerance.equal(component, a, b))
}

#[derive(Debug)]
struct Tester<'a> {
    window: &'a Window,
    pipeline_set: &'a PipelineSet,
    script: &'a Script,
    buffer_objects: Vec<TestBuffer>,
    test_buffers: Vec<TestBuffer>,
    descriptor_sets: DescriptorSetVec<'a>,
    bound_pipeline: Option<usize>,
    bo_descriptor_set_bound: bool,
    first_render: bool,
    state: State,
    vbo_buffer: Option<TestBuffer>,
    index_buffer: Option<TestBuffer>,
    inspector: Option<Inspector>,
}

impl<'a> Tester<'a> {
    fn new(
        window: &'a Window,
        pipeline_set: &'a PipelineSet,
        script: &'a Script,
        inspector: Option<Inspector>,
    ) -> Result<Tester<'a>, Error> {
        let buffer_objects = allocate_buffer_objects(window, script)?;
        let descriptor_sets = DescriptorSetVec::new(window, pipeline_set)?;

        write_descriptor_sets(
            window,
            script,
            &buffer_objects,
            &descriptor_sets.handles,
        );

        Ok(Tester {
            window,
            pipeline_set,
            script,
            buffer_objects,
            test_buffers: Vec::new(),
            descriptor_sets,
            bound_pipeline: None,
            bo_descriptor_set_bound: false,
            first_render: true,
            state: State::Idle,
            vbo_buffer: None,
            index_buffer: None,
            inspector,
        })
    }

    fn add_ssbo_barriers(&mut self) {
        let barriers = self.buffer_objects.iter().enumerate()
            .filter_map(|(buffer_num, buffer)| {
                match self.script.buffers()[buffer_num].buffer_type {
                    BufferType::Ssbo => Some(vk::VkBufferMemoryBarrier {
                        sType: vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
                        pNext: ptr::null(),
                        srcAccessMask: vk::VK_ACCESS_SHADER_WRITE_BIT,
                        dstAccessMask: vk::VK_ACCESS_HOST_READ_BIT,
                        srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
                        dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
                        buffer: buffer.buffer.buffer,
                        offset: 0,
                        size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
                    }),
                    _ => None,
                }
            })
            .collect::<Vec<_>>();

        if !barriers.is_empty() {
            unsafe {
                self.window.device().vkCmdPipelineBarrier.unwrap()(
                    self.window.context().command_buffer(),
                    vk::VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
                    vk::VK_PIPELINE_STAGE_HOST_BIT,
                    0, // dependencyFlags
                    0, // memoryBarrierCount
                    ptr::null(), // pMemoryBarriers
                    barriers.len() as u32, // bufferMemoryBarrierCount
                    barriers.as_ptr(), // pBufferMemoryBarriers
                    0, // imageMemoryBarrierCount
                    ptr::null(), // pImageMemoryBarriers
                );
            }
        }
    }

    fn flush_buffers(&mut self) -> Result<(), Error> {
        for buffer in self.buffer_objects.iter_mut() {
            if !buffer.pending_write {
                continue;
            }

            buffer.pending_write = false;

            flush_memory(
                self.window.context(),
                buffer.memory.memory_type_index as usize,
                buffer.memory.memory,
                0, // offset
                vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
            )?;
        }

        Ok(())
    }

    fn begin_command_buffer(&mut self) -> Result<(), Error> {
        let begin_command_buffer_info = vk::VkCommandBufferBeginInfo {
            sType: vk::VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
            pNext: ptr::null(),
            flags: 0,
            pInheritanceInfo: ptr::null(),
        };

        let res = unsafe {
            self.window.device().vkBeginCommandBuffer.unwrap()(
                self.window.context().command_buffer(),
                ptr::addr_of!(begin_command_buffer_info),
            )
        };

        if res == vk::VK_SUCCESS {
            self.bound_pipeline = None;
            self.bo_descriptor_set_bound = false;

            Ok(())
        } else {
            Err(Error::BeginCommandBufferFailed)
        }
    }

    fn reset_fence(&self) -> Result<(), Error> {
        let fence = self.window.context().fence();

        let res = unsafe {
            self.window.device().vkResetFences.unwrap()(
                self.window.vk_device(),
                1, // fenceCount,
                ptr::addr_of!(fence),
            )
        };

        if res == vk::VK_SUCCESS {
            Ok(())
        } else {
            Err(Error::ResetFencesFailed)
        }
    }

    fn queue_submit(&self) -> Result<(), Error> {
        let command_buffer = self.window.context().command_buffer();
        let wait_dst_stage_mask = vk::VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;

        let submit_info = vk::VkSubmitInfo {
            sType: vk::VK_STRUCTURE_TYPE_SUBMIT_INFO,
            pNext: ptr::null(),
            waitSemaphoreCount: 0,
            pWaitSemaphores: ptr::null(),
            pWaitDstStageMask: ptr::addr_of!(wait_dst_stage_mask),
            commandBufferCount: 1,
            pCommandBuffers: ptr::addr_of!(command_buffer),
            signalSemaphoreCount: 0,
            pSignalSemaphores: ptr::null(),
        };

        let res = unsafe {
            self.window.device().vkQueueSubmit.unwrap()(
                self.window.context().queue(),
                1, // submitCount
                ptr::addr_of!(submit_info),
                self.window.context().fence(),
            )
        };

        if res == vk::VK_SUCCESS {
            Ok(())
        } else {
            Err(Error::QueueSubmitFailed)
        }
    }

    fn wait_for_fence(&self) -> Result<(), Error> {
        let fence = self.window.context().fence();

        let res = unsafe {
            self.window.device().vkWaitForFences.unwrap()(
                self.window.vk_device(),
                1, // fenceCount
                ptr::addr_of!(fence),
                vk::VK_TRUE, // waitAll
                u64::MAX, // timeout
            )
        };

        if res == vk::VK_SUCCESS {
            Ok(())
        } else {
            Err(Error::WaitForFencesFailed)
        }
    }

    fn invalidate_window_linear_memory(&self) -> Result<(), Error> {
        if !self.window.need_linear_memory_invalidate() {
            return Ok(());
        }

        let memory_range = vk::VkMappedMemoryRange {
            sType: vk::VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
            pNext: ptr::null(),
            memory: self.window.linear_memory(),
            offset: 0,
            size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
        };

        let res = unsafe {
            self.window.device().vkInvalidateMappedMemoryRanges.unwrap()(
                self.window.vk_device(),
                1, // memoryRangeCount
                ptr::addr_of!(memory_range),
            )
        };

        if res == vk::VK_SUCCESS {
            Ok(())
        } else {
            Err(Error::InvalidateMappedMemoryRangesFailed)
        }
    }

    fn invalidate_ssbos(&self) -> Result<(), Error> {
        let memory_properties = self.window.context().memory_properties();

        let memory_ranges = self.buffer_objects.iter()
            .enumerate()
            .filter_map(|(buffer_num, buffer)| {
                if self.script.buffers()[buffer_num].buffer_type
                    != BufferType::Ssbo
                {
                    return None;
                }

                let memory_type = &memory_properties
                    .memoryTypes[buffer.memory.memory_type_index as usize];

                // We don’t need to do anything if the memory is
                // already coherent
                if memory_type.propertyFlags
                    & vk::VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
                    != 0
                {
                    return None;
                }

                Some(vk::VkMappedMemoryRange {
                    sType: vk::VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE,
                    pNext: ptr::null(),
                    memory: buffer.memory.memory,
                    offset: 0,
                    size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
                })
            })
            .collect::<Vec<_>>();

        if memory_ranges.is_empty() {
            Ok(())
        } else {
            let res = unsafe {
                self.window.device().vkInvalidateMappedMemoryRanges.unwrap()(
                    self.window.vk_device(),
                    memory_ranges.len() as u32,
                    memory_ranges.as_ptr(),
                )
            };

            if res == vk::VK_SUCCESS {
                Ok(())
            } else {
                Err(Error::InvalidateMappedMemoryRangesFailed)
            }
        }
    }

    fn end_command_buffer(&mut self) -> Result<(), Error> {
        self.flush_buffers()?;
        self.add_ssbo_barriers();

        let res = unsafe {
            self.window.device().vkEndCommandBuffer.unwrap()(
                self.window.context().command_buffer(),
            )
        };

        if res != vk::VK_SUCCESS {
            return Err(Error::EndCommandBufferFailed);
        }

        self.reset_fence()?;
        self.queue_submit()?;
        self.wait_for_fence()?;
        self.invalidate_window_linear_memory()?;
        self.invalidate_ssbos()?;

        Ok(())
    }

    fn begin_render_pass(&mut self) {
        let render_pass_index = !self.first_render as usize;
        let render_pass = self.window.render_passes()[render_pass_index];
        let window_format = self.window.format();

        let render_pass_begin_info = vk::VkRenderPassBeginInfo {
            sType: vk::VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
            pNext: ptr::null(),
            renderPass: render_pass,
            framebuffer: self.window.framebuffer(),
            renderArea: vk::VkRect2D {
                offset: vk::VkOffset2D { x: 0, y: 0 },
                extent: vk::VkExtent2D {
                    width: window_format.width as u32,
                    height: window_format.height as u32,
                },
            },
            clearValueCount: 0,
            pClearValues: ptr::null(),
        };

        unsafe {
            self.window.device().vkCmdBeginRenderPass.unwrap()(
                self.window.context().command_buffer(),
                ptr::addr_of!(render_pass_begin_info),
                vk::VK_SUBPASS_CONTENTS_INLINE,
            );
        }

        self.first_render = false;
    }

    fn add_render_finish_barrier(&self) {
        // Image barrier: transition the layout but also ensure:
        // - rendering is complete before vkCmdCopyImageToBuffer (below) and
        // before any future color attachment accesses
        // - the color attachment writes are visible to vkCmdCopyImageToBuffer
        // and to any future color attachment accesses */
        let render_finish_barrier = vk::VkImageMemoryBarrier {
            sType: vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
            pNext: ptr::null(),
            srcAccessMask: vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
            dstAccessMask: vk::VK_ACCESS_TRANSFER_READ_BIT
                | vk::VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT
                | vk::VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
            oldLayout: vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
            newLayout: vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
            srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
            dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
            image: self.window.color_image(),
            subresourceRange: vk::VkImageSubresourceRange {
                aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
                baseMipLevel: 0,
                levelCount: 1,
                baseArrayLayer: 0,
                layerCount: 1
            },
        };

        unsafe {
            self.window.device().vkCmdPipelineBarrier.unwrap()(
                self.window.context().command_buffer(),
                vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
                vk::VK_PIPELINE_STAGE_TRANSFER_BIT
                    | vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
                0, // dependencyFlags
                0, // memoryBarrierCount
                ptr::null(), // pMemoryBarriers
                0, // bufferMemoryBarrierCount
                ptr::null(), // pBufferMemoryBarriers
                1, // imageMemoryBarrierCount
                ptr::addr_of!(render_finish_barrier),
            );
        }
    }

    fn add_copy_to_linear_buffer(&self) {
        let window_format = self.window.format();

        let copy_region = vk::VkBufferImageCopy {
            bufferOffset: 0,
            bufferRowLength: window_format.width as u32,
            bufferImageHeight: window_format.height as u32,
            imageSubresource: vk::VkImageSubresourceLayers {
                aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
                mipLevel: 0,
                baseArrayLayer: 0,
                layerCount: 1,
            },
            imageOffset: vk::VkOffset3D { x: 0, y: 0, z: 0 },
            imageExtent: vk::VkExtent3D {
                width: window_format.width as u32,
                height: window_format.height as u32,
                depth: 1 as u32
            },
        };

        unsafe {
            self.window.device().vkCmdCopyImageToBuffer.unwrap()(
                self.window.context().command_buffer(),
                self.window.color_image(),
                vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
                self.window.linear_buffer(),
                1, // regionCount
                ptr::addr_of!(copy_region),
            );
        }
    }

    fn add_copy_finish_barrier(&self) {
        // Image barrier: transition the layout back but also ensure:
        // - the copy image operation (above) completes before any future color
        // attachment operations
        // No memory dependencies are needed because the first set of operations
        // are reads.
        let render_finish_barrier = vk::VkImageMemoryBarrier {
            sType: vk::VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
            pNext: ptr::null(),
            srcAccessMask: 0,
            dstAccessMask: 0,
            oldLayout: vk::VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
            newLayout: vk::VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
            srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
            dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
            image: self.window.color_image(),
            subresourceRange: vk::VkImageSubresourceRange {
                aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
                baseMipLevel: 0,
                levelCount: 1,
                baseArrayLayer: 0,
                layerCount: 1
            },
        };

        unsafe {
            self.window.device().vkCmdPipelineBarrier.unwrap()(
                self.window.context().command_buffer(),
                vk::VK_PIPELINE_STAGE_TRANSFER_BIT,
                vk::VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
                0, // dependencyFlags
                0, // memoryBarrierCount
                ptr::null(), // pMemoryBarriers
                0, // bufferMemoryBarrierCount
                ptr::null(), // pBufferMemoryBarriers
                1, // imageMemoryBarrierCount
                ptr::addr_of!(render_finish_barrier),
            );
        }
    }

    fn add_write_finish_buffer_memory_barrier(&self) {
        // Buffer barrier: ensure the device transfer writes have
        // completed before the host reads and are visible to host
        // reads.
        let write_finish_buffer_memory_barrier = vk::VkBufferMemoryBarrier {
            sType: vk::VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
            pNext: ptr::null(),
            srcAccessMask: vk::VK_ACCESS_TRANSFER_WRITE_BIT,
            dstAccessMask: vk::VK_ACCESS_HOST_READ_BIT,
            srcQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
            dstQueueFamilyIndex: vk::VK_QUEUE_FAMILY_IGNORED as u32,
            buffer: self.window.linear_buffer(),
            offset: 0,
            size: vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
        };

        unsafe {
            self.window.device().vkCmdPipelineBarrier.unwrap()(
                self.window.context().command_buffer(),
                vk::VK_PIPELINE_STAGE_TRANSFER_BIT,
                vk::VK_PIPELINE_STAGE_HOST_BIT,
                0, // dependencyFlags
                0, // memoryBarrierCount
                ptr::null(), // pMemoryBarriers
                1, // bufferMemoryBarrierCount
                ptr::addr_of!(write_finish_buffer_memory_barrier),
                0, // imageMemoryBarrierCount
                ptr::null(), // pImageMemoryBarriers
            );
        }
    }

    fn end_render_pass(&self) {
        unsafe {
            self.window.device().vkCmdEndRenderPass.unwrap()(
                self.window.context().command_buffer(),
            );
        }

        self.add_render_finish_barrier();
        self.add_copy_to_linear_buffer();
        self.add_copy_finish_barrier();
        self.add_write_finish_buffer_memory_barrier();
    }

    fn forward_state(&mut self) -> Result<(), Error> {
        match &self.state {
            State::Idle => {
                self.begin_command_buffer()?;
                self.state = State::CommandBuffer;
            },
            State::CommandBuffer => {
                self.begin_render_pass();
                self.state = State::RenderPass;
            },
            State::RenderPass => unreachable!(
                "Tried to advance after last state"
            ),
        }

        Ok(())
    }

    fn backward_state(&mut self) -> Result<(), Error> {
        match &self.state {
            State::Idle => unreachable!(
                "Tried to go backward to before the first state"
            ),
            State::CommandBuffer => {
                self.end_command_buffer()?;
                self.state = State::Idle;
            },
            State::RenderPass => {
                self.end_render_pass();
                self.state = State::CommandBuffer;
            },
        }

        Ok(())
    }

    fn goto_state(&mut self, state: State) -> Result<(), Error> {
        while (self.state as usize) < state as usize {
            self.forward_state()?;
        }
        while (self.state as usize) > state as usize {
            self.backward_state()?;
        }

        Ok(())
    }

    fn bind_bo_descriptor_set_at_binding_point(
        &self,
        binding_point: vk::VkPipelineBindPoint
    ) {
        unsafe {
            self.window.device().vkCmdBindDescriptorSets.unwrap()(
                self.window.context().command_buffer(),
                binding_point,
                self.pipeline_set.layout(),
                0, // firstSet
                self.descriptor_sets.handles.len() as u32,
                self.descriptor_sets.handles.as_ptr(),
                0, // dynamicOffsetCount
                ptr::null(), // pDynamicOffsets
            );
        }
    }

    fn bind_bo_descriptor_set(&mut self) {
        if self.bo_descriptor_set_bound
            || self.descriptor_sets.handles.is_empty()
        {
            return;
        }

        if self.pipeline_set.stages() & !vk::VK_SHADER_STAGE_COMPUTE_BIT != 0 {
            self.bind_bo_descriptor_set_at_binding_point(
                vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
            );
        }

        if self.pipeline_set.stages() & vk::VK_SHADER_STAGE_COMPUTE_BIT != 0 {
            self.bind_bo_descriptor_set_at_binding_point(
                vk::VK_PIPELINE_BIND_POINT_COMPUTE,
            );
        }

        self.bo_descriptor_set_bound = true;
    }

    fn bind_pipeline(&mut self, pipeline_num: usize) {
        if Some(pipeline_num) == self.bound_pipeline {
            return;
        }

        let key = &self.script.pipeline_keys()[pipeline_num];

        let bind_point = match key.pipeline_type() {
            pipeline_key::Type::Graphics => vk::VK_PIPELINE_BIND_POINT_GRAPHICS,
            pipeline_key::Type::Compute => vk::VK_PIPELINE_BIND_POINT_COMPUTE,
        };

        unsafe {
            self.window.device().vkCmdBindPipeline.unwrap()(
                self.window.context().command_buffer(),
                bind_point,
                self.pipeline_set.pipelines()[pipeline_num],
            );
        }

        self.bound_pipeline = Some(pipeline_num);
    }

    fn get_buffer_object(
        &mut self,
        desc_set: u32,
        binding: u32,
    ) -> Result<&mut TestBuffer, Error> {
        match self.script
            .buffers()
            .binary_search_by(|buffer| {
                buffer.desc_set
                    .cmp(&desc_set)
                    .then_with(|| buffer.binding.cmp(&binding))
            })
        {
            Ok(buffer_num) => Ok(&mut self.buffer_objects[buffer_num]),
            Err(_) => Err(Error::InvalidBufferBinding { desc_set, binding }),
        }
    }

    fn get_vbo_buffer(&mut self) -> Result<Option<&TestBuffer>, Error> {
        if let Some(ref buffer) = self.vbo_buffer {
            Ok(Some(buffer))
        } else if let Some(vbo) = self.script.vertex_data() {
            let buffer = TestBuffer::new(
                Rc::clone(self.window.context()),
                vbo.raw_data().len(),
                vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
            )?;

            unsafe {
                std::slice::from_raw_parts_mut(
                    buffer.map.pointer as *mut u8,
                    buffer.size
                ).copy_from_slice(vbo.raw_data());
            }

            flush_memory(
                self.window.context(),
                buffer.memory.memory_type_index as usize,
                buffer.memory.memory,
                0, // offset
                vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
            )?;

            Ok(Some(&*self.vbo_buffer.insert(buffer)))
        } else {
            Ok(None)
        }
    }

    fn get_index_buffer(&mut self) -> Result<&TestBuffer, Error> {
        match self.index_buffer {
            Some(ref buffer) => Ok(buffer),
            None => {
                let indices = self.script.indices();

                let buffer = TestBuffer::new(
                    Rc::clone(self.window.context()),
                    indices.len() * mem::size_of::<u16>(),
                    vk::VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
                )?;

                unsafe {
                    std::slice::from_raw_parts_mut(
                        buffer.map.pointer as *mut u16,
                        indices.len(),
                    ).copy_from_slice(indices);
                }

                flush_memory(
                    self.window.context(),
                    buffer.memory.memory_type_index as usize,
                    buffer.memory.memory,
                    0, // offset
                    vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
                )?;

                Ok(&*self.index_buffer.insert(buffer))
            }
        }
    }

    fn draw_rect(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::DrawRect { x, y, w, h, pipeline_key } = op else {
            unreachable!("bad op");
        };

        let buffer = TestBuffer::new(
            Rc::clone(self.window.context()),
            mem::size_of::<RectangleVertex>() * 4,
            vk::VK_BUFFER_USAGE_VERTEX_BUFFER_BIT
        )?;

        self.goto_state(State::RenderPass)?;

        let mut v: *mut RectangleVertex = buffer.map.pointer.cast();

        unsafe {
            *v = RectangleVertex {
                x: x,
                y: y,
                z: 0.0,
            };
            v = v.add(1);

            *v = RectangleVertex {
                x: x + w,
                y: y,
                z: 0.0,
            };
            v = v.add(1);

            *v = RectangleVertex {
                x: x,
                y: y + h,
                z: 0.0,
            };
            v = v.add(1);

            *v = RectangleVertex {
                x: x + w,
                y: y + h,
                z: 0.0,
            };
        }

        flush_memory(
            self.window.context(),
            buffer.memory.memory_type_index as usize,
            buffer.memory.memory,
            0, // offset
            vk::VK_WHOLE_SIZE as vk::VkDeviceSize,
        )?;

        self.bind_bo_descriptor_set();
        self.bind_pipeline(pipeline_key);

        let command_buffer = self.window.context().command_buffer();
        let buffer_handle = buffer.buffer.buffer;
        let offset = 0;

        unsafe {
            self.window.device().vkCmdBindVertexBuffers.unwrap()(
                command_buffer,
                0, // firstBinding
                1, // bindingCount
                ptr::addr_of!(buffer_handle),
                ptr::addr_of!(offset),
            );
            self.window.device().vkCmdDraw.unwrap()(
                command_buffer,
                4, // vertexCount
                1, // instanceCount
                0, // firstVertex
                0, // firstinstance
            );
        }

        self.test_buffers.push(buffer);

        Ok(())
    }

    fn draw_arrays(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::DrawArrays {
            indexed,
            vertex_count,
            instance_count,
            first_vertex,
            first_instance,
            pipeline_key,
            ..
        } = op else {
            unreachable!("bad op");
        };

        self.goto_state(State::RenderPass)?;

        let context = Rc::clone(self.window.context());

        if let Some(buffer) = self.get_vbo_buffer()? {
            let offset = 0;

            unsafe {
                context.device().vkCmdBindVertexBuffers.unwrap()(
                    context.command_buffer(),
                    0, // firstBinding
                    1, // bindingCount
                    ptr::addr_of!(buffer.buffer.buffer),
                    ptr::addr_of!(offset)
                );
            }
        }

        self.bind_bo_descriptor_set();
        self.bind_pipeline(pipeline_key);

        if indexed {
            let index_buffer = self.get_index_buffer()?;

            unsafe {
                context.device().vkCmdBindIndexBuffer.unwrap()(
                    context.command_buffer(),
                    index_buffer.buffer.buffer,
                    0, // offset
                    vk::VK_INDEX_TYPE_UINT16,
                );
                context.device().vkCmdDrawIndexed.unwrap()(
                    context.command_buffer(),
                    vertex_count,
                    instance_count,
                    0, // firstIndex
                    first_vertex as i32,
                    first_instance,
                );
            }
        } else {
            unsafe {
                context.device().vkCmdDraw.unwrap()(
                    context.command_buffer(),
                    vertex_count,
                    instance_count,
                    first_vertex,
                    first_instance,
                );
            }
        }

        Ok(())
    }

    fn dispatch_compute(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::DispatchCompute { x, y, z, pipeline_key } = op else {
            unreachable!("bad op");
        };

        self.goto_state(State::CommandBuffer)?;

        self.bind_bo_descriptor_set();
        self.bind_pipeline(pipeline_key);

        unsafe {
            self.window.device().vkCmdDispatch.unwrap()(
                self.window.context().command_buffer(),
                x,
                y,
                z,
            );
        }

        Ok(())
    }

    fn probe_rect(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::ProbeRect {
            n_components,
            x,
            y,
            w,
            h,
            ref color,
            ref tolerance,
        } = op else {
            unreachable!("bad op");
        };

        // End the render to copy the framebuffer into the linear buffer
        self.goto_state(State::Idle)?;

        let linear_memory_map: *const u8 =
            self.window.linear_memory_map().cast();
        let stride = self.window.linear_memory_stride();
        let format = self.window.format().color_format;
        let format_size = format.size();
        let n_components = n_components as usize;

        for y_offset in 0..h {
            let mut p = unsafe {
                linear_memory_map.add(
                    (y_offset + y) as usize * stride + x as usize * format_size
                )
            };

            for x_offset in 0..w {
                let source = unsafe {
                    std::slice::from_raw_parts(p, format_size)
                };

                let pixel = format.load_pixel(source);

                if !compare_pixel(
                    &pixel[0..n_components],
                    &color[0..n_components],
                    tolerance,
                ) {
                    return Err(Error::ProbeFailed(ProbeFailedError {
                        x: x + x_offset,
                        y: y + y_offset,
                        expected: color.clone(),
                        observed: pixel,
                        n_components,
                    }));
                }

                unsafe {
                    p = p.add(format_size);
                }
            }
        }

        Ok(())
    }

    fn probe_ssbo(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::ProbeSsbo {
            desc_set,
            binding,
            comparison,
            offset,
            slot_type,
            layout,
            ref values,
            ref tolerance,
        } = op else {
            unreachable!("bad op");
        };

        self.goto_state(State::Idle)?;

        let buffer = self.get_buffer_object(desc_set, binding)?;

        let buffer_slice = unsafe {
            std::slice::from_raw_parts(
                buffer.map.pointer as *const u8,
                buffer.size,
            )
        };

        let type_size = slot_type.size(layout);
        let observed_stride = slot_type.array_stride(layout);
        // The values are tightly packed in the operation buffer so we
        // don’t want to use the observed_stride
        let n_values = values.len() / type_size;

        if offset
            + (n_values - 1) * observed_stride
            + type_size
            > buffer_slice.len()
        {
            return Err(Error::InvalidBufferOffset);
        }

        let buffer_slice = &buffer_slice[offset..];

        for i in 0..n_values {
            let observed = &buffer_slice[i * observed_stride
                                         ..i * observed_stride + type_size];
            let expected = &values[i * type_size..(i + 1) * type_size];

            if !comparison.compare(
                tolerance,
                slot_type,
                layout,
                observed,
                expected,
            ) {
                return Err(Error::SsboProbeFailed {
                    slot_type,
                    layout,
                    expected: expected.into(),
                    observed: observed.into(),
                });
            }
        }

        Ok(())
    }

    fn set_push_command(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::SetPushCommand { offset, ref data } = op else {
            unreachable!("bad op");
        };

        if (self.state as usize) < State::CommandBuffer as usize {
            self.goto_state(State::CommandBuffer)?;
        }

        unsafe {
            self.window.device().vkCmdPushConstants.unwrap()(
                self.window.context().command_buffer(),
                self.pipeline_set.layout(),
                self.pipeline_set.stages(),
                offset as u32,
                data.len() as u32,
                data.as_ptr().cast(),
            );
        }

        Ok(())
    }

    fn set_buffer_data(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::SetBufferData {
            desc_set,
            binding,
            offset,
            ref data
        } = op else {
            unreachable!("bad op");
        };

        let buffer = self.get_buffer_object(desc_set, binding)
            .expect(
                "The script parser should make a buffer mentioned by \
                 any buffer data command and the tester should make a \
                 buffer for every buffer described by the script"
            );

        let buffer_slice = unsafe {
            std::slice::from_raw_parts_mut(
                (buffer.map.pointer as *mut u8).add(offset),
                data.len(),
            )
        };

        buffer_slice.copy_from_slice(data);

        buffer.pending_write = true;

        Ok(())
    }

    fn clear(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        let &Operation::Clear { ref color, depth, stencil } = op else {
            unreachable!("bad op");
        };

        let window_format = self.window.format();

        let depth_stencil_flags = match window_format.depth_stencil_format {
            Some(format) => format.depth_stencil_aspect_flags(),
            None => 0,
        };

        self.goto_state(State::RenderPass)?;

        let clear_attachments = [
            vk::VkClearAttachment {
                aspectMask: vk::VK_IMAGE_ASPECT_COLOR_BIT,
                colorAttachment: 0,
                clearValue: vk::VkClearValue {
                    color: vk::VkClearColorValue {
                        float32: color.clone(),
                    },
                },
            },
            vk::VkClearAttachment {
                aspectMask: depth_stencil_flags,
                colorAttachment: 0,
                clearValue: vk::VkClearValue {
                    depthStencil: vk::VkClearDepthStencilValue {
                        depth,
                        stencil,
                    },
                },
            },
        ];

        let clear_rect = vk::VkClearRect {
            rect: vk::VkRect2D {
                offset: vk::VkOffset2D { x: 0, y: 0 },
                extent: vk::VkExtent2D {
                    width: self.window.format().width as u32,
                    height: self.window.format().height as u32,
                },
            },
            baseArrayLayer: 0,
            layerCount: 1,
        };

        let n_attachments = 1 + (depth_stencil_flags != 0) as usize;

        unsafe {
            self.window.device().vkCmdClearAttachments.unwrap()(
                self.window.context().command_buffer(),
                n_attachments as u32,
                ptr::addr_of!(clear_attachments[0]),
                1, // rectCount
                ptr::addr_of!(clear_rect),
            );
        }

        Ok(())
    }

    fn run_operation(
        &mut self,
        op: &Operation,
    ) -> Result<(), Error> {
        match op {
            Operation::DrawRect { .. } => self.draw_rect(op),
            Operation::DrawArrays { .. } => self.draw_arrays(op),
            Operation::DispatchCompute { .. } => self.dispatch_compute(op),
            Operation::ProbeRect { .. } => self.probe_rect(op),
            Operation::ProbeSsbo { .. } => self.probe_ssbo(op),
            Operation::SetPushCommand { .. } => self.set_push_command(op),
            Operation::SetBufferData { .. } => self.set_buffer_data(op),
            Operation::Clear { .. } => self.clear(op),
        }
    }

    fn inspect(&self) {
        let Some(inspector) = self.inspector.as_ref() else { return; };

        let buffers = self.buffer_objects
            .iter()
            .enumerate()
            .map(|(buffer_num, buffer)| {
                inspect::Buffer {
                    binding: self.script.buffers()[buffer_num].binding as c_int,
                    size: buffer.size,
                    data: buffer.map.pointer,
                }
            })
            .collect::<Vec<_>>();

        let window_format = self.window.format();

        let data = inspect::Data {
            color_buffer: inspect::Image {
                width: window_format.width as c_int,
                height: window_format.height as c_int,
                stride: self.window.linear_memory_stride(),
                format: window_format.color_format,
                data: self.window.linear_memory_map(),
            },
            n_buffers: buffers.len(),
            buffers: if buffers.is_empty() {
                ptr::null()
            } else {
                buffers.as_ptr()
            },
        };

        inspector.inspect(&data);
    }
}

pub(crate) fn run(
    window: &Window,
    pipeline_set: &PipelineSet,
    script: &Script,
    inspector: Option<Inspector>,
) -> Result<(), Error> {
    let mut tester = Tester::new(window, pipeline_set, script, inspector)?;
    let mut errors = Vec::new();

    for command in script.commands().iter() {
        if let Err(e) = tester.run_operation(&command.op) {
            errors.push(CommandError {
                line_num: command.line_num,
                error: e,
            });
        }
    }

    if let Err(error) = tester.goto_state(State::Idle) {
        let line_num = match script.commands().last() {
            Some(command) => command.line_num,
            None => 1,
        };

        errors.push(CommandError { line_num, error });
    }

    tester.inspect();

    if errors.is_empty() {
        Ok(())
    } else {
        Err(Error::CommandErrors(errors))
    }
}

#[cfg(test)]
mod test {
    use super::*;
    use crate::fake_vulkan::{FakeVulkan, Command, HandleType, ClearAttachment};
    use crate::requirements::Requirements;
    use crate::logger::Logger;
    use crate::source::Source;
    use crate::window_format::WindowFormat;
    use crate::config::Config;
    use std::ffi::c_void;

    #[derive(Debug)]
    struct TestData {
        pipeline_set: PipelineSet,
        window: Rc<Window>,
        context: Rc<Context>,
        fake_vulkan: Box<FakeVulkan>,
    }

    impl TestData {
        fn new_full(
            source: &str,
            inspector: Option<Inspector>,
        ) -> Result<TestData, Error> {
            let mut fake_vulkan = FakeVulkan::new();

            fake_vulkan.physical_devices.push(Default::default());
            fake_vulkan.physical_devices[0].format_properties.insert(
                vk::VK_FORMAT_B8G8R8A8_UNORM,
                vk::VkFormatProperties {
                    linearTilingFeatures: 0,
                    optimalTilingFeatures:
                    vk::VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT
                        | vk::VK_FORMAT_FEATURE_BLIT_SRC_BIT,
                    bufferFeatures: 0,
                },
            );
            fake_vulkan.physical_devices[0].format_properties.insert(
                vk::VK_FORMAT_D24_UNORM_S8_UINT,
                vk::VkFormatProperties {
                    linearTilingFeatures: 0,
                    optimalTilingFeatures:
                    vk::VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT,
                    bufferFeatures: 0,
                },
            );

            let memory_properties =
                &mut fake_vulkan.physical_devices[0].memory_properties;
            memory_properties.memoryTypes[0].propertyFlags =
                vk::VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
            memory_properties.memoryTypeCount = 1;
            fake_vulkan.memory_requirements.memoryTypeBits = 1;

            fake_vulkan.set_override();
            let context = Rc::new(Context::new(
                &Requirements::new(),
                None, // device_id
            ).unwrap());

            let source = Source::from_string(source.to_string());
            let script = Script::load(&Config::new(), &source).unwrap();

            let window = Rc::new(Window::new(
                Rc::clone(&context),
                script.window_format(),
            ).unwrap());

            let mut logger = Logger::new(None, ptr::null_mut());

            let pipeline_set = PipelineSet::new(
                &mut logger,
                Rc::clone(&window),
                &script,
                false, // show_disassembly
            ).unwrap();

            run(
                &window,
                &pipeline_set,
                &script,
                inspector,
            )?;

            Ok(TestData {
                pipeline_set,
                window,
                context,
                fake_vulkan,
            })
        }

        fn new(source: &str) -> Result<TestData, Error> {
            TestData::new_full(
                source,
                None, // inspector
            )
        }
    }

    #[test]
    fn rectangle() {
        let test_data = TestData::new(
            "[test]\n\
             draw rect -1 -1 2 2"
        ).unwrap();

        let mut commands = test_data.fake_vulkan.commands.iter();

        let &Command::BeginRenderPass(ref begin_info) = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(begin_info.renderPass, test_data.window.render_passes()[0]);
        assert_eq!(begin_info.framebuffer, test_data.window.framebuffer());
        assert_eq!(begin_info.renderArea.offset.x, 0);
        assert_eq!(begin_info.renderArea.offset.y, 0);
        assert_eq!(
            begin_info.renderArea.extent.width as usize,
            WindowFormat::default().width,
        );
        assert_eq!(
            begin_info.renderArea.extent.height as usize,
            WindowFormat::default().height,
        );
        assert_eq!(begin_info.clearValueCount, 0);

        let &Command::BindPipeline {
            bind_point,
            pipeline,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(test_data.pipeline_set.pipelines().len(), 1);
        assert_eq!(test_data.pipeline_set.pipelines()[0], pipeline);
        assert_eq!(bind_point, vk::VK_PIPELINE_BIND_POINT_GRAPHICS);

        let &Command::BindVertexBuffers {
            first_binding,
            ref buffers,
            ref offsets,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(first_binding, 0);
        assert_eq!(buffers.len(), 1);
        assert_eq!(offsets, &[0]);

        let HandleType::Buffer { memory: Some(memory), .. } =
            test_data.fake_vulkan.get_freed_handle(buffers[0]).data
        else { unreachable!("Failed to get buffer memory"); };

        let HandleType::Memory { ref contents, .. } =
            test_data.fake_vulkan.get_freed_handle(memory).data
        else { unreachable!("Mismatched handle"); };

        let mut expected_contents = Vec::<u8>::new();
        for component in [
            -1f32, -1f32, 0f32,
            1f32, -1f32, 0f32,
            -1f32, 1f32, 0f32,
            1f32, 1f32, 0f32,
        ] {
            expected_contents.extend(&component.to_ne_bytes());
        }
        assert_eq!(contents, &expected_contents);

        let &Command::Draw {
            vertex_count,
            instance_count,
            first_vertex,
            first_instance,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(vertex_count, 4);
        assert_eq!(instance_count, 1);
        assert_eq!(first_vertex, 0);
        assert_eq!(first_instance, 0);

        assert!(matches!(commands.next(), Some(Command::EndRenderPass)));

        let &Command::PipelineBarrier {
            ref image_memory_barriers,
            ..
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };
        assert_eq!(image_memory_barriers.len(), 1);
        assert_eq!(
            image_memory_barriers[0].image,
            test_data.window.color_image()
        );

        let &Command::CopyImageToBuffer {
            src_image,
            dst_buffer,
            ..
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(src_image, test_data.window.color_image());
        assert_eq!(dst_buffer, test_data.window.linear_buffer());

        let &Command::PipelineBarrier {
            ref image_memory_barriers,
            ..
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };
        assert_eq!(image_memory_barriers.len(), 1);
        assert_eq!(
            image_memory_barriers[0].image,
            test_data.window.color_image()
        );

        let &Command::PipelineBarrier {
            ref buffer_memory_barriers,
            ..
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };
        assert_eq!(buffer_memory_barriers.len(), 1);
        assert_eq!(
            buffer_memory_barriers[0].buffer,
            test_data.window.linear_buffer()
        );

        assert!(commands.next().is_none());

        // There should only be one flush with the RectangleVertex vbo
        assert_eq!(test_data.fake_vulkan.memory_flushes.len(), 1);

        assert_eq!(test_data.fake_vulkan.memory_invalidations.len(), 1);
        assert_eq!(
            test_data.fake_vulkan.memory_invalidations[0].memory,
            test_data.window.linear_memory(),
        );

        let HandleType::Fence { reset_count, wait_count } =
            test_data.fake_vulkan.get_freed_handle(
                test_data.context.fence()
            ).data
        else { unreachable!("Bad handle"); };

        assert_eq!(reset_count, 1);
        assert_eq!(wait_count, 1);
    }

    #[test]
    fn vbo() {
        let test_data = TestData::new(
            "[vertex data]\n\
             0/R32_SFLOAT\n\
             1\n\
             2\n\
             3\n\
             [test]\n\
             draw arrays TRIANGLE_LIST 0 3"
        ).unwrap();

        let mut commands = test_data.fake_vulkan.commands.iter();

        assert!(matches!(
            commands.next(),
            Some(Command::BeginRenderPass { .. })
        ));

        let &Command::BindVertexBuffers {
            first_binding,
            ref buffers,
            ref offsets,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(first_binding, 0);
        assert_eq!(buffers.len(), 1);
        assert_eq!(offsets, &[0]);

        let HandleType::Buffer { memory: Some(memory), .. } =
            test_data.fake_vulkan.get_freed_handle(buffers[0]).data
        else { unreachable!("Failed to get buffer memory"); };

        let HandleType::Memory { ref contents, .. } =
            test_data.fake_vulkan.get_freed_handle(memory).data
        else { unreachable!("Mismatched handle"); };

        let mut expected_contents = Vec::<u8>::new();
        for component in [1f32, 2f32, 3f32] {
            expected_contents.extend(&component.to_ne_bytes());
        }
        assert_eq!(contents, &expected_contents);

        assert!(matches!(commands.next(), Some(Command::BindPipeline { .. })));

        let &Command::Draw {
            vertex_count,
            instance_count,
            first_vertex,
            first_instance,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(vertex_count, 3);
        assert_eq!(instance_count, 1);
        assert_eq!(first_vertex, 0);
        assert_eq!(first_instance, 0);
    }

    #[test]
    fn dispatch_compute() {
        let test_data = TestData::new(
            "[test]\n\
             compute 1 2 3"
        ).unwrap();

        let mut commands = test_data.fake_vulkan.commands.iter();

        assert!(matches!(commands.next(), Some(Command::BindPipeline { .. })));

        let &Command::Dispatch { x, y, z } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!((x, y, z), (1, 2, 3));

        assert!(commands.next().is_none());
    }

    #[test]
    fn clear() {
        let test_data = TestData::new(
            "[test]\n\
             clear color 1 2 3 4
             clear"
        ).unwrap();

        let mut commands = test_data.fake_vulkan.commands.iter();

        assert!(matches!(
            commands.next(),
            Some(Command::BeginRenderPass { .. })
        ));

        let &Command::ClearAttachments {
            ref attachments,
            ref rects,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(attachments.len(), 1);

        match &attachments[0] {
            &ClearAttachment::Color { attachment, value } => {
                assert_eq!(attachment, 0);
                assert_eq!(value, [1f32, 2f32, 3f32, 4f32]);
            },
            _ => unreachable!("unexepected clear attachment type"),
        }

        assert_eq!(rects.len(), 1);
        assert_eq!(
            rects[0].rect.extent.width as usize,
            WindowFormat::default().width
        );
        assert_eq!(
            rects[0].rect.extent.height as usize,
            WindowFormat::default().height
        );
    }

    #[test]
    fn clear_depth_stencil() {
        let test_data = TestData::new(
            "[require]\n\
             depthstencil D24_UNORM_S8_UINT\n\
             [test]\n\
             clear depth 2.0\n\
             clear stencil 5\n\
             clear"
        ).unwrap();

        let mut commands = test_data.fake_vulkan.commands.iter();

        assert!(matches!(
            commands.next(),
            Some(Command::BeginRenderPass { .. })
        ));

        let &Command::ClearAttachments {
            ref attachments,
            ref rects,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(attachments.len(), 2);

        match &attachments[1] {
            &ClearAttachment::DepthStencil { aspect_mask, value } => {
                assert_eq!(
                    aspect_mask,
                    vk::VK_IMAGE_ASPECT_DEPTH_BIT
                        | vk::VK_IMAGE_ASPECT_STENCIL_BIT
                );
                assert_eq!(value.depth, 2.0);
                assert_eq!(value.stencil, 5);
            },
            _ => unreachable!("unexepected clear attachment type"),
        }

        assert_eq!(rects.len(), 1);
        assert_eq!(
            rects[0].rect.extent.width as usize,
            WindowFormat::default().width
        );
        assert_eq!(
            rects[0].rect.extent.height as usize,
            WindowFormat::default().height
        );
    }

    #[test]
    fn push_constants() {
        let test_data = TestData::new(
            "[test]\n\
             push uint8_t 1 12\n\
             push u8vec2 2 13 14"
        ).unwrap();

        let mut commands = test_data.fake_vulkan.commands.iter();

        let &Command::PushConstants {
            layout,
            stage_flags,
            offset,
            ref values,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(layout, test_data.pipeline_set.layout());
        assert_eq!(stage_flags, 0);
        assert_eq!(offset, 1);
        assert_eq!(values.as_slice(), [12].as_slice());

        let &Command::PushConstants {
            layout,
            stage_flags,
            offset,
            ref values,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(layout, test_data.pipeline_set.layout());
        assert_eq!(stage_flags, 0);
        assert_eq!(offset, 2);
        assert_eq!(values.as_slice(), [13, 14].as_slice());
    }

    #[test]
    fn set_buffer_data() {
        let test_data = TestData::new(
            "[fragment shader]\n\
             03 02 23 07\n\
             [test]\n\
             ssbo 5 subdata uint8_t 1 1 2 3\n\
             # draw command to make it flush the memory\n\
             draw rect -1 -1 2 2"
        ).unwrap();

        let &Command::BindDescriptorSets {
            first_set,
            ref descriptor_sets,
            ..
        } = test_data.fake_vulkan.commands.iter().find(|command| {
            matches!(command, Command::BindDescriptorSets { .. })
        }).unwrap()
        else { unreachable!() };

        assert_eq!(first_set, 0);
        assert_eq!(descriptor_sets.len(), 1);

        let HandleType::DescriptorSet {
            ref bindings
        } = test_data.fake_vulkan.get_freed_handle(descriptor_sets[0]).data
        else { unreachable!("bad handle"); };

        let descriptor_type = bindings[&5].descriptor_type;
        assert_eq!(descriptor_type, vk::VK_DESCRIPTOR_TYPE_STORAGE_BUFFER);

        let buffer_handle = bindings[&5].info.buffer;

        let HandleType::Buffer {
            memory: Some(memory_handle),
            ..
        } = test_data.fake_vulkan.get_freed_handle(buffer_handle).data
        else { unreachable!("failed to get buffer memory"); };

        let HandleType::Memory {
            ref contents,
            ..
        } = test_data.fake_vulkan.get_freed_handle(memory_handle).data
        else { unreachable!("bad handle"); };

        assert_eq!(contents, &[0, 1, 2, 3]);

        test_data.fake_vulkan.memory_flushes.iter().find(|flush| {
            flush.memory == memory_handle
        }).expect("expected ssbo memory to be flushed");
    }

    #[test]
    fn probe_ssbo_success() {
        TestData::new(
            "[test]\n\
             ssbo 5 subdata uint8_t 1 1 2 3\n\
             probe ssbo u8vec4 5 0 == 0 1 2 3"
        ).expect("expected ssbo probe to succeed");
    }

    #[test]
    fn probe_ssbo_fail() {
        let error = TestData::new(
            "[test]\n\
             ssbo 5 subdata uint8_t 1 1 2 3\n\
             probe ssbo u8vec4 5 0 == 0 1 2 4"
        ).unwrap_err();

        assert_eq!(
            &error.to_string(),
            "line 3: SSBO probe failed\n\
             \x20 Reference: 0 1 2 4\n\
             \x20 Observed:  0 1 2 3",
        );
    }

    #[test]
    fn probe_rect_success() {
        TestData::new(
            "[test]\n\
             probe all rgba 0 0 0 0"
        ).expect("expected probe to succeed");
    }

    #[test]
    fn probe_rect_fail() {
        let error = TestData::new(
            "[test]\n\
             probe all rgba 1 0 0 0\n\
             probe all rgba 1 2 0 0"
        ).unwrap_err();

        assert_eq!(
            &error.to_string(),
            "line 2: Probe color at (0,0)\n\
             \x20 Expected: 1 0 0 0\n\
             \x20 Observed: 0 0 0 0\n\
             line 3: Probe color at (0,0)\n\
             \x20 Expected: 1 2 0 0\n\
             \x20 Observed: 0 0 0 0"
        );
    }

    #[test]
    fn indices() {
        let test_data = TestData::new(
            "[indices]\n\
             0 1 2\n\
             [test]\n\
             draw arrays indexed TRIANGLE_LIST 0 3"
        ).unwrap();

        let mut commands = test_data.fake_vulkan.commands.iter();

        println!("{:#?}",commands);

        assert!(matches!(
            commands.next(),
            Some(Command::BeginRenderPass { .. })
        ));

        assert!(matches!(commands.next(), Some(Command::BindPipeline { .. })));

        let &Command::BindIndexBuffer {
            buffer,
            offset,
            index_type,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(offset, 0);
        assert_eq!(index_type, vk::VK_INDEX_TYPE_UINT16);

        let HandleType::Buffer { memory: Some(memory), .. } =
            test_data.fake_vulkan.get_freed_handle(buffer).data
        else { unreachable!("Failed to get buffer memory"); };

        let HandleType::Memory { ref contents, .. } =
            test_data.fake_vulkan.get_freed_handle(memory).data
        else { unreachable!("Mismatched handle"); };

        let mut expected_contents = Vec::<u8>::new();
        for component in 0u16..3u16 {
            expected_contents.extend(&component.to_ne_bytes());
        }
        assert_eq!(contents, &expected_contents);

        let &Command::DrawIndexed {
            index_count,
            instance_count,
            first_index,
            vertex_offset,
            first_instance,
        } = commands.next().unwrap()
        else { unreachable!("Bad command"); };

        assert_eq!(index_count, 3);
        assert_eq!(instance_count, 1);
        assert_eq!(first_index, 0);
        assert_eq!(vertex_offset, 0);
        assert_eq!(first_instance, 0);
    }

    extern "C" fn inspector_cb(data: &inspect::Data, user_data: *mut c_void) {
        unsafe {
            *(user_data as *mut bool) = true;
        }

        let window_format = WindowFormat::default();

        assert_eq!(data.color_buffer.width as usize, window_format.width);
        assert_eq!(data.color_buffer.height as usize, window_format.height);
        assert!(data.color_buffer.stride >= window_format.width * 4);
        assert_eq!(
            data.color_buffer.format,
            window_format.color_format,
        );
        assert!(!data.color_buffer.data.is_null());

        assert_eq!(data.n_buffers, 1);

        let buffer = unsafe { &*data.buffers };

        assert_eq!(buffer.binding, 5);
        assert_eq!(buffer.size, 1024);
        assert!(!buffer.data.is_null());
    }

    #[test]
    fn inspector() {
        let mut inspector_called = false;

        let inspector = inspect::Inspector::new(
            inspector_cb,
            ptr::addr_of_mut!(inspector_called).cast(),
        );

        TestData::new_full(
            "[test]\n\
             ssbo 5 1024",
            Some(inspector),
        ).expect("expected test to pass");

        assert!(inspector_called);
    }
}

```

--------------------------------------------------------------------------------
/vkrunner/vkrunner/fake_vulkan.rs:
--------------------------------------------------------------------------------

```rust
// vkrunner
//
// Copyright 2023 Neil Roberts
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice (including the next
// paragraph) shall be included in all copies or substantial portions of the
// Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.

//! Sets up a fake Vulkan driver which can be manipulated to report
//! different extensions and features. This is only build in test
//! configurations and it is intended to help unit testing.

use crate::vk;
use crate::vulkan_funcs;
use crate::requirements;
use std::cell::Cell;
use std::mem;
use std::mem::transmute;
use std::ffi::{c_char, CStr, c_void};
use std::ptr;
use std::cmp::min;
use std::collections::{HashMap, VecDeque};

// Pointer to the current FakeVulkan instance that was created in
// this thread. There can only be one instance per thread.
thread_local! {
    static CURRENT_FAKE_VULKAN: Cell<Option<*mut FakeVulkan>> = Cell::new(None);
}

fn add_extension_to_vec(
    extension_vec: &mut Vec<vk::VkExtensionProperties>,
    ext: &str,
) {
    let old_len = extension_vec.len();
    extension_vec.resize(old_len + 1, Default::default());
    let props = extension_vec.last_mut().unwrap();
    for (i, b) in ext
        .bytes()
        .take(min(props.extensionName.len() - 1, ext.len()))
        .enumerate()
    {
        props.extensionName[i] = b as c_char;
    }
}

/// A structure containing the physical device infos that will be
/// reported by the driver.
#[derive(Debug, Clone)]
pub struct PhysicalDeviceInfo {
    pub properties: vk::VkPhysicalDeviceProperties,
    pub memory_properties: vk::VkPhysicalDeviceMemoryProperties,
    pub features: vk::VkPhysicalDeviceFeatures,
    pub queue_families: Vec<vk::VkQueueFamilyProperties>,
    pub extensions: Vec<vk::VkExtensionProperties>,
    pub format_properties: HashMap<vk::VkFormat, vk::VkFormatProperties>,
    // Two random extension feature sets to report when asked
    pub shader_atomic: vk::VkPhysicalDeviceShaderAtomicInt64FeaturesKHR,
    pub multiview: vk::VkPhysicalDeviceMultiviewFeaturesKHR,
}

impl PhysicalDeviceInfo {
    pub fn add_extension(&mut self, ext: &str) {
        add_extension_to_vec(&mut self.extensions, ext);
    }
}

impl Default for PhysicalDeviceInfo {
    fn default() -> PhysicalDeviceInfo {
        PhysicalDeviceInfo {
            properties: vk::VkPhysicalDeviceProperties {
                apiVersion: requirements::make_version(1, 0, 0),
                driverVersion: 0,
                vendorID: 0xfa4eed,
                deviceID: 0xfa4ede,
                deviceType: vk::VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU,
                deviceName: [0; 256],
                pipelineCacheUUID: *b"fakevulkan123456",
                limits: Default::default(),
                sparseProperties: Default::default(),
            },
            memory_properties: Default::default(),
            format_properties: HashMap::new(),
            features: Default::default(),
            queue_families: vec![vk::VkQueueFamilyProperties {
                queueFlags: vk::VK_QUEUE_GRAPHICS_BIT,
                queueCount: 1,
                timestampValidBits: 32,
                minImageTransferGranularity: Default::default(),
            }],
            extensions: Vec::new(),
            shader_atomic: Default::default(),
            multiview: Default::default(),
        }
    }
}

const ATOMIC_TYPE: vk::VkStructureType =
    vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR;
const MULTIVIEW_TYPE: vk::VkStructureType =
    vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES_KHR;

#[derive(Debug, Clone)]
pub struct GraphicsPipelineCreateInfo {
    pub create_info: vk::VkGraphicsPipelineCreateInfo,
    pub bindings: Vec<vk::VkVertexInputBindingDescription>,
    pub attribs: Vec<vk::VkVertexInputAttributeDescription>,
}

impl GraphicsPipelineCreateInfo {
    fn new(
        create_info: &vk::VkGraphicsPipelineCreateInfo
    ) -> GraphicsPipelineCreateInfo {
        let vertex_input_state = unsafe {
            &*create_info.pVertexInputState
        };
        let bindings = vec_from_raw_parts(
            vertex_input_state.pVertexBindingDescriptions,
            vertex_input_state.vertexBindingDescriptionCount as usize,
        );
        let attribs = vec_from_raw_parts(
            vertex_input_state.pVertexAttributeDescriptions,
            vertex_input_state.vertexAttributeDescriptionCount as usize,
        );

        GraphicsPipelineCreateInfo {
            create_info: create_info.clone(),
            bindings,
            attribs,
        }
    }
}

#[derive(Debug)]
pub enum PipelineCreateInfo {
    Graphics(GraphicsPipelineCreateInfo),
    Compute(vk::VkComputePipelineCreateInfo),
}

#[derive(Debug, Clone)]
pub struct PipelineLayoutCreateInfo {
    pub create_info: vk::VkPipelineLayoutCreateInfo,
    pub push_constant_ranges: Vec<vk::VkPushConstantRange>,
    pub layouts: Vec<vk::VkDescriptorSetLayout>,
}

#[derive(Debug)]
// It would be nice to just store the VkClearAttachment directly but
// that can’t derive Debug because it is a union and it would be
// annoying to have to manually implement Debug.
pub enum ClearAttachment {
    Color {
        attachment: u32,
        value: [f32; 4],
    },
    DepthStencil {
        aspect_mask: vk::VkImageAspectFlags,
        value: vk::VkClearDepthStencilValue,
    },
}

#[derive(Debug)]
#[allow(dead_code)]
pub enum Command {
    BeginRenderPass(vk::VkRenderPassBeginInfo),
    EndRenderPass,
    BindPipeline {
        bind_point: vk::VkPipelineBindPoint,
        pipeline: vk::VkPipeline,
    },
    BindVertexBuffers {
        first_binding: u32,
        buffers: Vec<vk::VkBuffer>,
        offsets: Vec<vk::VkDeviceSize>,
    },
    BindIndexBuffer {
        buffer: vk::VkBuffer,
        offset: vk::VkDeviceSize,
        index_type: vk::VkIndexType,
    },
    Draw {
        vertex_count: u32,
        instance_count: u32,
        first_vertex: u32,
        first_instance: u32,
    },
    DrawIndexed {
        index_count: u32,
        instance_count: u32,
        first_index: u32,
        vertex_offset: i32,
        first_instance: u32,
    },
    Dispatch {
        x: u32,
        y: u32,
        z: u32,
    },
    ClearAttachments {
        attachments: Vec<ClearAttachment>,
        rects: Vec<vk::VkClearRect>,
    },
    PipelineBarrier {
        src_stage_mask: vk::VkPipelineStageFlags,
        dst_stage_mask: vk::VkPipelineStageFlags,
        dependency_flags: vk::VkDependencyFlags,
        memory_barriers: Vec<vk::VkMemoryBarrier>,
        buffer_memory_barriers: Vec<vk::VkBufferMemoryBarrier>,
        image_memory_barriers: Vec<vk::VkImageMemoryBarrier>,
    },
    CopyImageToBuffer {
        src_image: vk::VkImage,
        src_image_layout: vk::VkImageLayout,
        dst_buffer: vk::VkBuffer,
        regions: Vec<vk::VkBufferImageCopy>,
    },
    PushConstants {
        layout: vk::VkPipelineLayout,
        stage_flags: vk::VkShaderStageFlags,
        offset: u32,
        values: Vec<u8>,
    },
    BindDescriptorSets {
        pipeline_bind_point: vk::VkPipelineBindPoint,
        layout: vk::VkPipelineLayout,
        first_set: u32,
        descriptor_sets: Vec<vk::VkDescriptorSet>,
    },
}

#[derive(Debug)]
pub enum HandleType {
    Instance,
    Device,
    CommandPool,
    CommandBuffer {
        command_pool: usize,
        commands: Vec<Command>,
        begun: bool,
    },
    Fence {
        reset_count: usize,
        wait_count: usize,
    },
    Memory {
        contents: Vec<u8>,
        mapped: bool,
    },
    RenderPass { attachments: Vec<vk::VkAttachmentDescription> },
    Image,
    ImageView,
    Buffer {
        create_info: vk::VkBufferCreateInfo,
        memory: Option<vk::VkDeviceMemory>,
    },
    Framebuffer,
    ShaderModule { code: Vec<u32> },
    PipelineCache,
    DescriptorPool,
    DescriptorSetLayout { bindings: Vec<vk::VkDescriptorSetLayoutBinding> },
    PipelineLayout(PipelineLayoutCreateInfo),
    Pipeline(PipelineCreateInfo),
    DescriptorSet { bindings: HashMap<u32, Binding> },
}

#[derive(Debug)]
pub struct Binding {
    pub descriptor_type: vk::VkDescriptorType,
    pub info: vk::VkDescriptorBufferInfo,
}

#[derive(Debug)]
pub struct Handle {
    pub freed: bool,
    pub data: HandleType,
}

fn vec_from_raw_parts<T: std::clone::Clone>(data: *const T, len: usize) -> Vec<T> {
    if len > 0 {
        unsafe {
            std::slice::from_raw_parts(data, len).to_vec()
        }
    } else {
        Vec::new()
    }
}

/// A fake Vulkan driver. Note that there can only be one FakeVulkan
/// instance per-thread because it needs to use thread-local storage
/// to figure out the current fake driver when the fake
/// vkCreateInstance is called. The FakeVulkan should always be stored
/// in a box so that its address can be tracked in
/// [CURRENT_FAKE_VULKAN].
#[derive(Debug)]
pub struct FakeVulkan {
    pub physical_devices: Vec<PhysicalDeviceInfo>,
    pub instance_extensions: Vec<vk::VkExtensionProperties>,

    // A fake set of requirements to return from the next call to
    // vkGetBufferMemoryRequirements or vkGetImageMemoryRequirements.
    pub memory_requirements: vk::VkMemoryRequirements,

    /// Whether to claim that the vkEnumerateInstanceVersion function
    /// is available.
    pub has_enumerate_instance_version: bool,

    /// Log of calls to vkFlushMappedMemoryRanges
    pub memory_flushes: Vec<vk::VkMappedMemoryRange>,
    /// Log of calls to vkInvalidateMappedMemoryRanges
    pub memory_invalidations: Vec<vk::VkMappedMemoryRange>,

    /// All of the commands from command queues that were submitted
    /// with vkQueueSubmit
    pub commands: Vec<Command>,

    handles: Vec<Handle>,

    // Queue of values to return instead of VK_SUCCESS to simulate
    // function call failures. This is indexed by the function name
    // and the value is a queue of override values to return.
    result_queue: HashMap<String, VecDeque<vk::VkResult>>,
}

impl FakeVulkan {
    pub fn new() -> Box<FakeVulkan> {
        let mut fake_vulkan = Box::new(FakeVulkan {
            physical_devices: Vec::new(),
            instance_extensions: Vec::new(),
            memory_requirements: Default::default(),
            handles: Vec::new(),
            has_enumerate_instance_version: false,
            result_queue: HashMap::new(),
            memory_flushes: Vec::new(),
            memory_invalidations: Vec::new(),
            commands: Vec::new(),
        });

        CURRENT_FAKE_VULKAN.with(|f| {
            let old_value = f.replace(Some(
                fake_vulkan.as_mut() as *mut FakeVulkan
            ));

            // There can only be one FakeVulkan instance per thread at a time
            assert!(old_value.is_none());
        });

        fake_vulkan
    }

    pub fn current() -> &'static mut FakeVulkan {
        unsafe { &mut *CURRENT_FAKE_VULKAN.with(|f| f.get().unwrap()) }
    }

    fn next_result(&mut self, func_name: &str) -> vk::VkResult {
        match self.result_queue.get_mut(func_name) {
            Some(queue) => match queue.pop_front() {
                Some(res) => res,
                None => vk::VK_SUCCESS,
            },
            None => vk::VK_SUCCESS,
        }
    }

    /// Queue a VkResult to return the next time the named function is
    /// called. The value will be used only once and after that the
    /// function will revert to always returning VK_SUCCESS. This can
    /// be called multiple times to queue multiple results before
    /// reverting.
    pub fn queue_result(&mut self, func_name: String, result: vk::VkResult) {
        self.result_queue
            .entry(func_name)
            .or_insert_with(Default::default)
            .push_back(result);
    }

    /// Sets the get_proc_addr override on the [vulkan_funcs] so that
    /// it will use this FakeVulkan driver the next time a
    /// [Library](vulkan_funcs::Library) is created.
    pub fn set_override(&self) {
        vulkan_funcs::override_get_instance_proc_addr(
            ptr::addr_of!(*self).cast(),
            Some(FakeVulkan::get_instance_proc_addr),
        );
    }

    pub fn add_instance_extension(&mut self, ext: &str) {
        add_extension_to_vec(&mut self.instance_extensions, ext);
    }

    pub fn get_function(&self, name: *const c_char) -> vk::PFN_vkVoidFunction {
        let name = unsafe { CStr::from_ptr(name).to_str().unwrap() };

        match name {
            "vkGetDeviceProcAddr" => unsafe {
                transmute::<vk::PFN_vkGetDeviceProcAddr, _>(
                    Some(FakeVulkan::get_device_proc_addr)
                )
            },
            "vkCreateInstance" => unsafe {
                transmute::<vk::PFN_vkCreateInstance, _>(
                    Some(FakeVulkan::create_instance)
                )
            },
            "vkEnumeratePhysicalDevices" => unsafe {
                transmute::<vk::PFN_vkEnumeratePhysicalDevices, _>(
                    Some(FakeVulkan::enumerate_physical_devices)
                )
            },
            "vkGetPhysicalDeviceMemoryProperties" => unsafe {
                transmute::<vk::PFN_vkGetPhysicalDeviceMemoryProperties, _>(
                    Some(FakeVulkan::get_physical_device_memory_properties)
                )
            },
            "vkGetPhysicalDeviceFormatProperties" => unsafe {
                transmute::<vk::PFN_vkGetPhysicalDeviceFormatProperties, _>(
                    Some(FakeVulkan::get_physical_device_format_properties)
                )
            },
            "vkGetPhysicalDeviceProperties" => unsafe {
                transmute::<vk::PFN_vkGetPhysicalDeviceProperties, _>(
                    Some(FakeVulkan::get_physical_device_properties)
                )
            },
            "vkGetPhysicalDeviceProperties2" => unsafe {
                transmute::<vk::PFN_vkGetPhysicalDeviceProperties2, _>(
                    Some(FakeVulkan::get_physical_device_properties2)
                )
            },
            "vkGetPhysicalDeviceFeatures" => unsafe {
                transmute::<vk::PFN_vkGetPhysicalDeviceFeatures, _>(
                    Some(FakeVulkan::get_physical_device_features)
                )
            },
            "vkGetPhysicalDeviceQueueFamilyProperties" => unsafe {
                type T = vk::PFN_vkGetPhysicalDeviceQueueFamilyProperties;
                transmute::<T, _>(Some(
                    FakeVulkan::get_physical_device_queue_family_properties
                ))
            },
            "vkGetDeviceQueue" => unsafe {
                transmute::<vk::PFN_vkGetDeviceQueue, _>(
                    Some(FakeVulkan::get_device_queue)
                )
            },
            "vkEnumerateInstanceExtensionProperties" => unsafe {
                transmute::<vk::PFN_vkEnumerateInstanceExtensionProperties, _>(
                    Some(FakeVulkan::enumerate_instance_extension_properties)
                )
            },
            "vkEnumerateDeviceExtensionProperties" => unsafe {
                transmute::<vk::PFN_vkEnumerateDeviceExtensionProperties, _>(
                    Some(FakeVulkan::enumerate_device_extension_properties)
                )
            },
            "vkCreateDevice" => unsafe {
                transmute::<vk::PFN_vkCreateDevice, _>(
                    Some(FakeVulkan::create_device)
                )
            },
            "vkDestroyInstance" => unsafe {
                transmute::<vk::PFN_vkDestroyInstance, _>(
                    Some(FakeVulkan::destroy_instance)
                )
            },
            "vkDestroyDevice" => unsafe {
                transmute::<vk::PFN_vkDestroyDevice, _>(
                    Some(FakeVulkan::destroy_device)
                )
            },
            "vkCreateCommandPool" => unsafe {
                transmute::<vk::PFN_vkCreateCommandPool, _>(
                    Some(FakeVulkan::create_command_pool)
                )
            },
            "vkDestroyCommandPool" => unsafe {
                transmute::<vk::PFN_vkDestroyCommandPool, _>(
                    Some(FakeVulkan::destroy_command_pool)
                )
            },
            "vkAllocateCommandBuffers" => unsafe {
                transmute::<vk::PFN_vkAllocateCommandBuffers, _>(
                    Some(FakeVulkan::allocate_command_buffers)
                )
            },
            "vkFreeCommandBuffers" => unsafe {
                transmute::<vk::PFN_vkFreeCommandBuffers, _>(
                    Some(FakeVulkan::free_command_buffers)
                )
            },
            "vkCreateFence" => unsafe {
                transmute::<vk::PFN_vkCreateFence, _>(
                    Some(FakeVulkan::create_fence)
                )
            },
            "vkDestroyFence" => unsafe {
                transmute::<vk::PFN_vkDestroyFence, _>(
                    Some(FakeVulkan::destroy_fence)
                )
            },
            "vkCreateRenderPass" => unsafe {
                transmute::<vk::PFN_vkCreateRenderPass, _>(
                    Some(FakeVulkan::create_render_pass)
                )
            },
            "vkDestroyRenderPass" => unsafe {
                transmute::<vk::PFN_vkDestroyRenderPass, _>(
                    Some(FakeVulkan::destroy_render_pass)
                )
            },
            "vkCreateImageView" => unsafe {
                transmute::<vk::PFN_vkCreateImageView, _>(
                    Some(FakeVulkan::create_image_view)
                )
            },
            "vkDestroyImageView" => unsafe {
                transmute::<vk::PFN_vkDestroyImageView, _>(
                    Some(FakeVulkan::destroy_image_view)
                )
            },
            "vkCreateImage" => unsafe {
                transmute::<vk::PFN_vkCreateImage, _>(
                    Some(FakeVulkan::create_image)
                )
            },
            "vkDestroyImage" => unsafe {
                transmute::<vk::PFN_vkDestroyImage, _>(
                    Some(FakeVulkan::destroy_image)
                )
            },
            "vkCreateBuffer" => unsafe {
                transmute::<vk::PFN_vkCreateBuffer, _>(
                    Some(FakeVulkan::create_buffer)
                )
            },
            "vkDestroyBuffer" => unsafe {
                transmute::<vk::PFN_vkDestroyBuffer, _>(
                    Some(FakeVulkan::destroy_buffer)
                )
            },
            "vkCreateFramebuffer" => unsafe {
                transmute::<vk::PFN_vkCreateFramebuffer, _>(
                    Some(FakeVulkan::create_framebuffer)
                )
            },
            "vkDestroyFramebuffer" => unsafe {
                transmute::<vk::PFN_vkDestroyFramebuffer, _>(
                    Some(FakeVulkan::destroy_framebuffer)
                )
            },
            "vkEnumerateInstanceVersion" => unsafe {
                if self.has_enumerate_instance_version {
                    transmute::<vk::PFN_vkEnumerateInstanceVersion, _>(
                        Some(FakeVulkan::enumerate_instance_version)
                    )
                } else {
                    None
                }
            },
            "vkGetPhysicalDeviceFeatures2KHR" => unsafe {
                transmute::<vk::PFN_vkGetPhysicalDeviceFeatures2, _>(
                    Some(FakeVulkan::get_physical_device_features2)
                )
            },
            "vkGetImageMemoryRequirements" => unsafe {
                transmute::<vk::PFN_vkGetImageMemoryRequirements, _>(
                    Some(FakeVulkan::get_image_memory_requirements)
                )
            },
            "vkGetBufferMemoryRequirements" => unsafe {
                transmute::<vk::PFN_vkGetBufferMemoryRequirements, _>(
                    Some(FakeVulkan::get_buffer_memory_requirements)
                )
            },
            "vkBindBufferMemory" => unsafe {
                transmute::<vk::PFN_vkBindBufferMemory, _>(
                    Some(FakeVulkan::bind_buffer_memory)
                )
            },
            "vkBindImageMemory" => unsafe {
                transmute::<vk::PFN_vkBindImageMemory, _>(
                    Some(FakeVulkan::bind_image_memory)
                )
            },
            "vkAllocateMemory" => unsafe {
                transmute::<vk::PFN_vkAllocateMemory, _>(
                    Some(FakeVulkan::allocate_memory)
                )
            },
            "vkFreeMemory" => unsafe {
                transmute::<vk::PFN_vkFreeMemory, _>(
                    Some(FakeVulkan::free_memory)
                )
            },
            "vkMapMemory" => unsafe {
                transmute::<vk::PFN_vkMapMemory, _>(
                    Some(FakeVulkan::map_memory)
                )
            },
            "vkUnmapMemory" => unsafe {
                transmute::<vk::PFN_vkUnmapMemory, _>(
                    Some(FakeVulkan::unmap_memory)
                )
            },
            "vkCreateShaderModule" => unsafe {
                transmute::<vk::PFN_vkCreateShaderModule, _>(
                    Some(FakeVulkan::create_shader_module)
                )
            },
            "vkDestroyShaderModule" => unsafe {
                transmute::<vk::PFN_vkDestroyShaderModule, _>(
                    Some(FakeVulkan::destroy_shader_module)
                )
            },
            "vkCreatePipelineCache" => unsafe {
                transmute::<vk::PFN_vkCreatePipelineCache, _>(
                    Some(FakeVulkan::create_pipeline_cache)
                )
            },
            "vkDestroyPipelineCache" => unsafe {
                transmute::<vk::PFN_vkDestroyPipelineCache, _>(
                    Some(FakeVulkan::destroy_pipeline_cache)
                )
            },
            "vkCreateDescriptorPool" => unsafe {
                transmute::<vk::PFN_vkCreateDescriptorPool, _>(
                    Some(FakeVulkan::create_descriptor_pool)
                )
            },
            "vkDestroyDescriptorPool" => unsafe {
                transmute::<vk::PFN_vkDestroyDescriptorPool, _>(
                    Some(FakeVulkan::destroy_descriptor_pool)
                )
            },
            "vkCreateDescriptorSetLayout" => unsafe {
                transmute::<vk::PFN_vkCreateDescriptorSetLayout, _>(
                    Some(FakeVulkan::create_descriptor_set_layout)
                )
            },
            "vkDestroyDescriptorSetLayout" => unsafe {
                transmute::<vk::PFN_vkDestroyDescriptorSetLayout, _>(
                    Some(FakeVulkan::destroy_descriptor_set_layout)
                )
            },
            "vkCreatePipelineLayout" => unsafe {
                transmute::<vk::PFN_vkCreatePipelineLayout, _>(
                    Some(FakeVulkan::create_pipeline_layout)
                )
            },
            "vkDestroyPipelineLayout" => unsafe {
                transmute::<vk::PFN_vkDestroyPipelineLayout, _>(
                    Some(FakeVulkan::destroy_pipeline_layout)
                )
            },
            "vkCreateGraphicsPipelines" => unsafe {
                transmute::<vk::PFN_vkCreateGraphicsPipelines, _>(
                    Some(FakeVulkan::create_graphics_pipelines)
                )
            },
            "vkCreateComputePipelines" => unsafe {
                transmute::<vk::PFN_vkCreateComputePipelines, _>(
                    Some(FakeVulkan::create_compute_pipelines)
                )
            },
            "vkDestroyPipeline" => unsafe {
                transmute::<vk::PFN_vkDestroyPipeline, _>(
                    Some(FakeVulkan::destroy_pipeline)
                )
            },
            "vkFlushMappedMemoryRanges" => unsafe {
                transmute::<vk::PFN_vkFlushMappedMemoryRanges, _>(
                    Some(FakeVulkan::flush_mapped_memory_ranges)
                )
            },
            "vkInvalidateMappedMemoryRanges" => unsafe {
                transmute::<vk::PFN_vkInvalidateMappedMemoryRanges, _>(
                    Some(FakeVulkan::invalidate_mapped_memory_ranges)
                )
            },
            "vkQueueSubmit" => unsafe {
                transmute::<vk::PFN_vkQueueSubmit, _>(
                    Some(FakeVulkan::queue_submit)
                )
            },
            "vkAllocateDescriptorSets" => unsafe {
                transmute::<vk::PFN_vkAllocateDescriptorSets, _>(
                    Some(FakeVulkan::allocate_descriptor_sets)
                )
            },
            "vkFreeDescriptorSets" => unsafe {
                transmute::<vk::PFN_vkFreeDescriptorSets, _>(
                    Some(FakeVulkan::free_descriptor_sets)
                )
            },
            "vkUpdateDescriptorSets" => unsafe {
                transmute::<vk::PFN_vkUpdateDescriptorSets, _>(
                    Some(FakeVulkan::update_descriptor_sets)
                )
            },
            "vkBeginCommandBuffer" => unsafe {
                transmute::<vk::PFN_vkBeginCommandBuffer, _>(
                    Some(FakeVulkan::begin_command_buffer)
                )
            },
            "vkEndCommandBuffer" => unsafe {
                transmute::<vk::PFN_vkEndCommandBuffer, _>(
                    Some(FakeVulkan::end_command_buffer)
                )
            },
            "vkCmdBeginRenderPass" => unsafe {
                transmute::<vk::PFN_vkCmdBeginRenderPass, _>(
                    Some(FakeVulkan::begin_render_pass)
                )
            },
            "vkCmdEndRenderPass" => unsafe {
                transmute::<vk::PFN_vkCmdEndRenderPass, _>(
                    Some(FakeVulkan::end_render_pass)
                )
            },
            "vkCmdBindPipeline" => unsafe {
                transmute::<vk::PFN_vkCmdBindPipeline, _>(
                    Some(FakeVulkan::bind_pipeline)
                )
            },
            "vkCmdBindVertexBuffers" => unsafe {
                transmute::<vk::PFN_vkCmdBindVertexBuffers, _>(
                    Some(FakeVulkan::bind_vertex_buffers)
                )
            },
            "vkCmdBindIndexBuffer" => unsafe {
                transmute::<vk::PFN_vkCmdBindIndexBuffer, _>(
                    Some(FakeVulkan::bind_index_buffer)
                )
            },
            "vkCmdDraw" => unsafe {
                transmute::<vk::PFN_vkCmdDraw, _>(
                    Some(FakeVulkan::draw)
                )
            },
            "vkCmdDrawIndexed" => unsafe {
                transmute::<vk::PFN_vkCmdDrawIndexed, _>(
                    Some(FakeVulkan::draw_indexed)
                )
            },
            "vkCmdDispatch" => unsafe {
                transmute::<vk::PFN_vkCmdDispatch, _>(
                    Some(FakeVulkan::dispatch)
                )
            },
            "vkCmdClearAttachments" => unsafe {
                transmute::<vk::PFN_vkCmdClearAttachments, _>(
                    Some(FakeVulkan::clear_attachments)
                )
            },
            "vkCmdPipelineBarrier" => unsafe {
                transmute::<vk::PFN_vkCmdPipelineBarrier, _>(
                    Some(FakeVulkan::pipeline_barrier)
                )
            },
            "vkCmdCopyImageToBuffer" => unsafe {
                transmute::<vk::PFN_vkCmdCopyImageToBuffer, _>(
                    Some(FakeVulkan::copy_image_to_buffer)
                )
            },
            "vkCmdPushConstants" => unsafe {
                transmute::<vk::PFN_vkCmdPushConstants, _>(
                    Some(FakeVulkan::push_constants)
                )
            },
            "vkCmdBindDescriptorSets" => unsafe {
                transmute::<vk::PFN_vkCmdBindDescriptorSets, _>(
                    Some(FakeVulkan::bind_descriptor_sets)
                )
            },
            "vkResetFences" => unsafe {
                transmute::<vk::PFN_vkResetFences, _>(
                    Some(FakeVulkan::reset_fences)
                )
            },
            "vkWaitForFences" => unsafe {
                transmute::<vk::PFN_vkWaitForFences, _>(
                    Some(FakeVulkan::wait_for_fences)
                )
            },
            _ => None,
        }
    }

    extern "C" fn get_instance_proc_addr(
        _instance: vk::VkInstance,
        name: *const c_char,
    ) -> vk::PFN_vkVoidFunction {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.get_function(name)
    }

    extern "C" fn get_device_proc_addr(
        _device: vk::VkDevice,
        name: *const c_char,
    ) -> vk::PFN_vkVoidFunction {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.get_function(name)
    }

    fn copy_with_count<T>(
        values: &[T],
        count_ptr: *mut u32,
        array_ptr: *mut T,
    ) where
        T: Clone,
    {
        if array_ptr.is_null() {
            unsafe {
                *count_ptr = values.len() as u32;
            }

            return;
        }

        let count = min(
            unsafe { *count_ptr } as usize,
            values.len(),
        );

        for (i, value) in values.iter().take(count).enumerate() {
            unsafe {
                *array_ptr.add(i) = value.clone();
            }
        }
    }

    extern "C" fn enumerate_physical_devices(
        _instance: vk::VkInstance,
        physical_device_count: *mut u32,
        physical_devices: *mut vk::VkPhysicalDevice,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        if physical_devices.is_null() {
            unsafe {
                *physical_device_count =
                    fake_vulkan.physical_devices.len() as u32;
            }

            return fake_vulkan.next_result("vkEnumeratePhysicalDevices");
        }

        let count = min(
            unsafe { *physical_device_count as usize },
            fake_vulkan.physical_devices.len()
        );

        for i in 0..count {
            unsafe {
                // Store the device index as a pointer. We add 1 so
                // that it won’t be null.
                *physical_devices.add(i) =
                    fake_vulkan.index_to_physical_device(i);
            }
        }

        unsafe {
            *physical_device_count = count as u32;
        }

        fake_vulkan.next_result("vkEnumeratePhysicalDevices")
    }

    /// Get the physical device that would point to the device at the
    /// given index.
    pub fn index_to_physical_device(
        &self,
        index: usize,
    ) -> vk::VkPhysicalDevice {
        assert!(index < self.physical_devices.len());
        unsafe { transmute(index + 1) }
    }

    /// Get the index of the physical device represented by the
    /// VkPhysicalDevice pointer.
    pub fn physical_device_to_index(
        &self,
        physical_device: vk::VkPhysicalDevice
    ) -> usize {
        assert!(!physical_device.is_null());
        let index = unsafe { transmute::<_, usize>(physical_device) - 1 };
        assert!(index < self.physical_devices.len());
        index
    }

    extern "C" fn get_physical_device_memory_properties(
        physical_device: vk::VkPhysicalDevice,
        memory_properties_out: *mut vk::VkPhysicalDeviceMemoryProperties,
    ) {
        let fake_vulkan = FakeVulkan::current();

        unsafe {
            let device_num =
                fake_vulkan.physical_device_to_index(physical_device);
            let device = &fake_vulkan.physical_devices[device_num];
            *memory_properties_out = device.memory_properties.clone();
        }
    }

    extern "C" fn get_physical_device_properties(
        physical_device: vk::VkPhysicalDevice,
        properties_out: *mut vk::VkPhysicalDeviceProperties,
    ) {
        let fake_vulkan = FakeVulkan::current();

        unsafe {
            let device_num =
                fake_vulkan.physical_device_to_index(physical_device);
            let device = &fake_vulkan.physical_devices[device_num];
            *properties_out = device.properties.clone();
        }
    }

    extern "C" fn get_physical_device_properties2(
        physical_device: vk::VkPhysicalDevice,
        properties_out: *mut vk::VkPhysicalDeviceProperties2,
    ) {
        let fake_vulkan = FakeVulkan::current();
        let device_num = fake_vulkan.physical_device_to_index(physical_device);
        let device = &fake_vulkan.physical_devices[device_num];

        unsafe {
            assert_eq!(
                (*properties_out).sType,
                vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2
            );

            (*properties_out).properties = device.properties.clone();
        }
    }

    extern "C" fn get_physical_device_features(
        physical_device: vk::VkPhysicalDevice,
        features_out: *mut vk::VkPhysicalDeviceFeatures,
    ) {
        let fake_vulkan = FakeVulkan::current();

        unsafe {
            let device_num =
                fake_vulkan.physical_device_to_index(physical_device);
            let device = &fake_vulkan.physical_devices[device_num];
            *features_out = device.features.clone();
        }
    }

    extern "C" fn get_physical_device_format_properties(
        physical_device: vk::VkPhysicalDevice,
        format: vk::VkFormat,
        properties: *mut vk::VkFormatProperties,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let device_num =
            fake_vulkan.physical_device_to_index(physical_device);
        let device = &fake_vulkan.physical_devices[device_num];

        unsafe {
            *properties = device.format_properties[&format];
        }
    }

    fn extract_struct_data(
        ptr: *mut u8
    ) -> (vk::VkStructureType, *mut u8) {
        let mut type_bytes =
            [0u8; mem::size_of::<vk::VkStructureType>()];
        unsafe {
            ptr.copy_to(type_bytes.as_mut_ptr(), type_bytes.len());
        }
        let mut next_bytes =
            [0u8; mem::size_of::<*mut u8>()];
        unsafe {
            ptr.add(vulkan_funcs::NEXT_PTR_OFFSET).copy_to(
                next_bytes.as_mut_ptr(), next_bytes.len()
            );
        }

        (
            vk::VkStructureType::from_ne_bytes(type_bytes),
            usize::from_ne_bytes(next_bytes) as *mut u8,
        )
    }

    extern "C" fn get_physical_device_features2(
        physical_device: vk::VkPhysicalDevice,
        features: *mut vk::VkPhysicalDeviceFeatures2,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let device_num = fake_vulkan.physical_device_to_index(physical_device);
        let device = &fake_vulkan.physical_devices[device_num];

        let (struct_type, mut struct_ptr) =
            FakeVulkan::extract_struct_data(features.cast());

        assert_eq!(
            struct_type,
            vk::VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2
        );

        while !struct_ptr.is_null() {
            let (struct_type, next_ptr) =
                FakeVulkan::extract_struct_data(struct_ptr);

            let to_copy = match struct_type {
                ATOMIC_TYPE => vec![
                    device.shader_atomic.shaderBufferInt64Atomics,
                    device.shader_atomic.shaderSharedInt64Atomics,
                ],
                MULTIVIEW_TYPE => vec![
                    device.multiview.multiview,
                    device.multiview.multiviewGeometryShader,
                    device.multiview.multiviewTessellationShader,
                ],
                _ => unreachable!("unexpected struct type {}", struct_type),
            };

            unsafe {
                std::ptr::copy(
                    to_copy.as_ptr(),
                    struct_ptr.add(vulkan_funcs::FIRST_FEATURE_OFFSET).cast(),
                    to_copy.len(),
                );
            }

            struct_ptr = next_ptr;
        }
    }

    extern "C" fn get_physical_device_queue_family_properties(
        physical_device: vk::VkPhysicalDevice,
        property_count_out: *mut u32,
        properties: *mut vk::VkQueueFamilyProperties,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let device_num = fake_vulkan.physical_device_to_index(physical_device);
        let device = &fake_vulkan.physical_devices[device_num];

        FakeVulkan::copy_with_count(
            &device.queue_families,
            property_count_out,
            properties,
        );
    }

    #[inline]
    pub fn make_queue(
        queue_family_index: u32,
        queue_index: u32
    ) -> vk::VkQueue {
        let queue =
            ((queue_family_index << 9) | (queue_index << 1) | 1) as usize;
        queue as vk::VkQueue
    }

    #[inline]
    pub fn unmake_queue(
        queue: vk::VkQueue,
    ) -> (u32, u32) {
        let queue = queue as usize;
        ((queue >> 9) as u32, ((queue >> 1) & 0xff) as u32)
    }

    extern "C" fn get_device_queue(
        _device: vk::VkDevice,
        queue_family_index: u32,
        queue_index: u32,
        queue_out: *mut vk::VkQueue,
    ) {
        unsafe {
            *queue_out = FakeVulkan::make_queue(
                queue_family_index,
                queue_index
            );
        }
    }

    extern "C" fn enumerate_instance_extension_properties(
        _layer_name: *const c_char,
        property_count: *mut u32,
        properties: *mut vk::VkExtensionProperties,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        FakeVulkan::copy_with_count(
            &fake_vulkan.instance_extensions,
            property_count,
            properties,
        );

        fake_vulkan.next_result("vkEnumerateInstanceExtensionProperties")
    }

    extern "C" fn enumerate_device_extension_properties(
        physical_device: vk::VkPhysicalDevice,
        _layer_name: *const c_char,
        property_count: *mut u32,
        properties: *mut vk::VkExtensionProperties,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let device_num = fake_vulkan.physical_device_to_index(physical_device);
        let device = &fake_vulkan.physical_devices[device_num];

        FakeVulkan::copy_with_count(
            &device.extensions,
            property_count,
            properties,
        );

        fake_vulkan.next_result("vkEnumerateDeviceExtensionProperties")
    }

    pub fn add_dispatchable_handle<T>(&mut self, data: HandleType) -> *mut T {
        self.handles.push(Handle {
            freed: false,
            data,
        });

        self.handles.len() as *mut T
    }

    #[cfg(target_pointer_width = "64")]
    pub fn add_handle<T>(&mut self, data: HandleType) -> *mut T {
        self.add_dispatchable_handle(data)
    }

    #[cfg(not(target_pointer_width = "64"))]
    pub fn add_handle(&mut self, data: HandleType) -> u64 {
        self.handles.push(Handle {
            freed: false,
            data,
        });

        self.handles.len() as u64
    }

    pub fn dispatchable_handle_to_index<T>(handle: *mut T) -> usize {
        let handle_num = handle as usize;

        assert!(handle_num > 0);

        handle_num - 1
    }

    #[cfg(target_pointer_width = "64")]
    pub fn handle_to_index<T>(handle: *mut T) -> usize {
        FakeVulkan::dispatchable_handle_to_index(handle)
    }

    #[cfg(not(target_pointer_width = "64"))]
    pub fn handle_to_index(handle: u64) -> usize {
        assert!(handle > 0);

        (handle - 1) as usize
    }

    pub fn get_dispatchable_handle<T>(&self, handle: *mut T) -> &Handle {
        let index = FakeVulkan::dispatchable_handle_to_index(handle);
        let handle = &self.handles[index];

        assert!(!handle.freed);

        handle
    }

    pub fn get_dispatchable_handle_mut<T>(
        &mut self,
        handle: *mut T,
    ) -> &mut Handle {
        let index = FakeVulkan::dispatchable_handle_to_index(handle);
        let handle = &mut self.handles[index];

        assert!(!handle.freed);

        handle
    }

    #[cfg(target_pointer_width = "64")]
    pub fn get_handle<T>(&self, handle: *mut T) -> &Handle {
        let handle = &self.handles[FakeVulkan::handle_to_index(handle)];

        assert!(!handle.freed);

        handle
    }

    #[cfg(not(target_pointer_width = "64"))]
    pub fn get_handle(&self, handle: u64) -> &Handle {
        let handle = &self.handles[FakeVulkan::handle_to_index(handle)];

        assert!(!handle.freed);

        handle
    }

    #[cfg(target_pointer_width = "64")]
    pub fn get_freed_handle<T>(&self, handle: *mut T) -> &Handle {
        &self.handles[FakeVulkan::handle_to_index(handle)]
    }

    #[cfg(not(target_pointer_width = "64"))]
    pub fn get_freed_handle(&self, handle: u64) -> &Handle {
        &self.handles[FakeVulkan::handle_to_index(handle)]
    }

    #[cfg(target_pointer_width = "64")]
    pub fn get_handle_mut<T>(&mut self, handle: *mut T) -> &mut Handle {
        let handle = &mut self.handles[FakeVulkan::handle_to_index(handle)];

        assert!(!handle.freed);

        handle
    }

    #[cfg(not(target_pointer_width = "64"))]
    pub fn get_handle_mut(&mut self, handle: u64) -> &mut Handle {
        let handle = &mut self.handles[FakeVulkan::handle_to_index(handle)];

        assert!(!handle.freed);

        handle
    }

    fn check_device(&self, device: vk::VkDevice) {
        let handle = self.get_dispatchable_handle(device);
        assert!(matches!(handle.data, HandleType::Device));
    }

    fn check_command_pool(&self, command_pool: vk::VkCommandPool) {
        let handle = self.get_handle(command_pool);
        assert!(matches!(handle.data, HandleType::CommandPool));
    }

    fn check_image(&self, image: vk::VkImage) {
        let handle = self.get_handle(image);
        assert!(matches!(handle.data, HandleType::Image));
    }

    fn check_descriptor_pool(&self, descriptor_pool: vk::VkDescriptorPool) {
        let handle = self.get_handle(descriptor_pool);
        assert!(matches!(handle.data, HandleType::DescriptorPool));
    }

    fn check_image_view(&self, image_view: vk::VkImageView) {
        let handle = self.get_handle(image_view);
        assert!(matches!(handle.data, HandleType::ImageView));
    }

    fn check_pipeline_cache(&self, pipeline_cache: vk::VkPipelineCache) {
        let handle = self.get_handle(pipeline_cache);
        assert!(matches!(handle.data, HandleType::PipelineCache));
    }

    fn check_fence(&self, fence: vk::VkFence) {
        let handle = self.get_handle(fence);
        assert!(matches!(handle.data, HandleType::Fence { .. }));
    }

    fn check_framebuffer(&self, framebuffer: vk::VkFramebuffer) {
        let handle = self.get_handle(framebuffer);
        assert!(matches!(handle.data, HandleType::Framebuffer));
    }

    fn check_render_pass(&self, render_pass: vk::VkRenderPass) {
        let handle = self.get_handle(render_pass);
        assert!(matches!(handle.data, HandleType::RenderPass { .. }));
    }

    fn check_pipeline(&self, pipeline: vk::VkPipeline) {
        let handle = self.get_handle(pipeline);
        assert!(matches!(handle.data, HandleType::Pipeline { .. }));
    }

    fn check_buffer(&self, buffer: vk::VkBuffer) {
        let handle = self.get_handle(buffer);
        assert!(matches!(handle.data, HandleType::Buffer { .. }));
    }

    fn check_memory(&self, memory: vk::VkDeviceMemory) {
        let handle = self.get_handle(memory);
        assert!(matches!(handle.data, HandleType::Memory { .. }));
    }

    extern "C" fn create_instance(
        _create_info: *const vk::VkInstanceCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        instance_out: *mut vk::VkInstance,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateInstance");

        if res != vk::VK_SUCCESS {
            return res;
        }

        unsafe {
            *instance_out =
                fake_vulkan.add_dispatchable_handle(HandleType::Instance);
        }

        res
    }

    extern "C" fn create_device(
        _physical_device: vk::VkPhysicalDevice,
        _create_info: *const vk::VkDeviceCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        device_out: *mut vk::VkDevice,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateDevice");

        if res != vk::VK_SUCCESS {
            return res;
        }

        unsafe {
            *device_out =
                fake_vulkan.add_dispatchable_handle(HandleType::Device);
        }

        res
    }

    extern "C" fn destroy_device(
        device: vk::VkDevice,
        _allocator: *const vk::VkAllocationCallbacks
    ) {
        let fake_vulkan = FakeVulkan::current();

        let handle = fake_vulkan.get_dispatchable_handle_mut(device);
        assert!(matches!(handle.data, HandleType::Device));
        handle.freed = true;
    }

    extern "C" fn destroy_instance(
        instance: vk::VkInstance,
        _allocator: *const vk::VkAllocationCallbacks
    ) {
        let fake_vulkan = FakeVulkan::current();

        let handle = fake_vulkan.get_dispatchable_handle_mut(instance);
        assert!(matches!(handle.data, HandleType::Instance));
        handle.freed = true;
    }

    extern "C" fn create_command_pool(
        device: vk::VkDevice,
        _create_info: *const vk::VkCommandPoolCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        command_pool_out: *mut vk::VkCommandPool,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateCommandPool");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            *command_pool_out = fake_vulkan.add_handle(HandleType::CommandPool);
        }

        res
    }

    extern "C" fn destroy_command_pool(
        device: vk::VkDevice,
        command_pool: vk::VkCommandPool,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(command_pool);
        assert!(matches!(handle.data, HandleType::CommandPool));
        handle.freed = true;
    }

    extern "C" fn allocate_command_buffers(
        device: vk::VkDevice,
        allocate_info: *const vk::VkCommandBufferAllocateInfo,
        command_buffers: *mut vk::VkCommandBuffer,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkAllocateCommandBuffers");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        let command_pool_handle = unsafe { (*allocate_info).commandPool };

        fake_vulkan.check_command_pool(command_pool_handle);

        let n_buffers = unsafe { (*allocate_info).commandBufferCount };

        for i in 0..(n_buffers as usize) {
            unsafe {
                *command_buffers.add(i) = fake_vulkan.add_dispatchable_handle(
                    HandleType::CommandBuffer {
                        command_pool: FakeVulkan::handle_to_index(
                            command_pool_handle
                        ),
                        commands: Vec::new(),
                        begun: false,
                    },
                );
            }
        }

        res
    }

    extern "C" fn free_command_buffers(
        device: vk::VkDevice,
        command_pool: vk::VkCommandPool,
        command_buffer_count: u32,
        command_buffers: *const vk::VkCommandBuffer,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        fake_vulkan.check_command_pool(command_pool);

        for i in 0..command_buffer_count as usize {
            let command_buffer = unsafe {
                *command_buffers.add(i)
            };

            let command_buffer_handle =
                fake_vulkan.get_dispatchable_handle_mut(command_buffer);

            match command_buffer_handle.data {
                HandleType::CommandBuffer { command_pool: handle_pool, .. } => {
                    assert_eq!(
                        handle_pool,
                        FakeVulkan::handle_to_index(command_pool),
                    );
                    command_buffer_handle.freed = true;
                },
                _ => unreachable!("mismatched handle"),
            }
        }
    }

    extern "C" fn create_fence(
        device: vk::VkDevice,
        _create_info: *const vk::VkFenceCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        fence_out: *mut vk::VkFence,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateFence");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            *fence_out = fake_vulkan.add_handle(HandleType::Fence {
                reset_count: 0,
                wait_count: 0,
            });
        }

        res
    }

    extern "C" fn destroy_fence(
        device: vk::VkDevice,
        fence: vk::VkFence,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(fence);
        assert!(matches!(handle.data, HandleType::Fence { .. }));
        handle.freed = true;
    }

    extern "C" fn create_render_pass(
        device: vk::VkDevice,
        create_info: *const vk::VkRenderPassCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        render_pass_out: *mut vk::VkRenderPass,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateRenderPass");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            let create_info = &*create_info;

            *render_pass_out = fake_vulkan.add_handle(HandleType::RenderPass {
                attachments: vec_from_raw_parts(
                    create_info.pAttachments,
                    create_info.attachmentCount as usize,
                ),
            });
        }

        res
    }

    extern "C" fn destroy_render_pass(
        device: vk::VkDevice,
        render_pass: vk::VkRenderPass,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(render_pass);
        assert!(matches!(handle.data, HandleType::RenderPass { .. }));
        handle.freed = true;
    }

    extern "C" fn create_image_view(
        device: vk::VkDevice,
        create_info: *const vk::VkImageViewCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        image_view_out: *mut vk::VkImageView,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateImageView");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        fake_vulkan.check_image(unsafe { *create_info }.image);

        unsafe {
            *image_view_out = fake_vulkan.add_handle(HandleType::ImageView);
        }

        res
    }

    extern "C" fn destroy_image_view(
        device: vk::VkDevice,
        image_view: vk::VkImageView,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(image_view);
        assert!(matches!(handle.data, HandleType::ImageView));
        handle.freed = true;
    }

    extern "C" fn create_image(
        device: vk::VkDevice,
        _create_info: *const vk::VkImageCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        image_out: *mut vk::VkImage,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateImage");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            *image_out = fake_vulkan.add_handle(HandleType::Image);
        }

        res
    }

    extern "C" fn destroy_image(
        device: vk::VkDevice,
        image: vk::VkImage,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(image);
        assert!(matches!(handle.data, HandleType::Image));
        handle.freed = true;
    }

    extern "C" fn create_buffer(
        device: vk::VkDevice,
        create_info: *const vk::VkBufferCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        buffer_out: *mut vk::VkBuffer,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateBuffer");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        let create_info = unsafe { &*create_info };

        unsafe {
            *buffer_out = fake_vulkan.add_handle(
                HandleType::Buffer {
                    create_info: create_info.clone(),
                    memory: None,
                }
            );
        }

        res
    }

    extern "C" fn destroy_buffer(
        device: vk::VkDevice,
        buffer: vk::VkBuffer,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(buffer);
        assert!(matches!(handle.data, HandleType::Buffer { .. }));
        handle.freed = true;
    }

    extern "C" fn create_framebuffer(
        device: vk::VkDevice,
        create_info: *const vk::VkFramebufferCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        framebuffer_out: *mut vk::VkFramebuffer,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateFramebuffer");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        let attachments = unsafe {
            let attachment_count = (*create_info).attachmentCount as usize;
            std::slice::from_raw_parts(
                (*create_info).pAttachments,
                attachment_count
            )
        };

        for &attachment in attachments {
            fake_vulkan.check_image_view(attachment);
        }

        unsafe {
            *framebuffer_out = fake_vulkan.add_handle(HandleType::Framebuffer);
        }

        res
    }

    extern "C" fn destroy_framebuffer(
        device: vk::VkDevice,
        framebuffer: vk::VkFramebuffer,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(framebuffer);
        assert!(matches!(handle.data, HandleType::Framebuffer));
        handle.freed = true;
    }

    extern "C" fn enumerate_instance_version(
        api_version: *mut u32
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();
        unsafe { *api_version = requirements::make_version(1, 1, 0) }
        fake_vulkan.next_result("vkEnumerateInstanceVersion")
    }

    extern "C" fn get_buffer_memory_requirements(
        device: vk::VkDevice,
        buffer: vk::VkBuffer,
        memory_requirements: *mut vk::VkMemoryRequirements,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let size = match fake_vulkan.get_handle(buffer).data {
            HandleType::Buffer { ref create_info, .. } => create_info.size,
            _ => unreachable!("mismatched handle"),
        };

        unsafe {
            *memory_requirements = fake_vulkan.memory_requirements.clone();
            (*memory_requirements).size = size;
        }
    }

    extern "C" fn get_image_memory_requirements(
        device: vk::VkDevice,
        image: vk::VkImage,
        memory_requirements: *mut vk::VkMemoryRequirements,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);
        fake_vulkan.check_image(image);

        unsafe {
            *memory_requirements = fake_vulkan.memory_requirements;
        }
    }

    extern "C" fn allocate_memory(
        device: vk::VkDevice,
        allocate_info: *const vk::VkMemoryAllocateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        memory: *mut vk::VkDeviceMemory,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkAllocateMemory");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            *memory = fake_vulkan.add_handle(HandleType::Memory {
                contents: vec![
                    0u8;
                    (*allocate_info).allocationSize as usize
                ],
                mapped: false,
            });
        }

        res
    }

    extern "C" fn free_memory(
        device: vk::VkDevice,
        memory: vk::VkDeviceMemory,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(memory);

        match handle.data {
            HandleType::Memory { mapped, .. } => assert!(!mapped),
            _ => unreachable!("mismatched handle"),
        }

        handle.freed = true;
    }

    extern "C" fn bind_buffer_memory(
        device: vk::VkDevice,
        buffer: vk::VkBuffer,
        memory: vk::VkDeviceMemory,
        _memory_offset: vk::VkDeviceSize,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkBindBufferMemory");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);
        fake_vulkan.check_memory(memory);

        let HandleType::Buffer { memory: ref mut buffer_memory, .. } =
            fake_vulkan.get_handle_mut(buffer).data
        else { unreachable!("mismatched handle"); };

        assert!(buffer_memory.is_none());

        *buffer_memory = Some(memory);

        res
    }

    extern "C" fn bind_image_memory(
        _device: vk::VkDevice,
        _image: vk::VkImage,
        _memory: vk::VkDeviceMemory,
        _memory_offset: vk::VkDeviceSize,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();
        fake_vulkan.next_result("vkBindImageMemory")
    }

    extern "C" fn map_memory(
        device: vk::VkDevice,
        memory: vk::VkDeviceMemory,
        offset: vk::VkDeviceSize,
        size: vk::VkDeviceSize,
        _flags: vk::VkMemoryMapFlags,
        data_out: *mut *mut c_void,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkMapMemory");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        let HandleType::Memory { ref mut mapped, ref mut contents } =
            fake_vulkan.get_handle_mut(memory).data
        else { unreachable!("mismatched handle"); };

        assert!(!*mapped);
        assert!(
            size == vk::VK_WHOLE_SIZE as vk::VkDeviceSize
                || (offset + size) as usize <= contents.len()
        );

        unsafe {
            *data_out = contents[offset as usize..].as_mut_ptr().cast();
        }

        *mapped = true;

        res
    }

    extern "C" fn unmap_memory(
        device: vk::VkDevice,
        memory: vk::VkDeviceMemory
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let HandleType::Memory { ref mut mapped, .. } =
            fake_vulkan.get_handle_mut(memory).data
        else { unreachable!("mismatched handle"); };

        assert!(*mapped);
        *mapped = false;
    }

    extern "C" fn create_shader_module(
        device: vk::VkDevice,
        create_info: *const vk::VkShaderModuleCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        shader_module_out: *mut vk::VkShaderModule,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateShaderModule");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            assert_eq!((*create_info).codeSize % (u32::BITS as usize / 8), 0);

            let code = vec_from_raw_parts(
                (*create_info).pCode,
                (*create_info).codeSize / (u32::BITS as usize / 8),
            );

            *shader_module_out = fake_vulkan.add_handle(
                HandleType::ShaderModule { code }
            );
        }

        res
    }

    extern "C" fn destroy_shader_module(
        device: vk::VkDevice,
        shader_module: vk::VkShaderModule,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(shader_module);
        assert!(matches!(handle.data, HandleType::ShaderModule { .. }));
        handle.freed = true;
    }

    extern "C" fn create_pipeline_cache(
        device: vk::VkDevice,
        _create_info: *const vk::VkPipelineCacheCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        pipeline_cache_out: *mut vk::VkPipelineCache,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreatePipelineCache");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            *pipeline_cache_out = fake_vulkan.add_handle(
                HandleType::PipelineCache
            );
        }

        res
    }

    extern "C" fn destroy_pipeline_cache(
        device: vk::VkDevice,
        pipeline_cache: vk::VkPipelineCache,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(pipeline_cache);
        assert!(matches!(handle.data, HandleType::PipelineCache));
        handle.freed = true;
    }

    extern "C" fn create_descriptor_pool(
        device: vk::VkDevice,
        _create_info: *const vk::VkDescriptorPoolCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        descriptor_pool_out: *mut vk::VkDescriptorPool,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateDescriptorPool");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            *descriptor_pool_out = fake_vulkan.add_handle(
                HandleType::DescriptorPool
            );
        }

        res
    }

    extern "C" fn destroy_descriptor_pool(
        device: vk::VkDevice,
        descriptor_pool: vk::VkDescriptorPool,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(descriptor_pool);
        assert!(matches!(handle.data, HandleType::DescriptorPool));
        handle.freed = true;
    }

    extern "C" fn create_descriptor_set_layout(
        device: vk::VkDevice,
        create_info: *const vk::VkDescriptorSetLayoutCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        descriptor_set_layout_out: *mut vk::VkDescriptorSetLayout,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateDescriptorSetLayout");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            let bindings = vec_from_raw_parts(
                (*create_info).pBindings,
                (*create_info).bindingCount as usize,
            );

            *descriptor_set_layout_out = fake_vulkan.add_handle(
                HandleType::DescriptorSetLayout { bindings }
            );
        }

        res
    }

    extern "C" fn destroy_descriptor_set_layout(
        device: vk::VkDevice,
        descriptor_set_layout: vk::VkDescriptorSetLayout,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(descriptor_set_layout);
        assert!(matches!(handle.data, HandleType::DescriptorSetLayout { .. }));
        handle.freed = true;
    }

    extern "C" fn create_pipeline_layout(
        device: vk::VkDevice,
        create_info: *const vk::VkPipelineLayoutCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        pipeline_layout_out: *mut vk::VkPipelineLayout,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreatePipelineLayout");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        unsafe {
            let push_constant_ranges = vec_from_raw_parts(
                (*create_info).pPushConstantRanges,
                (*create_info).pushConstantRangeCount as usize,
            );

            let layouts = vec_from_raw_parts(
                (*create_info).pSetLayouts,
                (*create_info).setLayoutCount as usize,
            );

            *pipeline_layout_out = fake_vulkan.add_handle(
                HandleType::PipelineLayout(PipelineLayoutCreateInfo {
                    create_info: (*create_info).clone(),
                    push_constant_ranges,
                    layouts,
                }),
            );
        }

        res
    }

    extern "C" fn destroy_pipeline_layout(
        device: vk::VkDevice,
        pipeline_layout: vk::VkPipelineLayout,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(pipeline_layout);
        assert!(matches!(handle.data, HandleType::PipelineLayout(_)));
        handle.freed = true;
    }

    extern "C" fn create_graphics_pipelines(
        device: vk::VkDevice,
        pipeline_cache: vk::VkPipelineCache,
        create_info_count: u32,
        create_infos: *const vk::VkGraphicsPipelineCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        pipelines_out: *mut vk::VkPipeline,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateGraphicsPipelines");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);
        fake_vulkan.check_pipeline_cache(pipeline_cache);

        for i in 0..create_info_count as usize {
            unsafe {
                let create_info = &*create_infos.add(i);

                *pipelines_out.add(i) = fake_vulkan.add_handle(
                    HandleType::Pipeline(
                        PipelineCreateInfo::Graphics(
                            GraphicsPipelineCreateInfo::new(create_info)
                        )
                    ),
                );
            }
        }

        res
    }

    extern "C" fn create_compute_pipelines(
        device: vk::VkDevice,
        pipeline_cache: vk::VkPipelineCache,
        create_info_count: u32,
        create_infos: *const vk::VkComputePipelineCreateInfo,
        _allocator: *const vk::VkAllocationCallbacks,
        pipelines_out: *mut vk::VkPipeline,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkCreateComputePipelines");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);
        fake_vulkan.check_pipeline_cache(pipeline_cache);

        for i in 0..create_info_count as usize {
            unsafe {
                *pipelines_out.add(i) = fake_vulkan.add_handle(
                    HandleType::Pipeline(PipelineCreateInfo::Compute(
                        (*create_infos.add(i)).clone()
                    )),
                );
            }
        };

        res
    }

    extern "C" fn destroy_pipeline(
        device: vk::VkDevice,
        pipeline: vk::VkPipeline,
        _allocator: *const vk::VkAllocationCallbacks,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let handle = fake_vulkan.get_handle_mut(pipeline);
        assert!(matches!(handle.data, HandleType::Pipeline { .. }));
        handle.freed = true;
    }

    extern "C" fn flush_mapped_memory_ranges(
        device: vk::VkDevice,
        memory_range_count: u32,
        memory_ranges: *const vk::VkMappedMemoryRange,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkFlushMappedMemoryRanges");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        let memory_ranges = unsafe {
            std::slice::from_raw_parts(
                memory_ranges,
                memory_range_count as usize,
            )
        };

        fake_vulkan.memory_flushes.extend_from_slice(memory_ranges);

        res
    }

    extern "C" fn invalidate_mapped_memory_ranges(
        device: vk::VkDevice,
        memory_range_count: u32,
        memory_ranges: *const vk::VkMappedMemoryRange,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkInvalidateMappedMemoryRanges");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        let memory_ranges = unsafe {
            std::slice::from_raw_parts(
                memory_ranges,
                memory_range_count as usize,
            )
        };

        fake_vulkan.memory_invalidations.extend_from_slice(memory_ranges);

        res
    }

    extern "C" fn queue_submit(
        _queue: vk::VkQueue,
        submit_count: u32,
        submits: *const vk::VkSubmitInfo,
        fence: vk::VkFence,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_fence(fence);

        let res = fake_vulkan.next_result("vkQueueSubmit");

        if res != vk::VK_SUCCESS {
            return res;
        }

        let submits = unsafe {
            std::slice::from_raw_parts(
                submits,
                submit_count as usize,
            )
        };

        for submit in submits.iter() {
            let command_buffers = unsafe {
                std::slice::from_raw_parts(
                    submit.pCommandBuffers,
                    submit.commandBufferCount as usize,
                )
            };

            for &command_buffer in command_buffers.iter() {
                let HandleType::CommandBuffer { ref mut commands, begun, .. } =
                    fake_vulkan.get_dispatchable_handle_mut(command_buffer).data
                else {
                    unreachable!("bad handle type")
                };

                assert!(!begun);

                let commands = mem::take(commands);
                fake_vulkan.commands.extend(commands);
            }
        }

        res
    }

    extern "C" fn allocate_descriptor_sets(
        device: vk::VkDevice,
        allocate_info: *const vk::VkDescriptorSetAllocateInfo,
        descriptor_sets: *mut vk::VkDescriptorSet,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkAllocateDescriptorSets");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        let descriptor_pool_handle = unsafe { (*allocate_info).descriptorPool };

        fake_vulkan.check_descriptor_pool(descriptor_pool_handle);

        let n_buffers = unsafe { (*allocate_info).descriptorSetCount };

        for i in 0..(n_buffers as usize) {
            unsafe {
                *descriptor_sets.add(i) = fake_vulkan.add_handle(
                    HandleType::DescriptorSet { bindings: HashMap::new() }
                );
            }
        }

        res
    }

    extern "C" fn free_descriptor_sets(
        device: vk::VkDevice,
        descriptor_pool: vk::VkDescriptorPool,
        descriptor_set_count: u32,
        descriptor_sets: *const vk::VkDescriptorSet,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkAllocateDescriptorSets");

        if res != vk::VK_SUCCESS {
            return res;
        }

        fake_vulkan.check_device(device);

        fake_vulkan.check_descriptor_pool(descriptor_pool);

        for i in 0..descriptor_set_count as usize {
            let descriptor_set = unsafe {
                *descriptor_sets.add(i)
            };

            let descriptor_set_handle =
                fake_vulkan.get_handle_mut(descriptor_set);

            match descriptor_set_handle.data {
                HandleType::DescriptorSet { .. } => {
                    descriptor_set_handle.freed = true;
                },
                _ => unreachable!("mismatched handle"),
            }
        }

        res
    }

    extern "C" fn update_descriptor_sets(
        device: vk::VkDevice,
        descriptor_write_count: u32,
        descriptor_writes: *const vk::VkWriteDescriptorSet,
        _descriptor_copy_count: u32,
        _descriptor_copies: *const vk::VkCopyDescriptorSet,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let descriptor_writes = unsafe {
            std::slice::from_raw_parts(
                descriptor_writes,
                descriptor_write_count as usize,
            )
        };

        for write in descriptor_writes.iter() {
            assert_eq!(write.sType, vk::VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET);
            assert_eq!(write.descriptorCount, 1);

            let HandleType::DescriptorSet { ref mut bindings } =
                fake_vulkan.get_handle_mut(write.dstSet).data
            else { unreachable!("mismatched handle type"); };

            bindings.insert(
                write.dstBinding,
                Binding {
                    descriptor_type: write.descriptorType,
                    info: unsafe { &*write.pBufferInfo }.clone(),
                },
            );
        }
    }

    extern "C" fn begin_command_buffer(
        command_buffer: vk::VkCommandBuffer,
        _begin_info: *const vk::VkCommandBufferBeginInfo,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkBeginCommandBuffer");

        if res != vk::VK_SUCCESS {
            return res;
        }

        let HandleType::CommandBuffer { ref mut begun, .. } =
            fake_vulkan.get_dispatchable_handle_mut(command_buffer).data
        else { unreachable!("mismatched handle"); };

        assert!(!*begun);
        *begun = true;

        res
    }

    extern "C" fn end_command_buffer(
        command_buffer: vk::VkCommandBuffer,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        let res = fake_vulkan.next_result("vkEndCommandBuffer");

        if res != vk::VK_SUCCESS {
            return res;
        }

        let HandleType::CommandBuffer { ref mut begun, .. } =
            fake_vulkan.get_dispatchable_handle_mut(command_buffer).data
        else { unreachable!("mismatched handle"); };

        assert!(*begun);
        *begun = false;

        res
    }

    fn add_command(
        &mut self,
        command_buffer: vk::VkCommandBuffer,
        command: Command,
    ) {
        let HandleType::CommandBuffer { ref mut commands, begun, .. } =
            self.get_dispatchable_handle_mut(command_buffer).data
        else { unreachable!("mismatched handle"); };

        assert!(begun);

        commands.push(command);
    }

    extern "C" fn begin_render_pass(
        command_buffer: vk::VkCommandBuffer,
        render_pass_begin: *const vk::VkRenderPassBeginInfo,
        _contents: vk::VkSubpassContents,
    ) {
        let render_pass_begin = unsafe { &*render_pass_begin };

        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_render_pass(render_pass_begin.renderPass);
        fake_vulkan.check_framebuffer(render_pass_begin.framebuffer);

        fake_vulkan.add_command(
            command_buffer,
            Command::BeginRenderPass(render_pass_begin.clone()),
        );
    }

    extern "C" fn end_render_pass(
        command_buffer: vk::VkCommandBuffer,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.add_command(command_buffer, Command::EndRenderPass);
    }

    extern "C" fn bind_pipeline(
        command_buffer: vk::VkCommandBuffer,
        pipeline_bind_point: vk::VkPipelineBindPoint,
        pipeline: vk::VkPipeline,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_pipeline(pipeline);

        fake_vulkan.add_command(
            command_buffer,
            Command::BindPipeline {
                bind_point: pipeline_bind_point,
                pipeline,
            },
        );
    }

    extern "C" fn bind_vertex_buffers(
        command_buffer: vk::VkCommandBuffer,
        first_binding: u32,
        binding_count: u32,
        buffers: *const vk::VkBuffer,
        offsets: *const vk::VkDeviceSize,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let buffers = vec_from_raw_parts(
            buffers,
            binding_count as usize,
        );

        for &buffer in buffers.iter() {
            fake_vulkan.check_buffer(buffer);
        }

        let offsets = vec_from_raw_parts(
            offsets,
            binding_count as usize,
        );

        fake_vulkan.add_command(
            command_buffer,
            Command::BindVertexBuffers {
                first_binding,
                buffers,
                offsets,
            },
        );
    }

    extern "C" fn bind_index_buffer(
        command_buffer: vk::VkCommandBuffer,
        buffer: vk::VkBuffer,
        offset: vk::VkDeviceSize,
        index_type: vk::VkIndexType,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.add_command(
            command_buffer,
            Command::BindIndexBuffer {
                buffer,
                offset,
                index_type,
            },
        );
    }

    extern "C" fn draw(
        command_buffer: vk::VkCommandBuffer,
        vertex_count: u32,
        instance_count: u32,
        first_vertex: u32,
        first_instance: u32,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.add_command(
            command_buffer,
            Command::Draw {
                vertex_count,
                instance_count,
                first_vertex,
                first_instance,
            },
        );
    }

    extern "C" fn draw_indexed(
        command_buffer: vk::VkCommandBuffer,
        index_count: u32,
        instance_count: u32,
        first_index: u32,
        vertex_offset: i32,
        first_instance: u32,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.add_command(
            command_buffer,
            Command::DrawIndexed {
                index_count,
                instance_count,
                first_index,
                vertex_offset,
                first_instance,
            },
        );
    }

    extern "C" fn dispatch(
        command_buffer: vk::VkCommandBuffer,
        x: u32,
        y: u32,
        z: u32
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.add_command(command_buffer, Command::Dispatch { x, y, z });
    }

    extern "C" fn clear_attachments(
        command_buffer: vk::VkCommandBuffer,
        attachment_count: u32,
        attachments: *const vk::VkClearAttachment,
        rect_count: u32,
        rects: *const vk::VkClearRect,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let attachments = unsafe {
            std::slice::from_raw_parts(
                attachments,
                attachment_count as usize,
            )
        }.iter().map(|attachment| {
            if attachment.aspectMask == vk::VK_IMAGE_ASPECT_COLOR_BIT {
                ClearAttachment::Color {
                    attachment: attachment.colorAttachment,
                    value: unsafe {
                        attachment.clearValue.color.float32.clone()
                    },
                }
            } else {
                assert!(
                    attachment.aspectMask
                        & (vk::VK_IMAGE_ASPECT_DEPTH_BIT
                           | vk::VK_IMAGE_ASPECT_STENCIL_BIT)
                        != 0
                );
                assert_eq!(
                    attachment.aspectMask
                        & !(vk::VK_IMAGE_ASPECT_DEPTH_BIT
                            | vk::VK_IMAGE_ASPECT_STENCIL_BIT),
                    0,
                );
                ClearAttachment::DepthStencil {
                    aspect_mask: attachment.aspectMask,
                    value: unsafe {
                        attachment.clearValue.depthStencil
                    },
                }
            }
        }).collect::<Vec<ClearAttachment>>();

        let rects = vec_from_raw_parts(
            rects,
            rect_count as usize,
        );

        fake_vulkan.add_command(
            command_buffer,
            Command::ClearAttachments {
                attachments,
                rects,
            }
        );
    }

    extern "C" fn pipeline_barrier(
        command_buffer: vk::VkCommandBuffer,
        src_stage_mask: vk::VkPipelineStageFlags,
        dst_stage_mask: vk::VkPipelineStageFlags,
        dependency_flags: vk::VkDependencyFlags,
        memory_barrier_count: u32,
        memory_barriers: *const vk::VkMemoryBarrier,
        buffer_memory_barrier_count: u32,
        buffer_memory_barriers: *const vk::VkBufferMemoryBarrier,
        image_memory_barrier_count: u32,
        image_memory_barriers: *const vk::VkImageMemoryBarrier,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let memory_barriers = vec_from_raw_parts(
            memory_barriers,
            memory_barrier_count as usize,
        );

        let buffer_memory_barriers = vec_from_raw_parts(
            buffer_memory_barriers,
            buffer_memory_barrier_count as usize,
        );

        for barrier in buffer_memory_barriers.iter() {
            fake_vulkan.check_buffer(barrier.buffer);
        }

        let image_memory_barriers = vec_from_raw_parts(
            image_memory_barriers,
            image_memory_barrier_count as usize,
        );

        for barrier in image_memory_barriers.iter() {
            fake_vulkan.check_image(barrier.image);
        }

        fake_vulkan.add_command(
            command_buffer,
            Command::PipelineBarrier {
                src_stage_mask,
                dst_stage_mask,
                dependency_flags,
                memory_barriers,
                buffer_memory_barriers,
                image_memory_barriers,
            },
        );
    }

    extern "C" fn copy_image_to_buffer(
        command_buffer: vk::VkCommandBuffer,
        src_image: vk::VkImage,
        src_image_layout: vk::VkImageLayout,
        dst_buffer: vk::VkBuffer,
        region_count: u32,
        regions: *const vk::VkBufferImageCopy,
    ) {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_image(src_image);
        fake_vulkan.check_buffer(dst_buffer);

        let regions = vec_from_raw_parts(
            regions,
            region_count as usize,
        );

        fake_vulkan.add_command(
            command_buffer,
            Command::CopyImageToBuffer {
                src_image,
                src_image_layout,
                dst_buffer,
                regions,
            },
        );
    }

    extern "C" fn push_constants(
        command_buffer: vk::VkCommandBuffer,
        layout: vk::VkPipelineLayout,
        stage_flags: vk::VkShaderStageFlags,
        offset: u32,
        size: u32,
        values: *const c_void,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let values = vec_from_raw_parts(
            values as *const u8,
            size as usize,
        );

        fake_vulkan.add_command(
            command_buffer,
            Command::PushConstants {
                layout,
                stage_flags,
                offset,
                values,
            },
        );
    }

    extern "C" fn bind_descriptor_sets(
        command_buffer: vk::VkCommandBuffer,
        pipeline_bind_point: vk::VkPipelineBindPoint,
        layout: vk::VkPipelineLayout,
        first_set: u32,
        descriptor_set_count: u32,
        descriptor_sets: *const vk::VkDescriptorSet,
        _dynamic_offset_count: u32,
        _dynamic_offsets: *const u32,
    ) {
        let fake_vulkan = FakeVulkan::current();

        let descriptor_sets = vec_from_raw_parts(
            descriptor_sets,
            descriptor_set_count as usize,
        );

        fake_vulkan.add_command(
            command_buffer,
            Command::BindDescriptorSets {
                pipeline_bind_point,
                layout,
                first_set,
                descriptor_sets,
            },
        );
    }

    extern "C" fn reset_fences(
        device: vk::VkDevice,
        fence_count: u32,
        fences: *const vk::VkFence,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let res = fake_vulkan.next_result("vkResetFences");

        if res != vk::VK_SUCCESS {
            return res;
        }

        let fences = unsafe {
            std::slice::from_raw_parts(
                fences,
                fence_count as usize,
            )
        };

        for &fence in fences.iter() {
            let HandleType::Fence { ref mut reset_count, .. } =
                fake_vulkan.get_handle_mut(fence).data
            else { unreachable!("bad handle"); };

            *reset_count += 1;
        }

        res
    }

    extern "C" fn wait_for_fences(
        device: vk::VkDevice,
        fence_count: u32,
        fences: *const vk::VkFence,
        _wait_all: vk::VkBool32,
        _timeout: u64,
    ) -> vk::VkResult {
        let fake_vulkan = FakeVulkan::current();

        fake_vulkan.check_device(device);

        let res = fake_vulkan.next_result("vkWaitForFences");

        if res != vk::VK_SUCCESS {
            return res;
        }

        let fences = unsafe {
            std::slice::from_raw_parts(
                fences,
                fence_count as usize,
            )
        };

        for &fence in fences.iter() {
            let HandleType::Fence { ref mut wait_count, .. } =
                fake_vulkan.get_handle_mut(fence).data
            else { unreachable!("bad handle"); };

            *wait_count += 1;
        }

        res
    }
}

impl Drop for FakeVulkan {
    fn drop(&mut self) {
        if !std::thread::panicking() {
            let old_value = CURRENT_FAKE_VULKAN.with(|f| f.replace(None));
            // There should only be one FakeVulkan at a time so the
            // one we just dropped should be the one that was set for
            // the current thread.
            assert_eq!(old_value.unwrap(), self as *mut FakeVulkan);
        }
    }
}

```
Page 5/7FirstPrevNextLast