Merge branch 'master' into issue-142

This commit is contained in:
colin 2018-12-09 07:47:29 -06:00
commit 59dfc6e216
9 changed files with 4229 additions and 4186 deletions

View file

@ -5,7 +5,7 @@ os:
language: rust language: rust
before_script: before_script:
- rustup component add rustfmt-preview - rustup component add rustfmt
rust: rust:
- stable - stable
@ -15,3 +15,13 @@ script:
- cargo build --manifest-path ash/Cargo.toml - cargo build --manifest-path ash/Cargo.toml
- cargo build --manifest-path examples/Cargo.toml - cargo build --manifest-path examples/Cargo.toml
- cargo build --manifest-path generator/Cargo.toml - cargo build --manifest-path generator/Cargo.toml
branches:
only:
# This is where pull requests from "bors r+" are built.
- staging
# This is where pull requests from "bors try" are built.
- trying
# Uncomment this to enable building pull requests.
- master

View file

@ -13,3 +13,12 @@ install:
build_script: build_script:
- cargo build --manifest-path ash/Cargo.toml - cargo build --manifest-path ash/Cargo.toml
- cargo build --manifest-path examples/Cargo.toml - cargo build --manifest-path examples/Cargo.toml
branches:
only:
# This is where pull requests from "bors r+" are built.
- staging
# This is where pull requests from "bors try" are built.
- trying
# Uncomment this to enable building pull requests.
- master

View file

@ -17,11 +17,7 @@ const LIB_PATH: &'static str = "vulkan-1.dll";
#[cfg(all( #[cfg(all(
unix, unix,
not(any( not(any(target_os = "macos", target_os = "ios", target_os = "android"))
target_os = "macos",
target_os = "ios",
target_os = "android"
))
))] ))]
const LIB_PATH: &'static str = "libvulkan.so.1"; const LIB_PATH: &'static str = "libvulkan.so.1";

View file

@ -1,6 +1,7 @@
#[macro_use] #[macro_use]
extern crate lazy_static; extern crate lazy_static;
extern crate shared_library; extern crate shared_library;
pub use device::Device; pub use device::Device;
pub use entry::{Entry, InstanceError, LoadingError}; pub use entry::{Entry, InstanceError, LoadingError};
pub use instance::Instance; pub use instance::Instance;

View file

@ -2,6 +2,7 @@ use std::iter::Iterator;
use std::marker::PhantomData; use std::marker::PhantomData;
use std::mem::size_of; use std::mem::size_of;
use std::os::raw::c_void; use std::os::raw::c_void;
use std::{io, slice};
use vk; use vk;
/// `Align` handles dynamic alignment. The is useful for dynamic uniform buffers where /// `Align` handles dynamic alignment. The is useful for dynamic uniform buffers where
@ -80,3 +81,58 @@ impl<'a, T: Copy + 'a> Iterator for AlignIter<'a, T> {
} }
} }
} }
/// Decode SPIR-V from bytes.
///
/// This function handles SPIR-V of arbitrary endianness gracefully, and returns correctly aligned
/// storage.
///
/// # Examples
/// ```no_run
/// // Decode SPIR-V from a file
/// let mut file = std::fs::File::open("/path/to/shader.spv").unwrap();
/// let words = ash::util::read_spv(&mut file).unwrap();
/// ```
/// ```
/// // Decode SPIR-V from memory
/// const SPIRV: &[u8] = &[
/// // ...
/// # 0x03, 0x02, 0x23, 0x07,
/// ];
/// let words = ash::util::read_spv(&mut std::io::Cursor::new(&SPIRV[..])).unwrap();
/// ```
pub fn read_spv<R: io::Read + io::Seek>(x: &mut R) -> io::Result<Vec<u32>> {
let size = x.seek(io::SeekFrom::End(0))?;
if size % 4 != 0 {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"input length not divisible by 4",
));
}
if size > usize::max_value() as u64 {
return Err(io::Error::new(io::ErrorKind::InvalidData, "input too long"));
}
let words = (size / 4) as usize;
let mut result = Vec::<u32>::with_capacity(words);
x.seek(io::SeekFrom::Start(0))?;
unsafe {
x.read_exact(slice::from_raw_parts_mut(
result.as_mut_ptr() as *mut u8,
words * 4,
))?;
result.set_len(words);
}
const MAGIC_NUMBER: u32 = 0x07230203;
if result.len() > 0 && result[0] == MAGIC_NUMBER.swap_bytes() {
for word in &mut result {
*word = word.swap_bytes();
}
}
if result.len() == 0 || result[0] != MAGIC_NUMBER {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"input missing SPIR-V magic number",
));
}
Ok(result)
}

File diff suppressed because it is too large Load diff

6
bors.toml Normal file
View file

@ -0,0 +1,6 @@
status = [
"continuous-integration/travis-ci/push",
"continuous-integration/appveyor/branch"
]
timeout_sec = 18000 # 5 hours

View file

@ -105,6 +105,7 @@ pub fn record_submit_commandbuffer<D: DeviceV1_0, F: FnOnce(&D, vk::CommandBuffe
vk::CommandBufferResetFlags::RELEASE_RESOURCES, vk::CommandBufferResetFlags::RELEASE_RESOURCES,
) )
.expect("Reset command buffer failed."); .expect("Reset command buffer failed.");
let command_buffer_begin_info = vk::CommandBufferBeginInfo::builder() let command_buffer_begin_info = vk::CommandBufferBeginInfo::builder()
.flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT); .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);

View file

@ -539,7 +539,8 @@ impl CommandExt for vkxml::Command {
.map(|field| match field.basetype.as_str() { .map(|field| match field.basetype.as_str() {
"VkDevice" | "VkCommandBuffer" | "VkQueue" => true, "VkDevice" | "VkCommandBuffer" | "VkQueue" => true,
_ => false, _ => false,
}).unwrap_or(false); })
.unwrap_or(false);
match self.name.as_str() { match self.name.as_str() {
"vkGetInstanceProcAddr" => FunctionType::Static, "vkGetInstanceProcAddr" => FunctionType::Static,
"vkCreateInstance" "vkCreateInstance"
@ -704,7 +705,8 @@ fn generate_function_pointers<'a>(
} else { } else {
return false; return false;
} }
}).collect(); })
.collect();
let params: Vec<Vec<(Ident, Tokens)>> = commands let params: Vec<Vec<(Ident, Tokens)>> = commands
.iter() .iter()
@ -716,9 +718,11 @@ fn generate_function_pointers<'a>(
let name = field.param_ident(); let name = field.param_ident();
let ty = field.type_tokens(); let ty = field.type_tokens();
(name, ty) (name, ty)
}).collect(); })
.collect();
params params
}).collect(); })
.collect();
let params_names: Vec<Vec<_>> = params let params_names: Vec<Vec<_>> = params
.iter() .iter()
@ -727,7 +731,8 @@ fn generate_function_pointers<'a>(
.iter() .iter()
.map(|&(param_name, _)| param_name) .map(|&(param_name, _)| param_name)
.collect() .collect()
}).collect(); })
.collect();
let param_names_ref = &params_names; let param_names_ref = &params_names;
let expanded_params: Vec<_> = params let expanded_params: Vec<_> = params
.iter() .iter()
@ -738,7 +743,8 @@ fn generate_function_pointers<'a>(
quote! { quote! {
#(#inner_params_iter,)* #(#inner_params_iter,)*
} }
}).collect(); })
.collect();
let expanded_params_unused: Vec<_> = params let expanded_params_unused: Vec<_> = params
.iter() .iter()
.map(|inner_params| { .map(|inner_params| {
@ -749,7 +755,8 @@ fn generate_function_pointers<'a>(
quote! { quote! {
#(#inner_params_iter,)* #(#inner_params_iter,)*
} }
}).collect(); })
.collect();
let expanded_params_ref = &expanded_params; let expanded_params_ref = &expanded_params;
let return_types: Vec<_> = commands let return_types: Vec<_> = commands
@ -774,9 +781,11 @@ fn generate_function_pointers<'a>(
let name = field.param_ident(); let name = field.param_ident();
let ty = field.type_tokens(); let ty = field.type_tokens();
quote! { #name: #ty } quote! { #name: #ty }
}).collect(); })
.collect();
params params
}).collect(); })
.collect();
let signature_params_ref = &signature_params; let signature_params_ref = &signature_params;
let pfn_return_types: Vec<_> = pfn_commands let pfn_return_types: Vec<_> = pfn_commands
@ -867,7 +876,8 @@ pub fn generate_extension_constants<'a>(
.filter_map(|item| match item { .filter_map(|item| match item {
vk_parse::ExtensionChild::Require { items, .. } => Some(items.iter()), vk_parse::ExtensionChild::Require { items, .. } => Some(items.iter()),
_ => None, _ => None,
}).flat_map(|iter| iter); })
.flat_map(|iter| iter);
let enum_tokens = items.filter_map(|item| match item { let enum_tokens = items.filter_map(|item| match item {
vk_parse::InterfaceItem::Enum(_enum) => { vk_parse::InterfaceItem::Enum(_enum) => {
use vk_parse::EnumSpec; use vk_parse::EnumSpec;
@ -937,7 +947,8 @@ pub fn generate_extension_commands<'a>(
})) }))
} }
_ => None, _ => None,
}).flat_map(|iter| iter) })
.flat_map(|iter| iter)
.collect_vec(); .collect_vec();
let name = format!("{}Fn", extension_name.to_camel_case()); let name = format!("{}Fn", extension_name.to_camel_case());
let ident = Ident::from(&name[2..]); let ident = Ident::from(&name[2..]);
@ -1051,7 +1062,8 @@ pub fn bitflags_impl_block(
let variant_ident = constant.variant_ident(enum_name); let variant_ident = constant.variant_ident(enum_name);
let tokens = constant.to_tokens(); let tokens = constant.to_tokens();
(variant_ident, tokens) (variant_ident, tokens)
}).collect_vec(); })
.collect_vec();
let notations = constants.iter().map(|constant| { let notations = constants.iter().map(|constant| {
constant.notation().map(|n| { constant.notation().map(|n| {
@ -1093,7 +1105,8 @@ pub fn generate_enum<'a>(
.filter_map(|elem| match *elem { .filter_map(|elem| match *elem {
vkxml::EnumerationElement::Enum(ref constant) => Some(constant), vkxml::EnumerationElement::Enum(ref constant) => Some(constant),
_ => None, _ => None,
}).collect_vec(); })
.collect_vec();
let values = const_values.entry(ident.clone()).or_insert_with(Vec::new); let values = const_values.entry(ident.clone()).or_insert_with(Vec::new);
for constant in &constants { for constant in &constants {
const_cache.insert(constant.name.as_str()); const_cache.insert(constant.name.as_str());
@ -1202,7 +1215,8 @@ fn is_static_array(field: &vkxml::Field) -> bool {
.map(|ty| match ty { .map(|ty| match ty {
vkxml::ArrayType::Static => true, vkxml::ArrayType::Static => true,
_ => false, _ => false,
}).unwrap_or(false) })
.unwrap_or(false)
} }
pub fn derive_default(_struct: &vkxml::Struct) -> Option<Tokens> { pub fn derive_default(_struct: &vkxml::Struct) -> Option<Tokens> {
let name = name_to_tokens(&_struct.name); let name = name_to_tokens(&_struct.name);
@ -1369,11 +1383,13 @@ pub fn derive_setters(_struct: &vkxml::Struct) -> Option<Tokens> {
.clone() .clone()
.find(|field| field.param_ident().to_string() == "p_next") .find(|field| field.param_ident().to_string() == "p_next")
{ {
Some(p_next) => if p_next.type_tokens().to_string().starts_with("*const") { Some(p_next) => {
if p_next.type_tokens().to_string().starts_with("*const") {
(true, true) (true, true)
} else { } else {
(true, false) (true, false)
}, }
}
None => (false, false), None => (false, false),
}; };
@ -1390,7 +1406,8 @@ pub fn derive_setters(_struct: &vkxml::Struct) -> Option<Tokens> {
// Associated _count members // Associated _count members
if field.array.is_some() { if field.array.is_some() {
if let Some(ref array_size) = field.size { if let Some(ref array_size) = field.size {
if !array_size.starts_with("latexmath") && !nofilter_count_members if !array_size.starts_with("latexmath")
&& !nofilter_count_members
.iter() .iter()
.any(|n| *n == &(_struct.name.clone() + "." + field_name)) .any(|n| *n == &(_struct.name.clone() + "." + field_name))
{ {
@ -1405,7 +1422,8 @@ pub fn derive_setters(_struct: &vkxml::Struct) -> Option<Tokens> {
} }
None None
}).collect(); })
.collect();
let setters = members.clone().filter_map(|field| { let setters = members.clone().filter_map(|field| {
let param_ident = field.param_ident(); let param_ident = field.param_ident();
@ -1479,26 +1497,36 @@ pub fn derive_setters(_struct: &vkxml::Struct) -> Option<Tokens> {
} }
let slice_param_ty_tokens; let slice_param_ty_tokens;
let ptr_mutability; let ptr;
if param_ty_string.starts_with("*const ") { if param_ty_string.starts_with("*const ") {
slice_param_ty_tokens = let slice_type = &param_ty_string[7..];
"&'a [".to_string() + &param_ty_string[7..] + "]"; if slice_type == "c_void" {
ptr_mutability = ".as_ptr()"; slice_param_ty_tokens = "&'a [u8]".to_string();
ptr = ".as_ptr() as *const c_void";
} else {
slice_param_ty_tokens = "&'a [".to_string() + slice_type + "]";
ptr = ".as_ptr()";
}
} else { } else {
// *mut // *mut
slice_param_ty_tokens = let slice_type = &param_ty_string[5..];
"&'a mut [".to_string() + &param_ty_string[5..] + "]"; if slice_type == "c_void" {
ptr_mutability = ".as_mut_ptr()"; slice_param_ty_tokens = "&mut 'a [u8]".to_string();
ptr = ".as_mut_ptr() as *mut c_void";
} else {
slice_param_ty_tokens = "&'a mut [".to_string() + slice_type + "]";
ptr = ".as_mut_ptr()";
}
} }
let slice_param_ty_tokens = Term::intern(&slice_param_ty_tokens); let slice_param_ty_tokens = Term::intern(&slice_param_ty_tokens);
let ptr_mutability = Term::intern(ptr_mutability); let ptr = Term::intern(ptr);
match array_type { match array_type {
vkxml::ArrayType::Dynamic => { vkxml::ArrayType::Dynamic => {
return Some(quote!{ return Some(quote!{
pub fn #param_ident_short(mut self, #param_ident_short: #slice_param_ty_tokens) -> #name_builder<'a> { pub fn #param_ident_short(mut self, #param_ident_short: #slice_param_ty_tokens) -> #name_builder<'a> {
self.inner.#array_size_ident = #param_ident_short.len() as _; self.inner.#array_size_ident = #param_ident_short.len() as _;
self.inner.#param_ident = #param_ident_short#ptr_mutability; self.inner.#param_ident = #param_ident_short#ptr;
self self
} }
}); });
@ -1770,11 +1798,13 @@ pub fn generate_feature<'a>(
} else { } else {
None None
} }
}).collect() })
.collect()
} else { } else {
vec![] vec![]
} }
}).filter_map(|cmd_ref| commands.get(&cmd_ref.name)) })
.filter_map(|cmd_ref| commands.get(&cmd_ref.name))
.fold( .fold(
(Vec::new(), Vec::new(), Vec::new(), Vec::new()), (Vec::new(), Vec::new(), Vec::new(), Vec::new()),
|mut acc, &cmd_ref| { |mut acc, &cmd_ref| {
@ -1939,7 +1969,8 @@ pub fn generate_aliases_of_types<'a>(
.filter_map(|child| match child { .filter_map(|child| match child {
vk_parse::TypesChild::Type(ty) => Some((ty.name.as_ref()?, ty.alias.as_ref()?)), vk_parse::TypesChild::Type(ty) => Some((ty.name.as_ref()?, ty.alias.as_ref()?)),
_ => None, _ => None,
}).filter_map(|(name, alias)| { })
.filter_map(|(name, alias)| {
let name_ident = name_to_tokens(name); let name_ident = name_to_tokens(name);
if ty_cache.contains(&name_ident) { if ty_cache.contains(&name_ident) {
return None; return None;
@ -1965,7 +1996,8 @@ pub fn write_source_code(path: &Path) {
.filter_map(|item| match item { .filter_map(|item| match item {
vk_parse::RegistryChild::Extensions(ref ext) => Some(&ext.children), vk_parse::RegistryChild::Extensions(ref ext) => Some(&ext.children),
_ => None, _ => None,
}).nth(0) })
.nth(0)
.expect("extension"); .expect("extension");
let mut ty_cache = HashSet::new(); let mut ty_cache = HashSet::new();
let aliases: Vec<_> = spec2 let aliases: Vec<_> = spec2
@ -1976,7 +2008,8 @@ pub fn write_source_code(path: &Path) {
Some(generate_aliases_of_types(ty, &mut ty_cache)) Some(generate_aliases_of_types(ty, &mut ty_cache))
} }
_ => None, _ => None,
}).collect(); })
.collect();
let spec = vk_parse::parse_file_as_vkxml(path); let spec = vk_parse::parse_file_as_vkxml(path);
let commands: HashMap<vkxml::Identifier, &vkxml::Command> = spec let commands: HashMap<vkxml::Identifier, &vkxml::Command> = spec
@ -1985,7 +2018,8 @@ pub fn write_source_code(path: &Path) {
.filter_map(|elem| match elem { .filter_map(|elem| match elem {
vkxml::RegistryElement::Commands(ref cmds) => Some(cmds), vkxml::RegistryElement::Commands(ref cmds) => Some(cmds),
_ => None, _ => None,
}).flat_map(|cmds| cmds.elements.iter().map(|cmd| (cmd.name.clone(), cmd))) })
.flat_map(|cmds| cmds.elements.iter().map(|cmd| (cmd.name.clone(), cmd)))
.collect(); .collect();
let features: Vec<&vkxml::Feature> = spec let features: Vec<&vkxml::Feature> = spec
@ -1994,7 +2028,8 @@ pub fn write_source_code(path: &Path) {
.filter_map(|elem| match elem { .filter_map(|elem| match elem {
vkxml::RegistryElement::Features(ref features) => Some(features), vkxml::RegistryElement::Features(ref features) => Some(features),
_ => None, _ => None,
}).flat_map(|features| features.elements.iter()) })
.flat_map(|features| features.elements.iter())
.collect(); .collect();
let definitions: Vec<&vkxml::DefinitionsElement> = spec let definitions: Vec<&vkxml::DefinitionsElement> = spec
@ -2003,7 +2038,8 @@ pub fn write_source_code(path: &Path) {
.filter_map(|elem| match elem { .filter_map(|elem| match elem {
vkxml::RegistryElement::Definitions(ref definitions) => Some(definitions), vkxml::RegistryElement::Definitions(ref definitions) => Some(definitions),
_ => None, _ => None,
}).flat_map(|definitions| definitions.elements.iter()) })
.flat_map(|definitions| definitions.elements.iter())
.collect(); .collect();
let enums: Vec<&vkxml::Enumeration> = spec let enums: Vec<&vkxml::Enumeration> = spec
@ -2012,12 +2048,14 @@ pub fn write_source_code(path: &Path) {
.filter_map(|elem| match elem { .filter_map(|elem| match elem {
vkxml::RegistryElement::Enums(ref enums) => Some(enums), vkxml::RegistryElement::Enums(ref enums) => Some(enums),
_ => None, _ => None,
}).flat_map(|enums| { })
.flat_map(|enums| {
enums.elements.iter().filter_map(|_enum| match *_enum { enums.elements.iter().filter_map(|_enum| match *_enum {
vkxml::EnumsElement::Enumeration(ref e) => Some(e), vkxml::EnumsElement::Enumeration(ref e) => Some(e),
_ => None, _ => None,
}) })
}).collect(); })
.collect();
let constants: Vec<&vkxml::Constant> = spec let constants: Vec<&vkxml::Constant> = spec
.elements .elements
@ -2025,7 +2063,8 @@ pub fn write_source_code(path: &Path) {
.filter_map(|elem| match elem { .filter_map(|elem| match elem {
vkxml::RegistryElement::Constants(ref constants) => Some(constants), vkxml::RegistryElement::Constants(ref constants) => Some(constants),
_ => None, _ => None,
}).flat_map(|constants| constants.elements.iter()) })
.flat_map(|constants| constants.elements.iter())
.collect(); .collect();
let mut fn_cache = HashSet::new(); let mut fn_cache = HashSet::new();
@ -2059,14 +2098,16 @@ pub fn write_source_code(path: &Path) {
&mut const_values, &mut const_values,
&mut fn_cache, &mut fn_cache,
) )
}).collect_vec(); })
.collect_vec();
let union_types = definitions let union_types = definitions
.iter() .iter()
.filter_map(|def| match def { .filter_map(|def| match def {
vkxml::DefinitionsElement::Union(ref union) => Some(union.name.as_str()), vkxml::DefinitionsElement::Union(ref union) => Some(union.name.as_str()),
_ => None, _ => None,
}).collect::<HashSet<&str>>(); })
.collect::<HashSet<&str>>();
let definition_code: Vec<_> = definitions let definition_code: Vec<_> = definitions
.into_iter() .into_iter()