Line |
Branch |
Exec |
Source |
1 |
|
|
/* |
2 |
|
|
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated |
3 |
|
|
* documentation files (the “Software”), to deal in the Software without restriction, including without |
4 |
|
|
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies |
5 |
|
|
* of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: |
6 |
|
|
* |
7 |
|
|
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. |
8 |
|
|
* |
9 |
|
|
* THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT |
10 |
|
|
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. |
11 |
|
|
* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, |
12 |
|
|
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. |
13 |
|
|
* |
14 |
|
|
* Copyright © 2020 Charles Giessen (charles@lunarg.com) |
15 |
|
|
*/ |
16 |
|
|
|
17 |
|
|
#include "VkBootstrap.h" |
18 |
|
|
|
19 |
|
|
#include <cstdio> |
20 |
|
|
#include <cstring> |
21 |
|
|
|
22 |
|
|
#if defined(_WIN32) |
23 |
|
|
#include <fcntl.h> |
24 |
|
|
#define NOMINMAX |
25 |
|
|
#include <windows.h> |
26 |
|
|
#endif // _WIN32 |
27 |
|
|
|
28 |
|
|
#if defined(__linux__) || defined(__APPLE__) |
29 |
|
|
#include <dlfcn.h> |
30 |
|
|
#endif |
31 |
|
|
|
32 |
|
|
#include <mutex> |
33 |
|
|
|
34 |
|
|
namespace vkb { |
35 |
|
|
|
36 |
|
|
namespace detail { |
37 |
|
|
|
38 |
|
|
class VulkanFunctions { |
39 |
|
|
private: |
40 |
|
|
std::mutex init_mutex; |
41 |
|
|
struct VulkanLibrary { |
42 |
|
|
#if defined(__linux__) || defined(__APPLE__) |
43 |
|
|
void* library; |
44 |
|
|
#elif defined(_WIN32) |
45 |
|
|
HMODULE library; |
46 |
|
|
#endif |
47 |
|
|
PFN_vkGetInstanceProcAddr ptr_vkGetInstanceProcAddr = VK_NULL_HANDLE; |
48 |
|
|
|
49 |
|
|
VulkanLibrary () { |
50 |
|
|
#if defined(__linux__) |
51 |
|
✗ |
library = dlopen ("libvulkan.so.1", RTLD_NOW | RTLD_LOCAL); |
52 |
|
✗ |
if (!library) library = dlopen ("libvulkan.so", RTLD_NOW | RTLD_LOCAL); |
53 |
|
|
#elif defined(__APPLE__) |
54 |
|
|
library = dlopen ("libvulkan.dylib", RTLD_NOW | RTLD_LOCAL); |
55 |
|
|
if (!library) library = dlopen ("libvulkan.1.dylib", RTLD_NOW | RTLD_LOCAL); |
56 |
|
|
#elif defined(_WIN32) |
57 |
|
|
library = LoadLibrary (TEXT ("vulkan-1.dll")); |
58 |
|
|
#else |
59 |
|
|
assert (false && "Unsupported platform"); |
60 |
|
|
#endif |
61 |
|
✗ |
if (!library) return; |
62 |
|
✗ |
load_func (ptr_vkGetInstanceProcAddr, "vkGetInstanceProcAddr"); |
63 |
|
|
} |
64 |
|
|
|
65 |
|
|
template <typename T> void load_func (T& func_dest, const char* func_name) { |
66 |
|
|
#if defined(__linux__) || defined(__APPLE__) |
67 |
|
✗ |
func_dest = reinterpret_cast<T> (dlsym (library, func_name)); |
68 |
|
|
#elif defined(_WIN32) |
69 |
|
|
func_dest = reinterpret_cast<T> (GetProcAddress (library, func_name)); |
70 |
|
|
#endif |
71 |
|
✗ |
} |
72 |
|
|
void close () { |
73 |
|
|
#if defined(__linux__) || defined(__APPLE__) |
74 |
|
|
dlclose (library); |
75 |
|
|
#elif defined(_WIN32) |
76 |
|
|
FreeLibrary (library); |
77 |
|
|
#endif |
78 |
|
|
library = 0; |
79 |
|
|
} |
80 |
|
|
}; |
81 |
|
|
VulkanLibrary& get_vulkan_library () { |
82 |
|
✗ |
static VulkanLibrary lib; |
83 |
|
✗ |
return lib; |
84 |
|
|
} |
85 |
|
|
|
86 |
|
|
bool load_vulkan (PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr = nullptr) { |
87 |
|
✗ |
if (fp_vkGetInstanceProcAddr != nullptr) { |
88 |
|
✗ |
ptr_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr; |
89 |
|
✗ |
return true; |
90 |
|
|
} else { |
91 |
|
✗ |
auto& lib = get_vulkan_library (); |
92 |
|
✗ |
ptr_vkGetInstanceProcAddr = lib.ptr_vkGetInstanceProcAddr; |
93 |
|
✗ |
return lib.library != nullptr && lib.ptr_vkGetInstanceProcAddr != VK_NULL_HANDLE; |
94 |
|
|
} |
95 |
|
|
} |
96 |
|
|
|
97 |
|
|
template <typename T> void get_proc_addr (T& out_ptr, const char* func_name) { |
98 |
|
✗ |
out_ptr = reinterpret_cast<T> (ptr_vkGetInstanceProcAddr (instance, func_name)); |
99 |
|
✗ |
} |
100 |
|
|
|
101 |
|
|
void init_pre_instance_funcs () { |
102 |
|
✗ |
get_proc_addr (fp_vkEnumerateInstanceExtensionProperties, "vkEnumerateInstanceExtensionProperties"); |
103 |
|
✗ |
get_proc_addr (fp_vkEnumerateInstanceLayerProperties, "vkEnumerateInstanceLayerProperties"); |
104 |
|
✗ |
get_proc_addr (fp_vkEnumerateInstanceVersion, "vkEnumerateInstanceVersion"); |
105 |
|
✗ |
get_proc_addr (fp_vkCreateInstance, "vkCreateInstance"); |
106 |
|
✗ |
} |
107 |
|
|
|
108 |
|
|
public: |
109 |
|
|
PFN_vkGetInstanceProcAddr ptr_vkGetInstanceProcAddr = nullptr; |
110 |
|
|
VkInstance instance = nullptr; |
111 |
|
|
|
112 |
|
|
PFN_vkEnumerateInstanceExtensionProperties fp_vkEnumerateInstanceExtensionProperties = nullptr; |
113 |
|
|
PFN_vkEnumerateInstanceLayerProperties fp_vkEnumerateInstanceLayerProperties = nullptr; |
114 |
|
|
PFN_vkEnumerateInstanceVersion fp_vkEnumerateInstanceVersion = nullptr; |
115 |
|
|
PFN_vkCreateInstance fp_vkCreateInstance = nullptr; |
116 |
|
|
PFN_vkDestroyInstance fp_vkDestroyInstance = nullptr; |
117 |
|
|
|
118 |
|
|
PFN_vkEnumeratePhysicalDevices fp_vkEnumeratePhysicalDevices = nullptr; |
119 |
|
|
PFN_vkGetPhysicalDeviceFeatures fp_vkGetPhysicalDeviceFeatures = nullptr; |
120 |
|
|
PFN_vkGetPhysicalDeviceFeatures2 fp_vkGetPhysicalDeviceFeatures2 = nullptr; |
121 |
|
|
PFN_vkGetPhysicalDeviceFormatProperties fp_vkGetPhysicalDeviceFormatProperties = nullptr; |
122 |
|
|
PFN_vkGetPhysicalDeviceImageFormatProperties fp_vkGetPhysicalDeviceImageFormatProperties = nullptr; |
123 |
|
|
PFN_vkGetPhysicalDeviceProperties fp_vkGetPhysicalDeviceProperties = nullptr; |
124 |
|
|
PFN_vkGetPhysicalDeviceProperties2 fp_vkGetPhysicalDeviceProperties2 = nullptr; |
125 |
|
|
PFN_vkGetPhysicalDeviceQueueFamilyProperties fp_vkGetPhysicalDeviceQueueFamilyProperties = nullptr; |
126 |
|
|
PFN_vkGetPhysicalDeviceQueueFamilyProperties2 fp_vkGetPhysicalDeviceQueueFamilyProperties2 = nullptr; |
127 |
|
|
PFN_vkGetPhysicalDeviceMemoryProperties fp_vkGetPhysicalDeviceMemoryProperties = nullptr; |
128 |
|
|
PFN_vkGetPhysicalDeviceFormatProperties2 fp_vkGetPhysicalDeviceFormatProperties2 = nullptr; |
129 |
|
|
PFN_vkGetPhysicalDeviceMemoryProperties2 fp_vkGetPhysicalDeviceMemoryProperties2 = nullptr; |
130 |
|
|
|
131 |
|
|
PFN_vkCreateDevice fp_vkCreateDevice = nullptr; |
132 |
|
|
PFN_vkDestroyDevice fp_vkDestroyDevice = nullptr; |
133 |
|
|
PFN_vkEnumerateDeviceExtensionProperties fp_vkEnumerateDeviceExtensionProperties = nullptr; |
134 |
|
|
PFN_vkGetDeviceQueue fp_vkGetDeviceQueue = nullptr; |
135 |
|
|
|
136 |
|
|
PFN_vkCreateImageView fp_vkCreateImageView = nullptr; |
137 |
|
|
PFN_vkDestroyImageView fp_vkDestroyImageView = nullptr; |
138 |
|
|
|
139 |
|
|
PFN_vkDestroySurfaceKHR fp_vkDestroySurfaceKHR = nullptr; |
140 |
|
|
PFN_vkGetPhysicalDeviceSurfaceSupportKHR fp_vkGetPhysicalDeviceSurfaceSupportKHR = nullptr; |
141 |
|
|
PFN_vkGetPhysicalDeviceSurfaceFormatsKHR fp_vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr; |
142 |
|
|
PFN_vkGetPhysicalDeviceSurfacePresentModesKHR fp_vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr; |
143 |
|
|
PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr; |
144 |
|
|
PFN_vkCreateSwapchainKHR fp_vkCreateSwapchainKHR = nullptr; |
145 |
|
|
PFN_vkDestroySwapchainKHR fp_vkDestroySwapchainKHR = nullptr; |
146 |
|
|
PFN_vkGetSwapchainImagesKHR fp_vkGetSwapchainImagesKHR = nullptr; |
147 |
|
|
|
148 |
|
|
bool init_vulkan_funcs (PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { |
149 |
|
✗ |
std::lock_guard<std::mutex> lg (init_mutex); |
150 |
|
✗ |
if (!load_vulkan (fp_vkGetInstanceProcAddr)) return false; |
151 |
|
✗ |
init_pre_instance_funcs (); |
152 |
|
✗ |
return true; |
153 |
|
✗ |
} |
154 |
|
|
|
155 |
|
|
template <typename T> void get_inst_proc_addr (T& out_ptr, const char* func_name) { |
156 |
|
✗ |
std::lock_guard<std::mutex> lg (init_mutex); |
157 |
|
✗ |
get_proc_addr (out_ptr, func_name); |
158 |
|
✗ |
} |
159 |
|
|
|
160 |
|
|
void init_instance_funcs (VkInstance inst) { |
161 |
|
✗ |
std::lock_guard<std::mutex> lg (init_mutex); |
162 |
|
|
|
163 |
|
✗ |
instance = inst; |
164 |
|
✗ |
get_proc_addr (fp_vkDestroyInstance, "vkDestroyInstance"); |
165 |
|
✗ |
get_proc_addr (fp_vkEnumeratePhysicalDevices, "vkEnumeratePhysicalDevices"); |
166 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceFeatures, "vkGetPhysicalDeviceFeatures"); |
167 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceFeatures2, "vkGetPhysicalDeviceFeatures2"); |
168 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceFormatProperties, "vkGetPhysicalDeviceFormatProperties"); |
169 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceImageFormatProperties, "vkGetPhysicalDeviceImageFormatProperties"); |
170 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceProperties, "vkGetPhysicalDeviceProperties"); |
171 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceProperties2, "vkGetPhysicalDeviceProperties2"); |
172 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceQueueFamilyProperties, "vkGetPhysicalDeviceQueueFamilyProperties"); |
173 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceQueueFamilyProperties2, "vkGetPhysicalDeviceQueueFamilyProperties2"); |
174 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceMemoryProperties, "vkGetPhysicalDeviceMemoryProperties"); |
175 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceFormatProperties2, "vkGetPhysicalDeviceFormatProperties2"); |
176 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceMemoryProperties2, "vkGetPhysicalDeviceMemoryProperties2"); |
177 |
|
|
|
178 |
|
✗ |
get_proc_addr (fp_vkCreateDevice, "vkCreateDevice"); |
179 |
|
✗ |
get_proc_addr (fp_vkDestroyDevice, "vkDestroyDevice"); |
180 |
|
✗ |
get_proc_addr (fp_vkEnumerateDeviceExtensionProperties, "vkEnumerateDeviceExtensionProperties"); |
181 |
|
✗ |
get_proc_addr (fp_vkGetDeviceQueue, "vkGetDeviceQueue"); |
182 |
|
|
|
183 |
|
✗ |
get_proc_addr (fp_vkCreateImageView, "vkCreateImageView"); |
184 |
|
✗ |
get_proc_addr (fp_vkDestroyImageView, "vkDestroyImageView"); |
185 |
|
|
|
186 |
|
✗ |
get_proc_addr (fp_vkDestroySurfaceKHR, "vkDestroySurfaceKHR"); |
187 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceSurfaceSupportKHR, "vkGetPhysicalDeviceSurfaceSupportKHR"); |
188 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceSurfaceFormatsKHR, "vkGetPhysicalDeviceSurfaceFormatsKHR"); |
189 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceSurfacePresentModesKHR, "vkGetPhysicalDeviceSurfacePresentModesKHR"); |
190 |
|
✗ |
get_proc_addr (fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR, "vkGetPhysicalDeviceSurfaceCapabilitiesKHR"); |
191 |
|
✗ |
get_proc_addr (fp_vkCreateSwapchainKHR, "vkCreateSwapchainKHR"); |
192 |
|
✗ |
get_proc_addr (fp_vkDestroySwapchainKHR, "vkDestroySwapchainKHR"); |
193 |
|
✗ |
get_proc_addr (fp_vkGetSwapchainImagesKHR, "vkGetSwapchainImagesKHR"); |
194 |
|
✗ |
} |
195 |
|
|
}; |
196 |
|
|
|
197 |
|
|
VulkanFunctions& vulkan_functions () { |
198 |
|
|
static VulkanFunctions v; |
199 |
|
✗ |
return v; |
200 |
|
|
} |
201 |
|
|
|
202 |
|
|
// Helper for robustly executing the two-call pattern |
203 |
|
|
template <typename T, typename F, typename... Ts> |
204 |
|
|
auto get_vector (std::vector<T>& out, F&& f, Ts&&... ts) -> VkResult { |
205 |
|
✗ |
uint32_t count = 0; |
206 |
|
|
VkResult err; |
207 |
|
|
do { |
208 |
|
✗ |
err = f (ts..., &count, nullptr); |
209 |
|
✗ |
if (err) { |
210 |
|
✗ |
return err; |
211 |
|
|
}; |
212 |
|
✗ |
out.resize (count); |
213 |
|
✗ |
err = f (ts..., &count, out.data ()); |
214 |
|
✗ |
out.resize (count); |
215 |
|
✗ |
} while (err == VK_INCOMPLETE); |
216 |
|
✗ |
return err; |
217 |
|
|
} |
218 |
|
|
|
219 |
|
|
template <typename T, typename F, typename... Ts> |
220 |
|
|
auto get_vector_noerror (F&& f, Ts&&... ts) -> std::vector<T> { |
221 |
|
✗ |
uint32_t count = 0; |
222 |
|
✗ |
std::vector<T> results; |
223 |
|
✗ |
f (ts..., &count, nullptr); |
224 |
|
✗ |
results.resize (count); |
225 |
|
✗ |
f (ts..., &count, results.data ()); |
226 |
|
✗ |
results.resize (count); |
227 |
|
✗ |
return results; |
228 |
|
✗ |
} |
229 |
|
|
} // namespace detail |
230 |
|
|
|
231 |
|
|
const char* to_string_message_severity (VkDebugUtilsMessageSeverityFlagBitsEXT s) { |
232 |
|
✗ |
switch (s) { |
233 |
|
✗ |
case VkDebugUtilsMessageSeverityFlagBitsEXT::VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: |
234 |
|
✗ |
return "VERBOSE"; |
235 |
|
✗ |
case VkDebugUtilsMessageSeverityFlagBitsEXT::VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: |
236 |
|
✗ |
return "ERROR"; |
237 |
|
✗ |
case VkDebugUtilsMessageSeverityFlagBitsEXT::VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: |
238 |
|
✗ |
return "WARNING"; |
239 |
|
✗ |
case VkDebugUtilsMessageSeverityFlagBitsEXT::VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: |
240 |
|
✗ |
return "INFO"; |
241 |
|
✗ |
default: |
242 |
|
✗ |
return "UNKNOWN"; |
243 |
|
|
} |
244 |
|
|
} |
245 |
|
|
const char* to_string_message_type (VkDebugUtilsMessageTypeFlagsEXT s) { |
246 |
|
✗ |
if (s == 7) return "General | Validation | Performance"; |
247 |
|
✗ |
if (s == 6) return "Validation | Performance"; |
248 |
|
✗ |
if (s == 5) return "General | Performance"; |
249 |
|
✗ |
if (s == 4 /*VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT*/) return "Performance"; |
250 |
|
✗ |
if (s == 3) return "General | Validation"; |
251 |
|
✗ |
if (s == 2 /*VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT*/) return "Validation"; |
252 |
|
✗ |
if (s == 1 /*VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT*/) return "General"; |
253 |
|
✗ |
return "Unknown"; |
254 |
|
|
} |
255 |
|
|
|
256 |
|
|
VkResult create_debug_utils_messenger (VkInstance instance, |
257 |
|
|
PFN_vkDebugUtilsMessengerCallbackEXT debug_callback, |
258 |
|
|
VkDebugUtilsMessageSeverityFlagsEXT severity, |
259 |
|
|
VkDebugUtilsMessageTypeFlagsEXT type, |
260 |
|
|
VkDebugUtilsMessengerEXT* pDebugMessenger, |
261 |
|
|
VkAllocationCallbacks* allocation_callbacks) { |
262 |
|
|
|
263 |
|
✗ |
if (debug_callback == nullptr) debug_callback = default_debug_callback; |
264 |
|
✗ |
VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {}; |
265 |
|
✗ |
messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; |
266 |
|
✗ |
messengerCreateInfo.pNext = nullptr; |
267 |
|
✗ |
messengerCreateInfo.messageSeverity = severity; |
268 |
|
✗ |
messengerCreateInfo.messageType = type; |
269 |
|
✗ |
messengerCreateInfo.pfnUserCallback = debug_callback; |
270 |
|
|
|
271 |
|
|
PFN_vkCreateDebugUtilsMessengerEXT createMessengerFunc; |
272 |
|
✗ |
detail::vulkan_functions ().get_inst_proc_addr (createMessengerFunc, "vkCreateDebugUtilsMessengerEXT"); |
273 |
|
|
|
274 |
|
✗ |
if (createMessengerFunc != nullptr) { |
275 |
|
✗ |
return createMessengerFunc (instance, &messengerCreateInfo, allocation_callbacks, pDebugMessenger); |
276 |
|
|
} else { |
277 |
|
✗ |
return VK_ERROR_EXTENSION_NOT_PRESENT; |
278 |
|
|
} |
279 |
|
|
} |
280 |
|
|
|
281 |
|
|
void destroy_debug_utils_messenger ( |
282 |
|
|
VkInstance instance, VkDebugUtilsMessengerEXT debugMessenger, VkAllocationCallbacks* allocation_callbacks) { |
283 |
|
|
|
284 |
|
|
PFN_vkDestroyDebugUtilsMessengerEXT deleteMessengerFunc; |
285 |
|
✗ |
detail::vulkan_functions ().get_inst_proc_addr (deleteMessengerFunc, "vkDestroyDebugUtilsMessengerEXT"); |
286 |
|
|
|
287 |
|
✗ |
if (deleteMessengerFunc != nullptr) { |
288 |
|
✗ |
deleteMessengerFunc (instance, debugMessenger, allocation_callbacks); |
289 |
|
|
} |
290 |
|
✗ |
} |
291 |
|
|
|
292 |
|
|
VkBool32 default_debug_callback (VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, |
293 |
|
|
VkDebugUtilsMessageTypeFlagsEXT messageType, |
294 |
|
|
const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData, |
295 |
|
|
void*) { |
296 |
|
✗ |
auto ms = to_string_message_severity (messageSeverity); |
297 |
|
✗ |
auto mt = to_string_message_type (messageType); |
298 |
|
✗ |
printf ("[%s: %s]\n%s\n", ms, mt, pCallbackData->pMessage); |
299 |
|
|
|
300 |
|
✗ |
return VK_FALSE; |
301 |
|
|
} |
302 |
|
|
|
303 |
|
|
namespace detail { |
304 |
|
|
bool check_layer_supported (std::vector<VkLayerProperties> const& available_layers, const char* layer_name) { |
305 |
|
✗ |
if (!layer_name) return false; |
306 |
|
✗ |
for (const auto& layer_properties : available_layers) { |
307 |
|
✗ |
if (strcmp (layer_name, layer_properties.layerName) == 0) { |
308 |
|
✗ |
return true; |
309 |
|
|
} |
310 |
|
|
} |
311 |
|
✗ |
return false; |
312 |
|
|
} |
313 |
|
|
|
314 |
|
|
bool check_layers_supported (std::vector<VkLayerProperties> const& available_layers, |
315 |
|
|
std::vector<const char*> const& layer_names) { |
316 |
|
✗ |
bool all_found = true; |
317 |
|
✗ |
for (const auto& layer_name : layer_names) { |
318 |
|
✗ |
bool found = check_layer_supported (available_layers, layer_name); |
319 |
|
✗ |
if (!found) all_found = false; |
320 |
|
|
} |
321 |
|
✗ |
return all_found; |
322 |
|
|
} |
323 |
|
|
|
324 |
|
|
bool check_extension_supported ( |
325 |
|
|
std::vector<VkExtensionProperties> const& available_extensions, const char* extension_name) { |
326 |
|
✗ |
if (!extension_name) return false; |
327 |
|
✗ |
for (const auto& extension_properties : available_extensions) { |
328 |
|
✗ |
if (strcmp (extension_name, extension_properties.extensionName) == 0) { |
329 |
|
✗ |
return true; |
330 |
|
|
} |
331 |
|
|
} |
332 |
|
✗ |
return false; |
333 |
|
|
} |
334 |
|
|
|
335 |
|
|
bool check_extensions_supported (std::vector<VkExtensionProperties> const& available_extensions, |
336 |
|
|
std::vector<const char*> const& extension_names) { |
337 |
|
✗ |
bool all_found = true; |
338 |
|
✗ |
for (const auto& extension_name : extension_names) { |
339 |
|
✗ |
bool found = check_extension_supported (available_extensions, extension_name); |
340 |
|
✗ |
if (!found) all_found = false; |
341 |
|
|
} |
342 |
|
✗ |
return all_found; |
343 |
|
|
} |
344 |
|
|
|
345 |
|
|
template <typename T> |
346 |
|
|
void setup_pNext_chain (T& structure, std::vector<VkBaseOutStructure*> const& structs) { |
347 |
|
✗ |
structure.pNext = nullptr; |
348 |
|
✗ |
if (structs.size () <= 0) return; |
349 |
|
✗ |
for (size_t i = 0; i < structs.size () - 1; i++) { |
350 |
|
✗ |
structs.at (i)->pNext = structs.at (i + 1); |
351 |
|
|
} |
352 |
|
✗ |
structure.pNext = structs.at (0); |
353 |
|
|
} |
354 |
|
|
const char* validation_layer_name = "VK_LAYER_KHRONOS_validation"; |
355 |
|
|
|
356 |
|
|
struct InstanceErrorCategory : std::error_category { |
357 |
|
|
const char* name () const noexcept override { return "vkb_instance"; } |
358 |
|
|
std::string message (int err) const override { |
359 |
|
✗ |
return to_string (static_cast<InstanceError> (err)); |
360 |
|
|
} |
361 |
|
|
}; |
362 |
|
|
const InstanceErrorCategory instance_error_category; |
363 |
|
|
|
364 |
|
|
struct PhysicalDeviceErrorCategory : std::error_category { |
365 |
|
|
const char* name () const noexcept override { return "vkb_physical_device"; } |
366 |
|
|
std::string message (int err) const override { |
367 |
|
✗ |
return to_string (static_cast<PhysicalDeviceError> (err)); |
368 |
|
|
} |
369 |
|
|
}; |
370 |
|
|
const PhysicalDeviceErrorCategory physical_device_error_category; |
371 |
|
|
|
372 |
|
|
struct QueueErrorCategory : std::error_category { |
373 |
|
|
const char* name () const noexcept override { return "vkb_queue"; } |
374 |
|
|
std::string message (int err) const override { |
375 |
|
✗ |
return to_string (static_cast<QueueError> (err)); |
376 |
|
|
} |
377 |
|
|
}; |
378 |
|
|
const QueueErrorCategory queue_error_category; |
379 |
|
|
|
380 |
|
|
struct DeviceErrorCategory : std::error_category { |
381 |
|
|
const char* name () const noexcept override { return "vkb_device"; } |
382 |
|
|
std::string message (int err) const override { |
383 |
|
✗ |
return to_string (static_cast<DeviceError> (err)); |
384 |
|
|
} |
385 |
|
|
}; |
386 |
|
|
const DeviceErrorCategory device_error_category; |
387 |
|
|
|
388 |
|
|
struct SwapchainErrorCategory : std::error_category { |
389 |
|
|
const char* name () const noexcept override { return "vbk_swapchain"; } |
390 |
|
|
std::string message (int err) const override { |
391 |
|
✗ |
return to_string (static_cast<SwapchainError> (err)); |
392 |
|
|
} |
393 |
|
|
}; |
394 |
|
|
const SwapchainErrorCategory swapchain_error_category; |
395 |
|
|
|
396 |
|
|
} // namespace detail |
397 |
|
|
|
398 |
|
|
std::error_code make_error_code (InstanceError instance_error) { |
399 |
|
✗ |
return { static_cast<int> (instance_error), detail::instance_error_category }; |
400 |
|
|
} |
401 |
|
|
std::error_code make_error_code (PhysicalDeviceError physical_device_error) { |
402 |
|
✗ |
return { static_cast<int> (physical_device_error), detail::physical_device_error_category }; |
403 |
|
|
} |
404 |
|
|
std::error_code make_error_code (QueueError queue_error) { |
405 |
|
✗ |
return { static_cast<int> (queue_error), detail::queue_error_category }; |
406 |
|
|
} |
407 |
|
|
std::error_code make_error_code (DeviceError device_error) { |
408 |
|
✗ |
return { static_cast<int> (device_error), detail::device_error_category }; |
409 |
|
|
} |
410 |
|
|
std::error_code make_error_code (SwapchainError swapchain_error) { |
411 |
|
✗ |
return { static_cast<int> (swapchain_error), detail::swapchain_error_category }; |
412 |
|
|
} |
413 |
|
|
|
414 |
|
|
const char* to_string (InstanceError err) { |
415 |
|
✗ |
switch (err) { |
416 |
|
✗ |
case InstanceError::vulkan_unavailable: |
417 |
|
✗ |
return "vulkan_unavailable"; |
418 |
|
✗ |
case InstanceError::vulkan_version_unavailable: |
419 |
|
✗ |
return "vulkan_version_unavailable"; |
420 |
|
✗ |
case InstanceError::vulkan_version_1_1_unavailable: |
421 |
|
✗ |
return "vulkan_version_1_1_unavailable"; |
422 |
|
✗ |
case InstanceError::vulkan_version_1_2_unavailable: |
423 |
|
✗ |
return "vulkan_version_1_2_unavailable"; |
424 |
|
✗ |
case InstanceError::failed_create_debug_messenger: |
425 |
|
✗ |
return "failed_create_debug_messenger"; |
426 |
|
✗ |
case InstanceError::failed_create_instance: |
427 |
|
✗ |
return "failed_create_instance"; |
428 |
|
✗ |
case InstanceError::requested_layers_not_present: |
429 |
|
✗ |
return "requested_layers_not_present"; |
430 |
|
✗ |
case InstanceError::requested_extensions_not_present: |
431 |
|
✗ |
return "requested_extensions_not_present"; |
432 |
|
✗ |
case InstanceError::windowing_extensions_not_present: |
433 |
|
✗ |
return "windowing_extensions_not_present"; |
434 |
|
✗ |
default: |
435 |
|
✗ |
return ""; |
436 |
|
|
} |
437 |
|
|
} |
438 |
|
|
const char* to_string (PhysicalDeviceError err) { |
439 |
|
✗ |
switch (err) { |
440 |
|
✗ |
case PhysicalDeviceError::no_surface_provided: |
441 |
|
✗ |
return "no_surface_provided"; |
442 |
|
✗ |
case PhysicalDeviceError::failed_enumerate_physical_devices: |
443 |
|
✗ |
return "failed_enumerate_physical_devices"; |
444 |
|
✗ |
case PhysicalDeviceError::no_physical_devices_found: |
445 |
|
✗ |
return "no_physical_devices_found"; |
446 |
|
✗ |
case PhysicalDeviceError::no_suitable_device: |
447 |
|
✗ |
return "no_suitable_device"; |
448 |
|
✗ |
default: |
449 |
|
✗ |
return ""; |
450 |
|
|
} |
451 |
|
|
} |
452 |
|
|
const char* to_string (QueueError err) { |
453 |
|
✗ |
switch (err) { |
454 |
|
✗ |
case QueueError::present_unavailable: |
455 |
|
✗ |
return "present_unavailable"; |
456 |
|
✗ |
case QueueError::graphics_unavailable: |
457 |
|
✗ |
return "graphics_unavailable"; |
458 |
|
✗ |
case QueueError::compute_unavailable: |
459 |
|
✗ |
return "compute_unavailable"; |
460 |
|
✗ |
case QueueError::transfer_unavailable: |
461 |
|
✗ |
return "transfer_unavailable"; |
462 |
|
✗ |
case QueueError::queue_index_out_of_range: |
463 |
|
✗ |
return "queue_index_out_of_range"; |
464 |
|
✗ |
case QueueError::invalid_queue_family_index: |
465 |
|
✗ |
return "invalid_queue_family_index"; |
466 |
|
✗ |
default: |
467 |
|
✗ |
return ""; |
468 |
|
|
} |
469 |
|
|
} |
470 |
|
|
const char* to_string (DeviceError err) { |
471 |
|
✗ |
switch (err) { |
472 |
|
✗ |
case DeviceError::failed_create_device: |
473 |
|
✗ |
return "failed_create_device"; |
474 |
|
✗ |
default: |
475 |
|
✗ |
return ""; |
476 |
|
|
} |
477 |
|
|
} |
478 |
|
|
const char* to_string (SwapchainError err) { |
479 |
|
✗ |
switch (err) { |
480 |
|
✗ |
case SwapchainError::surface_handle_not_provided: |
481 |
|
✗ |
return "surface_handle_not_provided"; |
482 |
|
✗ |
case SwapchainError::failed_query_surface_support_details: |
483 |
|
✗ |
return "failed_query_surface_support_details"; |
484 |
|
✗ |
case SwapchainError::failed_create_swapchain: |
485 |
|
✗ |
return "failed_create_swapchain"; |
486 |
|
✗ |
case SwapchainError::failed_get_swapchain_images: |
487 |
|
✗ |
return "failed_get_swapchain_images"; |
488 |
|
✗ |
case SwapchainError::failed_create_swapchain_image_views: |
489 |
|
✗ |
return "failed_create_swapchain_image_views"; |
490 |
|
✗ |
default: |
491 |
|
✗ |
return ""; |
492 |
|
|
} |
493 |
|
|
} |
494 |
|
|
|
495 |
|
|
detail::Result<SystemInfo> SystemInfo::get_system_info () { |
496 |
|
✗ |
if (!detail::vulkan_functions ().init_vulkan_funcs (nullptr)) { |
497 |
|
✗ |
return make_error_code (InstanceError::vulkan_unavailable); |
498 |
|
|
} |
499 |
|
✗ |
return SystemInfo (); |
500 |
|
|
} |
501 |
|
|
|
502 |
|
|
detail::Result<SystemInfo> SystemInfo::get_system_info (PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { |
503 |
|
|
// Using externally provided function pointers, assume the loader is available |
504 |
|
✗ |
detail::vulkan_functions ().init_vulkan_funcs (fp_vkGetInstanceProcAddr); |
505 |
|
✗ |
return SystemInfo (); |
506 |
|
|
} |
507 |
|
|
|
508 |
|
|
SystemInfo::SystemInfo () { |
509 |
|
✗ |
auto available_layers_ret = detail::get_vector<VkLayerProperties> ( |
510 |
|
✗ |
this->available_layers, detail::vulkan_functions ().fp_vkEnumerateInstanceLayerProperties); |
511 |
|
✗ |
if (available_layers_ret != VK_SUCCESS) { |
512 |
|
✗ |
this->available_layers.clear (); |
513 |
|
|
} |
514 |
|
|
|
515 |
|
✗ |
for (auto& layer : this->available_layers) |
516 |
|
✗ |
if (strcmp (layer.layerName, detail::validation_layer_name) == 0) |
517 |
|
✗ |
validation_layers_available = true; |
518 |
|
|
|
519 |
|
✗ |
auto available_extensions_ret = detail::get_vector<VkExtensionProperties> (this->available_extensions, |
520 |
|
✗ |
detail::vulkan_functions ().fp_vkEnumerateInstanceExtensionProperties, |
521 |
|
✗ |
nullptr); |
522 |
|
✗ |
if (available_extensions_ret != VK_SUCCESS) { |
523 |
|
✗ |
this->available_extensions.clear (); |
524 |
|
|
} |
525 |
|
|
|
526 |
|
✗ |
for (auto& ext : this->available_extensions) |
527 |
|
✗ |
if (strcmp (ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) |
528 |
|
✗ |
debug_utils_available = true; |
529 |
|
|
|
530 |
|
✗ |
for (auto& layer : this->available_layers) { |
531 |
|
✗ |
std::vector<VkExtensionProperties> layer_extensions; |
532 |
|
✗ |
auto layer_extensions_ret = detail::get_vector<VkExtensionProperties> (layer_extensions, |
533 |
|
✗ |
detail::vulkan_functions ().fp_vkEnumerateInstanceExtensionProperties, |
534 |
|
✗ |
layer.layerName); |
535 |
|
✗ |
if (layer_extensions_ret != VK_SUCCESS) { |
536 |
|
✗ |
for (auto& ext : layer_extensions) |
537 |
|
✗ |
if (strcmp (ext.extensionName, VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == 0) |
538 |
|
✗ |
debug_utils_available = true; |
539 |
|
|
} |
540 |
|
✗ |
} |
541 |
|
✗ |
} |
542 |
|
|
bool SystemInfo::is_extension_available (const char* extension_name) const { |
543 |
|
✗ |
if (!extension_name) return false; |
544 |
|
✗ |
return detail::check_extension_supported (available_extensions, extension_name); |
545 |
|
|
} |
546 |
|
|
bool SystemInfo::is_layer_available (const char* layer_name) const { |
547 |
|
✗ |
if (!layer_name) return false; |
548 |
|
✗ |
return detail::check_layer_supported (available_layers, layer_name); |
549 |
|
|
} |
550 |
|
|
|
551 |
|
|
void destroy_instance (Instance instance) { |
552 |
|
✗ |
if (instance.instance != VK_NULL_HANDLE) { |
553 |
|
✗ |
if (instance.debug_messenger != nullptr) |
554 |
|
✗ |
destroy_debug_utils_messenger (instance.instance, instance.debug_messenger, instance.allocation_callbacks); |
555 |
|
✗ |
detail::vulkan_functions ().fp_vkDestroyInstance (instance.instance, instance.allocation_callbacks); |
556 |
|
|
} |
557 |
|
✗ |
} |
558 |
|
|
|
559 |
|
|
InstanceBuilder::InstanceBuilder (PFN_vkGetInstanceProcAddr fp_vkGetInstanceProcAddr) { |
560 |
|
✗ |
info.fp_vkGetInstanceProcAddr = fp_vkGetInstanceProcAddr; |
561 |
|
✗ |
} |
562 |
|
|
InstanceBuilder::InstanceBuilder () {} |
563 |
|
|
|
564 |
|
|
detail::Result<Instance> InstanceBuilder::build () const { |
565 |
|
|
|
566 |
|
✗ |
auto sys_info_ret = SystemInfo::get_system_info (); |
567 |
|
✗ |
if (!sys_info_ret) return sys_info_ret.error (); |
568 |
|
✗ |
auto system = sys_info_ret.value (); |
569 |
|
|
|
570 |
|
✗ |
uint32_t api_version = VK_MAKE_VERSION (1, 0, 0); |
571 |
|
|
|
572 |
|
✗ |
if (info.required_api_version > VK_MAKE_VERSION (1, 0, 0) || |
573 |
|
✗ |
info.desired_api_version > VK_MAKE_VERSION (1, 0, 0)) { |
574 |
|
|
PFN_vkEnumerateInstanceVersion pfn_vkEnumerateInstanceVersion = |
575 |
|
✗ |
detail::vulkan_functions ().fp_vkEnumerateInstanceVersion; |
576 |
|
|
|
577 |
|
✗ |
uint32_t queried_api_version = VK_MAKE_VERSION (1, 0, 0); |
578 |
|
✗ |
if (pfn_vkEnumerateInstanceVersion != nullptr) { |
579 |
|
✗ |
VkResult res = pfn_vkEnumerateInstanceVersion (&queried_api_version); |
580 |
|
|
// Should always return VK_SUCCESS |
581 |
|
✗ |
if (res != VK_SUCCESS && info.required_api_version > 0) |
582 |
|
✗ |
return make_error_code (InstanceError::vulkan_version_unavailable); |
583 |
|
|
} |
584 |
|
✗ |
if (pfn_vkEnumerateInstanceVersion == nullptr || queried_api_version < info.required_api_version) { |
585 |
|
✗ |
if (VK_VERSION_MINOR (info.required_api_version) == 2) |
586 |
|
✗ |
return make_error_code (InstanceError::vulkan_version_1_2_unavailable); |
587 |
|
✗ |
else if (VK_VERSION_MINOR (info.required_api_version)) |
588 |
|
✗ |
return make_error_code (InstanceError::vulkan_version_1_1_unavailable); |
589 |
|
|
else |
590 |
|
✗ |
return make_error_code (InstanceError::vulkan_version_unavailable); |
591 |
|
|
} |
592 |
|
✗ |
if (info.required_api_version > VK_MAKE_VERSION (1, 0, 0)) { |
593 |
|
✗ |
api_version = info.required_api_version; |
594 |
|
✗ |
} else if (info.desired_api_version > VK_MAKE_VERSION (1, 0, 0)) { |
595 |
|
✗ |
if (queried_api_version >= info.desired_api_version) |
596 |
|
✗ |
api_version = info.desired_api_version; |
597 |
|
|
else |
598 |
|
✗ |
api_version = queried_api_version; |
599 |
|
|
} |
600 |
|
|
} |
601 |
|
|
|
602 |
|
✗ |
VkApplicationInfo app_info = {}; |
603 |
|
✗ |
app_info.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO; |
604 |
|
✗ |
app_info.pNext = nullptr; |
605 |
|
✗ |
app_info.pApplicationName = info.app_name != nullptr ? info.app_name : ""; |
606 |
|
✗ |
app_info.applicationVersion = info.application_version; |
607 |
|
✗ |
app_info.pEngineName = info.engine_name != nullptr ? info.engine_name : ""; |
608 |
|
✗ |
app_info.engineVersion = info.engine_version; |
609 |
|
✗ |
app_info.apiVersion = api_version; |
610 |
|
|
|
611 |
|
✗ |
std::vector<const char*> extensions; |
612 |
|
✗ |
for (auto& ext : info.extensions) |
613 |
|
✗ |
extensions.push_back (ext); |
614 |
|
✗ |
if (info.debug_callback != nullptr && system.debug_utils_available) { |
615 |
|
✗ |
extensions.push_back (VK_EXT_DEBUG_UTILS_EXTENSION_NAME); |
616 |
|
|
} |
617 |
|
|
|
618 |
|
✗ |
if (!info.headless_context) { |
619 |
|
|
auto check_add_window_ext = [&] (const char* name) -> bool { |
620 |
|
✗ |
if (!detail::check_extension_supported (system.available_extensions, name)) |
621 |
|
✗ |
return false; |
622 |
|
✗ |
extensions.push_back (name); |
623 |
|
✗ |
return true; |
624 |
|
✗ |
}; |
625 |
|
✗ |
bool khr_surface_added = check_add_window_ext ("VK_KHR_surface"); |
626 |
|
|
#if defined(_WIN32) |
627 |
|
|
bool added_window_exts = check_add_window_ext ("VK_KHR_win32_surface"); |
628 |
|
|
#elif defined(__ANDROID__) |
629 |
|
|
bool added_window_exts = check_add_window_ext ("VK_KHR_android_surface"); |
630 |
|
|
#elif defined(_DIRECT2DISPLAY) |
631 |
|
|
bool added_window_exts = check_add_window_ext ("VK_KHR_display"); |
632 |
|
|
#elif defined(__linux__) |
633 |
|
✗ |
bool added_window_exts = check_add_window_ext ("VK_KHR_xcb_surface"); |
634 |
|
✗ |
added_window_exts = check_add_window_ext ("VK_KHR_xlib_surface") || added_window_exts; |
635 |
|
✗ |
added_window_exts = check_add_window_ext ("VK_KHR_wayland_surface") || added_window_exts; |
636 |
|
|
#elif defined(__APPLE__) |
637 |
|
|
bool added_window_exts = check_add_window_ext ("VK_KHR_metal_surface"); |
638 |
|
|
#endif |
639 |
|
✗ |
if (!khr_surface_added || !added_window_exts) |
640 |
|
✗ |
return make_error_code (InstanceError::windowing_extensions_not_present); |
641 |
|
|
} |
642 |
|
✗ |
bool all_extensions_supported = detail::check_extensions_supported (system.available_extensions, extensions); |
643 |
|
✗ |
if (!all_extensions_supported) { |
644 |
|
✗ |
return make_error_code (InstanceError::requested_extensions_not_present); |
645 |
|
|
} |
646 |
|
|
|
647 |
|
✗ |
std::vector<const char*> layers; |
648 |
|
✗ |
for (auto& layer : info.layers) |
649 |
|
✗ |
layers.push_back (layer); |
650 |
|
|
|
651 |
|
✗ |
if (info.enable_validation_layers || (info.request_validation_layers && system.validation_layers_available)) { |
652 |
|
✗ |
layers.push_back (detail::validation_layer_name); |
653 |
|
|
} |
654 |
|
✗ |
bool all_layers_supported = detail::check_layers_supported (system.available_layers, layers); |
655 |
|
✗ |
if (!all_layers_supported) { |
656 |
|
✗ |
return make_error_code (InstanceError::requested_layers_not_present); |
657 |
|
|
} |
658 |
|
|
|
659 |
|
✗ |
std::vector<VkBaseOutStructure*> pNext_chain; |
660 |
|
|
|
661 |
|
✗ |
VkDebugUtilsMessengerCreateInfoEXT messengerCreateInfo = {}; |
662 |
|
✗ |
if (info.use_debug_messenger) { |
663 |
|
✗ |
messengerCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT; |
664 |
|
✗ |
messengerCreateInfo.pNext = nullptr; |
665 |
|
✗ |
messengerCreateInfo.messageSeverity = info.debug_message_severity; |
666 |
|
✗ |
messengerCreateInfo.messageType = info.debug_message_type; |
667 |
|
✗ |
messengerCreateInfo.pfnUserCallback = info.debug_callback; |
668 |
|
✗ |
pNext_chain.push_back (reinterpret_cast<VkBaseOutStructure*> (&messengerCreateInfo)); |
669 |
|
|
} |
670 |
|
|
|
671 |
|
✗ |
VkValidationFeaturesEXT features{}; |
672 |
|
✗ |
if (info.enabled_validation_features.size () != 0 || info.disabled_validation_features.size ()) { |
673 |
|
✗ |
features.sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT; |
674 |
|
✗ |
features.pNext = nullptr; |
675 |
|
✗ |
features.enabledValidationFeatureCount = |
676 |
|
✗ |
static_cast<uint32_t> (info.enabled_validation_features.size ()); |
677 |
|
✗ |
features.pEnabledValidationFeatures = info.enabled_validation_features.data (); |
678 |
|
✗ |
features.disabledValidationFeatureCount = |
679 |
|
✗ |
static_cast<uint32_t> (info.disabled_validation_features.size ()); |
680 |
|
✗ |
features.pDisabledValidationFeatures = info.disabled_validation_features.data (); |
681 |
|
✗ |
pNext_chain.push_back (reinterpret_cast<VkBaseOutStructure*> (&features)); |
682 |
|
|
} |
683 |
|
|
|
684 |
|
✗ |
VkValidationFlagsEXT checks{}; |
685 |
|
✗ |
if (info.disabled_validation_checks.size () != 0) { |
686 |
|
✗ |
checks.sType = VK_STRUCTURE_TYPE_VALIDATION_FLAGS_EXT; |
687 |
|
✗ |
checks.pNext = nullptr; |
688 |
|
✗ |
checks.disabledValidationCheckCount = |
689 |
|
✗ |
static_cast<uint32_t> (info.disabled_validation_checks.size ()); |
690 |
|
✗ |
checks.pDisabledValidationChecks = info.disabled_validation_checks.data (); |
691 |
|
✗ |
pNext_chain.push_back (reinterpret_cast<VkBaseOutStructure*> (&checks)); |
692 |
|
|
} |
693 |
|
|
|
694 |
|
✗ |
VkInstanceCreateInfo instance_create_info = {}; |
695 |
|
✗ |
instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO; |
696 |
|
✗ |
detail::setup_pNext_chain (instance_create_info, pNext_chain); |
697 |
|
✗ |
instance_create_info.flags = info.flags; |
698 |
|
✗ |
instance_create_info.pApplicationInfo = &app_info; |
699 |
|
✗ |
instance_create_info.enabledExtensionCount = static_cast<uint32_t> (extensions.size ()); |
700 |
|
✗ |
instance_create_info.ppEnabledExtensionNames = extensions.data (); |
701 |
|
✗ |
instance_create_info.enabledLayerCount = static_cast<uint32_t> (layers.size ()); |
702 |
|
✗ |
instance_create_info.ppEnabledLayerNames = layers.data (); |
703 |
|
|
|
704 |
|
✗ |
Instance instance; |
705 |
|
✗ |
VkResult res = detail::vulkan_functions ().fp_vkCreateInstance ( |
706 |
|
✗ |
&instance_create_info, info.allocation_callbacks, &instance.instance); |
707 |
|
✗ |
if (res != VK_SUCCESS) |
708 |
|
✗ |
return detail::Result<Instance> (InstanceError::failed_create_instance, res); |
709 |
|
|
|
710 |
|
✗ |
detail::vulkan_functions ().init_instance_funcs (instance.instance); |
711 |
|
|
|
712 |
|
✗ |
if (info.use_debug_messenger) { |
713 |
|
✗ |
res = create_debug_utils_messenger (instance.instance, |
714 |
|
✗ |
info.debug_callback, |
715 |
|
✗ |
info.debug_message_severity, |
716 |
|
✗ |
info.debug_message_type, |
717 |
|
|
&instance.debug_messenger, |
718 |
|
✗ |
info.allocation_callbacks); |
719 |
|
✗ |
if (res != VK_SUCCESS) { |
720 |
|
✗ |
return detail::Result<Instance> (InstanceError::failed_create_debug_messenger, res); |
721 |
|
|
} |
722 |
|
|
} |
723 |
|
|
|
724 |
|
✗ |
if (info.headless_context) { |
725 |
|
✗ |
instance.headless = true; |
726 |
|
|
} |
727 |
|
✗ |
instance.allocation_callbacks = info.allocation_callbacks; |
728 |
|
✗ |
instance.instance_version = api_version; |
729 |
|
✗ |
instance.fp_vkGetInstanceProcAddr = detail::vulkan_functions ().ptr_vkGetInstanceProcAddr; |
730 |
|
✗ |
return instance; |
731 |
|
✗ |
} |
732 |
|
|
|
733 |
|
|
InstanceBuilder& InstanceBuilder::set_app_name (const char* app_name) { |
734 |
|
✗ |
if (!app_name) return *this; |
735 |
|
✗ |
info.app_name = app_name; |
736 |
|
✗ |
return *this; |
737 |
|
|
} |
738 |
|
|
InstanceBuilder& InstanceBuilder::set_engine_name (const char* engine_name) { |
739 |
|
✗ |
if (!engine_name) return *this; |
740 |
|
✗ |
info.engine_name = engine_name; |
741 |
|
✗ |
return *this; |
742 |
|
|
} |
743 |
|
|
InstanceBuilder& InstanceBuilder::set_app_version (uint32_t major, uint32_t minor, uint32_t patch) { |
744 |
|
✗ |
info.application_version = VK_MAKE_VERSION (major, minor, patch); |
745 |
|
✗ |
return *this; |
746 |
|
|
} |
747 |
|
|
InstanceBuilder& InstanceBuilder::set_engine_version (uint32_t major, uint32_t minor, uint32_t patch) { |
748 |
|
✗ |
info.engine_version = VK_MAKE_VERSION (major, minor, patch); |
749 |
|
✗ |
return *this; |
750 |
|
|
} |
751 |
|
|
InstanceBuilder& InstanceBuilder::require_api_version (uint32_t major, uint32_t minor, uint32_t patch) { |
752 |
|
✗ |
info.required_api_version = VK_MAKE_VERSION (major, minor, patch); |
753 |
|
✗ |
return *this; |
754 |
|
|
} |
755 |
|
|
InstanceBuilder& InstanceBuilder::desire_api_version (uint32_t major, uint32_t minor, uint32_t patch) { |
756 |
|
✗ |
info.desired_api_version = VK_MAKE_VERSION (major, minor, patch); |
757 |
|
✗ |
return *this; |
758 |
|
|
} |
759 |
|
|
InstanceBuilder& InstanceBuilder::enable_layer (const char* layer_name) { |
760 |
|
✗ |
if (!layer_name) return *this; |
761 |
|
✗ |
info.layers.push_back (layer_name); |
762 |
|
✗ |
return *this; |
763 |
|
|
} |
764 |
|
|
InstanceBuilder& InstanceBuilder::enable_extension (const char* extension_name) { |
765 |
|
✗ |
if (!extension_name) return *this; |
766 |
|
✗ |
info.extensions.push_back (extension_name); |
767 |
|
✗ |
return *this; |
768 |
|
|
} |
769 |
|
|
InstanceBuilder& InstanceBuilder::enable_validation_layers (bool enable_validation) { |
770 |
|
✗ |
info.enable_validation_layers = enable_validation; |
771 |
|
✗ |
return *this; |
772 |
|
|
} |
773 |
|
|
InstanceBuilder& InstanceBuilder::request_validation_layers (bool enable_validation) { |
774 |
|
✗ |
info.request_validation_layers = enable_validation; |
775 |
|
✗ |
return *this; |
776 |
|
|
} |
777 |
|
|
InstanceBuilder& InstanceBuilder::use_default_debug_messenger () { |
778 |
|
✗ |
info.use_debug_messenger = true; |
779 |
|
✗ |
info.debug_callback = default_debug_callback; |
780 |
|
✗ |
return *this; |
781 |
|
|
} |
782 |
|
|
InstanceBuilder& InstanceBuilder::set_debug_callback (PFN_vkDebugUtilsMessengerCallbackEXT callback) { |
783 |
|
✗ |
info.use_debug_messenger = true; |
784 |
|
✗ |
info.debug_callback = callback; |
785 |
|
✗ |
return *this; |
786 |
|
|
} |
787 |
|
|
InstanceBuilder& InstanceBuilder::set_headless (bool headless) { |
788 |
|
✗ |
info.headless_context = headless; |
789 |
|
✗ |
return *this; |
790 |
|
|
} |
791 |
|
|
InstanceBuilder& InstanceBuilder::set_debug_messenger_severity (VkDebugUtilsMessageSeverityFlagsEXT severity) { |
792 |
|
✗ |
info.debug_message_severity = severity; |
793 |
|
✗ |
return *this; |
794 |
|
|
} |
795 |
|
|
InstanceBuilder& InstanceBuilder::add_debug_messenger_severity (VkDebugUtilsMessageSeverityFlagsEXT severity) { |
796 |
|
✗ |
info.debug_message_severity = info.debug_message_severity | severity; |
797 |
|
✗ |
return *this; |
798 |
|
|
} |
799 |
|
|
InstanceBuilder& InstanceBuilder::set_debug_messenger_type (VkDebugUtilsMessageTypeFlagsEXT type) { |
800 |
|
✗ |
info.debug_message_type = type; |
801 |
|
✗ |
return *this; |
802 |
|
|
} |
803 |
|
|
InstanceBuilder& InstanceBuilder::add_debug_messenger_type (VkDebugUtilsMessageTypeFlagsEXT type) { |
804 |
|
✗ |
info.debug_message_type = info.debug_message_type | type; |
805 |
|
✗ |
return *this; |
806 |
|
|
} |
807 |
|
|
InstanceBuilder& InstanceBuilder::add_validation_disable (VkValidationCheckEXT check) { |
808 |
|
✗ |
info.disabled_validation_checks.push_back (check); |
809 |
|
✗ |
return *this; |
810 |
|
|
} |
811 |
|
|
InstanceBuilder& InstanceBuilder::add_validation_feature_enable (VkValidationFeatureEnableEXT enable) { |
812 |
|
✗ |
info.enabled_validation_features.push_back (enable); |
813 |
|
✗ |
return *this; |
814 |
|
|
} |
815 |
|
|
InstanceBuilder& InstanceBuilder::add_validation_feature_disable (VkValidationFeatureDisableEXT disable) { |
816 |
|
✗ |
info.disabled_validation_features.push_back (disable); |
817 |
|
✗ |
return *this; |
818 |
|
|
} |
819 |
|
|
InstanceBuilder& InstanceBuilder::set_allocation_callbacks (VkAllocationCallbacks* callbacks) { |
820 |
|
✗ |
info.allocation_callbacks = callbacks; |
821 |
|
✗ |
return *this; |
822 |
|
|
} |
823 |
|
|
|
824 |
|
|
// ---- Physical Device ---- // |
825 |
|
|
|
826 |
|
|
namespace detail { |
827 |
|
|
|
828 |
|
|
std::vector<const char*> check_device_extension_support ( |
829 |
|
|
VkPhysicalDevice device, std::vector<const char*> desired_extensions) { |
830 |
|
✗ |
std::vector<VkExtensionProperties> available_extensions; |
831 |
|
✗ |
auto available_extensions_ret = detail::get_vector<VkExtensionProperties> ( |
832 |
|
✗ |
available_extensions, detail::vulkan_functions ().fp_vkEnumerateDeviceExtensionProperties, device, nullptr); |
833 |
|
✗ |
if (available_extensions_ret != VK_SUCCESS) return {}; |
834 |
|
|
|
835 |
|
✗ |
std::vector<const char*> extensions_to_enable; |
836 |
|
✗ |
for (const auto& extension : available_extensions) { |
837 |
|
✗ |
for (auto& req_ext : desired_extensions) { |
838 |
|
✗ |
if (strcmp (req_ext, extension.extensionName) == 0) { |
839 |
|
✗ |
extensions_to_enable.push_back (req_ext); |
840 |
|
✗ |
break; |
841 |
|
|
} |
842 |
|
|
} |
843 |
|
|
} |
844 |
|
✗ |
return extensions_to_enable; |
845 |
|
✗ |
} |
846 |
|
|
|
847 |
|
|
bool supports_features (VkPhysicalDeviceFeatures supported, VkPhysicalDeviceFeatures requested) { |
848 |
|
|
// clang-format off |
849 |
|
✗ |
if (requested.robustBufferAccess && !supported.robustBufferAccess) return false; |
850 |
|
✗ |
if (requested.fullDrawIndexUint32 && !supported.fullDrawIndexUint32) return false; |
851 |
|
✗ |
if (requested.imageCubeArray && !supported.imageCubeArray) return false; |
852 |
|
✗ |
if (requested.independentBlend && !supported.independentBlend) return false; |
853 |
|
✗ |
if (requested.geometryShader && !supported.geometryShader) return false; |
854 |
|
✗ |
if (requested.tessellationShader && !supported.tessellationShader) return false; |
855 |
|
✗ |
if (requested.sampleRateShading && !supported.sampleRateShading) return false; |
856 |
|
✗ |
if (requested.dualSrcBlend && !supported.dualSrcBlend) return false; |
857 |
|
✗ |
if (requested.logicOp && !supported.logicOp) return false; |
858 |
|
✗ |
if (requested.multiDrawIndirect && !supported.multiDrawIndirect) return false; |
859 |
|
✗ |
if (requested.drawIndirectFirstInstance && !supported.drawIndirectFirstInstance) return false; |
860 |
|
✗ |
if (requested.depthClamp && !supported.depthClamp) return false; |
861 |
|
✗ |
if (requested.depthBiasClamp && !supported.depthBiasClamp) return false; |
862 |
|
✗ |
if (requested.fillModeNonSolid && !supported.fillModeNonSolid) return false; |
863 |
|
✗ |
if (requested.depthBounds && !supported.depthBounds) return false; |
864 |
|
✗ |
if (requested.wideLines && !supported.wideLines) return false; |
865 |
|
✗ |
if (requested.largePoints && !supported.largePoints) return false; |
866 |
|
✗ |
if (requested.alphaToOne && !supported.alphaToOne) return false; |
867 |
|
✗ |
if (requested.multiViewport && !supported.multiViewport) return false; |
868 |
|
✗ |
if (requested.samplerAnisotropy && !supported.samplerAnisotropy) return false; |
869 |
|
✗ |
if (requested.textureCompressionETC2 && !supported.textureCompressionETC2) return false; |
870 |
|
✗ |
if (requested.textureCompressionASTC_LDR && !supported.textureCompressionASTC_LDR) return false; |
871 |
|
✗ |
if (requested.textureCompressionBC && !supported.textureCompressionBC) return false; |
872 |
|
✗ |
if (requested.occlusionQueryPrecise && !supported.occlusionQueryPrecise) return false; |
873 |
|
✗ |
if (requested.pipelineStatisticsQuery && !supported.pipelineStatisticsQuery) return false; |
874 |
|
✗ |
if (requested.vertexPipelineStoresAndAtomics && !supported.vertexPipelineStoresAndAtomics) return false; |
875 |
|
✗ |
if (requested.fragmentStoresAndAtomics && !supported.fragmentStoresAndAtomics) return false; |
876 |
|
✗ |
if (requested.shaderTessellationAndGeometryPointSize && !supported.shaderTessellationAndGeometryPointSize) return false; |
877 |
|
✗ |
if (requested.shaderImageGatherExtended && !supported.shaderImageGatherExtended) return false; |
878 |
|
✗ |
if (requested.shaderStorageImageExtendedFormats && !supported.shaderStorageImageExtendedFormats) return false; |
879 |
|
✗ |
if (requested.shaderStorageImageMultisample && !supported.shaderStorageImageMultisample) return false; |
880 |
|
✗ |
if (requested.shaderStorageImageReadWithoutFormat && !supported.shaderStorageImageReadWithoutFormat) return false; |
881 |
|
✗ |
if (requested.shaderStorageImageWriteWithoutFormat && !supported.shaderStorageImageWriteWithoutFormat) return false; |
882 |
|
✗ |
if (requested.shaderUniformBufferArrayDynamicIndexing && !supported.shaderUniformBufferArrayDynamicIndexing) return false; |
883 |
|
✗ |
if (requested.shaderSampledImageArrayDynamicIndexing && !supported.shaderSampledImageArrayDynamicIndexing) return false; |
884 |
|
✗ |
if (requested.shaderStorageBufferArrayDynamicIndexing && !supported.shaderStorageBufferArrayDynamicIndexing) return false; |
885 |
|
✗ |
if (requested.shaderStorageImageArrayDynamicIndexing && !supported.shaderStorageImageArrayDynamicIndexing) return false; |
886 |
|
✗ |
if (requested.shaderClipDistance && !supported.shaderClipDistance) return false; |
887 |
|
✗ |
if (requested.shaderCullDistance && !supported.shaderCullDistance) return false; |
888 |
|
✗ |
if (requested.shaderFloat64 && !supported.shaderFloat64) return false; |
889 |
|
✗ |
if (requested.shaderInt64 && !supported.shaderInt64) return false; |
890 |
|
✗ |
if (requested.shaderInt16 && !supported.shaderInt16) return false; |
891 |
|
✗ |
if (requested.shaderResourceResidency && !supported.shaderResourceResidency) return false; |
892 |
|
✗ |
if (requested.shaderResourceMinLod && !supported.shaderResourceMinLod) return false; |
893 |
|
✗ |
if (requested.sparseBinding && !supported.sparseBinding) return false; |
894 |
|
✗ |
if (requested.sparseResidencyBuffer && !supported.sparseResidencyBuffer) return false; |
895 |
|
✗ |
if (requested.sparseResidencyImage2D && !supported.sparseResidencyImage2D) return false; |
896 |
|
✗ |
if (requested.sparseResidencyImage3D && !supported.sparseResidencyImage3D) return false; |
897 |
|
✗ |
if (requested.sparseResidency2Samples && !supported.sparseResidency2Samples) return false; |
898 |
|
✗ |
if (requested.sparseResidency4Samples && !supported.sparseResidency4Samples) return false; |
899 |
|
✗ |
if (requested.sparseResidency8Samples && !supported.sparseResidency8Samples) return false; |
900 |
|
✗ |
if (requested.sparseResidency16Samples && !supported.sparseResidency16Samples) return false; |
901 |
|
✗ |
if (requested.sparseResidencyAliased && !supported.sparseResidencyAliased) return false; |
902 |
|
✗ |
if (requested.variableMultisampleRate && !supported.variableMultisampleRate) return false; |
903 |
|
✗ |
if (requested.inheritedQueries && !supported.inheritedQueries) return false; |
904 |
|
|
// clang-format on |
905 |
|
✗ |
return true; |
906 |
|
|
} |
907 |
|
|
|
908 |
|
|
// finds the first queue which supports graphics operations. returns -1 if none is found |
909 |
|
|
int get_graphics_queue_index (std::vector<VkQueueFamilyProperties> const& families) { |
910 |
|
✗ |
for (size_t i = 0; i < families.size (); i++) { |
911 |
|
✗ |
if (families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) return static_cast<int> (i); |
912 |
|
|
} |
913 |
|
✗ |
return -1; |
914 |
|
|
} |
915 |
|
|
// finds a compute queue which is separate from the graphics queue and tries to find one without |
916 |
|
|
// transfer support returns -1 if none is found |
917 |
|
|
int get_separate_compute_queue_index (std::vector<VkQueueFamilyProperties> const& families) { |
918 |
|
✗ |
int compute = -1; |
919 |
|
✗ |
for (size_t i = 0; i < families.size (); i++) { |
920 |
|
✗ |
if ((families[i].queueFlags & VK_QUEUE_COMPUTE_BIT) && |
921 |
|
✗ |
((families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0)) { |
922 |
|
✗ |
if ((families[i].queueFlags & VK_QUEUE_TRANSFER_BIT) == 0) { |
923 |
|
✗ |
return static_cast<int> (i); |
924 |
|
|
} else { |
925 |
|
✗ |
compute = static_cast<int> (i); |
926 |
|
|
} |
927 |
|
|
} |
928 |
|
|
} |
929 |
|
✗ |
return compute; |
930 |
|
|
} |
931 |
|
|
// finds a transfer queue which is separate from the graphics queue and tries to find one without |
932 |
|
|
// compute support returns -1 if none is found |
933 |
|
|
int get_separate_transfer_queue_index (std::vector<VkQueueFamilyProperties> const& families) { |
934 |
|
✗ |
int transfer = -1; |
935 |
|
✗ |
for (size_t i = 0; i < families.size (); i++) { |
936 |
|
✗ |
if ((families[i].queueFlags & VK_QUEUE_TRANSFER_BIT) && |
937 |
|
✗ |
((families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0)) { |
938 |
|
✗ |
if ((families[i].queueFlags & VK_QUEUE_COMPUTE_BIT) == 0) { |
939 |
|
✗ |
return static_cast<int> (i); |
940 |
|
|
} else { |
941 |
|
✗ |
transfer = static_cast<int> (i); |
942 |
|
|
} |
943 |
|
|
} |
944 |
|
|
} |
945 |
|
✗ |
return transfer; |
946 |
|
|
} |
947 |
|
|
// finds the first queue which supports only compute (not graphics or transfer). returns -1 if none is found |
948 |
|
|
int get_dedicated_compute_queue_index (std::vector<VkQueueFamilyProperties> const& families) { |
949 |
|
✗ |
for (size_t i = 0; i < families.size (); i++) { |
950 |
|
✗ |
if ((families[i].queueFlags & VK_QUEUE_COMPUTE_BIT) && |
951 |
|
✗ |
(families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0 && |
952 |
|
✗ |
(families[i].queueFlags & VK_QUEUE_TRANSFER_BIT) == 0) |
953 |
|
✗ |
return static_cast<int> (i); |
954 |
|
|
} |
955 |
|
✗ |
return -1; |
956 |
|
|
} |
957 |
|
|
// finds the first queue which supports only transfer (not graphics or compute). returns -1 if none is found |
958 |
|
|
int get_dedicated_transfer_queue_index (std::vector<VkQueueFamilyProperties> const& families) { |
959 |
|
✗ |
for (size_t i = 0; i < families.size (); i++) { |
960 |
|
✗ |
if ((families[i].queueFlags & VK_QUEUE_TRANSFER_BIT) && |
961 |
|
✗ |
(families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) == 0 && |
962 |
|
✗ |
(families[i].queueFlags & VK_QUEUE_COMPUTE_BIT) == 0) |
963 |
|
✗ |
return static_cast<int> (i); |
964 |
|
|
} |
965 |
|
✗ |
return -1; |
966 |
|
|
} |
967 |
|
|
// finds the first queue which supports presenting. returns -1 if none is found |
968 |
|
|
int get_present_queue_index (VkPhysicalDevice const phys_device, |
969 |
|
|
VkSurfaceKHR const surface, |
970 |
|
|
std::vector<VkQueueFamilyProperties> const& families) { |
971 |
|
✗ |
for (size_t i = 0; i < families.size (); i++) { |
972 |
|
✗ |
VkBool32 presentSupport = false; |
973 |
|
✗ |
if (surface != VK_NULL_HANDLE) { |
974 |
|
✗ |
VkResult res = detail::vulkan_functions ().fp_vkGetPhysicalDeviceSurfaceSupportKHR ( |
975 |
|
|
phys_device, static_cast<uint32_t> (i), surface, &presentSupport); |
976 |
|
✗ |
if (res != VK_SUCCESS) return -1; // TODO: determine if this should fail another way |
977 |
|
|
} |
978 |
|
✗ |
if (presentSupport == VK_TRUE) return static_cast<int> (i); |
979 |
|
|
} |
980 |
|
✗ |
return -1; |
981 |
|
|
} |
982 |
|
|
} // namespace detail |
983 |
|
|
|
984 |
|
|
|
985 |
|
|
PhysicalDeviceSelector::PhysicalDeviceDesc PhysicalDeviceSelector::populate_device_details ( |
986 |
|
|
VkPhysicalDevice phys_device) const { |
987 |
|
✗ |
PhysicalDeviceSelector::PhysicalDeviceDesc desc{}; |
988 |
|
✗ |
desc.phys_device = phys_device; |
989 |
|
|
auto queue_families = detail::get_vector_noerror<VkQueueFamilyProperties> ( |
990 |
|
✗ |
detail::vulkan_functions ().fp_vkGetPhysicalDeviceQueueFamilyProperties, phys_device); |
991 |
|
✗ |
desc.queue_families = queue_families; |
992 |
|
|
|
993 |
|
✗ |
detail::vulkan_functions ().fp_vkGetPhysicalDeviceProperties (phys_device, &desc.device_properties); |
994 |
|
✗ |
detail::vulkan_functions ().fp_vkGetPhysicalDeviceFeatures (phys_device, &desc.device_features); |
995 |
|
✗ |
detail::vulkan_functions ().fp_vkGetPhysicalDeviceMemoryProperties (phys_device, &desc.mem_properties); |
996 |
|
✗ |
return desc; |
997 |
|
✗ |
} |
998 |
|
|
|
999 |
|
|
PhysicalDeviceSelector::Suitable PhysicalDeviceSelector::is_device_suitable (PhysicalDeviceDesc pd) const { |
1000 |
|
✗ |
Suitable suitable = Suitable::yes; |
1001 |
|
|
|
1002 |
|
✗ |
if (criteria.required_version > pd.device_properties.apiVersion) return Suitable::no; |
1003 |
|
✗ |
if (criteria.desired_version > pd.device_properties.apiVersion) suitable = Suitable::partial; |
1004 |
|
|
|
1005 |
|
✗ |
bool dedicated_compute = detail::get_dedicated_compute_queue_index (pd.queue_families) >= 0; |
1006 |
|
✗ |
bool dedicated_transfer = detail::get_dedicated_transfer_queue_index (pd.queue_families) >= 0; |
1007 |
|
✗ |
bool separate_compute = detail::get_separate_compute_queue_index (pd.queue_families) >= 0; |
1008 |
|
✗ |
bool separate_transfer = detail::get_separate_transfer_queue_index (pd.queue_families) >= 0; |
1009 |
|
|
|
1010 |
|
|
bool present_queue = |
1011 |
|
✗ |
detail::get_present_queue_index (pd.phys_device, system_info.surface, pd.queue_families) >= 0; |
1012 |
|
|
|
1013 |
|
✗ |
if (criteria.require_dedicated_compute_queue && !dedicated_compute) return Suitable::no; |
1014 |
|
✗ |
if (criteria.require_dedicated_transfer_queue && !dedicated_transfer) return Suitable::no; |
1015 |
|
✗ |
if (criteria.require_separate_compute_queue && !separate_compute) return Suitable::no; |
1016 |
|
✗ |
if (criteria.require_separate_transfer_queue && !separate_transfer) return Suitable::no; |
1017 |
|
✗ |
if (criteria.require_present && !present_queue && !criteria.defer_surface_initialization) |
1018 |
|
✗ |
return Suitable::no; |
1019 |
|
|
|
1020 |
|
|
auto required_extensions_supported = |
1021 |
|
✗ |
detail::check_device_extension_support (pd.phys_device, criteria.required_extensions); |
1022 |
|
✗ |
if (required_extensions_supported.size () != criteria.required_extensions.size ()) |
1023 |
|
✗ |
return Suitable::no; |
1024 |
|
|
|
1025 |
|
|
auto desired_extensions_supported = |
1026 |
|
✗ |
detail::check_device_extension_support (pd.phys_device, criteria.desired_extensions); |
1027 |
|
✗ |
if (desired_extensions_supported.size () != criteria.desired_extensions.size ()) |
1028 |
|
✗ |
suitable = Suitable::partial; |
1029 |
|
|
|
1030 |
|
|
|
1031 |
|
✗ |
bool swapChainAdequate = false; |
1032 |
|
✗ |
if (criteria.defer_surface_initialization) { |
1033 |
|
✗ |
swapChainAdequate = true; |
1034 |
|
✗ |
} else if (!system_info.headless) { |
1035 |
|
✗ |
std::vector<VkSurfaceFormatKHR> formats; |
1036 |
|
✗ |
std::vector<VkPresentModeKHR> present_modes; |
1037 |
|
|
|
1038 |
|
✗ |
auto formats_ret = detail::get_vector<VkSurfaceFormatKHR> (formats, |
1039 |
|
✗ |
detail::vulkan_functions ().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, |
1040 |
|
✗ |
pd.phys_device, |
1041 |
|
✗ |
system_info.surface); |
1042 |
|
✗ |
auto present_modes_ret = detail::get_vector<VkPresentModeKHR> (present_modes, |
1043 |
|
✗ |
detail::vulkan_functions ().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, |
1044 |
|
✗ |
pd.phys_device, |
1045 |
|
✗ |
system_info.surface); |
1046 |
|
|
|
1047 |
|
✗ |
if (formats_ret == VK_SUCCESS && present_modes_ret == VK_SUCCESS) { |
1048 |
|
✗ |
swapChainAdequate = !formats.empty () && !present_modes.empty (); |
1049 |
|
|
} |
1050 |
|
✗ |
} |
1051 |
|
✗ |
if (criteria.require_present && !swapChainAdequate) return Suitable::no; |
1052 |
|
|
|
1053 |
|
✗ |
if (pd.device_properties.deviceType != static_cast<VkPhysicalDeviceType> (criteria.preferred_type)) { |
1054 |
|
✗ |
if (criteria.allow_any_type) |
1055 |
|
✗ |
suitable = Suitable::partial; |
1056 |
|
|
else |
1057 |
|
✗ |
return Suitable::no; |
1058 |
|
|
} |
1059 |
|
|
|
1060 |
|
|
bool required_features_supported = |
1061 |
|
✗ |
detail::supports_features (pd.device_features, criteria.required_features); |
1062 |
|
✗ |
if (!required_features_supported) return Suitable::no; |
1063 |
|
|
|
1064 |
|
✗ |
bool has_required_memory = false; |
1065 |
|
✗ |
bool has_preferred_memory = false; |
1066 |
|
✗ |
for (uint32_t i = 0; i < pd.mem_properties.memoryHeapCount; i++) { |
1067 |
|
✗ |
if (pd.mem_properties.memoryHeaps[i].flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) { |
1068 |
|
✗ |
if (pd.mem_properties.memoryHeaps[i].size > criteria.required_mem_size) { |
1069 |
|
✗ |
has_required_memory = true; |
1070 |
|
|
} |
1071 |
|
✗ |
if (pd.mem_properties.memoryHeaps[i].size > criteria.desired_mem_size) { |
1072 |
|
✗ |
has_preferred_memory = true; |
1073 |
|
|
} |
1074 |
|
|
} |
1075 |
|
|
} |
1076 |
|
✗ |
if (!has_required_memory) return Suitable::no; |
1077 |
|
✗ |
if (!has_preferred_memory) suitable = Suitable::partial; |
1078 |
|
|
|
1079 |
|
✗ |
return suitable; |
1080 |
|
✗ |
} |
1081 |
|
|
|
1082 |
|
|
PhysicalDeviceSelector::PhysicalDeviceSelector (Instance const& instance) { |
1083 |
|
✗ |
system_info.instance = instance.instance; |
1084 |
|
✗ |
system_info.headless = instance.headless; |
1085 |
|
✗ |
criteria.require_present = !instance.headless; |
1086 |
|
✗ |
criteria.required_version = instance.instance_version; |
1087 |
|
✗ |
criteria.desired_version = instance.instance_version; |
1088 |
|
✗ |
} |
1089 |
|
|
|
1090 |
|
|
detail::Result<PhysicalDevice> PhysicalDeviceSelector::select () const { |
1091 |
|
✗ |
if (!system_info.headless && !criteria.defer_surface_initialization) { |
1092 |
|
✗ |
if (system_info.surface == nullptr) |
1093 |
|
✗ |
return detail::Result<PhysicalDevice>{ PhysicalDeviceError::no_surface_provided }; |
1094 |
|
|
} |
1095 |
|
|
|
1096 |
|
|
|
1097 |
|
✗ |
std::vector<VkPhysicalDevice> physical_devices; |
1098 |
|
|
|
1099 |
|
✗ |
auto physical_devices_ret = detail::get_vector<VkPhysicalDevice> ( |
1100 |
|
✗ |
physical_devices, detail::vulkan_functions ().fp_vkEnumeratePhysicalDevices, system_info.instance); |
1101 |
|
✗ |
if (physical_devices_ret != VK_SUCCESS) { |
1102 |
|
|
return detail::Result<PhysicalDevice>{ PhysicalDeviceError::failed_enumerate_physical_devices, |
1103 |
|
✗ |
physical_devices_ret }; |
1104 |
|
|
} |
1105 |
|
✗ |
if (physical_devices.size () == 0) { |
1106 |
|
✗ |
return detail::Result<PhysicalDevice>{ PhysicalDeviceError::no_physical_devices_found }; |
1107 |
|
|
} |
1108 |
|
|
|
1109 |
|
✗ |
std::vector<PhysicalDeviceDesc> phys_device_descriptions; |
1110 |
|
✗ |
for (auto& phys_device : physical_devices) { |
1111 |
|
✗ |
phys_device_descriptions.push_back (populate_device_details (phys_device)); |
1112 |
|
|
} |
1113 |
|
|
|
1114 |
|
✗ |
PhysicalDeviceDesc selected_device{}; |
1115 |
|
|
|
1116 |
|
✗ |
if (criteria.use_first_gpu_unconditionally) { |
1117 |
|
✗ |
selected_device = phys_device_descriptions.at (0); |
1118 |
|
|
} else { |
1119 |
|
✗ |
for (const auto& device : phys_device_descriptions) { |
1120 |
|
✗ |
auto suitable = is_device_suitable (device); |
1121 |
|
✗ |
if (suitable == Suitable::yes) { |
1122 |
|
✗ |
selected_device = device; |
1123 |
|
✗ |
break; |
1124 |
|
✗ |
} else if (suitable == Suitable::partial) { |
1125 |
|
✗ |
selected_device = device; |
1126 |
|
|
} |
1127 |
|
|
} |
1128 |
|
|
} |
1129 |
|
|
|
1130 |
|
✗ |
if (selected_device.phys_device == VK_NULL_HANDLE) { |
1131 |
|
✗ |
return detail::Result<PhysicalDevice>{ PhysicalDeviceError::no_suitable_device }; |
1132 |
|
|
} |
1133 |
|
✗ |
PhysicalDevice out_device{}; |
1134 |
|
✗ |
out_device.physical_device = selected_device.phys_device; |
1135 |
|
✗ |
out_device.surface = system_info.surface; |
1136 |
|
✗ |
out_device.features = criteria.required_features; |
1137 |
|
✗ |
out_device.properties = selected_device.device_properties; |
1138 |
|
✗ |
out_device.memory_properties = selected_device.mem_properties; |
1139 |
|
✗ |
out_device.queue_families = selected_device.queue_families; |
1140 |
|
✗ |
out_device.defer_surface_initialization = criteria.defer_surface_initialization; |
1141 |
|
|
|
1142 |
|
✗ |
out_device.extensions_to_enable.insert (out_device.extensions_to_enable.end (), |
1143 |
|
|
criteria.required_extensions.begin (), |
1144 |
|
|
criteria.required_extensions.end ()); |
1145 |
|
|
auto desired_extensions_supported = |
1146 |
|
✗ |
detail::check_device_extension_support (out_device.physical_device, criteria.desired_extensions); |
1147 |
|
✗ |
out_device.extensions_to_enable.insert (out_device.extensions_to_enable.end (), |
1148 |
|
|
desired_extensions_supported.begin (), |
1149 |
|
|
desired_extensions_supported.end ()); |
1150 |
|
✗ |
return out_device; |
1151 |
|
✗ |
} |
1152 |
|
|
|
1153 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::set_surface (VkSurfaceKHR surface) { |
1154 |
|
✗ |
system_info.surface = surface; |
1155 |
|
✗ |
system_info.headless = false; |
1156 |
|
✗ |
return *this; |
1157 |
|
|
} |
1158 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::prefer_gpu_device_type (PreferredDeviceType type) { |
1159 |
|
✗ |
criteria.preferred_type = type; |
1160 |
|
✗ |
return *this; |
1161 |
|
|
} |
1162 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::allow_any_gpu_device_type (bool allow_any_type) { |
1163 |
|
✗ |
criteria.allow_any_type = allow_any_type; |
1164 |
|
✗ |
return *this; |
1165 |
|
|
} |
1166 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::require_present (bool require) { |
1167 |
|
✗ |
criteria.require_present = require; |
1168 |
|
✗ |
return *this; |
1169 |
|
|
} |
1170 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_transfer_queue () { |
1171 |
|
✗ |
criteria.require_dedicated_transfer_queue = true; |
1172 |
|
✗ |
return *this; |
1173 |
|
|
} |
1174 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::require_dedicated_compute_queue () { |
1175 |
|
✗ |
criteria.require_dedicated_compute_queue = true; |
1176 |
|
✗ |
return *this; |
1177 |
|
|
} |
1178 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_transfer_queue () { |
1179 |
|
✗ |
criteria.require_separate_transfer_queue = true; |
1180 |
|
✗ |
return *this; |
1181 |
|
|
} |
1182 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::require_separate_compute_queue () { |
1183 |
|
✗ |
criteria.require_separate_compute_queue = true; |
1184 |
|
✗ |
return *this; |
1185 |
|
|
} |
1186 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::required_device_memory_size (VkDeviceSize size) { |
1187 |
|
✗ |
criteria.required_mem_size = size; |
1188 |
|
✗ |
return *this; |
1189 |
|
|
} |
1190 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::desired_device_memory_size (VkDeviceSize size) { |
1191 |
|
✗ |
criteria.desired_mem_size = size; |
1192 |
|
✗ |
return *this; |
1193 |
|
|
} |
1194 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extension (const char* extension) { |
1195 |
|
✗ |
criteria.required_extensions.push_back (extension); |
1196 |
|
✗ |
return *this; |
1197 |
|
|
} |
1198 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::add_required_extensions (std::vector<const char*> extensions) { |
1199 |
|
✗ |
criteria.required_extensions.insert ( |
1200 |
|
✗ |
criteria.required_extensions.end (), extensions.begin (), extensions.end ()); |
1201 |
|
✗ |
return *this; |
1202 |
|
|
} |
1203 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extension (const char* extension) { |
1204 |
|
✗ |
criteria.desired_extensions.push_back (extension); |
1205 |
|
✗ |
return *this; |
1206 |
|
|
} |
1207 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::add_desired_extensions (std::vector<const char*> extensions) { |
1208 |
|
✗ |
criteria.desired_extensions.insert ( |
1209 |
|
✗ |
criteria.desired_extensions.end (), extensions.begin (), extensions.end ()); |
1210 |
|
✗ |
return *this; |
1211 |
|
|
} |
1212 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::set_minimum_version (uint32_t major, uint32_t minor) { |
1213 |
|
✗ |
criteria.required_version = VK_MAKE_VERSION (major, minor, 0); |
1214 |
|
✗ |
return *this; |
1215 |
|
|
} |
1216 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::set_desired_version (uint32_t major, uint32_t minor) { |
1217 |
|
✗ |
criteria.desired_version = VK_MAKE_VERSION (major, minor, 0); |
1218 |
|
✗ |
return *this; |
1219 |
|
|
} |
1220 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::set_required_features (VkPhysicalDeviceFeatures features) { |
1221 |
|
✗ |
criteria.required_features = features; |
1222 |
|
✗ |
return *this; |
1223 |
|
|
} |
1224 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::defer_surface_initialization () { |
1225 |
|
✗ |
criteria.defer_surface_initialization = true; |
1226 |
|
✗ |
return *this; |
1227 |
|
|
} |
1228 |
|
|
PhysicalDeviceSelector& PhysicalDeviceSelector::select_first_device_unconditionally (bool unconditionally) { |
1229 |
|
✗ |
criteria.use_first_gpu_unconditionally = unconditionally; |
1230 |
|
✗ |
return *this; |
1231 |
|
|
} |
1232 |
|
|
|
1233 |
|
|
bool PhysicalDevice::has_dedicated_compute_queue () const { |
1234 |
|
✗ |
return detail::get_dedicated_compute_queue_index (queue_families) >= 0; |
1235 |
|
|
} |
1236 |
|
|
bool PhysicalDevice::has_separate_compute_queue () const { |
1237 |
|
✗ |
return detail::get_separate_compute_queue_index (queue_families) >= 0; |
1238 |
|
|
} |
1239 |
|
|
bool PhysicalDevice::has_dedicated_transfer_queue () const { |
1240 |
|
✗ |
return detail::get_dedicated_transfer_queue_index (queue_families) >= 0; |
1241 |
|
|
} |
1242 |
|
|
bool PhysicalDevice::has_separate_transfer_queue () const { |
1243 |
|
✗ |
return detail::get_separate_transfer_queue_index (queue_families) >= 0; |
1244 |
|
|
} |
1245 |
|
|
std::vector<VkQueueFamilyProperties> PhysicalDevice::get_queue_families () const { |
1246 |
|
✗ |
return queue_families; |
1247 |
|
|
} |
1248 |
|
|
|
1249 |
|
|
// ---- Queues ---- // |
1250 |
|
|
|
1251 |
|
|
detail::Result<uint32_t> Device::get_queue_index (QueueType type) const { |
1252 |
|
✗ |
int index = -1; |
1253 |
|
✗ |
switch (type) { |
1254 |
|
✗ |
case QueueType::present: |
1255 |
|
✗ |
index = detail::get_present_queue_index (physical_device.physical_device, surface, queue_families); |
1256 |
|
✗ |
if (index < 0) return detail::Result<uint32_t>{ QueueError::present_unavailable }; |
1257 |
|
✗ |
break; |
1258 |
|
✗ |
case QueueType::graphics: |
1259 |
|
✗ |
index = detail::get_graphics_queue_index (queue_families); |
1260 |
|
✗ |
if (index < 0) return detail::Result<uint32_t>{ QueueError::graphics_unavailable }; |
1261 |
|
✗ |
break; |
1262 |
|
✗ |
case QueueType::compute: |
1263 |
|
✗ |
index = detail::get_separate_compute_queue_index (queue_families); |
1264 |
|
✗ |
if (index < 0) return detail::Result<uint32_t>{ QueueError::compute_unavailable }; |
1265 |
|
✗ |
break; |
1266 |
|
✗ |
case QueueType::transfer: |
1267 |
|
✗ |
index = detail::get_separate_transfer_queue_index (queue_families); |
1268 |
|
✗ |
if (index < 0) return detail::Result<uint32_t>{ QueueError::transfer_unavailable }; |
1269 |
|
✗ |
break; |
1270 |
|
✗ |
default: |
1271 |
|
✗ |
return detail::Result<uint32_t>{ QueueError::invalid_queue_family_index }; |
1272 |
|
|
} |
1273 |
|
✗ |
return static_cast<uint32_t> (index); |
1274 |
|
|
} |
1275 |
|
|
detail::Result<uint32_t> Device::get_dedicated_queue_index (QueueType type) const { |
1276 |
|
✗ |
int index = -1; |
1277 |
|
✗ |
switch (type) { |
1278 |
|
✗ |
case QueueType::compute: |
1279 |
|
✗ |
index = detail::get_dedicated_compute_queue_index (queue_families); |
1280 |
|
✗ |
if (index < 0) return detail::Result<uint32_t>{ QueueError::compute_unavailable }; |
1281 |
|
✗ |
break; |
1282 |
|
✗ |
case QueueType::transfer: |
1283 |
|
✗ |
index = detail::get_dedicated_transfer_queue_index (queue_families); |
1284 |
|
✗ |
if (index < 0) return detail::Result<uint32_t>{ QueueError::transfer_unavailable }; |
1285 |
|
✗ |
break; |
1286 |
|
✗ |
default: |
1287 |
|
✗ |
return detail::Result<uint32_t>{ QueueError::invalid_queue_family_index }; |
1288 |
|
|
} |
1289 |
|
✗ |
return static_cast<uint32_t> (index); |
1290 |
|
|
} |
1291 |
|
|
namespace detail { |
1292 |
|
|
VkQueue get_queue (VkDevice device, uint32_t family) { |
1293 |
|
|
VkQueue out_queue; |
1294 |
|
✗ |
detail::vulkan_functions ().fp_vkGetDeviceQueue (device, family, 0, &out_queue); |
1295 |
|
✗ |
return out_queue; |
1296 |
|
|
} |
1297 |
|
|
} // namespace detail |
1298 |
|
|
detail::Result<VkQueue> Device::get_queue (QueueType type) const { |
1299 |
|
✗ |
auto index = get_queue_index (type); |
1300 |
|
✗ |
if (!index.has_value ()) return { index.error () }; |
1301 |
|
✗ |
return detail::get_queue (device, index.value ()); |
1302 |
|
✗ |
} |
1303 |
|
|
detail::Result<VkQueue> Device::get_dedicated_queue (QueueType type) const { |
1304 |
|
✗ |
auto index = get_dedicated_queue_index (type); |
1305 |
|
✗ |
if (!index.has_value ()) return { index.error () }; |
1306 |
|
✗ |
return detail::get_queue (device, index.value ()); |
1307 |
|
✗ |
} |
1308 |
|
|
|
1309 |
|
|
// ---- Device ---- // |
1310 |
|
|
|
1311 |
|
|
CustomQueueDescription::CustomQueueDescription (uint32_t index, uint32_t count, std::vector<float> priorities) |
1312 |
|
✗ |
: index (index), count (count), priorities (priorities) { |
1313 |
|
✗ |
assert (count == priorities.size ()); |
1314 |
|
✗ |
} |
1315 |
|
|
|
1316 |
|
|
void destroy_device (Device device) { |
1317 |
|
✗ |
detail::vulkan_functions ().fp_vkDestroyDevice (device.device, device.allocation_callbacks); |
1318 |
|
✗ |
} |
1319 |
|
|
|
1320 |
|
|
DeviceBuilder::DeviceBuilder (PhysicalDevice phys_device) { |
1321 |
|
✗ |
info.physical_device = phys_device; |
1322 |
|
✗ |
info.surface = phys_device.surface; |
1323 |
|
✗ |
info.queue_families = phys_device.queue_families; |
1324 |
|
✗ |
info.features = phys_device.features; |
1325 |
|
✗ |
info.extensions_to_enable = phys_device.extensions_to_enable; |
1326 |
|
✗ |
info.defer_surface_initialization = phys_device.defer_surface_initialization; |
1327 |
|
✗ |
} |
1328 |
|
|
|
1329 |
|
|
detail::Result<Device> DeviceBuilder::build () const { |
1330 |
|
|
|
1331 |
|
✗ |
std::vector<CustomQueueDescription> queue_descriptions; |
1332 |
|
✗ |
queue_descriptions.insert ( |
1333 |
|
✗ |
queue_descriptions.end (), info.queue_descriptions.begin (), info.queue_descriptions.end ()); |
1334 |
|
|
|
1335 |
|
✗ |
if (queue_descriptions.size () == 0) { |
1336 |
|
✗ |
for (uint32_t i = 0; i < info.queue_families.size (); i++) { |
1337 |
|
✗ |
queue_descriptions.push_back (CustomQueueDescription{ i, 1, std::vector<float>{ 1.0f } }); |
1338 |
|
|
} |
1339 |
|
|
} |
1340 |
|
|
|
1341 |
|
✗ |
std::vector<VkDeviceQueueCreateInfo> queueCreateInfos; |
1342 |
|
✗ |
for (auto& desc : queue_descriptions) { |
1343 |
|
✗ |
VkDeviceQueueCreateInfo queue_create_info = {}; |
1344 |
|
✗ |
queue_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO; |
1345 |
|
✗ |
queue_create_info.queueFamilyIndex = desc.index; |
1346 |
|
✗ |
queue_create_info.queueCount = desc.count; |
1347 |
|
✗ |
queue_create_info.pQueuePriorities = desc.priorities.data (); |
1348 |
|
✗ |
queueCreateInfos.push_back (queue_create_info); |
1349 |
|
|
} |
1350 |
|
|
|
1351 |
|
✗ |
std::vector<const char*> extensions = info.extensions_to_enable; |
1352 |
|
✗ |
if (info.surface != VK_NULL_HANDLE || info.defer_surface_initialization) |
1353 |
|
✗ |
extensions.push_back ({ VK_KHR_SWAPCHAIN_EXTENSION_NAME }); |
1354 |
|
|
|
1355 |
|
|
// VUID-VkDeviceCreateInfo-pNext-00373 - don't add pEnabledFeatures if the phys_dev_features_2 is present |
1356 |
|
✗ |
bool has_phys_dev_features_2 = false; |
1357 |
|
✗ |
for (auto& pNext_struct : info.pNext_chain) { |
1358 |
|
✗ |
if (pNext_struct->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2) { |
1359 |
|
✗ |
has_phys_dev_features_2 = true; |
1360 |
|
|
} |
1361 |
|
|
} |
1362 |
|
|
|
1363 |
|
✗ |
VkDeviceCreateInfo device_create_info = {}; |
1364 |
|
✗ |
device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO; |
1365 |
|
✗ |
detail::setup_pNext_chain (device_create_info, info.pNext_chain); |
1366 |
|
✗ |
device_create_info.flags = info.flags; |
1367 |
|
✗ |
device_create_info.queueCreateInfoCount = static_cast<uint32_t> (queueCreateInfos.size ()); |
1368 |
|
✗ |
device_create_info.pQueueCreateInfos = queueCreateInfos.data (); |
1369 |
|
✗ |
device_create_info.enabledExtensionCount = static_cast<uint32_t> (extensions.size ()); |
1370 |
|
✗ |
device_create_info.ppEnabledExtensionNames = extensions.data (); |
1371 |
|
✗ |
if (!has_phys_dev_features_2) { |
1372 |
|
✗ |
device_create_info.pEnabledFeatures = &info.features; |
1373 |
|
|
} |
1374 |
|
|
|
1375 |
|
✗ |
Device device; |
1376 |
|
✗ |
VkResult res = detail::vulkan_functions ().fp_vkCreateDevice (info.physical_device.physical_device, |
1377 |
|
|
&device_create_info, |
1378 |
|
✗ |
info.allocation_callbacks, |
1379 |
|
|
&device.device); |
1380 |
|
✗ |
if (res != VK_SUCCESS) { |
1381 |
|
✗ |
return { DeviceError::failed_create_device, res }; |
1382 |
|
|
} |
1383 |
|
✗ |
device.physical_device = info.physical_device; |
1384 |
|
✗ |
device.surface = info.surface; |
1385 |
|
✗ |
device.queue_families = info.queue_families; |
1386 |
|
✗ |
device.allocation_callbacks = info.allocation_callbacks; |
1387 |
|
✗ |
return device; |
1388 |
|
✗ |
} |
1389 |
|
|
DeviceBuilder& DeviceBuilder::custom_queue_setup (std::vector<CustomQueueDescription> queue_descriptions) { |
1390 |
|
✗ |
info.queue_descriptions = queue_descriptions; |
1391 |
|
✗ |
return *this; |
1392 |
|
|
} |
1393 |
|
|
DeviceBuilder& DeviceBuilder::set_allocation_callbacks (VkAllocationCallbacks* callbacks) { |
1394 |
|
✗ |
info.allocation_callbacks = callbacks; |
1395 |
|
✗ |
return *this; |
1396 |
|
|
} |
1397 |
|
|
|
1398 |
|
|
// ---- Swapchain ---- // |
1399 |
|
|
|
1400 |
|
|
namespace detail { |
1401 |
|
|
struct SurfaceSupportDetails { |
1402 |
|
|
VkSurfaceCapabilitiesKHR capabilities; |
1403 |
|
|
std::vector<VkSurfaceFormatKHR> formats; |
1404 |
|
|
std::vector<VkPresentModeKHR> present_modes; |
1405 |
|
|
}; |
1406 |
|
|
|
1407 |
|
|
enum class SurfaceSupportError { |
1408 |
|
|
surface_handle_null, |
1409 |
|
|
failed_get_surface_capabilities, |
1410 |
|
|
failed_enumerate_surface_formats, |
1411 |
|
|
failed_enumerate_present_modes |
1412 |
|
|
}; |
1413 |
|
|
|
1414 |
|
|
struct SurfaceSupportErrorCategory : std::error_category { |
1415 |
|
|
const char* name () const noexcept override { return "vbk_surface_support"; } |
1416 |
|
|
std::string message (int err) const override { |
1417 |
|
✗ |
switch (static_cast<SurfaceSupportError> (err)) { |
1418 |
|
✗ |
case SurfaceSupportError::surface_handle_null: |
1419 |
|
✗ |
return "surface_handle_null"; |
1420 |
|
✗ |
case SurfaceSupportError::failed_get_surface_capabilities: |
1421 |
|
✗ |
return "failed_get_surface_capabilities"; |
1422 |
|
✗ |
case SurfaceSupportError::failed_enumerate_surface_formats: |
1423 |
|
✗ |
return "failed_enumerate_surface_formats"; |
1424 |
|
✗ |
case SurfaceSupportError::failed_enumerate_present_modes: |
1425 |
|
✗ |
return "failed_enumerate_present_modes"; |
1426 |
|
✗ |
default: |
1427 |
|
✗ |
return ""; |
1428 |
|
|
} |
1429 |
|
|
} |
1430 |
|
|
}; |
1431 |
|
|
const SurfaceSupportErrorCategory surface_support_error_category; |
1432 |
|
|
|
1433 |
|
|
std::error_code make_error_code (SurfaceSupportError surface_support_error) { |
1434 |
|
✗ |
return { static_cast<int> (surface_support_error), detail::surface_support_error_category }; |
1435 |
|
|
} |
1436 |
|
|
|
1437 |
|
|
Result<SurfaceSupportDetails> query_surface_support_details (VkPhysicalDevice phys_device, VkSurfaceKHR surface) { |
1438 |
|
✗ |
if (surface == VK_NULL_HANDLE) |
1439 |
|
✗ |
return make_error_code (SurfaceSupportError::surface_handle_null); |
1440 |
|
|
|
1441 |
|
|
VkSurfaceCapabilitiesKHR capabilities; |
1442 |
|
✗ |
VkResult res = detail::vulkan_functions ().fp_vkGetPhysicalDeviceSurfaceCapabilitiesKHR ( |
1443 |
|
|
phys_device, surface, &capabilities); |
1444 |
|
✗ |
if (res != VK_SUCCESS) { |
1445 |
|
✗ |
return { make_error_code (SurfaceSupportError::failed_get_surface_capabilities), res }; |
1446 |
|
|
} |
1447 |
|
|
|
1448 |
|
✗ |
std::vector<VkSurfaceFormatKHR> formats; |
1449 |
|
✗ |
std::vector<VkPresentModeKHR> present_modes; |
1450 |
|
|
|
1451 |
|
✗ |
auto formats_ret = detail::get_vector<VkSurfaceFormatKHR> ( |
1452 |
|
✗ |
formats, detail::vulkan_functions ().fp_vkGetPhysicalDeviceSurfaceFormatsKHR, phys_device, surface); |
1453 |
|
✗ |
if (formats_ret != VK_SUCCESS) |
1454 |
|
✗ |
return { make_error_code (SurfaceSupportError::failed_enumerate_surface_formats), formats_ret }; |
1455 |
|
✗ |
auto present_modes_ret = detail::get_vector<VkPresentModeKHR> ( |
1456 |
|
✗ |
present_modes, detail::vulkan_functions ().fp_vkGetPhysicalDeviceSurfacePresentModesKHR, phys_device, surface); |
1457 |
|
✗ |
if (present_modes_ret != VK_SUCCESS) |
1458 |
|
✗ |
return { make_error_code (SurfaceSupportError::failed_enumerate_present_modes), present_modes_ret }; |
1459 |
|
|
|
1460 |
|
✗ |
return SurfaceSupportDetails{ capabilities, formats, present_modes }; |
1461 |
|
✗ |
} |
1462 |
|
|
|
1463 |
|
|
VkSurfaceFormatKHR find_surface_format (std::vector<VkSurfaceFormatKHR> const& available_formats, |
1464 |
|
|
std::vector<VkSurfaceFormatKHR> const& desired_formats) { |
1465 |
|
✗ |
for (auto const& desired_format : desired_formats) { |
1466 |
|
✗ |
for (auto const& available_format : available_formats) { |
1467 |
|
|
// finds the first format that is desired and available |
1468 |
|
✗ |
if (desired_format.format == available_format.format && |
1469 |
|
✗ |
desired_format.colorSpace == available_format.colorSpace) { |
1470 |
|
✗ |
return desired_format; |
1471 |
|
|
} |
1472 |
|
|
} |
1473 |
|
|
} |
1474 |
|
|
|
1475 |
|
|
// use the first available one if any desired formats aren't found |
1476 |
|
✗ |
return available_formats[0]; |
1477 |
|
|
} |
1478 |
|
|
|
1479 |
|
|
VkPresentModeKHR find_present_mode (std::vector<VkPresentModeKHR> const& available_resent_modes, |
1480 |
|
|
std::vector<VkPresentModeKHR> const& desired_present_modes) { |
1481 |
|
✗ |
for (auto const& desired_pm : desired_present_modes) { |
1482 |
|
✗ |
for (auto const& available_pm : available_resent_modes) { |
1483 |
|
|
// finds the first present mode that is desired and available |
1484 |
|
✗ |
if (desired_pm == available_pm) return desired_pm; |
1485 |
|
|
} |
1486 |
|
|
} |
1487 |
|
|
// only present mode required, use as a fallback |
1488 |
|
✗ |
return VK_PRESENT_MODE_FIFO_KHR; |
1489 |
|
|
} |
1490 |
|
|
|
1491 |
|
|
template <typename T> T minimum (T a, T b) { return a < b ? a : b; } |
1492 |
|
|
template <typename T> T maximum (T a, T b) { return a > b ? a : b; } |
1493 |
|
|
|
1494 |
|
|
VkExtent2D find_extent ( |
1495 |
|
|
VkSurfaceCapabilitiesKHR const& capabilities, uint32_t desired_width, uint32_t desired_height) { |
1496 |
|
✗ |
if (capabilities.currentExtent.width != UINT32_MAX) { |
1497 |
|
✗ |
return capabilities.currentExtent; |
1498 |
|
|
} else { |
1499 |
|
✗ |
VkExtent2D actualExtent = { desired_width, desired_height }; |
1500 |
|
|
|
1501 |
|
✗ |
actualExtent.width = maximum (capabilities.minImageExtent.width, |
1502 |
|
✗ |
minimum (capabilities.maxImageExtent.width, actualExtent.width)); |
1503 |
|
✗ |
actualExtent.height = maximum (capabilities.minImageExtent.height, |
1504 |
|
✗ |
minimum (capabilities.maxImageExtent.height, actualExtent.height)); |
1505 |
|
|
|
1506 |
|
✗ |
return actualExtent; |
1507 |
|
|
} |
1508 |
|
|
} |
1509 |
|
|
} // namespace detail |
1510 |
|
|
|
1511 |
|
|
void destroy_swapchain (Swapchain const& swapchain) { |
1512 |
|
✗ |
if (swapchain.device != VK_NULL_HANDLE && swapchain.swapchain != VK_NULL_HANDLE) { |
1513 |
|
✗ |
detail::vulkan_functions ().fp_vkDestroySwapchainKHR ( |
1514 |
|
✗ |
swapchain.device, swapchain.swapchain, swapchain.allocation_callbacks); |
1515 |
|
|
} |
1516 |
|
✗ |
} |
1517 |
|
|
|
1518 |
|
|
SwapchainBuilder::SwapchainBuilder (Device const& device) { |
1519 |
|
✗ |
info.device = device.device; |
1520 |
|
✗ |
info.physical_device = device.physical_device.physical_device; |
1521 |
|
✗ |
info.surface = device.surface; |
1522 |
|
✗ |
auto present = device.get_queue_index (QueueType::present); |
1523 |
|
✗ |
auto graphics = device.get_queue_index (QueueType::graphics); |
1524 |
|
|
// TODO: handle error of queue's not available |
1525 |
|
✗ |
info.graphics_queue_index = present.value (); |
1526 |
|
✗ |
info.present_queue_index = graphics.value (); |
1527 |
|
✗ |
} |
1528 |
|
|
SwapchainBuilder::SwapchainBuilder (Device const& device, VkSurfaceKHR const surface) { |
1529 |
|
✗ |
info.device = device.device; |
1530 |
|
✗ |
info.physical_device = device.physical_device.physical_device; |
1531 |
|
✗ |
info.surface = surface; |
1532 |
|
✗ |
Device temp_device = device; |
1533 |
|
✗ |
temp_device.surface = surface; |
1534 |
|
✗ |
auto present = temp_device.get_queue_index (QueueType::present); |
1535 |
|
✗ |
auto graphics = temp_device.get_queue_index (QueueType::graphics); |
1536 |
|
|
// TODO: handle error of queue's not available |
1537 |
|
✗ |
info.graphics_queue_index = present.value (); |
1538 |
|
✗ |
info.present_queue_index = graphics.value (); |
1539 |
|
✗ |
} |
1540 |
|
|
SwapchainBuilder::SwapchainBuilder (VkPhysicalDevice const physical_device, VkDevice const device, VkSurfaceKHR const surface, int32_t graphics_queue_index, int32_t present_queue_index){ |
1541 |
|
✗ |
info.physical_device = physical_device; |
1542 |
|
✗ |
info.device = device; |
1543 |
|
✗ |
info.surface = surface; |
1544 |
|
✗ |
info.graphics_queue_index = static_cast<uint32_t>(graphics_queue_index); |
1545 |
|
✗ |
info.present_queue_index = static_cast<uint32_t>(present_queue_index); |
1546 |
|
✗ |
if (graphics_queue_index < 0 || present_queue_index < 0) { |
1547 |
|
|
auto queue_families = detail::get_vector_noerror<VkQueueFamilyProperties> ( |
1548 |
|
✗ |
detail::vulkan_functions().fp_vkGetPhysicalDeviceQueueFamilyProperties, physical_device); |
1549 |
|
✗ |
if (graphics_queue_index < 0) |
1550 |
|
✗ |
info.graphics_queue_index = static_cast<uint32_t>(detail::get_graphics_queue_index (queue_families)); |
1551 |
|
✗ |
if (present_queue_index < 0) |
1552 |
|
✗ |
info.present_queue_index = static_cast<uint32_t>(detail::get_present_queue_index (physical_device, surface, queue_families)); |
1553 |
|
✗ |
} |
1554 |
|
✗ |
} |
1555 |
|
|
detail::Result<Swapchain> SwapchainBuilder::build () const { |
1556 |
|
✗ |
if (info.surface == VK_NULL_HANDLE) { |
1557 |
|
✗ |
return detail::Error{ SwapchainError::surface_handle_not_provided }; |
1558 |
|
|
} |
1559 |
|
|
|
1560 |
|
✗ |
auto desired_formats = info.desired_formats; |
1561 |
|
✗ |
if (desired_formats.size () == 0) add_desired_formats (desired_formats); |
1562 |
|
✗ |
auto desired_present_modes = info.desired_present_modes; |
1563 |
|
✗ |
if (desired_present_modes.size () == 0) add_desired_present_modes (desired_present_modes); |
1564 |
|
|
|
1565 |
|
✗ |
auto surface_support_ret = detail::query_surface_support_details (info.physical_device, info.surface); |
1566 |
|
✗ |
if (!surface_support_ret.has_value ()) |
1567 |
|
|
return detail::Error{ SwapchainError::failed_query_surface_support_details, |
1568 |
|
✗ |
surface_support_ret.vk_result () }; |
1569 |
|
✗ |
auto surface_support = surface_support_ret.value (); |
1570 |
|
|
|
1571 |
|
✗ |
uint32_t image_count = surface_support.capabilities.minImageCount + 1; |
1572 |
|
✗ |
if (surface_support.capabilities.maxImageCount > 0 && image_count > surface_support.capabilities.maxImageCount) { |
1573 |
|
✗ |
image_count = surface_support.capabilities.maxImageCount; |
1574 |
|
|
} |
1575 |
|
✗ |
VkSurfaceFormatKHR surface_format = detail::find_surface_format (surface_support.formats, desired_formats); |
1576 |
|
|
|
1577 |
|
|
VkExtent2D extent = |
1578 |
|
✗ |
detail::find_extent (surface_support.capabilities, info.desired_width, info.desired_height); |
1579 |
|
|
|
1580 |
|
✗ |
uint32_t image_array_layers = info.array_layer_count; |
1581 |
|
✗ |
if (surface_support.capabilities.maxImageArrayLayers < info.array_layer_count) |
1582 |
|
✗ |
image_array_layers = surface_support.capabilities.maxImageArrayLayers; |
1583 |
|
✗ |
if (info.array_layer_count == 0) image_array_layers = 1; |
1584 |
|
|
|
1585 |
|
✗ |
uint32_t queue_family_indices[] = { info.graphics_queue_index, info.present_queue_index }; |
1586 |
|
|
|
1587 |
|
|
|
1588 |
|
|
VkPresentModeKHR present_mode = |
1589 |
|
✗ |
detail::find_present_mode (surface_support.present_modes, desired_present_modes); |
1590 |
|
|
|
1591 |
|
✗ |
VkSurfaceTransformFlagBitsKHR pre_transform = info.pre_transform; |
1592 |
|
✗ |
if (info.pre_transform == static_cast<VkSurfaceTransformFlagBitsKHR> (0)) |
1593 |
|
✗ |
pre_transform = surface_support.capabilities.currentTransform; |
1594 |
|
|
|
1595 |
|
✗ |
VkSwapchainCreateInfoKHR swapchain_create_info = {}; |
1596 |
|
✗ |
swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; |
1597 |
|
✗ |
detail::setup_pNext_chain (swapchain_create_info, info.pNext_chain); |
1598 |
|
✗ |
swapchain_create_info.flags = info.create_flags; |
1599 |
|
✗ |
swapchain_create_info.surface = info.surface; |
1600 |
|
✗ |
swapchain_create_info.minImageCount = image_count; |
1601 |
|
✗ |
swapchain_create_info.imageFormat = surface_format.format; |
1602 |
|
✗ |
swapchain_create_info.imageColorSpace = surface_format.colorSpace; |
1603 |
|
✗ |
swapchain_create_info.imageExtent = extent; |
1604 |
|
✗ |
swapchain_create_info.imageArrayLayers = image_array_layers; |
1605 |
|
✗ |
swapchain_create_info.imageUsage = info.image_usage_flags; |
1606 |
|
|
|
1607 |
|
✗ |
if (info.graphics_queue_index != info.present_queue_index) { |
1608 |
|
✗ |
swapchain_create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT; |
1609 |
|
✗ |
swapchain_create_info.queueFamilyIndexCount = 2; |
1610 |
|
✗ |
swapchain_create_info.pQueueFamilyIndices = queue_family_indices; |
1611 |
|
|
} else { |
1612 |
|
✗ |
swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; |
1613 |
|
|
} |
1614 |
|
|
|
1615 |
|
✗ |
swapchain_create_info.preTransform = pre_transform; |
1616 |
|
✗ |
swapchain_create_info.compositeAlpha = info.composite_alpha; |
1617 |
|
✗ |
swapchain_create_info.presentMode = present_mode; |
1618 |
|
✗ |
swapchain_create_info.clipped = info.clipped; |
1619 |
|
✗ |
swapchain_create_info.oldSwapchain = info.old_swapchain; |
1620 |
|
✗ |
Swapchain swapchain{}; |
1621 |
|
✗ |
VkResult res = detail::vulkan_functions ().fp_vkCreateSwapchainKHR ( |
1622 |
|
✗ |
info.device, &swapchain_create_info, info.allocation_callbacks, &swapchain.swapchain); |
1623 |
|
✗ |
if (res != VK_SUCCESS) { |
1624 |
|
✗ |
return detail::Error{ SwapchainError::failed_create_swapchain, res }; |
1625 |
|
|
} |
1626 |
|
✗ |
swapchain.device = info.device; |
1627 |
|
✗ |
swapchain.image_format = surface_format.format; |
1628 |
|
✗ |
swapchain.extent = extent; |
1629 |
|
✗ |
auto images = swapchain.get_images (); |
1630 |
|
✗ |
if (!images) { |
1631 |
|
✗ |
return detail::Error{ SwapchainError::failed_get_swapchain_images }; |
1632 |
|
|
} |
1633 |
|
✗ |
swapchain.image_count = static_cast<uint32_t> (images.value ().size ()); |
1634 |
|
✗ |
swapchain.allocation_callbacks = info.allocation_callbacks; |
1635 |
|
✗ |
return swapchain; |
1636 |
|
✗ |
} |
1637 |
|
|
detail::Result<std::vector<VkImage>> Swapchain::get_images () { |
1638 |
|
✗ |
std::vector<VkImage> swapchain_images; |
1639 |
|
|
|
1640 |
|
✗ |
auto swapchain_images_ret = detail::get_vector<VkImage> ( |
1641 |
|
✗ |
swapchain_images, detail::vulkan_functions ().fp_vkGetSwapchainImagesKHR, device, swapchain); |
1642 |
|
✗ |
if (swapchain_images_ret != VK_SUCCESS) { |
1643 |
|
✗ |
return detail::Error{ SwapchainError::failed_get_swapchain_images, swapchain_images_ret }; |
1644 |
|
|
} |
1645 |
|
✗ |
return swapchain_images; |
1646 |
|
✗ |
} |
1647 |
|
|
detail::Result<std::vector<VkImageView>> Swapchain::get_image_views () { |
1648 |
|
|
|
1649 |
|
✗ |
auto swapchain_images_ret = get_images (); |
1650 |
|
✗ |
if (!swapchain_images_ret) return swapchain_images_ret.error (); |
1651 |
|
✗ |
auto swapchain_images = swapchain_images_ret.value (); |
1652 |
|
|
|
1653 |
|
✗ |
std::vector<VkImageView> views{ swapchain_images.size () }; |
1654 |
|
|
|
1655 |
|
✗ |
for (size_t i = 0; i < swapchain_images.size (); i++) { |
1656 |
|
✗ |
VkImageViewCreateInfo createInfo = {}; |
1657 |
|
✗ |
createInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
1658 |
|
✗ |
createInfo.image = swapchain_images[i]; |
1659 |
|
✗ |
createInfo.viewType = VK_IMAGE_VIEW_TYPE_2D; |
1660 |
|
✗ |
createInfo.format = image_format; |
1661 |
|
✗ |
createInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY; |
1662 |
|
✗ |
createInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY; |
1663 |
|
✗ |
createInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY; |
1664 |
|
✗ |
createInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY; |
1665 |
|
✗ |
createInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
1666 |
|
✗ |
createInfo.subresourceRange.baseMipLevel = 0; |
1667 |
|
✗ |
createInfo.subresourceRange.levelCount = 1; |
1668 |
|
✗ |
createInfo.subresourceRange.baseArrayLayer = 0; |
1669 |
|
✗ |
createInfo.subresourceRange.layerCount = 1; |
1670 |
|
|
|
1671 |
|
✗ |
VkResult res = detail::vulkan_functions ().fp_vkCreateImageView ( |
1672 |
|
✗ |
device, &createInfo, allocation_callbacks, &views[i]); |
1673 |
|
✗ |
if (res != VK_SUCCESS) |
1674 |
|
✗ |
return detail::Error{ SwapchainError::failed_create_swapchain_image_views, res }; |
1675 |
|
|
} |
1676 |
|
✗ |
return views; |
1677 |
|
✗ |
} |
1678 |
|
|
void Swapchain::destroy_image_views (std::vector<VkImageView> const& image_views) { |
1679 |
|
✗ |
for (auto& image_view : image_views) { |
1680 |
|
✗ |
detail::vulkan_functions ().fp_vkDestroyImageView (device, image_view, allocation_callbacks); |
1681 |
|
|
} |
1682 |
|
✗ |
} |
1683 |
|
|
SwapchainBuilder& SwapchainBuilder::set_old_swapchain (VkSwapchainKHR old_swapchain) { |
1684 |
|
✗ |
info.old_swapchain = old_swapchain; |
1685 |
|
✗ |
return *this; |
1686 |
|
|
} |
1687 |
|
|
SwapchainBuilder& SwapchainBuilder::set_old_swapchain (Swapchain const& swapchain) { |
1688 |
|
✗ |
info.old_swapchain = swapchain.swapchain; |
1689 |
|
✗ |
return *this; |
1690 |
|
|
} |
1691 |
|
|
SwapchainBuilder& SwapchainBuilder::set_desired_extent (uint32_t width, uint32_t height) { |
1692 |
|
✗ |
info.desired_width = width; |
1693 |
|
✗ |
info.desired_height = height; |
1694 |
|
✗ |
return *this; |
1695 |
|
|
} |
1696 |
|
|
SwapchainBuilder& SwapchainBuilder::set_desired_format (VkSurfaceFormatKHR format) { |
1697 |
|
✗ |
info.desired_formats.insert (info.desired_formats.begin (), format); |
1698 |
|
✗ |
return *this; |
1699 |
|
|
} |
1700 |
|
|
SwapchainBuilder& SwapchainBuilder::add_fallback_format (VkSurfaceFormatKHR format) { |
1701 |
|
✗ |
info.desired_formats.push_back (format); |
1702 |
|
✗ |
return *this; |
1703 |
|
|
} |
1704 |
|
|
SwapchainBuilder& SwapchainBuilder::use_default_format_selection () { |
1705 |
|
✗ |
info.desired_formats.clear (); |
1706 |
|
✗ |
add_desired_formats (info.desired_formats); |
1707 |
|
✗ |
return *this; |
1708 |
|
|
} |
1709 |
|
|
|
1710 |
|
|
SwapchainBuilder& SwapchainBuilder::set_desired_present_mode (VkPresentModeKHR present_mode) { |
1711 |
|
✗ |
info.desired_present_modes.insert (info.desired_present_modes.begin (), present_mode); |
1712 |
|
✗ |
return *this; |
1713 |
|
|
} |
1714 |
|
|
SwapchainBuilder& SwapchainBuilder::add_fallback_present_mode (VkPresentModeKHR present_mode) { |
1715 |
|
✗ |
info.desired_present_modes.push_back (present_mode); |
1716 |
|
✗ |
return *this; |
1717 |
|
|
} |
1718 |
|
|
SwapchainBuilder& SwapchainBuilder::use_default_present_mode_selection () { |
1719 |
|
✗ |
info.desired_present_modes.clear (); |
1720 |
|
✗ |
add_desired_present_modes (info.desired_present_modes); |
1721 |
|
✗ |
return *this; |
1722 |
|
|
} |
1723 |
|
|
SwapchainBuilder& SwapchainBuilder::set_allocation_callbacks (VkAllocationCallbacks* callbacks) { |
1724 |
|
✗ |
info.allocation_callbacks = callbacks; |
1725 |
|
✗ |
return *this; |
1726 |
|
|
} |
1727 |
|
|
SwapchainBuilder& SwapchainBuilder::set_image_usage_flags (VkImageUsageFlags usage_flags) { |
1728 |
|
✗ |
info.image_usage_flags = usage_flags; |
1729 |
|
✗ |
return *this; |
1730 |
|
|
} |
1731 |
|
|
SwapchainBuilder& SwapchainBuilder::add_image_usage_flags (VkImageUsageFlags usage_flags) { |
1732 |
|
✗ |
info.image_usage_flags = info.image_usage_flags | usage_flags; |
1733 |
|
✗ |
return *this; |
1734 |
|
|
} |
1735 |
|
|
SwapchainBuilder& SwapchainBuilder::use_default_image_usage_flags () { |
1736 |
|
✗ |
info.image_usage_flags = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT; |
1737 |
|
✗ |
return *this; |
1738 |
|
|
} |
1739 |
|
|
SwapchainBuilder& SwapchainBuilder::set_image_array_layer_count (uint32_t array_layer_count) { |
1740 |
|
✗ |
info.array_layer_count = array_layer_count; |
1741 |
|
✗ |
return *this; |
1742 |
|
|
} |
1743 |
|
|
SwapchainBuilder& SwapchainBuilder::set_clipped (bool clipped) { |
1744 |
|
✗ |
info.clipped = clipped; |
1745 |
|
✗ |
return *this; |
1746 |
|
|
} |
1747 |
|
|
SwapchainBuilder& SwapchainBuilder::set_create_flags (VkSwapchainCreateFlagBitsKHR create_flags) { |
1748 |
|
✗ |
info.create_flags = create_flags; |
1749 |
|
✗ |
return *this; |
1750 |
|
|
} |
1751 |
|
|
SwapchainBuilder& SwapchainBuilder::set_pre_transform_flags (VkSurfaceTransformFlagBitsKHR pre_transform_flags) { |
1752 |
|
✗ |
info.pre_transform = pre_transform_flags; |
1753 |
|
✗ |
return *this; |
1754 |
|
|
} |
1755 |
|
|
SwapchainBuilder& SwapchainBuilder::set_composite_alpha_flags (VkCompositeAlphaFlagBitsKHR composite_alpha_flags) { |
1756 |
|
✗ |
info.composite_alpha = composite_alpha_flags; |
1757 |
|
✗ |
return *this; |
1758 |
|
|
} |
1759 |
|
|
|
1760 |
|
|
void SwapchainBuilder::add_desired_formats (std::vector<VkSurfaceFormatKHR>& formats) const { |
1761 |
|
✗ |
formats.push_back ({ VK_FORMAT_B8G8R8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR }); |
1762 |
|
✗ |
formats.push_back ({ VK_FORMAT_R8G8B8A8_SRGB, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR }); |
1763 |
|
✗ |
} |
1764 |
|
|
void SwapchainBuilder::add_desired_present_modes (std::vector<VkPresentModeKHR>& modes) const { |
1765 |
|
✗ |
modes.push_back (VK_PRESENT_MODE_MAILBOX_KHR); |
1766 |
|
✗ |
modes.push_back (VK_PRESENT_MODE_FIFO_KHR); |
1767 |
|
✗ |
} |
1768 |
|
|
} // namespace vkb |
1769 |
|
|
|