radv_device.c 263 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
/*
 * Copyright © 2016 Red Hat.
 * Copyright © 2016 Bas Nieuwenhuizen
 *
 * based in part on anv driver which is:
 * Copyright © 2015 Intel Corporation
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice (including the next
 * paragraph) shall be included in all copies or substantial portions of the
 * Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 * IN THE SOFTWARE.
 */

28
#include "dirent.h"
29 30 31 32 33 34 35
#include <errno.h>
#include <fcntl.h>
#include <linux/audit.h>
#include <linux/bpf.h>
#include <linux/filter.h>
#include <linux/seccomp.h>
#include <linux/unistd.h>
36
#include <stdbool.h>
37 38
#include <stddef.h>
#include <stdio.h>
39
#include <string.h>
40 41
#include <sys/prctl.h>
#include <sys/wait.h>
42 43
#include <unistd.h>
#include <fcntl.h>
44

45
#include "radv_debug.h"
46
#include "radv_private.h"
47
#include "radv_shader.h"
48
#include "radv_cs.h"
49
#include "util/disk_cache.h"
50
#include "vk_util.h"
51
#include <xf86drm.h>
52
#include <amdgpu.h>
53
#include "drm-uapi/amdgpu_drm.h"
54
#include "winsys/amdgpu/radv_amdgpu_winsys_public.h"
55
#include "winsys/null/radv_null_winsys_public.h"
56 57 58
#include "ac_llvm_util.h"
#include "vk_format.h"
#include "sid.h"
59
#include "git_sha1.h"
60
#include "util/build_id.h"
61
#include "util/debug.h"
62
#include "util/mesa-sha1.h"
63
#include "util/timespec.h"
64
#include "util/u_atomic.h"
65
#include "compiler/glsl_types.h"
66
#include "util/xmlpool.h"
67

68 69 70 71 72 73 74 75 76 77
static struct radv_timeline_point *
radv_timeline_find_point_at_least_locked(struct radv_device *device,
                                         struct radv_timeline *timeline,
                                         uint64_t p);

static struct radv_timeline_point *
radv_timeline_add_point_locked(struct radv_device *device,
                               struct radv_timeline *timeline,
                               uint64_t p);

78 79 80
static void
radv_timeline_trigger_waiters_locked(struct radv_timeline *timeline,
                                     struct list_head *processing_list);
81

82 83 84 85
static
void radv_destroy_semaphore_part(struct radv_device *device,
                                 struct radv_semaphore_part *part);

86
static int
87
radv_device_get_cache_uuid(enum radeon_family family, void *uuid)
88
{
89 90 91
	struct mesa_sha1 ctx;
	unsigned char sha1[20];
	unsigned ptr_size = sizeof(void*);
92

93
	memset(uuid, 0, VK_UUID_SIZE);
94
	_mesa_sha1_init(&ctx);
95

96 97
	if (!disk_cache_get_function_identifier(radv_device_get_cache_uuid, &ctx) ||
	    !disk_cache_get_function_identifier(LLVMInitializeAMDGPUTargetInfo, &ctx))
98 99
		return -1;

100 101 102 103 104
	_mesa_sha1_update(&ctx, &family, sizeof(family));
	_mesa_sha1_update(&ctx, &ptr_size, sizeof(ptr_size));
	_mesa_sha1_final(&ctx, sha1);

	memcpy(uuid, sha1, VK_UUID_SIZE);
105
	return 0;
106 107
}

108 109 110 111 112 113
static void
radv_get_driver_uuid(void *uuid)
{
	ac_compute_driver_uuid(uuid, VK_UUID_SIZE);
}

114
static void
115 116 117
radv_get_device_uuid(struct radeon_info *info, void *uuid)
{
	ac_compute_device_uuid(info, uuid, VK_UUID_SIZE);
118 119
}

120 121 122 123 124 125 126 127 128 129 130 131
static uint64_t
radv_get_visible_vram_size(struct radv_physical_device *device)
{
	return MIN2(device->rad_info.vram_size, device->rad_info.vram_vis_size);
}

static uint64_t
radv_get_vram_size(struct radv_physical_device *device)
{
	return device->rad_info.vram_size - radv_get_visible_vram_size(device);
}

132 133 134
static void
radv_physical_device_init_mem_types(struct radv_physical_device *device)
{
135 136
	uint64_t visible_vram_size = radv_get_visible_vram_size(device);
	uint64_t vram_size = radv_get_vram_size(device);
137 138
	int vram_index = -1, visible_vram_index = -1, gart_index = -1;
	device->memory_properties.memoryHeapCount = 0;
139
	if (vram_size > 0) {
140 141
		vram_index = device->memory_properties.memoryHeapCount++;
		device->memory_properties.memoryHeaps[vram_index] = (VkMemoryHeap) {
142
			.size = vram_size,
143 144 145
			.flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
		};
	}
146 147 148 149 150 151 152 153 154

	if (device->rad_info.gart_size > 0) {
		gart_index = device->memory_properties.memoryHeapCount++;
		device->memory_properties.memoryHeaps[gart_index] = (VkMemoryHeap) {
			.size = device->rad_info.gart_size,
			.flags = 0,
		};
	}

155 156 157 158 159 160 161 162 163
	if (visible_vram_size) {
		visible_vram_index = device->memory_properties.memoryHeapCount++;
		device->memory_properties.memoryHeaps[visible_vram_index] = (VkMemoryHeap) {
			.size = visible_vram_size,
			.flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
		};
	}

	unsigned type_count = 0;
164 165 166

	if (device->rad_info.has_dedicated_vram) {
		if (vram_index >= 0) {
167 168
			device->memory_domains[type_count] = RADEON_DOMAIN_VRAM;
			device->memory_flags[type_count] = RADEON_FLAG_NO_CPU_ACCESS;
169 170 171 172 173 174 175
			device->memory_properties.memoryTypes[type_count++] = (VkMemoryType) {
				.propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
				.heapIndex = vram_index,
			};
		}
	} else {
		if (visible_vram_index >= 0) {
176 177
			device->memory_domains[type_count] = RADEON_DOMAIN_VRAM;
			device->memory_flags[type_count] = RADEON_FLAG_NO_CPU_ACCESS;
178 179 180 181 182
			device->memory_properties.memoryTypes[type_count++] = (VkMemoryType) {
				.propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
				.heapIndex = visible_vram_index,
			};
		}
183
	}
184 185

	if (gart_index >= 0) {
186 187
		device->memory_domains[type_count] = RADEON_DOMAIN_GTT;
		device->memory_flags[type_count] = RADEON_FLAG_GTT_WC | RADEON_FLAG_CPU_ACCESS;
188 189
		device->memory_properties.memoryTypes[type_count++] = (VkMemoryType) {
			.propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
190
			VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
191 192 193 194
			.heapIndex = gart_index,
		};
	}
	if (visible_vram_index >= 0) {
195 196
		device->memory_domains[type_count] = RADEON_DOMAIN_VRAM;
		device->memory_flags[type_count] = RADEON_FLAG_CPU_ACCESS;
197 198 199 200 201 202 203
		device->memory_properties.memoryTypes[type_count++] = (VkMemoryType) {
			.propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
			VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
			VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
			.heapIndex = visible_vram_index,
		};
	}
204

205
	if (gart_index >= 0) {
206 207
		device->memory_domains[type_count] = RADEON_DOMAIN_GTT;
		device->memory_flags[type_count] = RADEON_FLAG_CPU_ACCESS;
208 209 210
		device->memory_properties.memoryTypes[type_count++] = (VkMemoryType) {
			.propertyFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
			VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
211
			VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
212 213 214 215
			.heapIndex = gart_index,
		};
	}
	device->memory_properties.memoryTypeCount = type_count;
216 217 218 219 220 221 222 223 224 225 226 227 228

	if (device->rad_info.has_l2_uncached) {
		for (int i = 0; i < device->memory_properties.memoryTypeCount; i++) {
			VkMemoryType mem_type = device->memory_properties.memoryTypes[i];

			if ((mem_type.propertyFlags & (VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
						       VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)) ||
			    mem_type.propertyFlags == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {

				VkMemoryPropertyFlags property_flags = mem_type.propertyFlags |
					VK_MEMORY_PROPERTY_DEVICE_COHERENT_BIT_AMD |
					VK_MEMORY_PROPERTY_DEVICE_UNCACHED_BIT_AMD;

229 230
				device->memory_domains[type_count] = device->memory_domains[i];
				device->memory_flags[type_count] = device->memory_flags[i] | RADEON_FLAG_VA_UNCACHED;
231 232 233 234 235 236 237 238
				device->memory_properties.memoryTypes[type_count++] = (VkMemoryType) {
					.propertyFlags = property_flags,
					.heapIndex = mem_type.heapIndex,
				};
			}
		}
		device->memory_properties.memoryTypeCount = type_count;
	}
239 240
}

241 242 243
static VkResult
radv_physical_device_init(struct radv_physical_device *device,
			  struct radv_instance *instance,
244
			  drmDevicePtr drm_device)
245 246
{
	VkResult result;
247
	int fd = -1;
248
	int master_fd = -1;
249

250 251 252
	if (drm_device) {
		const char *path = drm_device->nodes[DRM_NODE_RENDER];
		drmVersionPtr version;
253

254 255 256 257
		fd = open(path, O_RDWR | O_CLOEXEC);
		if (fd < 0) {
			if (instance->debug_flags & RADV_DEBUG_STARTUP)
				radv_logi("Could not open device '%s'", path);
258

259 260
			return vk_error(instance, VK_ERROR_INCOMPATIBLE_DRIVER);
		}
261

262 263 264
		version = drmGetVersion(fd);
		if (!version) {
			close(fd);
265

266 267
			if (instance->debug_flags & RADV_DEBUG_STARTUP)
				radv_logi("Could not get the kernel driver version for device '%s'", path);
268

269 270 271 272 273 274 275 276 277 278 279 280 281
			return vk_errorf(instance, VK_ERROR_INCOMPATIBLE_DRIVER,
					 "failed to get version %s: %m", path);
		}

		if (strcmp(version->name, "amdgpu")) {
			drmFreeVersion(version);
			close(fd);

			if (instance->debug_flags & RADV_DEBUG_STARTUP)
				radv_logi("Device '%s' is not using the amdgpu kernel driver.", path);

			return VK_ERROR_INCOMPATIBLE_DRIVER;
		}
282
		drmFreeVersion(version);
283 284

		if (instance->debug_flags & RADV_DEBUG_STARTUP)
285
				radv_logi("Found compatible device '%s'.", path);
286
	}
287

288 289 290
	device->_loader_data.loaderMagic = ICD_LOADER_MAGIC;
	device->instance = instance;

291 292 293 294 295 296 297
	if (drm_device) {
		device->ws = radv_amdgpu_winsys_create(fd, instance->debug_flags,
						       instance->perftest_flags);
	} else {
		device->ws = radv_null_winsys_create();
	}

298
	if (!device->ws) {
299
		result = vk_error(instance, VK_ERROR_INCOMPATIBLE_DRIVER);
300 301
		goto fail;
	}
302

303
	if (drm_device && instance->enabled_extensions.KHR_display) {
304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319
		master_fd = open(drm_device->nodes[DRM_NODE_PRIMARY], O_RDWR | O_CLOEXEC);
		if (master_fd >= 0) {
			uint32_t accel_working = 0;
			struct drm_amdgpu_info request = {
				.return_pointer = (uintptr_t)&accel_working,
				.return_size = sizeof(accel_working),
				.query = AMDGPU_INFO_ACCEL_WORKING
			};

			if (drmCommandWrite(master_fd, DRM_AMDGPU_INFO, &request, sizeof (struct drm_amdgpu_info)) < 0 || !accel_working) {
				close(master_fd);
				master_fd = -1;
			}
		}
	}

320
	device->master_fd = master_fd;
321
	device->local_fd = fd;
322 323
	device->ws->query_info(device->ws, &device->rad_info);

324 325
	device->use_aco = instance->perftest_flags & RADV_PERFTEST_ACO;

326 327 328
	snprintf(device->name, sizeof(device->name),
		 "AMD RADV%s %s (LLVM " MESA_LLVM_VERSION_STRING ")", device->use_aco ? "/ACO" : "",
		 device->rad_info.name);
329

330
	if (radv_device_get_cache_uuid(device->rad_info.family, device->cache_uuid)) {
331
		device->ws->destroy(device->ws);
332
		result = vk_errorf(instance, VK_ERROR_INITIALIZATION_FAILED,
333
				   "cannot generate UUID");
334 335
		goto fail;
	}
336

337
	/* These flags affect shader compilation. */
338
	uint64_t shader_env_flags = (device->use_aco ? 0x2 : 0);
339

Grazvydas Ignotas's avatar
Grazvydas Ignotas committed
340
	/* The gpu id is already embedded in the uuid so we just pass "radv"
341 342
	 * when creating the cache.
	 */
343 344
	char buf[VK_UUID_SIZE * 2 + 1];
	disk_cache_format_hex_id(buf, device->cache_uuid, VK_UUID_SIZE * 2);
345
	device->disk_cache = disk_cache_create(device->name, buf, shader_env_flags);
346

347
	if (device->rad_info.chip_class < GFX8)
348
		fprintf(stderr, "WARNING: radv is not a conformant vulkan implementation, testing use only.\n");
349

Józef Kucia's avatar
Józef Kucia committed
350
	radv_get_driver_uuid(&device->driver_uuid);
351
	radv_get_device_uuid(&device->rad_info, &device->device_uuid);
352

353
	device->out_of_order_rast_allowed = device->rad_info.has_out_of_order_rast &&
354
					    !(device->instance->debug_flags & RADV_DEBUG_NO_OUT_OF_ORDER);
355

356 357
	device->dcc_msaa_allowed =
		(device->instance->perftest_flags & RADV_PERFTEST_DCC_MSAA);
358

359 360
	device->use_shader_ballot = (device->use_aco && device->rad_info.chip_class >= GFX8) ||
				    (device->instance->perftest_flags & RADV_PERFTEST_SHADER_BALLOT);
361

362 363 364 365
	device->use_ngg = device->rad_info.chip_class >= GFX10 &&
			  device->rad_info.family != CHIP_NAVI14 &&
			  !(device->instance->debug_flags & RADV_DEBUG_NO_NGG);

366 367
	/* TODO: Implement NGG GS with ACO. */
	device->use_ngg_gs = device->use_ngg && !device->use_aco;
368 369
	device->use_ngg_streamout = false;

370 371
	/* Determine the number of threads per wave for all stages. */
	device->cs_wave_size = 64;
372
	device->ps_wave_size = 64;
373
	device->ge_wave_size = 64;
374 375 376 377

	if (device->rad_info.chip_class >= GFX10) {
		if (device->instance->perftest_flags & RADV_PERFTEST_CS_WAVE_32)
			device->cs_wave_size = 32;
378 379 380 381

		/* For pixel shaders, wave64 is recommanded. */
		if (device->instance->perftest_flags & RADV_PERFTEST_PS_WAVE_32)
			device->ps_wave_size = 32;
382 383 384

		if (device->instance->perftest_flags & RADV_PERFTEST_GE_WAVE_32)
			device->ge_wave_size = 32;
385 386
	}

387
	radv_physical_device_init_mem_types(device);
388
	radv_fill_device_extension_table(device, &device->supported_extensions);
389

390 391
	if (drm_device)
		device->bus_info = *drm_device->businfo.pci;
392 393 394 395 396 397 398 399

	if ((device->instance->debug_flags & RADV_DEBUG_INFO))
		ac_print_gpu_info(&device->rad_info);

	/* The WSI is structured as a layer on top of the driver, so this has
	 * to be the last part of initialization (at least until we get other
	 * semi-layers).
	 */
400 401 402
	result = radv_init_wsi(device);
	if (result != VK_SUCCESS) {
		device->ws->destroy(device->ws);
403
		vk_error(instance, result);
404 405 406
		goto fail;
	}

407 408 409 410
	return VK_SUCCESS;

fail:
	close(fd);
411 412
	if (master_fd != -1)
		close(master_fd);
413 414 415 416 417 418 419 420
	return result;
}

static void
radv_physical_device_finish(struct radv_physical_device *device)
{
	radv_finish_wsi(device);
	device->ws->destroy(device->ws);
421
	disk_cache_destroy(device->disk_cache);
422
	close(device->local_fd);
423 424
	if (device->master_fd != -1)
		close(device->master_fd);
425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453
}

static void *
default_alloc_func(void *pUserData, size_t size, size_t align,
                   VkSystemAllocationScope allocationScope)
{
	return malloc(size);
}

static void *
default_realloc_func(void *pUserData, void *pOriginal, size_t size,
                     size_t align, VkSystemAllocationScope allocationScope)
{
	return realloc(pOriginal, size);
}

static void
default_free_func(void *pUserData, void *pMemory)
{
	free(pMemory);
}

static const VkAllocationCallbacks default_alloc = {
	.pUserData = NULL,
	.pfnAllocation = default_alloc_func,
	.pfnReallocation = default_realloc_func,
	.pfnFree = default_free_func,
};

454
static const struct debug_control radv_debug_options[] = {
455
	{"nofastclears", RADV_DEBUG_NO_FAST_CLEARS},
456 457 458 459 460 461
	{"nodcc", RADV_DEBUG_NO_DCC},
	{"shaders", RADV_DEBUG_DUMP_SHADERS},
	{"nocache", RADV_DEBUG_NO_CACHE},
	{"shaderstats", RADV_DEBUG_DUMP_SHADER_STATS},
	{"nohiz", RADV_DEBUG_NO_HIZ},
	{"nocompute", RADV_DEBUG_NO_COMPUTE_QUEUE},
462 463
	{"allbos", RADV_DEBUG_ALL_BOS},
	{"noibs", RADV_DEBUG_NO_IBS},
464
	{"spirv", RADV_DEBUG_DUMP_SPIRV},
465
	{"vmfaults", RADV_DEBUG_VM_FAULTS},
466
	{"zerovram", RADV_DEBUG_ZERO_VRAM},
467
	{"syncshaders", RADV_DEBUG_SYNC_SHADERS},
468
	{"preoptir", RADV_DEBUG_PREOPTIR},
469
	{"nodynamicbounds", RADV_DEBUG_NO_DYNAMIC_BOUNDS},
470
	{"nooutoforder", RADV_DEBUG_NO_OUT_OF_ORDER},
471
	{"info", RADV_DEBUG_INFO},
472
	{"errors", RADV_DEBUG_ERRORS},
473
	{"startup", RADV_DEBUG_STARTUP},
Samuel Pitoiset's avatar
Samuel Pitoiset committed
474
	{"checkir", RADV_DEBUG_CHECKIR},
475
	{"nothreadllvm", RADV_DEBUG_NOTHREADLLVM},
476
	{"nobinning", RADV_DEBUG_NOBINNING},
477
	{"noloadstoreopt", RADV_DEBUG_NO_LOAD_STORE_OPT},
478
	{"nongg", RADV_DEBUG_NO_NGG},
479
	{"noshaderballot", RADV_DEBUG_NO_SHADER_BALLOT},
480
	{"allentrypoints", RADV_DEBUG_ALL_ENTRYPOINTS},
481
	{"metashaders", RADV_DEBUG_DUMP_META_SHADERS},
482
	{"nomemorycache", RADV_DEBUG_NO_MEMORY_CACHE},
483 484 485
	{NULL, 0}
};

486 487 488 489 490 491 492
const char *
radv_get_debug_option_name(int id)
{
	assert(id < ARRAY_SIZE(radv_debug_options) - 1);
	return radv_debug_options[id].string;
}

493
static const struct debug_control radv_perftest_options[] = {
494
	{"localbos", RADV_PERFTEST_LOCAL_BOS},
495
	{"dccmsaa", RADV_PERFTEST_DCC_MSAA},
496
	{"bolist", RADV_PERFTEST_BO_LIST},
497
	{"shader_ballot", RADV_PERFTEST_SHADER_BALLOT},
498
	{"tccompatcmask", RADV_PERFTEST_TC_COMPAT_CMASK},
499
	{"cswave32", RADV_PERFTEST_CS_WAVE_32},
500
	{"pswave32", RADV_PERFTEST_PS_WAVE_32},
501
	{"gewave32", RADV_PERFTEST_GE_WAVE_32},
502
	{"dfsm", RADV_PERFTEST_DFSM},
503
	{"aco", RADV_PERFTEST_ACO},
504 505 506
	{NULL, 0}
};

507 508 509
const char *
radv_get_perftest_option_name(int id)
{
510
	assert(id < ARRAY_SIZE(radv_perftest_options) - 1);
511 512 513
	return radv_perftest_options[id].string;
}

514 515 516 517 518 519 520 521 522
static void
radv_handle_per_app_options(struct radv_instance *instance,
			    const VkApplicationInfo *info)
{
	const char *name = info ? info->pApplicationName : NULL;

	if (!name)
		return;

523
	if (!strcmp(name, "DOOM_VFR")) {
Timothy Arceri's avatar
Timothy Arceri committed
524 525
		/* Work around a Doom VFR game bug */
		instance->debug_flags |= RADV_DEBUG_NO_DYNAMIC_BOUNDS;
526 527 528 529 530
	} else if (!strcmp(name, "MonsterHunterWorld.exe")) {
		/* Workaround for a WaW hazard when LLVM moves/merges
		 * load/store memory operations.
		 * See https://reviews.llvm.org/D61313
		 */
531
		if (LLVM_VERSION_MAJOR < 9)
532
			instance->debug_flags |= RADV_DEBUG_NO_LOAD_STORE_OPT;
533
	} else if (!strcmp(name, "Wolfenstein: Youngblood")) {
534 535
		if (!(instance->debug_flags & RADV_DEBUG_NO_SHADER_BALLOT) &&
		    !(instance->perftest_flags & RADV_PERFTEST_ACO)) {
536 537
			/* Force enable VK_AMD_shader_ballot because it looks
			 * safe and it gives a nice boost (+20% on Vega 56 at
538
			 * this time). It also prevents corruption on LLVM.
539 540 541
			 */
			instance->perftest_flags |= RADV_PERFTEST_SHADER_BALLOT;
		}
542 543 544 545 546 547 548 549
	} else if (!strcmp(name, "Fledge")) {
		/*
		 * Zero VRAM for "The Surge 2"
		 *
		 * This avoid a hang when when rendering any level. Likely
		 * uninitialized data in an indirect draw.
		 */
		instance->debug_flags |= RADV_DEBUG_ZERO_VRAM;
550 551 552
	} else if (!strcmp(name, "No Man's Sky")) {
		/* Work around a NMS game bug */
		instance->debug_flags |= RADV_DEBUG_DISCARD_TO_DEMOTE;
553 554 555
	}
}

556 557 558 559 560 561 562 563 564
static int radv_get_instance_extension_index(const char *name)
{
	for (unsigned i = 0; i < RADV_INSTANCE_EXTENSION_COUNT; ++i) {
		if (strcmp(name, radv_instance_extensions[i].extensionName) == 0)
			return i;
	}
	return -1;
}

565 566
static const char radv_dri_options_xml[] =
DRI_CONF_BEGIN
567
	DRI_CONF_SECTION_PERFORMANCE
568
		DRI_CONF_ADAPTIVE_SYNC("true")
569
		DRI_CONF_VK_X11_OVERRIDE_MIN_IMAGE_COUNT(0)
570
		DRI_CONF_VK_X11_STRICT_IMAGE_COUNT("false")
571
	DRI_CONF_SECTION_END
572 573 574 575

	DRI_CONF_SECTION_DEBUG
		DRI_CONF_VK_WSI_FORCE_BGRA8_UNORM_FIRST("false")
	DRI_CONF_SECTION_END
576 577 578 579 580 581 582
DRI_CONF_END;

static void  radv_init_dri_options(struct radv_instance *instance)
{
	driParseOptionInfo(&instance->available_dri_options, radv_dri_options_xml);
	driParseConfigFiles(&instance->dri_options,
	                    &instance->available_dri_options,
583 584 585
	                    0, "radv", NULL,
	                    instance->engineName,
	                    instance->engineVersion);
586
}
587

588 589 590 591 592 593
VkResult radv_CreateInstance(
	const VkInstanceCreateInfo*                 pCreateInfo,
	const VkAllocationCallbacks*                pAllocator,
	VkInstance*                                 pInstance)
{
	struct radv_instance *instance;
594
	VkResult result;
595 596 597 598 599 600 601 602

	assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);

	uint32_t client_version;
	if (pCreateInfo->pApplicationInfo &&
	    pCreateInfo->pApplicationInfo->apiVersion != 0) {
		client_version = pCreateInfo->pApplicationInfo->apiVersion;
	} else {
603
		client_version = VK_API_VERSION_1_0;
604 605
	}

Lionel Landwerlin's avatar
Lionel Landwerlin committed
606 607 608 609 610 611 612
	const char *engine_name = NULL;
	uint32_t engine_version = 0;
	if (pCreateInfo->pApplicationInfo) {
		engine_name = pCreateInfo->pApplicationInfo->pEngineName;
		engine_version = pCreateInfo->pApplicationInfo->engineVersion;
	}

613 614
	instance = vk_zalloc2(&default_alloc, pAllocator, sizeof(*instance), 8,
			      VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
615
	if (!instance)
616
		return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
617 618 619 620 621 622 623 624 625 626 627

	instance->_loader_data.loaderMagic = ICD_LOADER_MAGIC;

	if (pAllocator)
		instance->alloc = *pAllocator;
	else
		instance->alloc = default_alloc;

	instance->apiVersion = client_version;
	instance->physicalDeviceCount = -1;

628 629 630 631 632 633
	/* Get secure compile thread count. NOTE: We cap this at 32 */
#define MAX_SC_PROCS 32
	char *num_sc_threads = getenv("RADV_SECURE_COMPILE_THREADS");
	if (num_sc_threads)
		instance->num_sc_threads = MIN2(strtoul(num_sc_threads, NULL, 10), MAX_SC_PROCS);

634 635 636
	instance->debug_flags = parse_debug_string(getenv("RADV_DEBUG"),
						   radv_debug_options);

637 638 639 640
	/* Disable memory cache when secure compile is set */
	if (radv_device_use_secure_compile(instance))
		instance->debug_flags |= RADV_DEBUG_NO_MEMORY_CACHE;

641 642 643
	instance->perftest_flags = parse_debug_string(getenv("RADV_PERFTEST"),
						   radv_perftest_options);

644 645
	if (instance->perftest_flags & RADV_PERFTEST_ACO)
		fprintf(stderr, "WARNING: Experimental compiler backend enabled. Here be dragons! Incorrect rendering, GPU hangs and/or resets are likely\n");
646 647 648 649

	if (instance->debug_flags & RADV_DEBUG_STARTUP)
		radv_logi("Created an instance");

650 651 652 653 654 655
	for (uint32_t i = 0; i < pCreateInfo->enabledExtensionCount; i++) {
		const char *ext_name = pCreateInfo->ppEnabledExtensionNames[i];
		int index = radv_get_instance_extension_index(ext_name);

		if (index < 0 || !radv_supported_instance_extensions.extensions[index]) {
			vk_free2(&default_alloc, pAllocator, instance);
656
			return vk_error(instance, VK_ERROR_EXTENSION_NOT_PRESENT);
657 658 659 660 661
		}

		instance->enabled_extensions.extensions[index] = true;
	}

662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705
	bool unchecked = instance->debug_flags & RADV_DEBUG_ALL_ENTRYPOINTS;

	for (unsigned i = 0; i < ARRAY_SIZE(instance->dispatch.entrypoints); i++) {
		/* Vulkan requires that entrypoints for extensions which have
		 * not been enabled must not be advertised.
		 */
		if (!unchecked &&
		    !radv_instance_entrypoint_is_enabled(i, instance->apiVersion,
							 &instance->enabled_extensions)) {
			instance->dispatch.entrypoints[i] = NULL;
		} else {
			instance->dispatch.entrypoints[i] =
				radv_instance_dispatch_table.entrypoints[i];
		}
	}

	 for (unsigned i = 0; i < ARRAY_SIZE(instance->physical_device_dispatch.entrypoints); i++) {
		/* Vulkan requires that entrypoints for extensions which have
		 * not been enabled must not be advertised.
		 */
		if (!unchecked &&
		    !radv_physical_device_entrypoint_is_enabled(i, instance->apiVersion,
								&instance->enabled_extensions)) {
			instance->physical_device_dispatch.entrypoints[i] = NULL;
		} else {
			instance->physical_device_dispatch.entrypoints[i] =
				radv_physical_device_dispatch_table.entrypoints[i];
		}
	}

	for (unsigned i = 0; i < ARRAY_SIZE(instance->device_dispatch.entrypoints); i++) {
		/* Vulkan requires that entrypoints for extensions which have
		 * not been enabled must not be advertised.
		 */
		if (!unchecked &&
		    !radv_device_entrypoint_is_enabled(i, instance->apiVersion,
						       &instance->enabled_extensions, NULL)) {
			instance->device_dispatch.entrypoints[i] = NULL;
		} else {
			instance->device_dispatch.entrypoints[i] =
				radv_device_dispatch_table.entrypoints[i];
		}
	}

706 707 708
	result = vk_debug_report_instance_init(&instance->debug_report_callbacks);
	if (result != VK_SUCCESS) {
		vk_free2(&default_alloc, pAllocator, instance);
709
		return vk_error(instance, result);
710 711
	}

Lionel Landwerlin's avatar
Lionel Landwerlin committed
712 713 714 715
	instance->engineName = vk_strdup(&instance->alloc, engine_name,
					 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
	instance->engineVersion = engine_version;

716
	glsl_type_singleton_init_or_ref();
717 718 719

	VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));

720
	radv_init_dri_options(instance);
721 722
	radv_handle_per_app_options(instance, pCreateInfo->pApplicationInfo);

723 724 725 726 727 728 729 730 731 732 733
	*pInstance = radv_instance_to_handle(instance);

	return VK_SUCCESS;
}

void radv_DestroyInstance(
	VkInstance                                  _instance,
	const VkAllocationCallbacks*                pAllocator)
{
	RADV_FROM_HANDLE(radv_instance, instance, _instance);

734 735 736
	if (!instance)
		return;

737 738
	for (int i = 0; i < instance->physicalDeviceCount; ++i) {
		radv_physical_device_finish(instance->physicalDevices + i);
739 740
	}

Lionel Landwerlin's avatar
Lionel Landwerlin committed
741 742
	vk_free(&instance->alloc, instance->engineName);

743 744
	VG(VALGRIND_DESTROY_MEMPOOL(instance));

745
	glsl_type_singleton_decref();
746

747 748 749
	driDestroyOptionCache(&instance->dri_options);
	driDestroyOptionInfo(&instance->available_dri_options);

750 751
	vk_debug_report_instance_destroy(&instance->debug_report_callbacks);

752
	vk_free(&instance->alloc, instance);
753 754
}

755 756
static VkResult
radv_enumerate_devices(struct radv_instance *instance)
757
{
758 759 760 761 762 763 764
	/* TODO: Check for more devices ? */
	drmDevicePtr devices[8];
	VkResult result = VK_ERROR_INCOMPATIBLE_DRIVER;
	int max_devices;

	instance->physicalDeviceCount = 0;

765 766 767 768 769 770 771 772 773 774 775 776 777
	if (getenv("RADV_FORCE_FAMILY")) {
		/* When RADV_FORCE_FAMILY is set, the driver creates a nul
		 * device that allows to test the compiler without having an
		 * AMDGPU instance.
		 */
		result = radv_physical_device_init(instance->physicalDevices +
			                           instance->physicalDeviceCount,
			                           instance, NULL);

		++instance->physicalDeviceCount;
		return VK_SUCCESS;
	}

778
	max_devices = drmGetDevices2(0, devices, ARRAY_SIZE(devices));
779 780 781 782

	if (instance->debug_flags & RADV_DEBUG_STARTUP)
		radv_logi("Found %d drm nodes", max_devices);

783
	if (max_devices < 1)
784
		return vk_error(instance, VK_ERROR_INCOMPATIBLE_DRIVER);
785 786 787 788

	for (unsigned i = 0; i < (unsigned)max_devices; i++) {
		if (devices[i]->available_nodes & 1 << DRM_NODE_RENDER &&
		    devices[i]->bustype == DRM_BUS_PCI &&
789
		    devices[i]->deviceinfo.pci->vendor_id == ATI_VENDOR_ID) {
790

791 792
			result = radv_physical_device_init(instance->physicalDevices +
			                                   instance->physicalDeviceCount,
793
			                                   instance,
794
			                                   devices[i]);
795 796 797
			if (result == VK_SUCCESS)
				++instance->physicalDeviceCount;
			else if (result != VK_ERROR_INCOMPATIBLE_DRIVER)
798
				break;
799 800
		}
	}
801 802
	drmFreeDevices(devices, max_devices);

803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819
	return result;
}

VkResult radv_EnumeratePhysicalDevices(
	VkInstance                                  _instance,
	uint32_t*                                   pPhysicalDeviceCount,
	VkPhysicalDevice*                           pPhysicalDevices)
{
	RADV_FROM_HANDLE(radv_instance, instance, _instance);
	VkResult result;

	if (instance->physicalDeviceCount < 0) {
		result = radv_enumerate_devices(instance);
		if (result != VK_SUCCESS &&
		    result != VK_ERROR_INCOMPATIBLE_DRIVER)
			return result;
	}
820 821 822 823

	if (!pPhysicalDevices) {
		*pPhysicalDeviceCount = instance->physicalDeviceCount;
	} else {
824 825 826
		*pPhysicalDeviceCount = MIN2(*pPhysicalDeviceCount, instance->physicalDeviceCount);
		for (unsigned i = 0; i < *pPhysicalDeviceCount; ++i)
			pPhysicalDevices[i] = radv_physical_device_to_handle(instance->physicalDevices + i);
827 828
	}

829 830
	return *pPhysicalDeviceCount < instance->physicalDeviceCount ? VK_INCOMPLETE
	                                                             : VK_SUCCESS;
831 832
}

833 834 835 836 837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861
VkResult radv_EnumeratePhysicalDeviceGroups(
    VkInstance                                  _instance,
    uint32_t*                                   pPhysicalDeviceGroupCount,
    VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties)
{
	RADV_FROM_HANDLE(radv_instance, instance, _instance);
	VkResult result;

	if (instance->physicalDeviceCount < 0) {
		result = radv_enumerate_devices(instance);
		if (result != VK_SUCCESS &&
		    result != VK_ERROR_INCOMPATIBLE_DRIVER)
			return result;
	}

	if (!pPhysicalDeviceGroupProperties) {
		*pPhysicalDeviceGroupCount = instance->physicalDeviceCount;
	} else {
		*pPhysicalDeviceGroupCount = MIN2(*pPhysicalDeviceGroupCount, instance->physicalDeviceCount);
		for (unsigned i = 0; i < *pPhysicalDeviceGroupCount; ++i) {
			pPhysicalDeviceGroupProperties[i].physicalDeviceCount = 1;
			pPhysicalDeviceGroupProperties[i].physicalDevices[0] = radv_physical_device_to_handle(instance->physicalDevices + i);
			pPhysicalDeviceGroupProperties[i].subsetAllocation = false;
		}
	}
	return *pPhysicalDeviceGroupCount < instance->physicalDeviceCount ? VK_INCOMPLETE
	                                                                  : VK_SUCCESS;
}

862 863 864 865
void radv_GetPhysicalDeviceFeatures(
	VkPhysicalDevice                            physicalDevice,
	VkPhysicalDeviceFeatures*                   pFeatures)
{
866
	RADV_FROM_HANDLE(radv_physical_device, pdevice, physicalDevice);
867 868 869 870 871 872 873
	memset(pFeatures, 0, sizeof(*pFeatures));

	*pFeatures = (VkPhysicalDeviceFeatures) {
		.robustBufferAccess                       = true,
		.fullDrawIndexUint32                      = true,
		.imageCubeArray                           = true,
		.independentBlend                         = true,
874
		.geometryShader                           = true,
Bas Nieuwenhuizen's avatar
Bas Nieuwenhuizen committed
875
		.tessellationShader                       = true,
Dave Airlie's avatar
Dave Airlie committed
876
		.sampleRateShading                        = true,
877 878 879 880 881 882 883 884 885 886 887
		.dualSrcBlend                             = true,
		.logicOp                                  = true,
		.multiDrawIndirect                        = true,
		.drawIndirectFirstInstance                = true,
		.depthClamp                               = true,
		.depthBiasClamp                           = true,
		.fillModeNonSolid                         = true,
		.depthBounds                              = true,
		.wideLines                                = true,
		.largePoints                              = true,
		.alphaToOne                               = true,
888
		.multiViewport                            = true,
889
		.samplerAnisotropy                        = true,
890
		.textureCompressionETC2                   = radv_device_supports_etc(pdevice),
891 892 893
		.textureCompressionASTC_LDR               = false,
		.textureCompressionBC                     = true,
		.occlusionQueryPrecise                    = true,
894
		.pipelineStatisticsQuery                  = true,
895 896 897
		.vertexPipelineStoresAndAtomics           = true,
		.fragmentStoresAndAtomics                 = true,
		.shaderTessellationAndGeometryPointSize   = true,
898
		.shaderImageGatherExtended                = true,
899
		.shaderStorageImageExtendedFormats        = true,
900
		.shaderStorageImageMultisample            = true,
901 902 903 904
		.shaderUniformBufferArrayDynamicIndexing  = true,
		.shaderSampledImageArrayDynamicIndexing   = true,
		.shaderStorageBufferArrayDynamicIndexing  = true,
		.shaderStorageImageArrayDynamicIndexing   = true,
905
		.shaderStorageImageReadWithoutFormat      = true,
906
		.shaderStorageImageWriteWithoutFormat     = true,
907 908
		.shaderClipDistance                       = true,
		.shaderCullDistance                       = true,
909
		.shaderFloat64                            = true,
910
		.shaderInt64                              = true,
911
		.shaderInt16                              = pdevice->rad_info.chip_class >= GFX9,
912
		.sparseBinding                            = true,
913
		.variableMultisampleRate                  = true,
914
		.inheritedQueries                         = true,
915 916 917
	};
}

918
void radv_GetPhysicalDeviceFeatures2(
919
	VkPhysicalDevice                            physicalDevice,
920
	VkPhysicalDeviceFeatures2                  *pFeatures)
921
{
922
	RADV_FROM_HANDLE(radv_physical_device, pdevice, physicalDevice);
923 924
	vk_foreach_struct(ext, pFeatures->pNext) {
		switch (ext->sType) {
925 926
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VARIABLE_POINTERS_FEATURES: {
			VkPhysicalDeviceVariablePointersFeatures *features = (void *)ext;
927
			features->variablePointersStorageBuffer = true;
928
			features->variablePointers = true;
929 930
			break;
		}
931 932
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTIVIEW_FEATURES: {
			VkPhysicalDeviceMultiviewFeatures *features = (VkPhysicalDeviceMultiviewFeatures*)ext;
933 934 935 936 937
			features->multiview = true;
			features->multiviewGeometryShader = true;
			features->multiviewTessellationShader = true;
			break;
		}
938 939 940
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_DRAW_PARAMETERS_FEATURES: {
			VkPhysicalDeviceShaderDrawParametersFeatures *features =
			    (VkPhysicalDeviceShaderDrawParametersFeatures*)ext;
941 942 943
			features->shaderDrawParameters = true;
			break;
		}
944 945 946 947 948 949
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROTECTED_MEMORY_FEATURES: {
			VkPhysicalDeviceProtectedMemoryFeatures *features =
			    (VkPhysicalDeviceProtectedMemoryFeatures*)ext;
			features->protectedMemory = false;
			break;
		}
950 951 952
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_16BIT_STORAGE_FEATURES: {
			VkPhysicalDevice16BitStorageFeatures *features =
			    (VkPhysicalDevice16BitStorageFeatures*)ext;
953 954 955 956
			bool enable = !pdevice->use_aco || pdevice->rad_info.chip_class >= GFX8;
			features->storageBuffer16BitAccess = enable;
			features->uniformAndStorageBuffer16BitAccess = enable;
			features->storagePushConstant16 = enable;
957
			features->storageInputOutput16 = pdevice->rad_info.has_double_rate_fp16 && !pdevice->use_aco && LLVM_VERSION_MAJOR >= 9;
958 959
			break;
		}
960 961 962
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SAMPLER_YCBCR_CONVERSION_FEATURES: {
			VkPhysicalDeviceSamplerYcbcrConversionFeatures *features =
			    (VkPhysicalDeviceSamplerYcbcrConversionFeatures*)ext;
963
			features->samplerYcbcrConversion = true;
964 965
			break;
		}
966 967 968
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES: {
			VkPhysicalDeviceDescriptorIndexingFeatures *features =
				(VkPhysicalDeviceDescriptorIndexingFeatures*)ext;
969 970 971
			features->shaderInputAttachmentArrayDynamicIndexing = true;
			features->shaderUniformTexelBufferArrayDynamicIndexing = true;
			features->shaderStorageTexelBufferArrayDynamicIndexing = true;
972 973 974 975 976 977 978
			features->shaderUniformBufferArrayNonUniformIndexing = true;
			features->shaderSampledImageArrayNonUniformIndexing = true;
			features->shaderStorageBufferArrayNonUniformIndexing = true;
			features->shaderStorageImageArrayNonUniformIndexing = true;
			features->shaderInputAttachmentArrayNonUniformIndexing = true;
			features->shaderUniformTexelBufferArrayNonUniformIndexing = true;
			features->shaderStorageTexelBufferArrayNonUniformIndexing = true;
979 980 981 982 983 984 985 986 987 988 989 990
			features->descriptorBindingUniformBufferUpdateAfterBind = true;
			features->descriptorBindingSampledImageUpdateAfterBind = true;
			features->descriptorBindingStorageImageUpdateAfterBind = true;
			features->descriptorBindingStorageBufferUpdateAfterBind = true;
			features->descriptorBindingUniformTexelBufferUpdateAfterBind = true;
			features->descriptorBindingStorageTexelBufferUpdateAfterBind = true;
			features->descriptorBindingUpdateUnusedWhilePending = true;
			features->descriptorBindingPartiallyBound = true;
			features->descriptorBindingVariableDescriptorCount = true;
			features->runtimeDescriptorArray = true;
			break;
		}
991 992 993 994 995 996 997
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
			VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
				(VkPhysicalDeviceConditionalRenderingFeaturesEXT*)ext;
			features->conditionalRendering = true;
			features->inheritedConditionalRendering = false;
			break;
		}
998 999 1000
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
			VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
				(VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
1001 1002
			features->vertexAttributeInstanceRateDivisor = true;
			features->vertexAttributeInstanceRateZeroDivisor = true;
1003 1004
			break;
		}
1005 1006 1007 1008
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
			VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
				(VkPhysicalDeviceTransformFeedbackFeaturesEXT*)ext;
			features->transformFeedback = true;
1009
			features->geometryStreams = !pdevice->use_ngg_streamout;
1010 1011
			break;
		}
1012 1013 1014
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES: {
			VkPhysicalDeviceScalarBlockLayoutFeatures *features =
				(VkPhysicalDeviceScalarBlockLayoutFeatures *)ext;
1015
			features->scalarBlockLayout = pdevice->rad_info.chip_class >= GFX7;
1016 1017
			break;
		}
1018 1019 1020
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MEMORY_PRIORITY_FEATURES_EXT: {
			VkPhysicalDeviceMemoryPriorityFeaturesEXT *features =
				(VkPhysicalDeviceMemoryPriorityFeaturesEXT *)ext;
1021
			features->memoryPriority = true;
1022 1023
			break;
		}
1024 1025 1026
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_EXT: {
			VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *features =
				(VkPhysicalDeviceBufferDeviceAddressFeaturesEXT *)ext;
1027 1028 1029 1030 1031
			features->bufferDeviceAddress = true;
			features->bufferDeviceAddressCaptureReplay = false;
			features->bufferDeviceAddressMultiDevice = false;
			break;
		}
1032 1033 1034
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES: {
			VkPhysicalDeviceBufferDeviceAddressFeatures *features =
				(VkPhysicalDeviceBufferDeviceAddressFeatures *)ext;
1035 1036 1037 1038 1039
			features->bufferDeviceAddress = true;
			features->bufferDeviceAddressCaptureReplay = false;
			features->bufferDeviceAddressMultiDevice = false;
			break;
		}
1040 1041 1042 1043 1044 1045
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
			VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
				(VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
			features->depthClipEnable = true;
			break;
		}
1046 1047 1048
		case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_QUERY_RESET_FEATURES: {
			VkPhysicalDeviceHostQueryResetFeatures *features =
				(VkPhysicalDeviceHostQueryResetFeatures *)ext;
1049 1050 1051