anv_descriptor_set.c 34 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
/*
 * Copyright © 2015 Intel Corporation
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice (including the next
 * paragraph) shall be included in all copies or substantial portions of the
 * Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 * IN THE SOFTWARE.
 */

#include <assert.h>
#include <stdbool.h>
#include <string.h>
#include <unistd.h>
#include <fcntl.h>

30 31
#include "util/mesa-sha1.h"

32 33 34 35 36 37 38 39 40
#include "anv_private.h"

/*
 * Descriptor set layouts.
 */

VkResult anv_CreateDescriptorSetLayout(
    VkDevice                                    _device,
    const VkDescriptorSetLayoutCreateInfo*      pCreateInfo,
41
    const VkAllocationCallbacks*                pAllocator,
42 43 44 45 46 47
    VkDescriptorSetLayout*                      pSetLayout)
{
   ANV_FROM_HANDLE(anv_device, device, _device);

   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);

48
   uint32_t max_binding = 0;
49
   uint32_t immutable_sampler_count = 0;
50
   for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
51 52 53
      max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
      if (pCreateInfo->pBindings[j].pImmutableSamplers)
         immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
54 55
   }

56 57 58 59
   struct anv_descriptor_set_layout *set_layout;
   struct anv_descriptor_set_binding_layout *bindings;
   struct anv_sampler **samplers;

60 61 62 63
   /* We need to allocate decriptor set layouts off the device allocator
    * with DEVICE scope because they are reference counted and may not be
    * destroyed when vkDestroyDescriptorSetLayout is called.
    */
64 65 66 67
   ANV_MULTIALLOC(ma);
   anv_multialloc_add(&ma, &set_layout, 1);
   anv_multialloc_add(&ma, &bindings, max_binding + 1);
   anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
68

69 70
   if (!anv_multialloc_alloc(&ma, &device->alloc,
                             VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
71 72
      return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);

73
   memset(set_layout, 0, sizeof(*set_layout));
74
   set_layout->ref_cnt = 1;
75
   set_layout->binding_count = max_binding + 1;
76

77 78 79 80
   for (uint32_t b = 0; b <= max_binding; b++) {
      /* Initialize all binding_layout entries to -1 */
      memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));

81
      set_layout->binding[b].array_size = 0;
82 83
      set_layout->binding[b].immutable_samplers = NULL;
   }
84 85 86 87

   /* Initialize all samplers to 0 */
   memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));

88 89
   uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
   uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
90
   uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
91
   uint32_t buffer_count = 0;
92 93
   uint32_t dynamic_offset_count = 0;

94
   for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
95
      const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
96
      uint32_t b = binding->binding;
97 98 99 100 101 102 103 104 105 106 107 108 109
      /* We temporarily store the pointer to the binding in the
       * immutable_samplers pointer.  This provides us with a quick-and-dirty
       * way to sort the bindings by binding number.
       */
      set_layout->binding[b].immutable_samplers = (void *)binding;
   }

   for (uint32_t b = 0; b <= max_binding; b++) {
      const VkDescriptorSetLayoutBinding *binding =
         (void *)set_layout->binding[b].immutable_samplers;

      if (binding == NULL)
         continue;
110

111 112 113
      if (binding->descriptorCount == 0)
         continue;

114 115 116
#ifndef NDEBUG
      set_layout->binding[b].type = binding->descriptorType;
#endif
117
      set_layout->binding[b].array_size = binding->descriptorCount;
118
      set_layout->binding[b].descriptor_index = set_layout->size;
119
      set_layout->size += binding->descriptorCount;
120

121
      switch (binding->descriptorType) {
122 123
      case VK_DESCRIPTOR_TYPE_SAMPLER:
      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
124
         anv_foreach_stage(s, binding->stageFlags) {
125
            set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
126
            sampler_count[s] += binding->descriptorCount;
127 128 129 130 131 132
         }
         break;
      default:
         break;
      }

133
      switch (binding->descriptorType) {
134 135 136 137 138 139 140 141
      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
         set_layout->binding[b].buffer_index = buffer_count;
         buffer_count += binding->descriptorCount;
         /* fall through */

142 143 144 145 146 147
      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
148
         anv_foreach_stage(s, binding->stageFlags) {
149
            set_layout->binding[b].stage[s].surface_index = surface_count[s];
150
            surface_count[s] += binding->descriptorCount;
151 152 153 154 155 156
         }
         break;
      default:
         break;
      }

157
      switch (binding->descriptorType) {
158 159 160
      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
         set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
161
         dynamic_offset_count += binding->descriptorCount;
162 163 164 165 166
         break;
      default:
         break;
      }

167 168 169
      switch (binding->descriptorType) {
      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
170
         anv_foreach_stage(s, binding->stageFlags) {
171 172 173
            set_layout->binding[b].stage[s].image_index = image_count[s];
            image_count[s] += binding->descriptorCount;
         }
174 175 176
         break;
      default:
         break;
177 178
      }

179
      if (binding->pImmutableSamplers) {
180
         set_layout->binding[b].immutable_samplers = samplers;
181
         samplers += binding->descriptorCount;
182

183
         for (uint32_t i = 0; i < binding->descriptorCount; i++)
184
            set_layout->binding[b].immutable_samplers[i] =
185
               anv_sampler_from_handle(binding->pImmutableSamplers[i]);
186 187 188 189
      } else {
         set_layout->binding[b].immutable_samplers = NULL;
      }

190
      set_layout->shader_stages |= binding->stageFlags;
191 192
   }

193
   set_layout->buffer_count = buffer_count;
194 195 196 197 198 199 200 201 202
   set_layout->dynamic_offset_count = dynamic_offset_count;

   *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);

   return VK_SUCCESS;
}

void anv_DestroyDescriptorSetLayout(
    VkDevice                                    _device,
203 204
    VkDescriptorSetLayout                       _set_layout,
    const VkAllocationCallbacks*                pAllocator)
205 206 207 208
{
   ANV_FROM_HANDLE(anv_device, device, _device);
   ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);

209 210 211
   if (!set_layout)
      return;

212
   anv_descriptor_set_layout_unref(device, set_layout);
213 214
}

215 216 217 218 219 220 221 222 223
static void
sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
                                  const struct anv_descriptor_set_layout *layout)
{
   size_t size = sizeof(*layout) +
                 sizeof(layout->binding[0]) * layout->binding_count;
   _mesa_sha1_update(ctx, layout, size);
}

224 225
/*
 * Pipeline layouts.  These have nothing to do with the pipeline.  They are
Lionel Landwerlin's avatar
Lionel Landwerlin committed
226
 * just multiple descriptor set layouts pasted together
227 228 229 230 231
 */

VkResult anv_CreatePipelineLayout(
    VkDevice                                    _device,
    const VkPipelineLayoutCreateInfo*           pCreateInfo,
232
    const VkAllocationCallbacks*                pAllocator,
233 234 235
    VkPipelineLayout*                           pPipelineLayout)
{
   ANV_FROM_HANDLE(anv_device, device, _device);
Jason Ekstrand's avatar
Jason Ekstrand committed
236
   struct anv_pipeline_layout *layout;
237 238 239

   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);

240
   layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
Jason Ekstrand's avatar
Jason Ekstrand committed
241 242 243 244 245
                       VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
   if (layout == NULL)
      return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);

   layout->num_sets = pCreateInfo->setLayoutCount;
246 247 248

   unsigned dynamic_offset_count = 0;

Jason Ekstrand's avatar
Jason Ekstrand committed
249
   memset(layout->stage, 0, sizeof(layout->stage));
250
   for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
251 252
      ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
                      pCreateInfo->pSetLayouts[set]);
Jason Ekstrand's avatar
Jason Ekstrand committed
253
      layout->set[set].layout = set_layout;
254
      anv_descriptor_set_layout_ref(set_layout);
255

Jason Ekstrand's avatar
Jason Ekstrand committed
256
      layout->set[set].dynamic_offset_start = dynamic_offset_count;
257
      for (uint32_t b = 0; b < set_layout->binding_count; b++) {
258 259 260 261
         if (set_layout->binding[b].dynamic_offset_index < 0)
            continue;

         dynamic_offset_count += set_layout->binding[b].array_size;
Jason Ekstrand's avatar
Jason Ekstrand committed
262 263 264
         for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
            if (set_layout->binding[b].stage[s].surface_index >= 0)
               layout->stage[s].has_dynamic_offsets = true;
265 266 267 268
         }
      }
   }

269 270
   struct mesa_sha1 ctx;
   _mesa_sha1_init(&ctx);
271
   for (unsigned s = 0; s < layout->num_sets; s++) {
272 273
      sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
      _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
274 275
                        sizeof(layout->set[s].dynamic_offset_start));
   }
276
   _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
277
   for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
278
      _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets,
279 280
                        sizeof(layout->stage[s].has_dynamic_offsets));
   }
281
   _mesa_sha1_final(&ctx, layout->sha1);
282

283 284 285 286 287 288 289
   *pPipelineLayout = anv_pipeline_layout_to_handle(layout);

   return VK_SUCCESS;
}

void anv_DestroyPipelineLayout(
    VkDevice                                    _device,
290 291
    VkPipelineLayout                            _pipelineLayout,
    const VkAllocationCallbacks*                pAllocator)
292 293 294 295
{
   ANV_FROM_HANDLE(anv_device, device, _device);
   ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);

296 297 298
   if (!pipeline_layout)
      return;

299 300 301
   for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
      anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);

302
   vk_free2(&device->alloc, pAllocator, pipeline_layout);
303 304 305
}

/*
306 307 308 309 310 311 312 313 314
 * Descriptor pools.
 *
 * These are implemented using a big pool of memory and a free-list for the
 * host memory allocations and a state_stream and a free list for the buffer
 * view surface state. The spec allows us to fail to allocate due to
 * fragmentation in all cases but two: 1) after pool reset, allocating up
 * until the pool size with no freeing must succeed and 2) allocating and
 * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
 * and the free lists lets us recycle blocks for case 2).
315 316
 */

317 318
#define EMPTY 1

319
VkResult anv_CreateDescriptorPool(
320
    VkDevice                                    _device,
321
    const VkDescriptorPoolCreateInfo*           pCreateInfo,
322
    const VkAllocationCallbacks*                pAllocator,
323 324
    VkDescriptorPool*                           pDescriptorPool)
{
325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342
   ANV_FROM_HANDLE(anv_device, device, _device);
   struct anv_descriptor_pool *pool;

   uint32_t descriptor_count = 0;
   uint32_t buffer_count = 0;
   for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
      switch (pCreateInfo->pPoolSizes[i].type) {
      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
         buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
      default:
         descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
         break;
      }
   }

343
   const size_t pool_size =
344 345 346
      pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
      descriptor_count * sizeof(struct anv_descriptor) +
      buffer_count * sizeof(struct anv_buffer_view);
347
   const size_t total_size = sizeof(*pool) + pool_size;
348

349
   pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8,
350 351 352 353
                     VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
   if (!pool)
      return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);

354
   pool->size = pool_size;
355 356 357 358
   pool->next = 0;
   pool->free_list = EMPTY;

   anv_state_stream_init(&pool->surface_state_stream,
359
                         &device->surface_state_pool, 4096);
360 361 362 363
   pool->surface_state_free_list = NULL;

   *pDescriptorPool = anv_descriptor_pool_to_handle(pool);

364 365 366 367 368
   return VK_SUCCESS;
}

void anv_DestroyDescriptorPool(
    VkDevice                                    _device,
369 370
    VkDescriptorPool                            _pool,
    const VkAllocationCallbacks*                pAllocator)
371
{
372 373 374
   ANV_FROM_HANDLE(anv_device, device, _device);
   ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);

375 376 377
   if (!pool)
      return;

378
   anv_state_stream_finish(&pool->surface_state_stream);
379
   vk_free2(&device->alloc, pAllocator, pool);
380 381 382
}

VkResult anv_ResetDescriptorPool(
383
    VkDevice                                    _device,
384 385
    VkDescriptorPool                            descriptorPool,
    VkDescriptorPoolResetFlags                  flags)
386
{
387 388 389 390 391 392 393
   ANV_FROM_HANDLE(anv_device, device, _device);
   ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);

   pool->next = 0;
   pool->free_list = EMPTY;
   anv_state_stream_finish(&pool->surface_state_stream);
   anv_state_stream_init(&pool->surface_state_stream,
394
                         &device->surface_state_pool, 4096);
395 396
   pool->surface_state_free_list = NULL;

397 398 399
   return VK_SUCCESS;
}

400 401 402 403 404
struct pool_free_list_entry {
   uint32_t next;
   uint32_t size;
};

405 406
size_t
anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
407 408 409 410 411 412 413
{
   return
      sizeof(struct anv_descriptor_set) +
      layout->size * sizeof(struct anv_descriptor) +
      layout->buffer_count * sizeof(struct anv_buffer_view);
}

414 415 416 417 418 419 420 421 422 423 424 425 426
size_t
anv_descriptor_set_binding_layout_get_hw_size(const struct anv_descriptor_set_binding_layout *binding)
{
   if (!binding->immutable_samplers)
      return binding->array_size;

   uint32_t total_plane_count = 0;
   for (uint32_t i = 0; i < binding->array_size; i++)
      total_plane_count += binding->immutable_samplers[i]->n_planes;

   return total_plane_count;
}

427 428
struct surface_state_free_list_entry {
   void *next;
429
   struct anv_state state;
430 431
};

432 433
VkResult
anv_descriptor_set_create(struct anv_device *device,
434
                          struct anv_descriptor_pool *pool,
435
                          struct anv_descriptor_set_layout *layout,
436 437 438
                          struct anv_descriptor_set **out_set)
{
   struct anv_descriptor_set *set;
439
   const size_t size = anv_descriptor_set_layout_size(layout);
440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457

   set = NULL;
   if (size <= pool->size - pool->next) {
      set = (struct anv_descriptor_set *) (pool->data + pool->next);
      pool->next += size;
   } else {
      struct pool_free_list_entry *entry;
      uint32_t *link = &pool->free_list;
      for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
         entry = (struct pool_free_list_entry *) (pool->data + f);
         if (size <= entry->size) {
            *link = entry->next;
            set = (struct anv_descriptor_set *) entry;
            break;
         }
         link = &entry->next;
      }
   }
458

459 460 461 462
   if (set == NULL) {
      if (pool->free_list != EMPTY) {
         return vk_error(VK_ERROR_FRAGMENTED_POOL);
      } else {
463
         return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
464 465
      }
   }
466 467

   set->layout = layout;
468 469 470
   anv_descriptor_set_layout_ref(layout);

   set->size = size;
471 472 473
   set->buffer_views =
      (struct anv_buffer_view *) &set->descriptors[layout->size];
   set->buffer_count = layout->buffer_count;
474

475 476 477 478 479
   /* By defining the descriptors to be zero now, we can later verify that
    * a descriptor has not been populated with user data.
    */
   memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);

480 481 482 483
   /* Go through and fill out immutable samplers if we have any */
   struct anv_descriptor *desc = set->descriptors;
   for (uint32_t b = 0; b < layout->binding_count; b++) {
      if (layout->binding[b].immutable_samplers) {
484 485 486 487 488 489 490 491 492 493 494
         for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
            /* The type will get changed to COMBINED_IMAGE_SAMPLER in
             * UpdateDescriptorSets if needed.  However, if the descriptor
             * set has an immutable sampler, UpdateDescriptorSets may never
             * touch it, so we need to make sure it's 100% valid now.
             */
            desc[i] = (struct anv_descriptor) {
               .type = VK_DESCRIPTOR_TYPE_SAMPLER,
               .sampler = layout->binding[b].immutable_samplers[i],
            };
         }
495 496 497 498
      }
      desc += layout->binding[b].array_size;
   }

499
   /* Allocate surface state for the buffer views. */
500
   for (uint32_t b = 0; b < layout->buffer_count; b++) {
501 502 503 504 505
      struct surface_state_free_list_entry *entry =
         pool->surface_state_free_list;
      struct anv_state state;

      if (entry) {
506
         state = entry->state;
507
         pool->surface_state_free_list = entry->next;
508
         assert(state.alloc_size == 64);
509 510 511 512 513
      } else {
         state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
      }

      set->buffer_views[b].surface_state = state;
514
   }
515

516 517 518 519 520 521 522
   *out_set = set;

   return VK_SUCCESS;
}

void
anv_descriptor_set_destroy(struct anv_device *device,
523
                           struct anv_descriptor_pool *pool,
524 525
                           struct anv_descriptor_set *set)
{
526 527
   anv_descriptor_set_layout_unref(device, set->layout);

528 529 530 531 532
   /* Put the buffer view surface state back on the free list. */
   for (uint32_t b = 0; b < set->buffer_count; b++) {
      struct surface_state_free_list_entry *entry =
         set->buffer_views[b].surface_state.map;
      entry->next = pool->surface_state_free_list;
533
      entry->state = set->buffer_views[b].surface_state;
534 535
      pool->surface_state_free_list = entry;
   }
536

537 538 539 540 541 542 543 544 545 546
   /* Put the descriptor set allocation back on the free list. */
   const uint32_t index = (char *) set - pool->data;
   if (index + set->size == pool->next) {
      pool->next = index;
   } else {
      struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
      entry->next = pool->free_list;
      entry->size = set->size;
      pool->free_list = (char *) entry - pool->data;
   }
547 548
}

549
VkResult anv_AllocateDescriptorSets(
550
    VkDevice                                    _device,
551
    const VkDescriptorSetAllocateInfo*          pAllocateInfo,
552 553 554
    VkDescriptorSet*                            pDescriptorSets)
{
   ANV_FROM_HANDLE(anv_device, device, _device);
555
   ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
556 557 558 559 560

   VkResult result = VK_SUCCESS;
   struct anv_descriptor_set *set;
   uint32_t i;

561
   for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
562 563
      ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
                      pAllocateInfo->pSetLayouts[i]);
564

565
      result = anv_descriptor_set_create(device, pool, layout, &set);
566 567 568 569 570 571 572
      if (result != VK_SUCCESS)
         break;

      pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
   }

   if (result != VK_SUCCESS)
573 574
      anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
                             i, pDescriptorSets);
575 576 577 578 579 580 581 582 583 584 585

   return result;
}

VkResult anv_FreeDescriptorSets(
    VkDevice                                    _device,
    VkDescriptorPool                            descriptorPool,
    uint32_t                                    count,
    const VkDescriptorSet*                      pDescriptorSets)
{
   ANV_FROM_HANDLE(anv_device, device, _device);
586
   ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
587 588 589 590

   for (uint32_t i = 0; i < count; i++) {
      ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);

591 592 593
      if (!set)
         continue;

594
      anv_descriptor_set_destroy(device, pool, set);
595 596 597 598 599
   }

   return VK_SUCCESS;
}

600 601
void
anv_descriptor_set_write_image_view(struct anv_descriptor_set *set,
602 603
                                    const struct gen_device_info * const devinfo,
                                    const VkDescriptorImageInfo * const info,
604 605 606 607 608 609 610 611 612 613 614 615 616 617 618
                                    VkDescriptorType type,
                                    uint32_t binding,
                                    uint32_t element)
{
   const struct anv_descriptor_set_binding_layout *bind_layout =
      &set->layout->binding[binding];
   struct anv_descriptor *desc =
      &set->descriptors[bind_layout->descriptor_index + element];
   struct anv_image_view *image_view = NULL;
   struct anv_sampler *sampler = NULL;

   assert(type == bind_layout->type);

   switch (type) {
   case VK_DESCRIPTOR_TYPE_SAMPLER:
619
      sampler = anv_sampler_from_handle(info->sampler);
620 621 622
      break;

   case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
623 624
      image_view = anv_image_view_from_handle(info->imageView);
      sampler = anv_sampler_from_handle(info->sampler);
625 626 627 628 629
      break;

   case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
   case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
   case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
630
      image_view = anv_image_view_from_handle(info->imageView);
631 632 633 634 635 636 637 638 639 640 641 642 643 644 645
      break;

   default:
      unreachable("invalid descriptor type");
   }

   /* If this descriptor has an immutable sampler, we don't want to stomp on
    * it.
    */
   sampler = bind_layout->immutable_samplers ?
             bind_layout->immutable_samplers[element] :
             sampler;

   *desc = (struct anv_descriptor) {
      .type = type,
646
      .layout = info->imageLayout,
647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674
      .image_view = image_view,
      .sampler = sampler,
   };
}

void
anv_descriptor_set_write_buffer_view(struct anv_descriptor_set *set,
                                     VkDescriptorType type,
                                     struct anv_buffer_view *buffer_view,
                                     uint32_t binding,
                                     uint32_t element)
{
   const struct anv_descriptor_set_binding_layout *bind_layout =
      &set->layout->binding[binding];
   struct anv_descriptor *desc =
      &set->descriptors[bind_layout->descriptor_index + element];

   assert(type == bind_layout->type);

   *desc = (struct anv_descriptor) {
      .type = type,
      .buffer_view = buffer_view,
   };
}

void
anv_descriptor_set_write_buffer(struct anv_descriptor_set *set,
                                struct anv_device *device,
675
                                struct anv_state_stream *alloc_stream,
676 677 678 679 680 681 682 683 684 685 686 687 688 689
                                VkDescriptorType type,
                                struct anv_buffer *buffer,
                                uint32_t binding,
                                uint32_t element,
                                VkDeviceSize offset,
                                VkDeviceSize range)
{
   const struct anv_descriptor_set_binding_layout *bind_layout =
      &set->layout->binding[binding];
   struct anv_descriptor *desc =
      &set->descriptors[bind_layout->descriptor_index + element];

   assert(type == bind_layout->type);

690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722
   if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
       type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
      *desc = (struct anv_descriptor) {
         .type = type,
         .buffer = buffer,
         .offset = offset,
         .range = range,
      };
   } else {
      struct anv_buffer_view *bview =
         &set->buffer_views[bind_layout->buffer_index + element];

      bview->format = anv_isl_format_for_descriptor_type(type);
      bview->bo = buffer->bo;
      bview->offset = buffer->offset + offset;
      bview->range = anv_buffer_get_range(buffer, offset, range);

      /* If we're writing descriptors through a push command, we need to
       * allocate the surface state from the command buffer. Otherwise it will
       * be allocated by the descriptor pool when calling
       * vkAllocateDescriptorSets. */
      if (alloc_stream)
         bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);

      anv_fill_buffer_surface_state(device, bview->surface_state,
                                    bview->format,
                                    bview->offset, bview->range, 1);

      *desc = (struct anv_descriptor) {
         .type = type,
         .buffer_view = bview,
      };
   }
723 724
}

725
void anv_UpdateDescriptorSets(
726
    VkDevice                                    _device,
727
    uint32_t                                    descriptorWriteCount,
728
    const VkWriteDescriptorSet*                 pDescriptorWrites,
729
    uint32_t                                    descriptorCopyCount,
730 731
    const VkCopyDescriptorSet*                  pDescriptorCopies)
{
732 733
   ANV_FROM_HANDLE(anv_device, device, _device);

734
   for (uint32_t i = 0; i < descriptorWriteCount; i++) {
735
      const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
736
      ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
737

738 739 740 741 742
      switch (write->descriptorType) {
      case VK_DESCRIPTOR_TYPE_SAMPLER:
      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
743
      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
744
         for (uint32_t j = 0; j < write->descriptorCount; j++) {
745 746
            anv_descriptor_set_write_image_view(set, &device->info,
                                                write->pImageInfo + j,
747 748 749
                                                write->descriptorType,
                                                write->dstBinding,
                                                write->dstArrayElement + j);
750 751 752 753 754
         }
         break;

      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
755 756 757 758
         for (uint32_t j = 0; j < write->descriptorCount; j++) {
            ANV_FROM_HANDLE(anv_buffer_view, bview,
                            write->pTexelBufferView[j]);

759 760 761 762 763
            anv_descriptor_set_write_buffer_view(set,
                                                 write->descriptorType,
                                                 bview,
                                                 write->dstBinding,
                                                 write->dstArrayElement + j);
764
         }
765 766 767 768 769 770
         break;

      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
771 772 773
         for (uint32_t j = 0; j < write->descriptorCount; j++) {
            assert(write->pBufferInfo[j].buffer);
            ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
774 775
            assert(buffer);

776 777
            anv_descriptor_set_write_buffer(set,
                                            device,
778
                                            NULL,
779 780 781 782 783 784
                                            write->descriptorType,
                                            buffer,
                                            write->dstBinding,
                                            write->dstArrayElement + j,
                                            write->pBufferInfo[j].offset,
                                            write->pBufferInfo[j].range);
785
         }
786
         break;
787 788 789 790 791 792

      default:
         break;
      }
   }

793
   for (uint32_t i = 0; i < descriptorCopyCount; i++) {
794
      const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
Józef Kucia's avatar
Józef Kucia committed
795
      ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
796 797 798 799 800 801 802 803 804 805 806 807 808 809 810 811
      ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);

      const struct anv_descriptor_set_binding_layout *src_layout =
         &src->layout->binding[copy->srcBinding];
      struct anv_descriptor *src_desc =
         &src->descriptors[src_layout->descriptor_index];
      src_desc += copy->srcArrayElement;

      const struct anv_descriptor_set_binding_layout *dst_layout =
         &dst->layout->binding[copy->dstBinding];
      struct anv_descriptor *dst_desc =
         &dst->descriptors[dst_layout->descriptor_index];
      dst_desc += copy->dstArrayElement;

      for (uint32_t j = 0; j < copy->descriptorCount; j++)
         dst_desc[j] = src_desc[j];
812 813
   }
}
814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844

/*
 * Descriptor update templates.
 */

void
anv_descriptor_set_write_template(struct anv_descriptor_set *set,
                                  struct anv_device *device,
                                  struct anv_state_stream *alloc_stream,
                                  const struct anv_descriptor_update_template *template,
                                  const void *data)
{
   const struct anv_descriptor_set_layout *layout = set->layout;

   for (uint32_t i = 0; i < template->entry_count; i++) {
      const struct anv_descriptor_template_entry *entry =
         &template->entries[i];
      const struct anv_descriptor_set_binding_layout *bind_layout =
         &layout->binding[entry->binding];
      struct anv_descriptor *desc = &set->descriptors[bind_layout->descriptor_index];
      desc += entry->array_element;

      switch (entry->type) {
      case VK_DESCRIPTOR_TYPE_SAMPLER:
      case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
      case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
      case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
      case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
         for (uint32_t j = 0; j < entry->array_count; j++) {
            const VkDescriptorImageInfo *info =
               data + entry->offset + j * entry->stride;
845 846
            anv_descriptor_set_write_image_view(set, &device->info,
                                                info, entry->type,
847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868 869 870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892
                                                entry->binding,
                                                entry->array_element + j);
         }
         break;

      case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
      case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
         for (uint32_t j = 0; j < entry->array_count; j++) {
            const VkBufferView *_bview =
               data + entry->offset + j * entry->stride;
            ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);

            anv_descriptor_set_write_buffer_view(set,
                                                 entry->type,
                                                 bview,
                                                 entry->binding,
                                                 entry->array_element + j);
         }
         break;

      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
      case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
      case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
         for (uint32_t j = 0; j < entry->array_count; j++) {
            const VkDescriptorBufferInfo *info =
               data + entry->offset + j * entry->stride;
            ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);

            anv_descriptor_set_write_buffer(set,
                                            device,
                                            alloc_stream,
                                            entry->type,
                                            buffer,
                                            entry->binding,
                                            entry->array_element + j,
                                            info->offset, info->range);
         }
         break;

      default:
         break;
      }
   }
}

893
VkResult anv_CreateDescriptorUpdateTemplate(
894
    VkDevice                                    _device,
895
    const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
896
    const VkAllocationCallbacks*                pAllocator,
897
    VkDescriptorUpdateTemplate*                 pDescriptorUpdateTemplate)
898 899 900 901 902 903 904 905 906 907 908
{
   ANV_FROM_HANDLE(anv_device, device, _device);
   struct anv_descriptor_update_template *template;

   size_t size = sizeof(*template) +
      pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
   template = vk_alloc2(&device->alloc, pAllocator, size, 8,
                        VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
   if (template == NULL)
      return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);

909 910
   template->bind_point = pCreateInfo->pipelineBindPoint;

911
   if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
912 913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934
      template->set = pCreateInfo->set;

   template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
   for (uint32_t i = 0; i < template->entry_count; i++) {
      const VkDescriptorUpdateTemplateEntryKHR *pEntry =
         &pCreateInfo->pDescriptorUpdateEntries[i];

      template->entries[i] = (struct anv_descriptor_template_entry) {
         .type = pEntry->descriptorType,
         .binding = pEntry->dstBinding,
         .array_element = pEntry->dstArrayElement,
         .array_count = pEntry->descriptorCount,
         .offset = pEntry->offset,
         .stride = pEntry->stride,
      };
   }

   *pDescriptorUpdateTemplate =
      anv_descriptor_update_template_to_handle(template);

   return VK_SUCCESS;
}

935
void anv_DestroyDescriptorUpdateTemplate(
936
    VkDevice                                    _device,
937
    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
938 939 940 941 942 943 944 945 946
    const VkAllocationCallbacks*                pAllocator)
{
   ANV_FROM_HANDLE(anv_device, device, _device);
   ANV_FROM_HANDLE(anv_descriptor_update_template, template,
                   descriptorUpdateTemplate);

   vk_free2(&device->alloc, pAllocator, template);
}

947
void anv_UpdateDescriptorSetWithTemplate(
948 949
    VkDevice                                    _device,
    VkDescriptorSet                             descriptorSet,
950
    VkDescriptorUpdateTemplate                  descriptorUpdateTemplate,
951 952 953 954 955 956 957 958 959
    const void*                                 pData)
{
   ANV_FROM_HANDLE(anv_device, device, _device);
   ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
   ANV_FROM_HANDLE(anv_descriptor_update_template, template,
                   descriptorUpdateTemplate);

   anv_descriptor_set_write_template(set, device, NULL, template, pData);
}