anv_private.h 113 KB
Newer Older
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
/*
 * Copyright © 2015 Intel Corporation
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice (including the next
 * paragraph) shall be included in all copies or substantial portions of the
 * Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 * IN THE SOFTWARE.
 */

Emil Velikov's avatar
Emil Velikov committed
24 25
#ifndef ANV_PRIVATE_H
#define ANV_PRIVATE_H
Kristian Høgsberg's avatar
Kristian Høgsberg committed
26 27 28 29 30 31

#include <stdlib.h>
#include <stdio.h>
#include <stdbool.h>
#include <pthread.h>
#include <assert.h>
32
#include <stdint.h>
Kristian Høgsberg's avatar
Kristian Høgsberg committed
33 34
#include <i915_drm.h>

35 36 37 38
#ifdef HAVE_VALGRIND
#include <valgrind.h>
#include <memcheck.h>
#define VG(x) x
39
#define __gen_validate_value(x) VALGRIND_CHECK_MEM_IS_DEFINED(&(x), sizeof(x))
40 41 42 43
#else
#define VG(x)
#endif

44
#include "common/gen_clflush.h"
45
#include "dev/gen_device_info.h"
46
#include "blorp/blorp.h"
47
#include "compiler/brw_compiler.h"
Kristian Høgsberg's avatar
Kristian Høgsberg committed
48
#include "util/macros.h"
49
#include "util/list.h"
50
#include "util/u_atomic.h"
51
#include "util/u_vector.h"
52
#include "vk_alloc.h"
53
#include "vk_debug_report.h"
Kristian Høgsberg's avatar
Kristian Høgsberg committed
54

55 56 57 58 59 60 61
/* Pre-declarations needed for WSI entrypoints */
struct wl_surface;
struct wl_display;
typedef struct xcb_connection_t xcb_connection_t;
typedef uint32_t xcb_visualid_t;
typedef uint32_t xcb_window_t;

62 63 64
struct anv_buffer;
struct anv_buffer_view;
struct anv_image_view;
65
struct anv_instance;
66

67
struct gen_l3_config;
68

Kristian Høgsberg's avatar
Kristian Høgsberg committed
69
#include <vulkan/vulkan.h>
70
#include <vulkan/vulkan_intel.h>
Jason Ekstrand's avatar
Jason Ekstrand committed
71
#include <vulkan/vk_icd.h>
72
#include <vulkan/vk_android_native_buffer.h>
Kristian Høgsberg's avatar
Kristian Høgsberg committed
73

74
#include "anv_entrypoints.h"
75
#include "anv_extensions.h"
Jason Ekstrand's avatar
Jason Ekstrand committed
76
#include "isl/isl.h"
Kristian Høgsberg's avatar
Kristian Høgsberg committed
77

78
#include "common/gen_debug.h"
79
#include "common/intel_log.h"
80 81
#include "wsi_common.h"

82 83 84 85 86 87 88 89 90 91 92 93 94 95 96
/* Allowing different clear colors requires us to perform a depth resolve at
 * the end of certain render passes. This is because while slow clears store
 * the clear color in the HiZ buffer, fast clears (without a resolve) don't.
 * See the PRMs for examples describing when additional resolves would be
 * necessary. To enable fast clears without requiring extra resolves, we set
 * the clear value to a globally-defined one. We could allow different values
 * if the user doesn't expect coherent data during or after a render passes
 * (VK_ATTACHMENT_STORE_OP_DONT_CARE), but such users (aside from the CTS)
 * don't seem to exist yet. In almost all Vulkan applications tested thus far,
 * 1.0f seems to be the only value used. The only application that doesn't set
 * this value does so through the usage of an seemingly uninitialized clear
 * value.
 */
#define ANV_HZ_FC_VAL 1.0f

97
#define MAX_VBS         28
98 99 100 101 102 103 104
#define MAX_SETS         8
#define MAX_RTS          8
#define MAX_VIEWPORTS   16
#define MAX_SCISSORS    16
#define MAX_PUSH_CONSTANTS_SIZE 128
#define MAX_DYNAMIC_BUFFERS 16
#define MAX_IMAGES 8
105
#define MAX_PUSH_DESCRIPTORS 32 /* Minimum requirement */
106

107
#define ANV_SVGS_VB_INDEX    MAX_VBS
Lionel Landwerlin's avatar
Lionel Landwerlin committed
108
#define ANV_DRAWID_VB_INDEX (MAX_VBS + 1)
109

110 111
#define anv_printflike(a, b) __attribute__((__format__(__printf__, a, b)))

112 113 114 115 116 117
static inline uint32_t
align_down_npot_u32(uint32_t v, uint32_t a)
{
   return v - (v % a);
}

Kristian Høgsberg's avatar
Kristian Høgsberg committed
118
static inline uint32_t
119
align_u32(uint32_t v, uint32_t a)
Kristian Høgsberg's avatar
Kristian Høgsberg committed
120
{
121
   assert(a != 0 && a == (a & -a));
Kristian Høgsberg's avatar
Kristian Høgsberg committed
122 123 124
   return (v + a - 1) & ~(a - 1);
}

125 126 127
static inline uint64_t
align_u64(uint64_t v, uint64_t a)
{
128
   assert(a != 0 && a == (a & -a));
129 130 131
   return (v + a - 1) & ~(a - 1);
}

Kristian Høgsberg's avatar
Kristian Høgsberg committed
132
static inline int32_t
133
align_i32(int32_t v, int32_t a)
Kristian Høgsberg's avatar
Kristian Høgsberg committed
134
{
135
   assert(a != 0 && a == (a & -a));
Kristian Høgsberg's avatar
Kristian Høgsberg committed
136 137 138
   return (v + a - 1) & ~(a - 1);
}

Chad Versace's avatar
Chad Versace committed
139 140 141 142 143 144 145 146
/** Alignment must be a power of 2. */
static inline bool
anv_is_aligned(uintmax_t n, uintmax_t a)
{
   assert(a == (a & -a));
   return (n & (a - 1)) == 0;
}

Chad Versace's avatar
Chad Versace committed
147 148 149 150 151 152
static inline uint32_t
anv_minify(uint32_t n, uint32_t levels)
{
   if (unlikely(n == 0))
      return 0;
   else
Dave Airlie's avatar
Dave Airlie committed
153
      return MAX2(n >> levels, 1);
Chad Versace's avatar
Chad Versace committed
154 155
}

156 157 158 159 160 161 162 163 164 165 166 167 168
static inline float
anv_clamp_f(float f, float min, float max)
{
   assert(min < max);

   if (f > max)
      return max;
   else if (f < min)
      return min;
   else
      return f;
}

Chad Versace's avatar
Chad Versace committed
169 170 171 172 173 174 175 176 177 178 179
static inline bool
anv_clear_mask(uint32_t *inout_mask, uint32_t clear_mask)
{
   if (*inout_mask & clear_mask) {
      *inout_mask &= ~clear_mask;
      return true;
   } else {
      return false;
   }
}

180 181 182 183 184 185 186 187 188 189 190 191 192
static inline union isl_color_value
vk_to_isl_color(VkClearColorValue color)
{
   return (union isl_color_value) {
      .u32 = {
         color.uint32[0],
         color.uint32[1],
         color.uint32[2],
         color.uint32[3],
      },
   };
}

Kristian Høgsberg's avatar
Kristian Høgsberg committed
193 194 195 196 197
#define for_each_bit(b, dword)                          \
   for (uint32_t __dword = (dword);                     \
        (b) = __builtin_ffs(__dword) - 1, __dword;      \
        __dword &= ~(1 << (b)))

198
#define typed_memcpy(dest, src, count) ({ \
199
   STATIC_ASSERT(sizeof(*src) == sizeof(*dest)); \
200 201 202
   memcpy((dest), (src), (count) * sizeof(*(src))); \
})

203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294
/* Mapping from anv object to VkDebugReportObjectTypeEXT. New types need
 * to be added here in order to utilize mapping in debug/error/perf macros.
 */
#define REPORT_OBJECT_TYPE(o)                                                      \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_instance*),              \
   VK_DEBUG_REPORT_OBJECT_TYPE_INSTANCE_EXT,                                       \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_physical_device*),       \
   VK_DEBUG_REPORT_OBJECT_TYPE_PHYSICAL_DEVICE_EXT,                                \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_device*),                \
   VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,                                         \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), const struct anv_device*),          \
   VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_EXT,                                         \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_queue*),                 \
   VK_DEBUG_REPORT_OBJECT_TYPE_QUEUE_EXT,                                          \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_semaphore*),             \
   VK_DEBUG_REPORT_OBJECT_TYPE_SEMAPHORE_EXT,                                      \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_cmd_buffer*),            \
   VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_BUFFER_EXT,                                 \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_fence*),                 \
   VK_DEBUG_REPORT_OBJECT_TYPE_FENCE_EXT,                                          \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_device_memory*),         \
   VK_DEBUG_REPORT_OBJECT_TYPE_DEVICE_MEMORY_EXT,                                  \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_buffer*),                \
   VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_EXT,                                         \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_image*),                 \
   VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,                                          \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), const struct anv_image*),           \
   VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_EXT,                                          \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_event*),                 \
   VK_DEBUG_REPORT_OBJECT_TYPE_EVENT_EXT,                                          \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_query_pool*),            \
   VK_DEBUG_REPORT_OBJECT_TYPE_QUERY_POOL_EXT,                                     \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_buffer_view*),           \
   VK_DEBUG_REPORT_OBJECT_TYPE_BUFFER_VIEW_EXT,                                    \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_image_view*),            \
   VK_DEBUG_REPORT_OBJECT_TYPE_IMAGE_VIEW_EXT,                                     \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_shader_module*),         \
   VK_DEBUG_REPORT_OBJECT_TYPE_SHADER_MODULE_EXT,                                  \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_pipeline_cache*),        \
   VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_CACHE_EXT,                                 \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_pipeline_layout*),       \
   VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_LAYOUT_EXT,                                \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_render_pass*),           \
   VK_DEBUG_REPORT_OBJECT_TYPE_RENDER_PASS_EXT,                                    \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_pipeline*),              \
   VK_DEBUG_REPORT_OBJECT_TYPE_PIPELINE_EXT,                                       \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_descriptor_set_layout*), \
   VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_LAYOUT_EXT,                          \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_sampler*),               \
   VK_DEBUG_REPORT_OBJECT_TYPE_SAMPLER_EXT,                                        \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_descriptor_pool*),       \
   VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_POOL_EXT,                                \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_descriptor_set*),        \
   VK_DEBUG_REPORT_OBJECT_TYPE_DESCRIPTOR_SET_EXT,                                 \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_framebuffer*),           \
   VK_DEBUG_REPORT_OBJECT_TYPE_FRAMEBUFFER_EXT,                                    \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_cmd_pool*),              \
   VK_DEBUG_REPORT_OBJECT_TYPE_COMMAND_POOL_EXT,                                   \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct anv_surface*),               \
   VK_DEBUG_REPORT_OBJECT_TYPE_SURFACE_KHR_EXT,                                    \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), struct wsi_swapchain*),             \
   VK_DEBUG_REPORT_OBJECT_TYPE_SWAPCHAIN_KHR_EXT,                                  \
   __builtin_choose_expr (                                                         \
295
   __builtin_types_compatible_p (__typeof (o), struct vk_debug_callback*),         \
296 297 298 299 300 301 302 303
   VK_DEBUG_REPORT_OBJECT_TYPE_DEBUG_REPORT_CALLBACK_EXT_EXT,                      \
   __builtin_choose_expr (                                                         \
   __builtin_types_compatible_p (__typeof (o), void*),                             \
   VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,                                        \
   /* The void expression results in a compile-time error                          \
      when assigning the result to something.  */                                  \
   (void)0)))))))))))))))))))))))))))))))

Kristian Høgsberg's avatar
Kristian Høgsberg committed
304 305 306 307 308
/* Whenever we generate an error, pass it through this function. Useful for
 * debugging, where we can break on it. Only call at error site, not when
 * propagating errors. Might be useful to plug in a stack trace here.
 */

309 310 311
VkResult __vk_errorf(struct anv_instance *instance, const void *object,
                     VkDebugReportObjectTypeEXT type, VkResult error,
                     const char *file, int line, const char *format, ...);
312

Kristian Høgsberg's avatar
Kristian Høgsberg committed
313
#ifdef DEBUG
314 315
#define vk_error(error) __vk_errorf(NULL, NULL,\
                                    VK_DEBUG_REPORT_OBJECT_TYPE_UNKNOWN_EXT,\
316
                                    error, __FILE__, __LINE__, NULL)
317 318
#define vk_errorf(instance, obj, error, format, ...)\
    __vk_errorf(instance, obj, REPORT_OBJECT_TYPE(obj), error,\
319
                __FILE__, __LINE__, format, ## __VA_ARGS__)
320 321
#else
#define vk_error(error) error
322
#define vk_errorf(instance, obj, error, format, ...) error
Kristian Høgsberg's avatar
Kristian Høgsberg committed
323 324
#endif

325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340
/**
 * Warn on ignored extension structs.
 *
 * The Vulkan spec requires us to ignore unsupported or unknown structs in
 * a pNext chain.  In debug mode, emitting warnings for ignored structs may
 * help us discover structs that we should not have ignored.
 *
 *
 * From the Vulkan 1.0.38 spec:
 *
 *    Any component of the implementation (the loader, any enabled layers,
 *    and drivers) must skip over, without processing (other than reading the
 *    sType and pNext members) any chained structures with sType values not
 *    defined by extensions supported by that component.
 */
#define anv_debug_ignored_stype(sType) \
341
   intel_logd("%s: ignored VkStructureType %u\n", __func__, (sType))
342

343 344 345 346
void __anv_perf_warn(struct anv_instance *instance, const void *object,
                     VkDebugReportObjectTypeEXT type, const char *file,
                     int line, const char *format, ...)
   anv_printflike(6, 7);
347 348
void anv_loge(const char *format, ...) anv_printflike(1, 2);
void anv_loge_v(const char *format, va_list va);
349 350 351 352

/**
 * Print a FINISHME message, including its source location.
 */
353 354 355 356
#define anv_finishme(format, ...) \
   do { \
      static bool reported = false; \
      if (!reported) { \
357 358
         intel_logw("%s:%d: FINISHME: " format, __FILE__, __LINE__, \
                    ##__VA_ARGS__); \
359 360 361
         reported = true; \
      } \
   } while (0)
362

363 364 365
/**
 * Print a perf warning message.  Set INTEL_DEBUG=perf to see these.
 */
366
#define anv_perf_warn(instance, obj, format, ...) \
367 368 369
   do { \
      static bool reported = false; \
      if (!reported && unlikely(INTEL_DEBUG & DEBUG_PERF)) { \
370 371
         __anv_perf_warn(instance, obj, REPORT_OBJECT_TYPE(obj), __FILE__, __LINE__,\
                         format, ##__VA_ARGS__); \
372 373 374 375
         reported = true; \
      } \
   } while (0)

376 377 378 379
/* A non-fatal assert.  Useful for debugging. */
#ifdef DEBUG
#define anv_assert(x) ({ \
   if (unlikely(!(x))) \
380
      intel_loge("%s:%d ASSERT: %s", __FILE__, __LINE__, #x); \
381 382 383 384 385
})
#else
#define anv_assert(x)
#endif

386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431
/* A multi-pointer allocator
 *
 * When copying data structures from the user (such as a render pass), it's
 * common to need to allocate data for a bunch of different things.  Instead
 * of doing several allocations and having to handle all of the error checking
 * that entails, it can be easier to do a single allocation.  This struct
 * helps facilitate that.  The intended usage looks like this:
 *
 *    ANV_MULTIALLOC(ma)
 *    anv_multialloc_add(&ma, &main_ptr, 1);
 *    anv_multialloc_add(&ma, &substruct1, substruct1Count);
 *    anv_multialloc_add(&ma, &substruct2, substruct2Count);
 *
 *    if (!anv_multialloc_alloc(&ma, pAllocator, VK_ALLOCATION_SCOPE_FOO))
 *       return vk_error(VK_ERROR_OUT_OF_HOST_MEORY);
 */
struct anv_multialloc {
    size_t size;
    size_t align;

    uint32_t ptr_count;
    void **ptrs[8];
};

#define ANV_MULTIALLOC_INIT \
   ((struct anv_multialloc) { 0, })

#define ANV_MULTIALLOC(_name) \
   struct anv_multialloc _name = ANV_MULTIALLOC_INIT

__attribute__((always_inline))
static inline void
_anv_multialloc_add(struct anv_multialloc *ma,
                    void **ptr, size_t size, size_t align)
{
   size_t offset = align_u64(ma->size, align);
   ma->size = offset + size;
   ma->align = MAX2(ma->align, align);

   /* Store the offset in the pointer. */
   *ptr = (void *)(uintptr_t)offset;

   assert(ma->ptr_count < ARRAY_SIZE(ma->ptrs));
   ma->ptrs[ma->ptr_count++] = ptr;
}

432 433 434
#define anv_multialloc_add_size(_ma, _ptr, _size) \
   _anv_multialloc_add((_ma), (void **)(_ptr), (_size), __alignof__(**(_ptr)))

435
#define anv_multialloc_add(_ma, _ptr, _count) \
436
   anv_multialloc_add_size(_ma, _ptr, (_count) * sizeof(**(_ptr)));
437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483

__attribute__((always_inline))
static inline void *
anv_multialloc_alloc(struct anv_multialloc *ma,
                     const VkAllocationCallbacks *alloc,
                     VkSystemAllocationScope scope)
{
   void *ptr = vk_alloc(alloc, ma->size, ma->align, scope);
   if (!ptr)
      return NULL;

   /* Fill out each of the pointers with their final value.
    *
    *   for (uint32_t i = 0; i < ma->ptr_count; i++)
    *      *ma->ptrs[i] = ptr + (uintptr_t)*ma->ptrs[i];
    *
    * Unfortunately, even though ma->ptr_count is basically guaranteed to be a
    * constant, GCC is incapable of figuring this out and unrolling the loop
    * so we have to give it a little help.
    */
   STATIC_ASSERT(ARRAY_SIZE(ma->ptrs) == 8);
#define _ANV_MULTIALLOC_UPDATE_POINTER(_i) \
   if ((_i) < ma->ptr_count) \
      *ma->ptrs[_i] = ptr + (uintptr_t)*ma->ptrs[_i]
   _ANV_MULTIALLOC_UPDATE_POINTER(0);
   _ANV_MULTIALLOC_UPDATE_POINTER(1);
   _ANV_MULTIALLOC_UPDATE_POINTER(2);
   _ANV_MULTIALLOC_UPDATE_POINTER(3);
   _ANV_MULTIALLOC_UPDATE_POINTER(4);
   _ANV_MULTIALLOC_UPDATE_POINTER(5);
   _ANV_MULTIALLOC_UPDATE_POINTER(6);
   _ANV_MULTIALLOC_UPDATE_POINTER(7);
#undef _ANV_MULTIALLOC_UPDATE_POINTER

   return ptr;
}

__attribute__((always_inline))
static inline void *
anv_multialloc_alloc2(struct anv_multialloc *ma,
                      const VkAllocationCallbacks *parent_alloc,
                      const VkAllocationCallbacks *alloc,
                      VkSystemAllocationScope scope)
{
   return anv_multialloc_alloc(ma, alloc ? alloc : parent_alloc, scope);
}

Kristian Høgsberg's avatar
Kristian Høgsberg committed
484
struct anv_bo {
485
   uint32_t gem_handle;
486 487 488 489 490

   /* Index into the current validation list.  This is used by the
    * validation list building alrogithm to track which buffers are already
    * in the validation list so that we can ensure uniqueness.
    */
Kristian Høgsberg's avatar
Kristian Høgsberg committed
491
   uint32_t index;
492 493 494 495 496

   /* Last known offset.  This value is provided by the kernel when we
    * execbuf and is used as the presumed offset for the next bunch of
    * relocations.
    */
Kristian Høgsberg's avatar
Kristian Høgsberg committed
497
   uint64_t offset;
498

Kristian Høgsberg's avatar
Kristian Høgsberg committed
499 500
   uint64_t size;
   void *map;
501

502 503
   /** Flags to pass to the kernel through drm_i915_exec_object2::flags */
   uint32_t flags;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
504 505
};

506 507 508 509 510
static inline void
anv_bo_init(struct anv_bo *bo, uint32_t gem_handle, uint64_t size)
{
   bo->gem_handle = gem_handle;
   bo->index = 0;
511
   bo->offset = -1;
512 513
   bo->size = size;
   bo->map = NULL;
514
   bo->flags = 0;
515 516
}

Kristian Høgsberg's avatar
Kristian Høgsberg committed
517 518 519 520 521 522
/* Represents a lock-free linked list of "free" things.  This is used by
 * both the block pool and the state pools.  Unfortunately, in order to
 * solve the ABA problem, we can't use a single uint32_t head.
 */
union anv_free_list {
   struct {
523
      int32_t offset;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
524 525 526 527 528 529 530 531 532

      /* A simple count that is incremented every time the head changes. */
      uint32_t count;
   };
   uint64_t u64;
};

#define ANV_FREE_LIST_EMPTY ((union anv_free_list) { { 1, 0 } })

533 534 535 536 537 538 539 540 541 542
struct anv_block_state {
   union {
      struct {
         uint32_t next;
         uint32_t end;
      };
      uint64_t u64;
   };
};

Kristian Høgsberg's avatar
Kristian Høgsberg committed
543 544 545
struct anv_block_pool {
   struct anv_device *device;

546 547
   uint64_t bo_flags;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
548
   struct anv_bo bo;
549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564

   /* The offset from the start of the bo to the "center" of the block
    * pool.  Pointers to allocated blocks are given by
    * bo.map + center_bo_offset + offsets.
    */
   uint32_t center_bo_offset;

   /* Current memory map of the block pool.  This pointer may or may not
    * point to the actual beginning of the block pool memory.  If
    * anv_block_pool_alloc_back has ever been called, then this pointer
    * will point to the "center" position of the buffer and all offsets
    * (negative or positive) given out by the block pool alloc functions
    * will be valid relative to this pointer.
    *
    * In particular, map == bo.map + center_offset
    */
Kristian Høgsberg's avatar
Kristian Høgsberg committed
565 566 567 568 569 570 571
   void *map;
   int fd;

   /**
    * Array of mmaps and gem handles owned by the block pool, reclaimed when
    * the block pool is destroyed.
    */
572
   struct u_vector mmap_cleanups;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
573

574
   struct anv_block_state state;
575 576

   struct anv_block_state back_state;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
577 578
};

579 580
/* Block pools are backed by a fixed-size 1GB memfd */
#define BLOCK_POOL_MEMFD_SIZE (1ul << 30)
581

582 583 584 585 586
/* The center of the block pool is also the middle of the memfd.  This may
 * change in the future if we decide differently for some reason.
 */
#define BLOCK_POOL_MEMFD_CENTER (BLOCK_POOL_MEMFD_SIZE / 2)

587 588 589
static inline uint32_t
anv_block_pool_size(struct anv_block_pool *pool)
{
590
   return pool->state.end + pool->back_state.end;
591 592
}

Kristian Høgsberg's avatar
Kristian Høgsberg committed
593
struct anv_state {
594
   int32_t offset;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
595 596 597 598
   uint32_t alloc_size;
   void *map;
};

599 600
#define ANV_STATE_NULL ((struct anv_state) { .alloc_size = 0 })

Kristian Høgsberg's avatar
Kristian Høgsberg committed
601 602 603 604 605 606
struct anv_fixed_size_state_pool {
   union anv_free_list free_list;
   struct anv_block_state block;
};

#define ANV_MIN_STATE_SIZE_LOG2 6
607
#define ANV_MAX_STATE_SIZE_LOG2 20
Kristian Høgsberg's avatar
Kristian Høgsberg committed
608

609
#define ANV_STATE_BUCKETS (ANV_MAX_STATE_SIZE_LOG2 - ANV_MIN_STATE_SIZE_LOG2 + 1)
Kristian Høgsberg's avatar
Kristian Høgsberg committed
610 611

struct anv_state_pool {
612
   struct anv_block_pool block_pool;
613 614 615 616

   /* The size of blocks which will be allocated from the block pool */
   uint32_t block_size;

617 618 619
   /** Free list for "back" allocations */
   union anv_free_list back_alloc_free_list;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
620 621 622
   struct anv_fixed_size_state_pool buckets[ANV_STATE_BUCKETS];
};

623 624
struct anv_state_stream_block;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
625
struct anv_state_stream {
626 627 628 629
   struct anv_state_pool *state_pool;

   /* The size of blocks to allocate from the state pool */
   uint32_t block_size;
630

631 632
   /* Current block we're allocating from */
   struct anv_state block;
633

634
   /* Offset into the current block at which to allocate the next state */
Kristian Høgsberg's avatar
Kristian Høgsberg committed
635
   uint32_t next;
636 637 638

   /* List of all blocks allocated from this pool */
   struct anv_state_stream_block *block_list;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
639 640
};

641 642 643
/* The block_pool functions exported for testing only.  The block pool should
 * only be used via a state pool (see below).
 */
644
VkResult anv_block_pool_init(struct anv_block_pool *pool,
645
                             struct anv_device *device,
646 647
                             uint32_t initial_size,
                             uint64_t bo_flags);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
648
void anv_block_pool_finish(struct anv_block_pool *pool);
649 650 651 652
int32_t anv_block_pool_alloc(struct anv_block_pool *pool,
                             uint32_t block_size);
int32_t anv_block_pool_alloc_back(struct anv_block_pool *pool,
                                  uint32_t block_size);
653 654 655

VkResult anv_state_pool_init(struct anv_state_pool *pool,
                             struct anv_device *device,
656 657
                             uint32_t block_size,
                             uint64_t bo_flags);
658
void anv_state_pool_finish(struct anv_state_pool *pool);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
659
struct anv_state anv_state_pool_alloc(struct anv_state_pool *pool,
660
                                      uint32_t state_size, uint32_t alignment);
661
struct anv_state anv_state_pool_alloc_back(struct anv_state_pool *pool);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
662 663
void anv_state_pool_free(struct anv_state_pool *pool, struct anv_state state);
void anv_state_stream_init(struct anv_state_stream *stream,
664 665
                           struct anv_state_pool *state_pool,
                           uint32_t block_size);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
666 667 668 669
void anv_state_stream_finish(struct anv_state_stream *stream);
struct anv_state anv_state_stream_alloc(struct anv_state_stream *stream,
                                        uint32_t size, uint32_t alignment);

Jason Ekstrand's avatar
Jason Ekstrand committed
670 671 672 673 674 675 676
/**
 * Implements a pool of re-usable BOs.  The interface is identical to that
 * of block_pool except that each block is its own BO.
 */
struct anv_bo_pool {
   struct anv_device *device;

677 678
   uint64_t bo_flags;

679
   void *free_list[16];
Jason Ekstrand's avatar
Jason Ekstrand committed
680 681
};

682 683
void anv_bo_pool_init(struct anv_bo_pool *pool, struct anv_device *device,
                      uint64_t bo_flags);
Jason Ekstrand's avatar
Jason Ekstrand committed
684
void anv_bo_pool_finish(struct anv_bo_pool *pool);
685 686
VkResult anv_bo_pool_alloc(struct anv_bo_pool *pool, struct anv_bo *bo,
                           uint32_t size);
Jason Ekstrand's avatar
Jason Ekstrand committed
687 688
void anv_bo_pool_free(struct anv_bo_pool *pool, const struct anv_bo *bo);

689 690 691 692 693
struct anv_scratch_bo {
   bool exists;
   struct anv_bo bo;
};

694 695
struct anv_scratch_pool {
   /* Indexed by Per-Thread Scratch Space number (the hardware value) and stage */
696
   struct anv_scratch_bo bos[16][MESA_SHADER_STAGES];
697 698 699 700 701 702 703 704 705 706
};

void anv_scratch_pool_init(struct anv_device *device,
                           struct anv_scratch_pool *pool);
void anv_scratch_pool_finish(struct anv_device *device,
                             struct anv_scratch_pool *pool);
struct anv_bo *anv_scratch_pool_alloc(struct anv_device *device,
                                      struct anv_scratch_pool *pool,
                                      gl_shader_stage stage,
                                      unsigned per_thread_scratch);
707

Jason Ekstrand's avatar
Jason Ekstrand committed
708 709 710 711 712 713 714 715 716 717 718 719 720
/** Implements a BO cache that ensures a 1-1 mapping of GEM BOs to anv_bos */
struct anv_bo_cache {
   struct hash_table *bo_map;
   pthread_mutex_t mutex;
};

VkResult anv_bo_cache_init(struct anv_bo_cache *cache);
void anv_bo_cache_finish(struct anv_bo_cache *cache);
VkResult anv_bo_cache_alloc(struct anv_device *device,
                            struct anv_bo_cache *cache,
                            uint64_t size, struct anv_bo **bo);
VkResult anv_bo_cache_import(struct anv_device *device,
                             struct anv_bo_cache *cache,
721
                             int fd, struct anv_bo **bo);
Jason Ekstrand's avatar
Jason Ekstrand committed
722 723 724 725 726 727 728
VkResult anv_bo_cache_export(struct anv_device *device,
                             struct anv_bo_cache *cache,
                             struct anv_bo *bo_in, int *fd_out);
void anv_bo_cache_release(struct anv_device *device,
                          struct anv_bo_cache *cache,
                          struct anv_bo *bo);

729 730 731 732 733 734 735 736 737
struct anv_memory_type {
   /* Standard bits passed on to the client */
   VkMemoryPropertyFlags   propertyFlags;
   uint32_t                heapIndex;

   /* Driver-internal book-keeping */
   VkBufferUsageFlags      valid_buffer_usage;
};

738 739 740 741 742 743 744 745 746
struct anv_memory_heap {
   /* Standard bits passed on to the client */
   VkDeviceSize      size;
   VkMemoryHeapFlags flags;

   /* Driver-internal book-keeping */
   bool              supports_48bit_addresses;
};

Kristian Høgsberg's avatar
Kristian Høgsberg committed
747
struct anv_physical_device {
748 749
    VK_LOADER_DATA                              _loader_data;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
750 751
    struct anv_instance *                       instance;
    uint32_t                                    chipset_id;
752
    bool                                        no_hw;
753
    char                                        path[20];
Kristian Høgsberg's avatar
Kristian Høgsberg committed
754
    const char *                                name;
755
    struct gen_device_info                      info;
756 757 758 759 760 761 762 763
    /** Amount of "GPU memory" we want to advertise
     *
     * Clearly, this value is bogus since Intel is a UMA architecture.  On
     * gen7 platforms, we are limited by GTT size unless we want to implement
     * fine-grained tracking and GTT splitting.  On Broadwell and above we are
     * practically unlimited.  However, we will never report more than 3/4 of
     * the total system ram to try and avoid running out of RAM.
     */
764
    bool                                        supports_48bit_addresses;
765
    struct brw_compiler *                       compiler;
Chad Versace's avatar
Chad Versace committed
766
    struct isl_device                           isl_dev;
767
    int                                         cmd_parser_version;
768
    bool                                        has_exec_async;
769
    bool                                        has_exec_capture;
770
    bool                                        has_exec_fence;
771
    bool                                        has_syncobj;
772
    bool                                        has_syncobj_wait;
773
    bool                                        has_context_priority;
774

775 776
    struct anv_device_extension_table           supported_extensions;

777 778 779
    uint32_t                                    eu_total;
    uint32_t                                    subslice_total;

780 781
    struct {
      uint32_t                                  type_count;
782
      struct anv_memory_type                    types[VK_MAX_MEMORY_TYPES];
783
      uint32_t                                  heap_count;
784
      struct anv_memory_heap                    heaps[VK_MAX_MEMORY_HEAPS];
785 786
    } memory;

787
    uint8_t                                     pipeline_cache_uuid[VK_UUID_SIZE];
788 789
    uint8_t                                     driver_uuid[VK_UUID_SIZE];
    uint8_t                                     device_uuid[VK_UUID_SIZE];
790

791
    struct wsi_device                       wsi_device;
792
    int                                         local_fd;
793
};
794

Kristian Høgsberg's avatar
Kristian Høgsberg committed
795
struct anv_instance {
796 797
    VK_LOADER_DATA                              _loader_data;

798 799
    VkAllocationCallbacks                       alloc;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
800
    uint32_t                                    apiVersion;
801
    struct anv_instance_extension_table         enabled_extensions;
802
    struct anv_dispatch_table                   dispatch;
803

804
    int                                         physicalDeviceCount;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
805
    struct anv_physical_device                  physicalDevice;
806

807
    struct vk_debug_report_instance             debug_report_callbacks;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
808 809
};

810 811
VkResult anv_init_wsi(struct anv_physical_device *physical_device);
void anv_finish_wsi(struct anv_physical_device *physical_device);
812

813
uint32_t anv_physical_device_api_version(struct anv_physical_device *dev);
814 815 816
bool anv_physical_device_extension_supported(struct anv_physical_device *dev,
                                             const char *name);

817
struct anv_queue {
818 819
    VK_LOADER_DATA                              _loader_data;

820 821 822 823 824
    struct anv_device *                         device;

    struct anv_state_pool *                     pool;
};

825 826 827
struct anv_pipeline_cache {
   struct anv_device *                          device;
   pthread_mutex_t                              mutex;
828

Jason Ekstrand's avatar
Jason Ekstrand committed
829
   struct hash_table *                          cache;
830 831
};

832 833
struct anv_pipeline_bind_map;

834
void anv_pipeline_cache_init(struct anv_pipeline_cache *cache,
Jason Ekstrand's avatar
Jason Ekstrand committed
835 836
                             struct anv_device *device,
                             bool cache_enabled);
837
void anv_pipeline_cache_finish(struct anv_pipeline_cache *cache);
Jason Ekstrand's avatar
Jason Ekstrand committed
838 839 840 841 842 843 844 845

struct anv_shader_bin *
anv_pipeline_cache_search(struct anv_pipeline_cache *cache,
                          const void *key, uint32_t key_size);
struct anv_shader_bin *
anv_pipeline_cache_upload_kernel(struct anv_pipeline_cache *cache,
                                 const void *key_data, uint32_t key_size,
                                 const void *kernel_data, uint32_t kernel_size,
846 847
                                 const struct brw_stage_prog_data *prog_data,
                                 uint32_t prog_data_size,
Jason Ekstrand's avatar
Jason Ekstrand committed
848
                                 const struct anv_pipeline_bind_map *bind_map);
849

Kristian Høgsberg's avatar
Kristian Høgsberg committed
850
struct anv_device {
851 852
    VK_LOADER_DATA                              _loader_data;

853 854
    VkAllocationCallbacks                       alloc;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
855 856
    struct anv_instance *                       instance;
    uint32_t                                    chipset_id;
857
    bool                                        no_hw;
858
    struct gen_device_info                      info;
Chad Versace's avatar
Chad Versace committed
859
    struct isl_device                           isl_dev;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
860 861
    int                                         context_id;
    int                                         fd;
862
    bool                                        can_chain_batches;
863
    bool                                        robust_buffer_access;
864
    struct anv_device_extension_table           enabled_extensions;
865
    struct anv_dispatch_table                   dispatch;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
866

867 868
    struct anv_bo_pool                          batch_bo_pool;

869 870
    struct anv_bo_cache                         bo_cache;

871
    struct anv_state_pool                       dynamic_state_pool;
872
    struct anv_state_pool                       instruction_state_pool;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
873 874
    struct anv_state_pool                       surface_state_pool;

875
    struct anv_bo                               workaround_bo;
876
    struct anv_bo                               trivial_batch_bo;
877

878 879 880
    struct anv_pipeline_cache                   blorp_shader_cache;
    struct blorp_context                        blorp;

881
    struct anv_state                            border_colors;
882

883 884
    struct anv_queue                            queue;

885
    struct anv_scratch_pool                     scratch_pool;
886

887 888
    uint32_t                                    default_mocs;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
889
    pthread_mutex_t                             mutex;
Jason Ekstrand's avatar
Jason Ekstrand committed
890
    pthread_cond_t                              queue_submit;
891
    bool                                        lost;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
892 893
};

894 895 896 897 898 899
static void inline
anv_state_flush(struct anv_device *device, struct anv_state state)
{
   if (device->info.has_llc)
      return;

900
   gen_flush_range(state.map, state.alloc_size);
901 902
}

903 904
void anv_device_init_blorp(struct anv_device *device);
void anv_device_finish_blorp(struct anv_device *device);
905

906 907 908
VkResult anv_device_execbuf(struct anv_device *device,
                            struct drm_i915_gem_execbuffer2 *execbuf,
                            struct anv_bo **execbuf_bos);
909
VkResult anv_device_query_status(struct anv_device *device);
910
VkResult anv_device_bo_busy(struct anv_device *device, struct anv_bo *bo);
911 912
VkResult anv_device_wait(struct anv_device *device, struct anv_bo *bo,
                         int64_t timeout);
913

Kristian Høgsberg's avatar
Kristian Høgsberg committed
914
void* anv_gem_mmap(struct anv_device *device,
915
                   uint32_t gem_handle, uint64_t offset, uint64_t size, uint32_t flags);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
916
void anv_gem_munmap(void *p, uint64_t size);
917
uint32_t anv_gem_create(struct anv_device *device, uint64_t size);
918 919
void anv_gem_close(struct anv_device *device, uint32_t gem_handle);
uint32_t anv_gem_userptr(struct anv_device *device, void *mem, size_t size);
920
int anv_gem_busy(struct anv_device *device, uint32_t gem_handle);
921
int anv_gem_wait(struct anv_device *device, uint32_t gem_handle, int64_t *timeout_ns);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
922 923
int anv_gem_execbuffer(struct anv_device *device,
                       struct drm_i915_gem_execbuffer2 *execbuf);
924
int anv_gem_set_tiling(struct anv_device *device, uint32_t gem_handle,
Kristian Høgsberg's avatar
Kristian Høgsberg committed
925 926
                       uint32_t stride, uint32_t tiling);
int anv_gem_create_context(struct anv_device *device);
927
bool anv_gem_has_context_priority(int fd);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
928
int anv_gem_destroy_context(struct anv_device *device, int context);
929 930
int anv_gem_set_context_param(int fd, int context, uint32_t param,
                              uint64_t value);
931 932
int anv_gem_get_context_param(int fd, int context, uint32_t param,
                              uint64_t *value);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
933
int anv_gem_get_param(int fd, uint32_t param);
934
int anv_gem_get_tiling(struct anv_device *device, uint32_t gem_handle);
935
bool anv_gem_get_bit6_swizzle(int fd, uint32_t tiling);
936
int anv_gem_get_aperture(int fd, uint64_t *size);
937
bool anv_gem_supports_48b_addresses(int fd);
938 939
int anv_gem_gpu_get_reset_stats(struct anv_device *device,
                                uint32_t *active, uint32_t *pending);
940 941 942 943
int anv_gem_handle_to_fd(struct anv_device *device, uint32_t gem_handle);
uint32_t anv_gem_fd_to_handle(struct anv_device *device, int fd);
int anv_gem_set_caching(struct anv_device *device, uint32_t gem_handle, uint32_t caching);
int anv_gem_set_domain(struct anv_device *device, uint32_t gem_handle,
944
                       uint32_t read_domains, uint32_t write_domain);
945
int anv_gem_sync_file_merge(struct anv_device *device, int fd1, int fd2);
946
uint32_t anv_gem_syncobj_create(struct anv_device *device, uint32_t flags);
947 948 949
void anv_gem_syncobj_destroy(struct anv_device *device, uint32_t handle);
int anv_gem_syncobj_handle_to_fd(struct anv_device *device, uint32_t handle);
uint32_t anv_gem_syncobj_fd_to_handle(struct anv_device *device, int fd);
950 951 952 953
int anv_gem_syncobj_export_sync_file(struct anv_device *device,
                                     uint32_t handle);
int anv_gem_syncobj_import_sync_file(struct anv_device *device,
                                     uint32_t handle, int fd);
954 955 956 957 958
void anv_gem_syncobj_reset(struct anv_device *device, uint32_t handle);
bool anv_gem_supports_syncobj_wait(int fd);
int anv_gem_syncobj_wait(struct anv_device *device,
                         uint32_t *handles, uint32_t num_handles,
                         int64_t abs_timeout_ns, bool wait_all);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
959 960 961 962

VkResult anv_bo_init_new(struct anv_bo *bo, struct anv_device *device, uint64_t size);

struct anv_reloc_list {
963 964
   uint32_t                                     num_relocs;
   uint32_t                                     array_length;
965 966
   struct drm_i915_gem_relocation_entry *       relocs;
   struct anv_bo **                             reloc_bos;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
967 968
};

969
VkResult anv_reloc_list_init(struct anv_reloc_list *list,
970
                             const VkAllocationCallbacks *alloc);
971
void anv_reloc_list_finish(struct anv_reloc_list *list,
972
                           const VkAllocationCallbacks *alloc);
973

974
VkResult anv_reloc_list_add(struct anv_reloc_list *list,
975
                            const VkAllocationCallbacks *alloc,
976 977 978
                            uint32_t offset, struct anv_bo *target_bo,
                            uint32_t delta);

979
struct anv_batch_bo {
980 981 982
   /* Link in the anv_cmd_buffer.owned_batch_bos list */
   struct list_head                             link;

983 984 985
   struct anv_bo                                bo;

   /* Bytes actually consumed in this batch BO */
986
   uint32_t                                     length;
987

988
   struct anv_reloc_list                        relocs;
989 990
};

Kristian Høgsberg's avatar
Kristian Høgsberg committed
991
struct anv_batch {
992
   const VkAllocationCallbacks *                alloc;
993

994 995
   void *                                       start;
   void *                                       end;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
996
   void *                                       next;
997

998
   struct anv_reloc_list *                      relocs;
999 1000 1001 1002 1003 1004

   /* This callback is called (with the associated user data) in the event
    * that the batch runs out of space.
    */
   VkResult (*extend_cb)(struct anv_batch *, void *);
   void *                                       user_data;
1005 1006 1007 1008 1009 1010 1011 1012 1013

   /**
    * Current error status of the command buffer. Used to track inconsistent
    * or incomplete command buffer states that are the consequence of run-time
    * errors such as out of memory scenarios. We want to track this in the
    * batch because the command buffer object is not visible to some parts
    * of the driver.
    */
   VkResult                                     status;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1014 1015 1016 1017 1018 1019
};

void *anv_batch_emit_dwords(struct anv_batch *batch, int num_dwords);
void anv_batch_emit_batch(struct anv_batch *batch, struct anv_batch *other);
uint64_t anv_batch_emit_reloc(struct anv_batch *batch,
                              void *location, struct anv_bo *bo, uint32_t offset);
1020 1021
VkResult anv_device_submit_simple_batch(struct anv_device *device,
                                        struct anv_batch *batch);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1022

1023 1024 1025 1026 1027 1028 1029 1030 1031 1032 1033 1034 1035 1036 1037
static inline VkResult
anv_batch_set_error(struct anv_batch *batch, VkResult error)
{
   assert(error != VK_SUCCESS);
   if (batch->status == VK_SUCCESS)
      batch->status = error;
   return batch->status;
}

static inline bool
anv_batch_has_error(struct anv_batch *batch)
{
   return batch->status != VK_SUCCESS;
}

Kristian Høgsberg's avatar
Kristian Høgsberg committed
1038 1039 1040 1041 1042 1043
struct anv_address {
   struct anv_bo *bo;
   uint32_t offset;
};

static inline uint64_t
1044 1045
_anv_combine_address(struct anv_batch *batch, void *location,
                     const struct anv_address address, uint32_t delta)
1046
{
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1047
   if (address.bo == NULL) {
1048
      return address.offset + delta;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1049
   } else {
1050
      assert(batch->start <= location && location < batch->end);
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1051 1052 1053 1054

      return anv_batch_emit_reloc(batch, location, address.bo, address.offset + delta);
   }
}
1055

1056 1057 1058 1059
#define __gen_address_type struct anv_address
#define __gen_user_data struct anv_batch
#define __gen_combine_address _anv_combine_address

1060 1061 1062 1063 1064 1065 1066 1067 1068 1069
/* Wrapper macros needed to work around preprocessor argument issues.  In
 * particular, arguments don't get pre-evaluated if they are concatenated.
 * This means that, if you pass GENX(3DSTATE_PS) into the emit macro, the
 * GENX macro won't get evaluated if the emit macro contains "cmd ## foo".
 * We can work around this easily enough with these helpers.
 */
#define __anv_cmd_length(cmd) cmd ## _length
#define __anv_cmd_length_bias(cmd) cmd ## _length_bias
#define __anv_cmd_header(cmd) cmd ## _header
#define __anv_cmd_pack(cmd) cmd ## _pack
1070 1071 1072 1073 1074 1075 1076 1077 1078
#define __anv_reg_num(reg) reg ## _num

#define anv_pack_struct(dst, struc, ...) do {                              \
      struct struc __template = {                                          \
         __VA_ARGS__                                                       \
      };                                                                   \
      __anv_cmd_pack(struc)(NULL, dst, &__template);                       \
      VG(VALGRIND_CHECK_MEM_IS_DEFINED(dst, __anv_cmd_length(struc) * 4)); \
   } while (0)
1079

1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090
#define anv_batch_emitn(batch, n, cmd, ...) ({             \
      void *__dst = anv_batch_emit_dwords(batch, n);       \
      if (__dst) {                                         \
         struct cmd __template = {                         \
            __anv_cmd_header(cmd),                         \
           .DWordLength = n - __anv_cmd_length_bias(cmd),  \
            __VA_ARGS__                                    \
         };                                                \
         __anv_cmd_pack(cmd)(batch, __dst, &__template);   \
      }                                                    \
      __dst;                                               \
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1091 1092
   })

1093 1094 1095 1096
#define anv_batch_emit_merge(batch, dwords0, dwords1)                   \
   do {                                                                 \
      uint32_t *dw;                                                     \
                                                                        \
1097
      STATIC_ASSERT(ARRAY_SIZE(dwords0) == ARRAY_SIZE(dwords1));        \
1098
      dw = anv_batch_emit_dwords((batch), ARRAY_SIZE(dwords0));         \
1099 1100
      if (!dw)                                                          \
         break;                                                         \
1101 1102
      for (uint32_t i = 0; i < ARRAY_SIZE(dwords0); i++)                \
         dw[i] = (dwords0)[i] | (dwords1)[i];                           \
1103
      VG(VALGRIND_CHECK_MEM_IS_DEFINED(dw, ARRAY_SIZE(dwords0) * 4));\
1104 1105
   } while (0)

1106
#define anv_batch_emit(batch, cmd, name)                            \
1107 1108 1109 1110 1111 1112 1113 1114
   for (struct cmd name = { __anv_cmd_header(cmd) },                    \
        *_dst = anv_batch_emit_dwords(batch, __anv_cmd_length(cmd));    \
        __builtin_expect(_dst != NULL, 1);                              \
        ({ __anv_cmd_pack(cmd)(batch, _dst, &name);                     \
           VG(VALGRIND_CHECK_MEM_IS_DEFINED(_dst, __anv_cmd_length(cmd) * 4)); \
           _dst = NULL;                                                 \
         }))

1115 1116 1117 1118 1119
#define GEN7_MOCS (struct GEN7_MEMORY_OBJECT_CONTROL_STATE) {  \
   .GraphicsDataTypeGFDT                        = 0,           \
   .LLCCacheabilityControlLLCCC                 = 0,           \
   .L3CacheabilityControlL3CC                   = 1,           \
}
1120

1121 1122 1123 1124 1125
#define GEN75_MOCS (struct GEN75_MEMORY_OBJECT_CONTROL_STATE) {  \
   .LLCeLLCCacheabilityControlLLCCC             = 0,           \
   .L3CacheabilityControlL3CC                   = 1,           \
}

1126 1127 1128 1129
#define GEN8_MOCS (struct GEN8_MEMORY_OBJECT_CONTROL_STATE) {  \
      .MemoryTypeLLCeLLCCacheabilityControl = WB,              \
      .TargetCache = L3DefertoPATforLLCeLLCselection,          \
      .AgeforQUADLRU = 0                                       \
1130 1131
   }

1132 1133 1134 1135
/* Skylake: MOCS is now an index into an array of 62 different caching
 * configurations programmed by the kernel.
 */

1136 1137 1138
#define GEN9_MOCS (struct GEN9_MEMORY_OBJECT_CONTROL_STATE) {  \
      /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */              \
      .IndextoMOCSTables                           = 2         \
1139 1140 1141 1142 1143 1144 1145
   }

#define GEN9_MOCS_PTE {                                 \
      /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */       \
      .IndextoMOCSTables                           = 1  \
   }

1146 1147 1148 1149 1150 1151 1152 1153 1154 1155 1156
/* Cannonlake MOCS defines are duplicates of Skylake MOCS defines. */
#define GEN10_MOCS (struct GEN10_MEMORY_OBJECT_CONTROL_STATE) {  \
      /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */              \
      .IndextoMOCSTables                           = 2         \
   }

#define GEN10_MOCS_PTE {                                 \
      /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */       \
      .IndextoMOCSTables                           = 1  \
   }

Anuj Phogat's avatar
Anuj Phogat committed
1157 1158 1159 1160 1161 1162 1163 1164 1165 1166 1167
/* Ice Lake MOCS defines are duplicates of Skylake MOCS defines. */
#define GEN11_MOCS (struct GEN11_MEMORY_OBJECT_CONTROL_STATE) {  \
      /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */              \
      .IndextoMOCSTables                           = 2         \
   }

#define GEN11_MOCS_PTE {                                 \
      /* TC=LLC/eLLC, LeCC=WB, LRUM=3, L3CC=WB */       \
      .IndextoMOCSTables                           = 1  \
   }

Kristian Høgsberg's avatar
Kristian Høgsberg committed
1168
struct anv_device_memory {
1169
   struct anv_bo *                              bo;
1170
   struct anv_memory_type *                     type;
1171 1172
   VkDeviceSize                                 map_size;
   void *                                       map;
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1173 1174
};

1175 1176 1177 1178 1179 1180 1181 1182 1183 1184
/**
 * Header for Vertex URB Entry (VUE)
 */
struct anv_vue_header {
   uint32_t Reserved;
   uint32_t RTAIndex; /* RenderTargetArrayIndex */
   uint32_t ViewportIndex;
   float PointWidth;
};

1185
struct anv_descriptor_set_binding_layout {
1186 1187 1188 1189 1190
#ifndef NDEBUG
   /* The type of the descriptors in this binding */
   VkDescriptorType type;
#endif

1191 1192 1193
   /* Number of array elements in this binding */
   uint16_t array_size;

1194 1195 1196
   /* Index into the flattend descriptor set */
   uint16_t descriptor_index;

1197 1198
   /* Index into the dynamic state array for a dynamic buffer */
   int16_t dynamic_offset_index;
1199

1200 1201 1202
   /* Index into the descriptor set buffer views */
   int16_t buffer_index;

Kristian Høgsberg's avatar
Kristian Høgsberg committed
1203
   struct {
1204 1205 1206 1207 1208
      /* Index into the binding table for the associated surface */
      int16_t surface_index;

      /* Index into the sampler table for the associated sampler */
      int16_t sampler_index;
1209 1210 1211

      /* Index into the image table for the associated image */
      int16_t image_index;
1212
   } stage[MESA_SHADER_STAGES];
1213 1214 1215

   /* Immutable samplers (or NULL if no immutable samplers) */
   struct anv_sampler **immutable_samplers;
1216 1217 1218
};

struct anv_descriptor_set_layout {
1219 1220 1221
   /* Descriptor set layouts can be destroyed at almost any time */
   uint32_t ref_cnt;

1222 1223 1224 1225 1226 1227 1228 1229 1230
   /* Number of bindings in this descriptor set */
   uint16_t binding_count;

   /* Total size of the descriptor set with room for all array entries */
   uint16_t size;

   /* Shader stages affected by this descriptor set */
   uint16_t shader_stages;

1231 1232 1233
   /* Number of buffers in this descriptor set */
   uint16_t buffer_count;

1234 1235
   /* Number of dynamic offsets used by this descriptor set */
   uint16_t dynamic_offset_count;
1236

1237
   /* Bindings in this descriptor set */
1238
   struct anv_descriptor_set_binding_layout binding[0];
Kristian Høgsberg's avatar
Kristian Høgsberg committed
1239 1240
};

1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253 1254 1255 1256
static inline void
anv_descriptor_set_layout_ref(struct anv_descriptor_set_layout *layout)
{
   assert(layout && layout->ref_cnt >= 1);
   p_atomic_inc(&layout->ref_cnt);
}

static inline void
anv_descriptor_set_layout_unref(struct anv_device *device,
                                struct anv_descriptor_set_layout *layout)
{
   assert(layout && layout->ref_cnt >= 1);
   if (p_atomic_dec_zero(&layout->ref_cnt))
      vk_free(&device->alloc, layout);
}

1257
struct anv_descriptor {
1258
   VkDescriptorType type;
1259

Chad Versace's avatar
Chad Versace committed
1260
   union {
1261
      struct {
1262
         VkImageLayout layout;
1263
         struct anv_image_view *image_view;
1264 1265
         struct anv_sampler *sampler;
      };
Chad Versace's avatar
Chad Versace committed
1266

1267 1268 1269 1270 1271 1272
      struct {
         struct anv_buffer *buffer;
         uint64_t offset;
         uint64_t range;
      };

1273
      struct anv_buffer_view *buffer_view;
1274
   };