Clone of mesa.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

anv_descriptor_set.c 35KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003
  1. /*
  2. * Copyright © 2015 Intel Corporation
  3. *
  4. * Permission is hereby granted, free of charge, to any person obtaining a
  5. * copy of this software and associated documentation files (the "Software"),
  6. * to deal in the Software without restriction, including without limitation
  7. * the rights to use, copy, modify, merge, publish, distribute, sublicense,
  8. * and/or sell copies of the Software, and to permit persons to whom the
  9. * Software is furnished to do so, subject to the following conditions:
  10. *
  11. * The above copyright notice and this permission notice (including the next
  12. * paragraph) shall be included in all copies or substantial portions of the
  13. * Software.
  14. *
  15. * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16. * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
  18. * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  19. * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
  20. * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
  21. * IN THE SOFTWARE.
  22. */
  23. #include <assert.h>
  24. #include <stdbool.h>
  25. #include <string.h>
  26. #include <unistd.h>
  27. #include <fcntl.h>
  28. #include "util/mesa-sha1.h"
  29. #include "anv_private.h"
  30. /*
  31. * Descriptor set layouts.
  32. */
  33. void anv_GetDescriptorSetLayoutSupport(
  34. VkDevice device,
  35. const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
  36. VkDescriptorSetLayoutSupport* pSupport)
  37. {
  38. uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
  39. for (uint32_t b = 0; b < pCreateInfo->bindingCount; b++) {
  40. const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[b];
  41. switch (binding->descriptorType) {
  42. case VK_DESCRIPTOR_TYPE_SAMPLER:
  43. /* There is no real limit on samplers */
  44. break;
  45. case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  46. if (binding->pImmutableSamplers) {
  47. for (uint32_t i = 0; i < binding->descriptorCount; i++) {
  48. ANV_FROM_HANDLE(anv_sampler, sampler,
  49. binding->pImmutableSamplers[i]);
  50. anv_foreach_stage(s, binding->stageFlags)
  51. surface_count[s] += sampler->n_planes;
  52. }
  53. }
  54. break;
  55. default:
  56. anv_foreach_stage(s, binding->stageFlags)
  57. surface_count[s] += binding->descriptorCount;
  58. break;
  59. }
  60. }
  61. bool supported = true;
  62. for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
  63. /* Our maximum binding table size is 250 and we need to reserve 8 for
  64. * render targets. 240 is a nice round number.
  65. */
  66. if (surface_count[s] >= 240)
  67. supported = false;
  68. }
  69. pSupport->supported = supported;
  70. }
  71. VkResult anv_CreateDescriptorSetLayout(
  72. VkDevice _device,
  73. const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
  74. const VkAllocationCallbacks* pAllocator,
  75. VkDescriptorSetLayout* pSetLayout)
  76. {
  77. ANV_FROM_HANDLE(anv_device, device, _device);
  78. assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO);
  79. uint32_t max_binding = 0;
  80. uint32_t immutable_sampler_count = 0;
  81. for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
  82. max_binding = MAX2(max_binding, pCreateInfo->pBindings[j].binding);
  83. if (pCreateInfo->pBindings[j].pImmutableSamplers)
  84. immutable_sampler_count += pCreateInfo->pBindings[j].descriptorCount;
  85. }
  86. struct anv_descriptor_set_layout *set_layout;
  87. struct anv_descriptor_set_binding_layout *bindings;
  88. struct anv_sampler **samplers;
  89. /* We need to allocate decriptor set layouts off the device allocator
  90. * with DEVICE scope because they are reference counted and may not be
  91. * destroyed when vkDestroyDescriptorSetLayout is called.
  92. */
  93. ANV_MULTIALLOC(ma);
  94. anv_multialloc_add(&ma, &set_layout, 1);
  95. anv_multialloc_add(&ma, &bindings, max_binding + 1);
  96. anv_multialloc_add(&ma, &samplers, immutable_sampler_count);
  97. if (!anv_multialloc_alloc(&ma, &device->alloc,
  98. VK_SYSTEM_ALLOCATION_SCOPE_DEVICE))
  99. return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
  100. memset(set_layout, 0, sizeof(*set_layout));
  101. set_layout->ref_cnt = 1;
  102. set_layout->binding_count = max_binding + 1;
  103. for (uint32_t b = 0; b <= max_binding; b++) {
  104. /* Initialize all binding_layout entries to -1 */
  105. memset(&set_layout->binding[b], -1, sizeof(set_layout->binding[b]));
  106. set_layout->binding[b].array_size = 0;
  107. set_layout->binding[b].immutable_samplers = NULL;
  108. }
  109. /* Initialize all samplers to 0 */
  110. memset(samplers, 0, immutable_sampler_count * sizeof(*samplers));
  111. uint32_t sampler_count[MESA_SHADER_STAGES] = { 0, };
  112. uint32_t surface_count[MESA_SHADER_STAGES] = { 0, };
  113. uint32_t image_count[MESA_SHADER_STAGES] = { 0, };
  114. uint32_t buffer_count = 0;
  115. uint32_t dynamic_offset_count = 0;
  116. for (uint32_t j = 0; j < pCreateInfo->bindingCount; j++) {
  117. const VkDescriptorSetLayoutBinding *binding = &pCreateInfo->pBindings[j];
  118. uint32_t b = binding->binding;
  119. /* We temporarily store the pointer to the binding in the
  120. * immutable_samplers pointer. This provides us with a quick-and-dirty
  121. * way to sort the bindings by binding number.
  122. */
  123. set_layout->binding[b].immutable_samplers = (void *)binding;
  124. }
  125. for (uint32_t b = 0; b <= max_binding; b++) {
  126. const VkDescriptorSetLayoutBinding *binding =
  127. (void *)set_layout->binding[b].immutable_samplers;
  128. if (binding == NULL)
  129. continue;
  130. if (binding->descriptorCount == 0)
  131. continue;
  132. #ifndef NDEBUG
  133. set_layout->binding[b].type = binding->descriptorType;
  134. #endif
  135. set_layout->binding[b].array_size = binding->descriptorCount;
  136. set_layout->binding[b].descriptor_index = set_layout->size;
  137. set_layout->size += binding->descriptorCount;
  138. switch (binding->descriptorType) {
  139. case VK_DESCRIPTOR_TYPE_SAMPLER:
  140. case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  141. anv_foreach_stage(s, binding->stageFlags) {
  142. set_layout->binding[b].stage[s].sampler_index = sampler_count[s];
  143. sampler_count[s] += binding->descriptorCount;
  144. }
  145. break;
  146. default:
  147. break;
  148. }
  149. switch (binding->descriptorType) {
  150. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
  151. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
  152. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
  153. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
  154. set_layout->binding[b].buffer_index = buffer_count;
  155. buffer_count += binding->descriptorCount;
  156. /* fall through */
  157. case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  158. case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
  159. case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
  160. case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
  161. case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
  162. case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
  163. anv_foreach_stage(s, binding->stageFlags) {
  164. set_layout->binding[b].stage[s].surface_index = surface_count[s];
  165. surface_count[s] += binding->descriptorCount;
  166. }
  167. break;
  168. default:
  169. break;
  170. }
  171. switch (binding->descriptorType) {
  172. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
  173. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
  174. set_layout->binding[b].dynamic_offset_index = dynamic_offset_count;
  175. dynamic_offset_count += binding->descriptorCount;
  176. break;
  177. default:
  178. break;
  179. }
  180. switch (binding->descriptorType) {
  181. case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
  182. case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
  183. anv_foreach_stage(s, binding->stageFlags) {
  184. set_layout->binding[b].stage[s].image_index = image_count[s];
  185. image_count[s] += binding->descriptorCount;
  186. }
  187. break;
  188. default:
  189. break;
  190. }
  191. if (binding->pImmutableSamplers) {
  192. set_layout->binding[b].immutable_samplers = samplers;
  193. samplers += binding->descriptorCount;
  194. for (uint32_t i = 0; i < binding->descriptorCount; i++)
  195. set_layout->binding[b].immutable_samplers[i] =
  196. anv_sampler_from_handle(binding->pImmutableSamplers[i]);
  197. } else {
  198. set_layout->binding[b].immutable_samplers = NULL;
  199. }
  200. set_layout->shader_stages |= binding->stageFlags;
  201. }
  202. set_layout->buffer_count = buffer_count;
  203. set_layout->dynamic_offset_count = dynamic_offset_count;
  204. *pSetLayout = anv_descriptor_set_layout_to_handle(set_layout);
  205. return VK_SUCCESS;
  206. }
  207. void anv_DestroyDescriptorSetLayout(
  208. VkDevice _device,
  209. VkDescriptorSetLayout _set_layout,
  210. const VkAllocationCallbacks* pAllocator)
  211. {
  212. ANV_FROM_HANDLE(anv_device, device, _device);
  213. ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout, _set_layout);
  214. if (!set_layout)
  215. return;
  216. anv_descriptor_set_layout_unref(device, set_layout);
  217. }
  218. static void
  219. sha1_update_descriptor_set_layout(struct mesa_sha1 *ctx,
  220. const struct anv_descriptor_set_layout *layout)
  221. {
  222. size_t size = sizeof(*layout) +
  223. sizeof(layout->binding[0]) * layout->binding_count;
  224. _mesa_sha1_update(ctx, layout, size);
  225. }
  226. /*
  227. * Pipeline layouts. These have nothing to do with the pipeline. They are
  228. * just multiple descriptor set layouts pasted together
  229. */
  230. VkResult anv_CreatePipelineLayout(
  231. VkDevice _device,
  232. const VkPipelineLayoutCreateInfo* pCreateInfo,
  233. const VkAllocationCallbacks* pAllocator,
  234. VkPipelineLayout* pPipelineLayout)
  235. {
  236. ANV_FROM_HANDLE(anv_device, device, _device);
  237. struct anv_pipeline_layout *layout;
  238. assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO);
  239. layout = vk_alloc2(&device->alloc, pAllocator, sizeof(*layout), 8,
  240. VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
  241. if (layout == NULL)
  242. return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
  243. layout->num_sets = pCreateInfo->setLayoutCount;
  244. unsigned dynamic_offset_count = 0;
  245. memset(layout->stage, 0, sizeof(layout->stage));
  246. for (uint32_t set = 0; set < pCreateInfo->setLayoutCount; set++) {
  247. ANV_FROM_HANDLE(anv_descriptor_set_layout, set_layout,
  248. pCreateInfo->pSetLayouts[set]);
  249. layout->set[set].layout = set_layout;
  250. anv_descriptor_set_layout_ref(set_layout);
  251. layout->set[set].dynamic_offset_start = dynamic_offset_count;
  252. for (uint32_t b = 0; b < set_layout->binding_count; b++) {
  253. if (set_layout->binding[b].dynamic_offset_index < 0)
  254. continue;
  255. dynamic_offset_count += set_layout->binding[b].array_size;
  256. for (gl_shader_stage s = 0; s < MESA_SHADER_STAGES; s++) {
  257. if (set_layout->binding[b].stage[s].surface_index >= 0)
  258. layout->stage[s].has_dynamic_offsets = true;
  259. }
  260. }
  261. }
  262. struct mesa_sha1 ctx;
  263. _mesa_sha1_init(&ctx);
  264. for (unsigned s = 0; s < layout->num_sets; s++) {
  265. sha1_update_descriptor_set_layout(&ctx, layout->set[s].layout);
  266. _mesa_sha1_update(&ctx, &layout->set[s].dynamic_offset_start,
  267. sizeof(layout->set[s].dynamic_offset_start));
  268. }
  269. _mesa_sha1_update(&ctx, &layout->num_sets, sizeof(layout->num_sets));
  270. for (unsigned s = 0; s < MESA_SHADER_STAGES; s++) {
  271. _mesa_sha1_update(&ctx, &layout->stage[s].has_dynamic_offsets,
  272. sizeof(layout->stage[s].has_dynamic_offsets));
  273. }
  274. _mesa_sha1_final(&ctx, layout->sha1);
  275. *pPipelineLayout = anv_pipeline_layout_to_handle(layout);
  276. return VK_SUCCESS;
  277. }
  278. void anv_DestroyPipelineLayout(
  279. VkDevice _device,
  280. VkPipelineLayout _pipelineLayout,
  281. const VkAllocationCallbacks* pAllocator)
  282. {
  283. ANV_FROM_HANDLE(anv_device, device, _device);
  284. ANV_FROM_HANDLE(anv_pipeline_layout, pipeline_layout, _pipelineLayout);
  285. if (!pipeline_layout)
  286. return;
  287. for (uint32_t i = 0; i < pipeline_layout->num_sets; i++)
  288. anv_descriptor_set_layout_unref(device, pipeline_layout->set[i].layout);
  289. vk_free2(&device->alloc, pAllocator, pipeline_layout);
  290. }
  291. /*
  292. * Descriptor pools.
  293. *
  294. * These are implemented using a big pool of memory and a free-list for the
  295. * host memory allocations and a state_stream and a free list for the buffer
  296. * view surface state. The spec allows us to fail to allocate due to
  297. * fragmentation in all cases but two: 1) after pool reset, allocating up
  298. * until the pool size with no freeing must succeed and 2) allocating and
  299. * freeing only descriptor sets with the same layout. Case 1) is easy enogh,
  300. * and the free lists lets us recycle blocks for case 2).
  301. */
  302. #define EMPTY 1
  303. VkResult anv_CreateDescriptorPool(
  304. VkDevice _device,
  305. const VkDescriptorPoolCreateInfo* pCreateInfo,
  306. const VkAllocationCallbacks* pAllocator,
  307. VkDescriptorPool* pDescriptorPool)
  308. {
  309. ANV_FROM_HANDLE(anv_device, device, _device);
  310. struct anv_descriptor_pool *pool;
  311. uint32_t descriptor_count = 0;
  312. uint32_t buffer_count = 0;
  313. for (uint32_t i = 0; i < pCreateInfo->poolSizeCount; i++) {
  314. switch (pCreateInfo->pPoolSizes[i].type) {
  315. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
  316. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
  317. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
  318. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
  319. buffer_count += pCreateInfo->pPoolSizes[i].descriptorCount;
  320. default:
  321. descriptor_count += pCreateInfo->pPoolSizes[i].descriptorCount;
  322. break;
  323. }
  324. }
  325. const size_t pool_size =
  326. pCreateInfo->maxSets * sizeof(struct anv_descriptor_set) +
  327. descriptor_count * sizeof(struct anv_descriptor) +
  328. buffer_count * sizeof(struct anv_buffer_view);
  329. const size_t total_size = sizeof(*pool) + pool_size;
  330. pool = vk_alloc2(&device->alloc, pAllocator, total_size, 8,
  331. VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
  332. if (!pool)
  333. return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
  334. pool->size = pool_size;
  335. pool->next = 0;
  336. pool->free_list = EMPTY;
  337. anv_state_stream_init(&pool->surface_state_stream,
  338. &device->surface_state_pool, 4096);
  339. pool->surface_state_free_list = NULL;
  340. *pDescriptorPool = anv_descriptor_pool_to_handle(pool);
  341. return VK_SUCCESS;
  342. }
  343. void anv_DestroyDescriptorPool(
  344. VkDevice _device,
  345. VkDescriptorPool _pool,
  346. const VkAllocationCallbacks* pAllocator)
  347. {
  348. ANV_FROM_HANDLE(anv_device, device, _device);
  349. ANV_FROM_HANDLE(anv_descriptor_pool, pool, _pool);
  350. if (!pool)
  351. return;
  352. anv_state_stream_finish(&pool->surface_state_stream);
  353. vk_free2(&device->alloc, pAllocator, pool);
  354. }
  355. VkResult anv_ResetDescriptorPool(
  356. VkDevice _device,
  357. VkDescriptorPool descriptorPool,
  358. VkDescriptorPoolResetFlags flags)
  359. {
  360. ANV_FROM_HANDLE(anv_device, device, _device);
  361. ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
  362. pool->next = 0;
  363. pool->free_list = EMPTY;
  364. anv_state_stream_finish(&pool->surface_state_stream);
  365. anv_state_stream_init(&pool->surface_state_stream,
  366. &device->surface_state_pool, 4096);
  367. pool->surface_state_free_list = NULL;
  368. return VK_SUCCESS;
  369. }
  370. struct pool_free_list_entry {
  371. uint32_t next;
  372. uint32_t size;
  373. };
  374. size_t
  375. anv_descriptor_set_layout_size(const struct anv_descriptor_set_layout *layout)
  376. {
  377. return
  378. sizeof(struct anv_descriptor_set) +
  379. layout->size * sizeof(struct anv_descriptor) +
  380. layout->buffer_count * sizeof(struct anv_buffer_view);
  381. }
  382. size_t
  383. anv_descriptor_set_binding_layout_get_hw_size(const struct anv_descriptor_set_binding_layout *binding)
  384. {
  385. if (!binding->immutable_samplers)
  386. return binding->array_size;
  387. uint32_t total_plane_count = 0;
  388. for (uint32_t i = 0; i < binding->array_size; i++)
  389. total_plane_count += binding->immutable_samplers[i]->n_planes;
  390. return total_plane_count;
  391. }
  392. struct surface_state_free_list_entry {
  393. void *next;
  394. struct anv_state state;
  395. };
  396. VkResult
  397. anv_descriptor_set_create(struct anv_device *device,
  398. struct anv_descriptor_pool *pool,
  399. struct anv_descriptor_set_layout *layout,
  400. struct anv_descriptor_set **out_set)
  401. {
  402. struct anv_descriptor_set *set;
  403. const size_t size = anv_descriptor_set_layout_size(layout);
  404. set = NULL;
  405. if (size <= pool->size - pool->next) {
  406. set = (struct anv_descriptor_set *) (pool->data + pool->next);
  407. pool->next += size;
  408. } else {
  409. struct pool_free_list_entry *entry;
  410. uint32_t *link = &pool->free_list;
  411. for (uint32_t f = pool->free_list; f != EMPTY; f = entry->next) {
  412. entry = (struct pool_free_list_entry *) (pool->data + f);
  413. if (size <= entry->size) {
  414. *link = entry->next;
  415. set = (struct anv_descriptor_set *) entry;
  416. break;
  417. }
  418. link = &entry->next;
  419. }
  420. }
  421. if (set == NULL) {
  422. if (pool->free_list != EMPTY) {
  423. return vk_error(VK_ERROR_FRAGMENTED_POOL);
  424. } else {
  425. return vk_error(VK_ERROR_OUT_OF_POOL_MEMORY);
  426. }
  427. }
  428. set->layout = layout;
  429. anv_descriptor_set_layout_ref(layout);
  430. set->size = size;
  431. set->buffer_views =
  432. (struct anv_buffer_view *) &set->descriptors[layout->size];
  433. set->buffer_count = layout->buffer_count;
  434. /* By defining the descriptors to be zero now, we can later verify that
  435. * a descriptor has not been populated with user data.
  436. */
  437. memset(set->descriptors, 0, sizeof(struct anv_descriptor) * layout->size);
  438. /* Go through and fill out immutable samplers if we have any */
  439. struct anv_descriptor *desc = set->descriptors;
  440. for (uint32_t b = 0; b < layout->binding_count; b++) {
  441. if (layout->binding[b].immutable_samplers) {
  442. for (uint32_t i = 0; i < layout->binding[b].array_size; i++) {
  443. /* The type will get changed to COMBINED_IMAGE_SAMPLER in
  444. * UpdateDescriptorSets if needed. However, if the descriptor
  445. * set has an immutable sampler, UpdateDescriptorSets may never
  446. * touch it, so we need to make sure it's 100% valid now.
  447. */
  448. desc[i] = (struct anv_descriptor) {
  449. .type = VK_DESCRIPTOR_TYPE_SAMPLER,
  450. .sampler = layout->binding[b].immutable_samplers[i],
  451. };
  452. }
  453. }
  454. desc += layout->binding[b].array_size;
  455. }
  456. /* Allocate surface state for the buffer views. */
  457. for (uint32_t b = 0; b < layout->buffer_count; b++) {
  458. struct surface_state_free_list_entry *entry =
  459. pool->surface_state_free_list;
  460. struct anv_state state;
  461. if (entry) {
  462. state = entry->state;
  463. pool->surface_state_free_list = entry->next;
  464. assert(state.alloc_size == 64);
  465. } else {
  466. state = anv_state_stream_alloc(&pool->surface_state_stream, 64, 64);
  467. }
  468. set->buffer_views[b].surface_state = state;
  469. }
  470. *out_set = set;
  471. return VK_SUCCESS;
  472. }
  473. void
  474. anv_descriptor_set_destroy(struct anv_device *device,
  475. struct anv_descriptor_pool *pool,
  476. struct anv_descriptor_set *set)
  477. {
  478. anv_descriptor_set_layout_unref(device, set->layout);
  479. /* Put the buffer view surface state back on the free list. */
  480. for (uint32_t b = 0; b < set->buffer_count; b++) {
  481. struct surface_state_free_list_entry *entry =
  482. set->buffer_views[b].surface_state.map;
  483. entry->next = pool->surface_state_free_list;
  484. entry->state = set->buffer_views[b].surface_state;
  485. pool->surface_state_free_list = entry;
  486. }
  487. /* Put the descriptor set allocation back on the free list. */
  488. const uint32_t index = (char *) set - pool->data;
  489. if (index + set->size == pool->next) {
  490. pool->next = index;
  491. } else {
  492. struct pool_free_list_entry *entry = (struct pool_free_list_entry *) set;
  493. entry->next = pool->free_list;
  494. entry->size = set->size;
  495. pool->free_list = (char *) entry - pool->data;
  496. }
  497. }
  498. VkResult anv_AllocateDescriptorSets(
  499. VkDevice _device,
  500. const VkDescriptorSetAllocateInfo* pAllocateInfo,
  501. VkDescriptorSet* pDescriptorSets)
  502. {
  503. ANV_FROM_HANDLE(anv_device, device, _device);
  504. ANV_FROM_HANDLE(anv_descriptor_pool, pool, pAllocateInfo->descriptorPool);
  505. VkResult result = VK_SUCCESS;
  506. struct anv_descriptor_set *set;
  507. uint32_t i;
  508. for (i = 0; i < pAllocateInfo->descriptorSetCount; i++) {
  509. ANV_FROM_HANDLE(anv_descriptor_set_layout, layout,
  510. pAllocateInfo->pSetLayouts[i]);
  511. result = anv_descriptor_set_create(device, pool, layout, &set);
  512. if (result != VK_SUCCESS)
  513. break;
  514. pDescriptorSets[i] = anv_descriptor_set_to_handle(set);
  515. }
  516. if (result != VK_SUCCESS)
  517. anv_FreeDescriptorSets(_device, pAllocateInfo->descriptorPool,
  518. i, pDescriptorSets);
  519. return result;
  520. }
  521. VkResult anv_FreeDescriptorSets(
  522. VkDevice _device,
  523. VkDescriptorPool descriptorPool,
  524. uint32_t count,
  525. const VkDescriptorSet* pDescriptorSets)
  526. {
  527. ANV_FROM_HANDLE(anv_device, device, _device);
  528. ANV_FROM_HANDLE(anv_descriptor_pool, pool, descriptorPool);
  529. for (uint32_t i = 0; i < count; i++) {
  530. ANV_FROM_HANDLE(anv_descriptor_set, set, pDescriptorSets[i]);
  531. if (!set)
  532. continue;
  533. anv_descriptor_set_destroy(device, pool, set);
  534. }
  535. return VK_SUCCESS;
  536. }
  537. void
  538. anv_descriptor_set_write_image_view(struct anv_descriptor_set *set,
  539. const struct gen_device_info * const devinfo,
  540. const VkDescriptorImageInfo * const info,
  541. VkDescriptorType type,
  542. uint32_t binding,
  543. uint32_t element)
  544. {
  545. const struct anv_descriptor_set_binding_layout *bind_layout =
  546. &set->layout->binding[binding];
  547. struct anv_descriptor *desc =
  548. &set->descriptors[bind_layout->descriptor_index + element];
  549. struct anv_image_view *image_view = NULL;
  550. struct anv_sampler *sampler = NULL;
  551. assert(type == bind_layout->type);
  552. switch (type) {
  553. case VK_DESCRIPTOR_TYPE_SAMPLER:
  554. sampler = anv_sampler_from_handle(info->sampler);
  555. break;
  556. case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  557. image_view = anv_image_view_from_handle(info->imageView);
  558. sampler = anv_sampler_from_handle(info->sampler);
  559. break;
  560. case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
  561. case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
  562. case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
  563. image_view = anv_image_view_from_handle(info->imageView);
  564. break;
  565. default:
  566. unreachable("invalid descriptor type");
  567. }
  568. /* If this descriptor has an immutable sampler, we don't want to stomp on
  569. * it.
  570. */
  571. sampler = bind_layout->immutable_samplers ?
  572. bind_layout->immutable_samplers[element] :
  573. sampler;
  574. *desc = (struct anv_descriptor) {
  575. .type = type,
  576. .layout = info->imageLayout,
  577. .image_view = image_view,
  578. .sampler = sampler,
  579. };
  580. }
  581. void
  582. anv_descriptor_set_write_buffer_view(struct anv_descriptor_set *set,
  583. VkDescriptorType type,
  584. struct anv_buffer_view *buffer_view,
  585. uint32_t binding,
  586. uint32_t element)
  587. {
  588. const struct anv_descriptor_set_binding_layout *bind_layout =
  589. &set->layout->binding[binding];
  590. struct anv_descriptor *desc =
  591. &set->descriptors[bind_layout->descriptor_index + element];
  592. assert(type == bind_layout->type);
  593. *desc = (struct anv_descriptor) {
  594. .type = type,
  595. .buffer_view = buffer_view,
  596. };
  597. }
  598. void
  599. anv_descriptor_set_write_buffer(struct anv_descriptor_set *set,
  600. struct anv_device *device,
  601. struct anv_state_stream *alloc_stream,
  602. VkDescriptorType type,
  603. struct anv_buffer *buffer,
  604. uint32_t binding,
  605. uint32_t element,
  606. VkDeviceSize offset,
  607. VkDeviceSize range)
  608. {
  609. const struct anv_descriptor_set_binding_layout *bind_layout =
  610. &set->layout->binding[binding];
  611. struct anv_descriptor *desc =
  612. &set->descriptors[bind_layout->descriptor_index + element];
  613. assert(type == bind_layout->type);
  614. if (type == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC ||
  615. type == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC) {
  616. *desc = (struct anv_descriptor) {
  617. .type = type,
  618. .buffer = buffer,
  619. .offset = offset,
  620. .range = range,
  621. };
  622. } else {
  623. struct anv_buffer_view *bview =
  624. &set->buffer_views[bind_layout->buffer_index + element];
  625. bview->format = anv_isl_format_for_descriptor_type(type);
  626. bview->range = anv_buffer_get_range(buffer, offset, range);
  627. bview->address = anv_address_add(buffer->address, offset);
  628. /* If we're writing descriptors through a push command, we need to
  629. * allocate the surface state from the command buffer. Otherwise it will
  630. * be allocated by the descriptor pool when calling
  631. * vkAllocateDescriptorSets. */
  632. if (alloc_stream)
  633. bview->surface_state = anv_state_stream_alloc(alloc_stream, 64, 64);
  634. anv_fill_buffer_surface_state(device, bview->surface_state,
  635. bview->format,
  636. bview->address, bview->range, 1);
  637. *desc = (struct anv_descriptor) {
  638. .type = type,
  639. .buffer_view = bview,
  640. };
  641. }
  642. }
  643. void anv_UpdateDescriptorSets(
  644. VkDevice _device,
  645. uint32_t descriptorWriteCount,
  646. const VkWriteDescriptorSet* pDescriptorWrites,
  647. uint32_t descriptorCopyCount,
  648. const VkCopyDescriptorSet* pDescriptorCopies)
  649. {
  650. ANV_FROM_HANDLE(anv_device, device, _device);
  651. for (uint32_t i = 0; i < descriptorWriteCount; i++) {
  652. const VkWriteDescriptorSet *write = &pDescriptorWrites[i];
  653. ANV_FROM_HANDLE(anv_descriptor_set, set, write->dstSet);
  654. switch (write->descriptorType) {
  655. case VK_DESCRIPTOR_TYPE_SAMPLER:
  656. case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  657. case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
  658. case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
  659. case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
  660. for (uint32_t j = 0; j < write->descriptorCount; j++) {
  661. anv_descriptor_set_write_image_view(set, &device->info,
  662. write->pImageInfo + j,
  663. write->descriptorType,
  664. write->dstBinding,
  665. write->dstArrayElement + j);
  666. }
  667. break;
  668. case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
  669. case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
  670. for (uint32_t j = 0; j < write->descriptorCount; j++) {
  671. ANV_FROM_HANDLE(anv_buffer_view, bview,
  672. write->pTexelBufferView[j]);
  673. anv_descriptor_set_write_buffer_view(set,
  674. write->descriptorType,
  675. bview,
  676. write->dstBinding,
  677. write->dstArrayElement + j);
  678. }
  679. break;
  680. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
  681. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
  682. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
  683. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
  684. for (uint32_t j = 0; j < write->descriptorCount; j++) {
  685. assert(write->pBufferInfo[j].buffer);
  686. ANV_FROM_HANDLE(anv_buffer, buffer, write->pBufferInfo[j].buffer);
  687. assert(buffer);
  688. anv_descriptor_set_write_buffer(set,
  689. device,
  690. NULL,
  691. write->descriptorType,
  692. buffer,
  693. write->dstBinding,
  694. write->dstArrayElement + j,
  695. write->pBufferInfo[j].offset,
  696. write->pBufferInfo[j].range);
  697. }
  698. break;
  699. default:
  700. break;
  701. }
  702. }
  703. for (uint32_t i = 0; i < descriptorCopyCount; i++) {
  704. const VkCopyDescriptorSet *copy = &pDescriptorCopies[i];
  705. ANV_FROM_HANDLE(anv_descriptor_set, src, copy->srcSet);
  706. ANV_FROM_HANDLE(anv_descriptor_set, dst, copy->dstSet);
  707. const struct anv_descriptor_set_binding_layout *src_layout =
  708. &src->layout->binding[copy->srcBinding];
  709. struct anv_descriptor *src_desc =
  710. &src->descriptors[src_layout->descriptor_index];
  711. src_desc += copy->srcArrayElement;
  712. const struct anv_descriptor_set_binding_layout *dst_layout =
  713. &dst->layout->binding[copy->dstBinding];
  714. struct anv_descriptor *dst_desc =
  715. &dst->descriptors[dst_layout->descriptor_index];
  716. dst_desc += copy->dstArrayElement;
  717. for (uint32_t j = 0; j < copy->descriptorCount; j++)
  718. dst_desc[j] = src_desc[j];
  719. }
  720. }
  721. /*
  722. * Descriptor update templates.
  723. */
  724. void
  725. anv_descriptor_set_write_template(struct anv_descriptor_set *set,
  726. struct anv_device *device,
  727. struct anv_state_stream *alloc_stream,
  728. const struct anv_descriptor_update_template *template,
  729. const void *data)
  730. {
  731. const struct anv_descriptor_set_layout *layout = set->layout;
  732. for (uint32_t i = 0; i < template->entry_count; i++) {
  733. const struct anv_descriptor_template_entry *entry =
  734. &template->entries[i];
  735. const struct anv_descriptor_set_binding_layout *bind_layout =
  736. &layout->binding[entry->binding];
  737. struct anv_descriptor *desc = &set->descriptors[bind_layout->descriptor_index];
  738. desc += entry->array_element;
  739. switch (entry->type) {
  740. case VK_DESCRIPTOR_TYPE_SAMPLER:
  741. case VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER:
  742. case VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE:
  743. case VK_DESCRIPTOR_TYPE_STORAGE_IMAGE:
  744. case VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT:
  745. for (uint32_t j = 0; j < entry->array_count; j++) {
  746. const VkDescriptorImageInfo *info =
  747. data + entry->offset + j * entry->stride;
  748. anv_descriptor_set_write_image_view(set, &device->info,
  749. info, entry->type,
  750. entry->binding,
  751. entry->array_element + j);
  752. }
  753. break;
  754. case VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER:
  755. case VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER:
  756. for (uint32_t j = 0; j < entry->array_count; j++) {
  757. const VkBufferView *_bview =
  758. data + entry->offset + j * entry->stride;
  759. ANV_FROM_HANDLE(anv_buffer_view, bview, *_bview);
  760. anv_descriptor_set_write_buffer_view(set,
  761. entry->type,
  762. bview,
  763. entry->binding,
  764. entry->array_element + j);
  765. }
  766. break;
  767. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER:
  768. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER:
  769. case VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC:
  770. case VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC:
  771. for (uint32_t j = 0; j < entry->array_count; j++) {
  772. const VkDescriptorBufferInfo *info =
  773. data + entry->offset + j * entry->stride;
  774. ANV_FROM_HANDLE(anv_buffer, buffer, info->buffer);
  775. anv_descriptor_set_write_buffer(set,
  776. device,
  777. alloc_stream,
  778. entry->type,
  779. buffer,
  780. entry->binding,
  781. entry->array_element + j,
  782. info->offset, info->range);
  783. }
  784. break;
  785. default:
  786. break;
  787. }
  788. }
  789. }
  790. VkResult anv_CreateDescriptorUpdateTemplate(
  791. VkDevice _device,
  792. const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
  793. const VkAllocationCallbacks* pAllocator,
  794. VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate)
  795. {
  796. ANV_FROM_HANDLE(anv_device, device, _device);
  797. struct anv_descriptor_update_template *template;
  798. size_t size = sizeof(*template) +
  799. pCreateInfo->descriptorUpdateEntryCount * sizeof(template->entries[0]);
  800. template = vk_alloc2(&device->alloc, pAllocator, size, 8,
  801. VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
  802. if (template == NULL)
  803. return vk_error(VK_ERROR_OUT_OF_HOST_MEMORY);
  804. template->bind_point = pCreateInfo->pipelineBindPoint;
  805. if (pCreateInfo->templateType == VK_DESCRIPTOR_UPDATE_TEMPLATE_TYPE_DESCRIPTOR_SET)
  806. template->set = pCreateInfo->set;
  807. template->entry_count = pCreateInfo->descriptorUpdateEntryCount;
  808. for (uint32_t i = 0; i < template->entry_count; i++) {
  809. const VkDescriptorUpdateTemplateEntryKHR *pEntry =
  810. &pCreateInfo->pDescriptorUpdateEntries[i];
  811. template->entries[i] = (struct anv_descriptor_template_entry) {
  812. .type = pEntry->descriptorType,
  813. .binding = pEntry->dstBinding,
  814. .array_element = pEntry->dstArrayElement,
  815. .array_count = pEntry->descriptorCount,
  816. .offset = pEntry->offset,
  817. .stride = pEntry->stride,
  818. };
  819. }
  820. *pDescriptorUpdateTemplate =
  821. anv_descriptor_update_template_to_handle(template);
  822. return VK_SUCCESS;
  823. }
  824. void anv_DestroyDescriptorUpdateTemplate(
  825. VkDevice _device,
  826. VkDescriptorUpdateTemplate descriptorUpdateTemplate,
  827. const VkAllocationCallbacks* pAllocator)
  828. {
  829. ANV_FROM_HANDLE(anv_device, device, _device);
  830. ANV_FROM_HANDLE(anv_descriptor_update_template, template,
  831. descriptorUpdateTemplate);
  832. vk_free2(&device->alloc, pAllocator, template);
  833. }
  834. void anv_UpdateDescriptorSetWithTemplate(
  835. VkDevice _device,
  836. VkDescriptorSet descriptorSet,
  837. VkDescriptorUpdateTemplate descriptorUpdateTemplate,
  838. const void* pData)
  839. {
  840. ANV_FROM_HANDLE(anv_device, device, _device);
  841. ANV_FROM_HANDLE(anv_descriptor_set, set, descriptorSet);
  842. ANV_FROM_HANDLE(anv_descriptor_update_template, template,
  843. descriptorUpdateTemplate);
  844. anv_descriptor_set_write_template(set, device, NULL, template, pData);
  845. }