Browse Source

radv: add radv_meta_save() helper

And merge radv_meta_save_novertex() with
radv_meta_save_graphics_reset_vport_scissor_novertex().

Signed-off-by: Samuel Pitoiset <samuel.pitoiset@gmail.com>
Reviewed-by: Bas Nieuwenhuizen <bas@basnieuwenhuizen.nl>
tags/17.3-branchpoint
Samuel Pitoiset 8 years ago
parent
commit
ba3dc3519d

+ 12
- 21
src/amd/vulkan/radv_meta.c View File

@@ -30,10 +30,9 @@
#include <pwd.h>
#include <sys/stat.h>

static void
radv_meta_save_novertex(struct radv_meta_saved_state *state,
const struct radv_cmd_buffer *cmd_buffer,
uint32_t flags)
void
radv_meta_save(struct radv_meta_saved_state *state,
struct radv_cmd_buffer *cmd_buffer, uint32_t flags)
{
state->flags = flags;

@@ -51,6 +50,15 @@ radv_meta_save_novertex(struct radv_meta_saved_state *state,
typed_memcpy(state->scissor.scissors,
cmd_buffer->state.dynamic.scissor.scissors,
MAX_SCISSORS);

/* The most common meta operations all want to have the
* viewport reset and any scissors disabled. The rest of the
* dynamic state should have no effect.
*/
cmd_buffer->state.dynamic.viewport.count = 0;
cmd_buffer->state.dynamic.scissor.count = 0;
cmd_buffer->state.dirty |= 1 << VK_DYNAMIC_STATE_VIEWPORT |
1 << VK_DYNAMIC_STATE_SCISSOR;
}

if (state->flags & RADV_META_SAVE_DESCRIPTORS) {
@@ -419,23 +427,6 @@ radv_device_finish_meta(struct radv_device *device)
radv_pipeline_cache_finish(&device->meta_state.cache);
}

/*
* The most common meta operations all want to have the viewport
* reset and any scissors disabled. The rest of the dynamic state
* should have no effect.
*/
void
radv_meta_save_graphics_reset_vport_scissor_novertex(struct radv_meta_saved_state *saved_state,
struct radv_cmd_buffer *cmd_buffer,
uint32_t flags)
{
radv_meta_save_novertex(saved_state, cmd_buffer, flags);
cmd_buffer->state.dynamic.viewport.count = 0;
cmd_buffer->state.dynamic.scissor.count = 0;
cmd_buffer->state.dirty |= 1 << VK_DYNAMIC_STATE_VIEWPORT |
1 << VK_DYNAMIC_STATE_SCISSOR;
}

nir_ssa_def *radv_meta_gen_rect_vertices_comp2(nir_builder *vs_b, nir_ssa_def *comp2)
{


+ 3
- 4
src/amd/vulkan/radv_meta.h View File

@@ -95,6 +95,9 @@ void radv_device_finish_meta_resolve_compute_state(struct radv_device *device);
VkResult radv_device_init_meta_resolve_fragment_state(struct radv_device *device);
void radv_device_finish_meta_resolve_fragment_state(struct radv_device *device);

void radv_meta_save(struct radv_meta_saved_state *saved_state,
struct radv_cmd_buffer *cmd_buffer, uint32_t flags);

void radv_meta_restore(const struct radv_meta_saved_state *state,
struct radv_cmd_buffer *cmd_buffer);

@@ -182,10 +185,6 @@ void radv_fast_clear_flush_image_inplace(struct radv_cmd_buffer *cmd_buffer,
struct radv_image *image,
const VkImageSubresourceRange *subresourceRange);

void radv_meta_save_graphics_reset_vport_scissor_novertex(struct radv_meta_saved_state *saved_state,
struct radv_cmd_buffer *cmd_buffer,
uint32_t flags);

void radv_meta_resolve_compute_image(struct radv_cmd_buffer *cmd_buffer,
struct radv_image *src_image,
VkImageLayout src_image_layout,

+ 4
- 4
src/amd/vulkan/radv_meta_blit.c View File

@@ -510,10 +510,10 @@ void radv_CmdBlitImage(
assert(src_image->info.samples == 1);
assert(dest_image->info.samples == 1);

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);
radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

for (unsigned r = 0; r < regionCount; r++) {
const VkImageSubresourceLayers *src_res = &pRegions[r].srcSubresource;

+ 12
- 16
src/amd/vulkan/radv_meta_clear.c View File

@@ -1145,10 +1145,9 @@ radv_cmd_buffer_clear_subpass(struct radv_cmd_buffer *cmd_buffer)
if (!radv_subpass_needs_clear(cmd_buffer))
return;

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

for (uint32_t i = 0; i < cmd_state->subpass->color_count; ++i) {
uint32_t a = cmd_state->subpass->color_attachments[i].attachment;
@@ -1390,10 +1389,9 @@ void radv_CmdClearColorImage(
if (cs)
radv_meta_save_compute(&saved_state.compute, cmd_buffer, 16);
else
radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

radv_meta_save(&saved_state.gfx, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

radv_cmd_clear_image(cmd_buffer, image, imageLayout,
(const VkClearValue *) pColor,
@@ -1417,10 +1415,9 @@ void radv_CmdClearDepthStencilImage(
RADV_FROM_HANDLE(radv_image, image, image_h);
struct radv_meta_saved_state saved_state;

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

radv_cmd_clear_image(cmd_buffer, image, imageLayout,
(const VkClearValue *) pDepthStencil,
@@ -1444,10 +1441,9 @@ void radv_CmdClearAttachments(
if (!cmd_buffer->state.subpass)
return;

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS);

/* FINISHME: We can do better than this dumb loop. It thrashes too much
* state.

+ 8
- 9
src/amd/vulkan/radv_meta_copy.c View File

@@ -123,10 +123,10 @@ meta_copy_buffer_to_image(struct radv_cmd_buffer *cmd_buffer,
if (cs)
radv_meta_save_compute(&saved_state.compute, cmd_buffer, 12);
else
radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);
radv_meta_save(&saved_state.gfx, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

for (unsigned r = 0; r < regionCount; r++) {

@@ -345,11 +345,10 @@ meta_copy_image(struct radv_cmd_buffer *cmd_buffer,
if (cs)
radv_meta_save_compute(&saved_state.compute, cmd_buffer, 16);
else
radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state.gfx, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

radv_meta_save(&saved_state.gfx, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

for (unsigned r = 0; r < regionCount; r++) {
assert(pRegions[r].srcSubresource.aspectMask ==

+ 3
- 3
src/amd/vulkan/radv_meta_decompress.c View File

@@ -315,9 +315,9 @@ static void radv_process_depth_image_inplace(struct radv_cmd_buffer *cmd_buffer,
if (!image->surface.htile_size)
return;

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_PASS);
radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_PASS);

switch (op) {
case DEPTH_DECOMPRESS:

+ 3
- 3
src/amd/vulkan/radv_meta_fast_clear.c View File

@@ -343,9 +343,9 @@ radv_fast_clear_flush_image_inplace(struct radv_cmd_buffer *cmd_buffer,

assert(cmd_buffer->queue_family_index == RADV_QUEUE_GENERAL);

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_PASS);
radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_PASS);

if (image->fmask.size > 0) {
pipeline = cmd_buffer->device->meta_state.fast_clear_flush.fmask_decompress_pipeline;

+ 4
- 4
src/amd/vulkan/radv_meta_resolve.c View File

@@ -365,8 +365,8 @@ void radv_CmdResolveImage(
return;
}

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE);
radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE);

assert(src_image->info.samples > 1);
if (src_image->info.samples <= 1) {
@@ -574,8 +574,8 @@ radv_cmd_buffer_resolve_subpass(struct radv_cmd_buffer *cmd_buffer)
return;
}

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE);
radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE);

for (uint32_t i = 0; i < subpass->color_count; ++i) {
VkAttachmentReference src_att = subpass->color_attachments[i];

+ 9
- 9
src/amd/vulkan/radv_meta_resolve_fs.c View File

@@ -468,10 +468,11 @@ void radv_meta_resolve_fragment_image(struct radv_cmd_buffer *cmd_buffer,
}

rp = device->meta_state.resolve_fragment.rc[samples_log2].render_pass[fs_key];
radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

for (uint32_t r = 0; r < region_count; ++r) {
const VkImageResolve *region = &regions[r];
@@ -598,11 +599,10 @@ radv_cmd_buffer_resolve_subpass_fs(struct radv_cmd_buffer *cmd_buffer)
if (!subpass->has_resolve)
return;

radv_meta_save_graphics_reset_vport_scissor_novertex(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

radv_meta_save(&saved_state, cmd_buffer,
RADV_META_SAVE_GRAPHICS_PIPELINE |
RADV_META_SAVE_CONSTANTS |
RADV_META_SAVE_DESCRIPTORS);

for (uint32_t i = 0; i < subpass->color_count; ++i) {
VkAttachmentReference src_att = subpass->color_attachments[i];

Loading…
Cancel
Save