I made some comments on v1 of this patch which appear to have gone unanswered. The two I think are important are
1) Naming. Should it be plane_format or format_plane? I think I mildly prefer the later but coulbe be confinced. 2) I don't think anv_get_isl_format needs to be renamed. It already takes an aspect and does planar-like things and you aren't adding any parameters here. On Tue, Oct 3, 2017 at 9:29 AM, Lionel Landwerlin < lionel.g.landwer...@intel.com> wrote: > A given Vulkan format can now be decomposed into a set of planes. We > now use 'struct anv_format_plane' to represent the format of those > planes. > > Signed-off-by: Lionel Landwerlin <lionel.g.landwer...@intel.com> > --- > src/intel/vulkan/anv_blorp.c | 31 +-- > src/intel/vulkan/anv_formats.c | 505 ++++++++++++++++++++---------- > --------- > src/intel/vulkan/anv_image.c | 27 ++- > src/intel/vulkan/anv_private.h | 54 ++++- > src/intel/vulkan/genX_pipeline.c | 14 +- > 5 files changed, 351 insertions(+), 280 deletions(-) > > diff --git a/src/intel/vulkan/anv_blorp.c b/src/intel/vulkan/anv_blorp.c > index 8dead1d87a8..72f482625aa 100644 > --- a/src/intel/vulkan/anv_blorp.c > +++ b/src/intel/vulkan/anv_blorp.c > @@ -323,8 +323,9 @@ copy_buffer_to_image(struct anv_cmd_buffer *cmd_buffer, > } > > const enum isl_format buffer_format = > - anv_get_isl_format(&cmd_buffer->device->info, > anv_image->vk_format, > - aspect, VK_IMAGE_TILING_LINEAR); > + anv_get_isl_plane_format(&cmd_buffer->device->info, > + anv_image->vk_format, > + aspect, VK_IMAGE_TILING_LINEAR); > > const VkExtent3D bufferImageExtent = { > .width = pRegions[r].bufferRowLength ? > @@ -459,12 +460,12 @@ void anv_CmdBlitImage( > get_blorp_surf_for_anv_image(dst_image, dst_res->aspectMask, > dst_image->aux_usage, &dst); > > - struct anv_format src_format = > - anv_get_format(&cmd_buffer->device->info, src_image->vk_format, > - src_res->aspectMask, src_image->tiling); > - struct anv_format dst_format = > - anv_get_format(&cmd_buffer->device->info, dst_image->vk_format, > - dst_res->aspectMask, dst_image->tiling); > + struct anv_format_plane src_format = > + anv_get_plane_format(&cmd_buffer->device->info, > src_image->vk_format, > + src_res->aspectMask, src_image->tiling); > + struct anv_format_plane dst_format = > + anv_get_plane_format(&cmd_buffer->device->info, > dst_image->vk_format, > + dst_res->aspectMask, dst_image->tiling); > > unsigned dst_start, dst_end; > if (dst_image->type == VK_IMAGE_TYPE_3D) { > @@ -758,9 +759,9 @@ void anv_CmdClearColorImage( > > assert(pRanges[r].aspectMask == VK_IMAGE_ASPECT_COLOR_BIT); > > - struct anv_format src_format = > - anv_get_format(&cmd_buffer->device->info, image->vk_format, > - VK_IMAGE_ASPECT_COLOR_BIT, image->tiling); > + struct anv_format_plane src_format = > + anv_get_plane_format(&cmd_buffer->device->info, > image->vk_format, > + VK_IMAGE_ASPECT_COLOR_BIT, image->tiling); > > unsigned base_layer = pRanges[r].baseArrayLayer; > unsigned layer_count = anv_get_layerCount(image, &pRanges[r]); > @@ -974,10 +975,10 @@ clear_depth_stencil_attachment(struct > anv_cmd_buffer *cmd_buffer, > > enum isl_format depth_format = ISL_FORMAT_UNSUPPORTED; > if (clear_depth) { > - depth_format = anv_get_isl_format(&cmd_buffer->device->info, > - pass_att->format, > - VK_IMAGE_ASPECT_DEPTH_BIT, > - VK_IMAGE_TILING_OPTIMAL); > + depth_format = anv_get_isl_plane_format(&cmd_buffer->device->info, > + pass_att->format, > + VK_IMAGE_ASPECT_DEPTH_BIT, > + VK_IMAGE_TILING_OPTIMAL); > } > > uint32_t binding_table; > diff --git a/src/intel/vulkan/anv_formats.c b/src/intel/vulkan/anv_ > formats.c > index 71824256b25..8dd87133c0c 100644 > --- a/src/intel/vulkan/anv_formats.c > +++ b/src/intel/vulkan/anv_formats.c > @@ -44,14 +44,33 @@ > #define BGRA _ISL_SWIZZLE(BLUE, GREEN, RED, ALPHA) > #define RGB1 _ISL_SWIZZLE(RED, GREEN, BLUE, ONE) > > -#define swiz_fmt(__vk_fmt, __hw_fmt, __swizzle) \ > - [__vk_fmt] = { \ > - .isl_format = __hw_fmt, \ > - .swizzle = __swizzle, \ > +#define _fmt(__hw_fmt, __swizzle) \ > + { .isl_format = __hw_fmt, \ > + .swizzle = __swizzle } > + > +#define swiz_fmt1(__vk_fmt, __hw_fmt, __swizzle) \ > + [vk_enum_offset(__vk_fmt)] = { \ > + .planes = { \ > + { .isl_format = __hw_fmt, .swizzle = __swizzle }, \ > + }, \ > + .n_planes = 1, \ > } > > -#define fmt(__vk_fmt, __hw_fmt) \ > - swiz_fmt(__vk_fmt, __hw_fmt, RGBA) > +#define fmt1(__vk_fmt, __hw_fmt) \ > + swiz_fmt1(__vk_fmt, __hw_fmt, RGBA) > + > +#define ds_fmt(__vk_fmt, __depth_fmt, __stencil_fmt) \ > + [vk_enum_offset(__vk_fmt)] = { \ > + .planes = { \ > + { .isl_format = __depth_fmt, \ > + .swizzle = RGBA, \ > + }, \ > + { .isl_format = __stencil_fmt, \ > + .swizzle = RGBA, \ > + }, \ > + }, \ > + .n_planes = 2, \ > + } > > /* HINT: For array formats, the ISL name should match the VK name. For > * packed formats, they should have the channels in reverse order from > each > @@ -59,196 +78,199 @@ > * bspec) names are in LSB -> MSB order while VK formats are MSB -> LSB. > */ > static const struct anv_format main_formats[] = { > - fmt(VK_FORMAT_UNDEFINED, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_R4G4_UNORM_PACK8, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_R4G4B4A4_UNORM_PACK16, ISL_FORMAT_A4B4G4R4_UNORM), > - swiz_fmt(VK_FORMAT_B4G4R4A4_UNORM_PACK16, > ISL_FORMAT_A4B4G4R4_UNORM, BGRA), > - fmt(VK_FORMAT_R5G6B5_UNORM_PACK16, ISL_FORMAT_B5G6R5_UNORM), > - swiz_fmt(VK_FORMAT_B5G6R5_UNORM_PACK16, ISL_FORMAT_B5G6R5_UNORM, > BGRA), > - fmt(VK_FORMAT_R5G5B5A1_UNORM_PACK16, ISL_FORMAT_A1B5G5R5_UNORM), > - fmt(VK_FORMAT_B5G5R5A1_UNORM_PACK16, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_A1R5G5B5_UNORM_PACK16, ISL_FORMAT_B5G5R5A1_UNORM), > - fmt(VK_FORMAT_R8_UNORM, ISL_FORMAT_R8_UNORM), > - fmt(VK_FORMAT_R8_SNORM, ISL_FORMAT_R8_SNORM), > - fmt(VK_FORMAT_R8_USCALED, ISL_FORMAT_R8_USCALED), > - fmt(VK_FORMAT_R8_SSCALED, ISL_FORMAT_R8_SSCALED), > - fmt(VK_FORMAT_R8_UINT, ISL_FORMAT_R8_UINT), > - fmt(VK_FORMAT_R8_SINT, ISL_FORMAT_R8_SINT), > - swiz_fmt(VK_FORMAT_R8_SRGB, ISL_FORMAT_L8_UNORM_SRGB, > + fmt1(VK_FORMAT_UNDEFINED, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_R4G4_UNORM_PACK8, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_R4G4B4A4_UNORM_PACK16, ISL_FORMAT_A4B4G4R4_UNORM), > + swiz_fmt1(VK_FORMAT_B4G4R4A4_UNORM_PACK16, > ISL_FORMAT_A4B4G4R4_UNORM, BGRA), > + fmt1(VK_FORMAT_R5G6B5_UNORM_PACK16, ISL_FORMAT_B5G6R5_UNORM), > + swiz_fmt1(VK_FORMAT_B5G6R5_UNORM_PACK16, ISL_FORMAT_B5G6R5_UNORM, > BGRA), > + fmt1(VK_FORMAT_R5G5B5A1_UNORM_PACK16, ISL_FORMAT_A1B5G5R5_UNORM), > + fmt1(VK_FORMAT_B5G5R5A1_UNORM_PACK16, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_A1R5G5B5_UNORM_PACK16, ISL_FORMAT_B5G5R5A1_UNORM), > + fmt1(VK_FORMAT_R8_UNORM, ISL_FORMAT_R8_UNORM), > + fmt1(VK_FORMAT_R8_SNORM, ISL_FORMAT_R8_SNORM), > + fmt1(VK_FORMAT_R8_USCALED, ISL_FORMAT_R8_USCALED), > + fmt1(VK_FORMAT_R8_SSCALED, ISL_FORMAT_R8_SSCALED), > + fmt1(VK_FORMAT_R8_UINT, ISL_FORMAT_R8_UINT), > + fmt1(VK_FORMAT_R8_SINT, ISL_FORMAT_R8_SINT), > + swiz_fmt1(VK_FORMAT_R8_SRGB, ISL_FORMAT_L8_UNORM_SRGB, > _ISL_SWIZZLE(RED, ZERO, ZERO, > ONE)), > - fmt(VK_FORMAT_R8G8_UNORM, ISL_FORMAT_R8G8_UNORM), > - fmt(VK_FORMAT_R8G8_SNORM, ISL_FORMAT_R8G8_SNORM), > - fmt(VK_FORMAT_R8G8_USCALED, ISL_FORMAT_R8G8_USCALED), > - fmt(VK_FORMAT_R8G8_SSCALED, ISL_FORMAT_R8G8_SSCALED), > - fmt(VK_FORMAT_R8G8_UINT, ISL_FORMAT_R8G8_UINT), > - fmt(VK_FORMAT_R8G8_SINT, ISL_FORMAT_R8G8_SINT), > - fmt(VK_FORMAT_R8G8_SRGB, ISL_FORMAT_UNSUPPORTED), /* > L8A8_UNORM_SRGB */ > - fmt(VK_FORMAT_R8G8B8_UNORM, ISL_FORMAT_R8G8B8_UNORM), > - fmt(VK_FORMAT_R8G8B8_SNORM, ISL_FORMAT_R8G8B8_SNORM), > - fmt(VK_FORMAT_R8G8B8_USCALED, ISL_FORMAT_R8G8B8_USCALED), > - fmt(VK_FORMAT_R8G8B8_SSCALED, ISL_FORMAT_R8G8B8_SSCALED), > - fmt(VK_FORMAT_R8G8B8_UINT, ISL_FORMAT_R8G8B8_UINT), > - fmt(VK_FORMAT_R8G8B8_SINT, ISL_FORMAT_R8G8B8_SINT), > - fmt(VK_FORMAT_R8G8B8_SRGB, ISL_FORMAT_R8G8B8_UNORM_SRGB), > - fmt(VK_FORMAT_R8G8B8A8_UNORM, ISL_FORMAT_R8G8B8A8_UNORM), > - fmt(VK_FORMAT_R8G8B8A8_SNORM, ISL_FORMAT_R8G8B8A8_SNORM), > - fmt(VK_FORMAT_R8G8B8A8_USCALED, ISL_FORMAT_R8G8B8A8_USCALED), > - fmt(VK_FORMAT_R8G8B8A8_SSCALED, ISL_FORMAT_R8G8B8A8_SSCALED), > - fmt(VK_FORMAT_R8G8B8A8_UINT, ISL_FORMAT_R8G8B8A8_UINT), > - fmt(VK_FORMAT_R8G8B8A8_SINT, ISL_FORMAT_R8G8B8A8_SINT), > - fmt(VK_FORMAT_R8G8B8A8_SRGB, ISL_FORMAT_R8G8B8A8_UNORM_ > SRGB), > - fmt(VK_FORMAT_A8B8G8R8_UNORM_PACK32, ISL_FORMAT_R8G8B8A8_UNORM), > - fmt(VK_FORMAT_A8B8G8R8_SNORM_PACK32, ISL_FORMAT_R8G8B8A8_SNORM), > - fmt(VK_FORMAT_A8B8G8R8_USCALED_PACK32, ISL_FORMAT_R8G8B8A8_USCALED), > - fmt(VK_FORMAT_A8B8G8R8_SSCALED_PACK32, ISL_FORMAT_R8G8B8A8_SSCALED), > - fmt(VK_FORMAT_A8B8G8R8_UINT_PACK32, ISL_FORMAT_R8G8B8A8_UINT), > - fmt(VK_FORMAT_A8B8G8R8_SINT_PACK32, ISL_FORMAT_R8G8B8A8_SINT), > - fmt(VK_FORMAT_A8B8G8R8_SRGB_PACK32, ISL_FORMAT_R8G8B8A8_UNORM_ > SRGB), > - fmt(VK_FORMAT_A2R10G10B10_UNORM_PACK32, ISL_FORMAT_B10G10R10A2_UNORM), > - fmt(VK_FORMAT_A2R10G10B10_SNORM_PACK32, ISL_FORMAT_B10G10R10A2_SNORM), > - fmt(VK_FORMAT_A2R10G10B10_USCALED_PACK32, ISL_FORMAT_B10G10R10A2_ > USCALED), > - fmt(VK_FORMAT_A2R10G10B10_SSCALED_PACK32, ISL_FORMAT_B10G10R10A2_ > SSCALED), > - fmt(VK_FORMAT_A2R10G10B10_UINT_PACK32, ISL_FORMAT_B10G10R10A2_UINT), > - fmt(VK_FORMAT_A2R10G10B10_SINT_PACK32, ISL_FORMAT_B10G10R10A2_SINT), > - fmt(VK_FORMAT_A2B10G10R10_UNORM_PACK32, ISL_FORMAT_R10G10B10A2_UNORM), > - fmt(VK_FORMAT_A2B10G10R10_SNORM_PACK32, ISL_FORMAT_R10G10B10A2_SNORM), > - fmt(VK_FORMAT_A2B10G10R10_USCALED_PACK32, ISL_FORMAT_R10G10B10A2_ > USCALED), > - fmt(VK_FORMAT_A2B10G10R10_SSCALED_PACK32, ISL_FORMAT_R10G10B10A2_ > SSCALED), > - fmt(VK_FORMAT_A2B10G10R10_UINT_PACK32, ISL_FORMAT_R10G10B10A2_UINT), > - fmt(VK_FORMAT_A2B10G10R10_SINT_PACK32, ISL_FORMAT_R10G10B10A2_SINT), > - fmt(VK_FORMAT_R16_UNORM, ISL_FORMAT_R16_UNORM), > - fmt(VK_FORMAT_R16_SNORM, ISL_FORMAT_R16_SNORM), > - fmt(VK_FORMAT_R16_USCALED, ISL_FORMAT_R16_USCALED), > - fmt(VK_FORMAT_R16_SSCALED, ISL_FORMAT_R16_SSCALED), > - fmt(VK_FORMAT_R16_UINT, ISL_FORMAT_R16_UINT), > - fmt(VK_FORMAT_R16_SINT, ISL_FORMAT_R16_SINT), > - fmt(VK_FORMAT_R16_SFLOAT, ISL_FORMAT_R16_FLOAT), > - fmt(VK_FORMAT_R16G16_UNORM, ISL_FORMAT_R16G16_UNORM), > - fmt(VK_FORMAT_R16G16_SNORM, ISL_FORMAT_R16G16_SNORM), > - fmt(VK_FORMAT_R16G16_USCALED, ISL_FORMAT_R16G16_USCALED), > - fmt(VK_FORMAT_R16G16_SSCALED, ISL_FORMAT_R16G16_SSCALED), > - fmt(VK_FORMAT_R16G16_UINT, ISL_FORMAT_R16G16_UINT), > - fmt(VK_FORMAT_R16G16_SINT, ISL_FORMAT_R16G16_SINT), > - fmt(VK_FORMAT_R16G16_SFLOAT, ISL_FORMAT_R16G16_FLOAT), > - fmt(VK_FORMAT_R16G16B16_UNORM, ISL_FORMAT_R16G16B16_UNORM), > - fmt(VK_FORMAT_R16G16B16_SNORM, ISL_FORMAT_R16G16B16_SNORM), > - fmt(VK_FORMAT_R16G16B16_USCALED, ISL_FORMAT_R16G16B16_USCALED), > - fmt(VK_FORMAT_R16G16B16_SSCALED, ISL_FORMAT_R16G16B16_SSCALED), > - fmt(VK_FORMAT_R16G16B16_UINT, ISL_FORMAT_R16G16B16_UINT), > - fmt(VK_FORMAT_R16G16B16_SINT, ISL_FORMAT_R16G16B16_SINT), > - fmt(VK_FORMAT_R16G16B16_SFLOAT, ISL_FORMAT_R16G16B16_FLOAT), > - fmt(VK_FORMAT_R16G16B16A16_UNORM, ISL_FORMAT_R16G16B16A16_UNORM), > - fmt(VK_FORMAT_R16G16B16A16_SNORM, ISL_FORMAT_R16G16B16A16_SNORM), > - fmt(VK_FORMAT_R16G16B16A16_USCALED, ISL_FORMAT_R16G16B16A16_ > USCALED), > - fmt(VK_FORMAT_R16G16B16A16_SSCALED, ISL_FORMAT_R16G16B16A16_ > SSCALED), > - fmt(VK_FORMAT_R16G16B16A16_UINT, ISL_FORMAT_R16G16B16A16_UINT), > - fmt(VK_FORMAT_R16G16B16A16_SINT, ISL_FORMAT_R16G16B16A16_SINT), > - fmt(VK_FORMAT_R16G16B16A16_SFLOAT, ISL_FORMAT_R16G16B16A16_FLOAT), > - fmt(VK_FORMAT_R32_UINT, ISL_FORMAT_R32_UINT), > - fmt(VK_FORMAT_R32_SINT, ISL_FORMAT_R32_SINT), > - fmt(VK_FORMAT_R32_SFLOAT, ISL_FORMAT_R32_FLOAT), > - fmt(VK_FORMAT_R32G32_UINT, ISL_FORMAT_R32G32_UINT), > - fmt(VK_FORMAT_R32G32_SINT, ISL_FORMAT_R32G32_SINT), > - fmt(VK_FORMAT_R32G32_SFLOAT, ISL_FORMAT_R32G32_FLOAT), > - fmt(VK_FORMAT_R32G32B32_UINT, ISL_FORMAT_R32G32B32_UINT), > - fmt(VK_FORMAT_R32G32B32_SINT, ISL_FORMAT_R32G32B32_SINT), > - fmt(VK_FORMAT_R32G32B32_SFLOAT, ISL_FORMAT_R32G32B32_FLOAT), > - fmt(VK_FORMAT_R32G32B32A32_UINT, ISL_FORMAT_R32G32B32A32_UINT), > - fmt(VK_FORMAT_R32G32B32A32_SINT, ISL_FORMAT_R32G32B32A32_SINT), > - fmt(VK_FORMAT_R32G32B32A32_SFLOAT, ISL_FORMAT_R32G32B32A32_FLOAT), > - fmt(VK_FORMAT_R64_UINT, ISL_FORMAT_R64_PASSTHRU), > - fmt(VK_FORMAT_R64_SINT, ISL_FORMAT_R64_PASSTHRU), > - fmt(VK_FORMAT_R64_SFLOAT, ISL_FORMAT_R64_PASSTHRU), > - fmt(VK_FORMAT_R64G64_UINT, ISL_FORMAT_R64G64_PASSTHRU), > - fmt(VK_FORMAT_R64G64_SINT, ISL_FORMAT_R64G64_PASSTHRU), > - fmt(VK_FORMAT_R64G64_SFLOAT, ISL_FORMAT_R64G64_PASSTHRU), > - fmt(VK_FORMAT_R64G64B64_UINT, ISL_FORMAT_R64G64B64_PASSTHRU), > - fmt(VK_FORMAT_R64G64B64_SINT, ISL_FORMAT_R64G64B64_PASSTHRU), > - fmt(VK_FORMAT_R64G64B64_SFLOAT, ISL_FORMAT_R64G64B64_PASSTHRU), > - fmt(VK_FORMAT_R64G64B64A64_UINT, ISL_FORMAT_R64G64B64A64_ > PASSTHRU), > - fmt(VK_FORMAT_R64G64B64A64_SINT, ISL_FORMAT_R64G64B64A64_ > PASSTHRU), > - fmt(VK_FORMAT_R64G64B64A64_SFLOAT, ISL_FORMAT_R64G64B64A64_ > PASSTHRU), > - fmt(VK_FORMAT_B10G11R11_UFLOAT_PACK32, ISL_FORMAT_R11G11B10_FLOAT), > - fmt(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, ISL_FORMAT_R9G9B9E5_SHAREDEXP), > - > - fmt(VK_FORMAT_D16_UNORM, ISL_FORMAT_R16_UNORM), > - fmt(VK_FORMAT_X8_D24_UNORM_PACK32, ISL_FORMAT_R24_UNORM_X8_ > TYPELESS), > - fmt(VK_FORMAT_D32_SFLOAT, ISL_FORMAT_R32_FLOAT), > - fmt(VK_FORMAT_S8_UINT, ISL_FORMAT_R8_UINT), > - fmt(VK_FORMAT_D16_UNORM_S8_UINT, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_D24_UNORM_S8_UINT, ISL_FORMAT_R24_UNORM_X8_ > TYPELESS), > - fmt(VK_FORMAT_D32_SFLOAT_S8_UINT, ISL_FORMAT_R32_FLOAT), > - > - swiz_fmt(VK_FORMAT_BC1_RGB_UNORM_BLOCK, ISL_FORMAT_BC1_UNORM, > RGB1), > - swiz_fmt(VK_FORMAT_BC1_RGB_SRGB_BLOCK, > ISL_FORMAT_BC1_UNORM_SRGB, RGB1), > - fmt(VK_FORMAT_BC1_RGBA_UNORM_BLOCK, ISL_FORMAT_BC1_UNORM), > - fmt(VK_FORMAT_BC1_RGBA_SRGB_BLOCK, ISL_FORMAT_BC1_UNORM_SRGB), > - fmt(VK_FORMAT_BC2_UNORM_BLOCK, ISL_FORMAT_BC2_UNORM), > - fmt(VK_FORMAT_BC2_SRGB_BLOCK, ISL_FORMAT_BC2_UNORM_SRGB), > - fmt(VK_FORMAT_BC3_UNORM_BLOCK, ISL_FORMAT_BC3_UNORM), > - fmt(VK_FORMAT_BC3_SRGB_BLOCK, ISL_FORMAT_BC3_UNORM_SRGB), > - fmt(VK_FORMAT_BC4_UNORM_BLOCK, ISL_FORMAT_BC4_UNORM), > - fmt(VK_FORMAT_BC4_SNORM_BLOCK, ISL_FORMAT_BC4_SNORM), > - fmt(VK_FORMAT_BC5_UNORM_BLOCK, ISL_FORMAT_BC5_UNORM), > - fmt(VK_FORMAT_BC5_SNORM_BLOCK, ISL_FORMAT_BC5_SNORM), > - fmt(VK_FORMAT_BC6H_UFLOAT_BLOCK, ISL_FORMAT_BC6H_UF16), > - fmt(VK_FORMAT_BC6H_SFLOAT_BLOCK, ISL_FORMAT_BC6H_SF16), > - fmt(VK_FORMAT_BC7_UNORM_BLOCK, ISL_FORMAT_BC7_UNORM), > - fmt(VK_FORMAT_BC7_SRGB_BLOCK, ISL_FORMAT_BC7_UNORM_SRGB), > - fmt(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, ISL_FORMAT_ETC2_RGB8), > - fmt(VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, ISL_FORMAT_ETC2_SRGB8), > - fmt(VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, ISL_FORMAT_ETC2_RGB8_PTA), > - fmt(VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, ISL_FORMAT_ETC2_SRGB8_PTA), > - fmt(VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, ISL_FORMAT_ETC2_EAC_RGBA8), > - fmt(VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, ISL_FORMAT_ETC2_EAC_SRGB8_A8), > - fmt(VK_FORMAT_EAC_R11_UNORM_BLOCK, ISL_FORMAT_EAC_R11), > - fmt(VK_FORMAT_EAC_R11_SNORM_BLOCK, ISL_FORMAT_EAC_SIGNED_R11), > - fmt(VK_FORMAT_EAC_R11G11_UNORM_BLOCK, ISL_FORMAT_EAC_RG11), > - fmt(VK_FORMAT_EAC_R11G11_SNORM_BLOCK, ISL_FORMAT_EAC_SIGNED_RG11), > - fmt(VK_FORMAT_ASTC_4x4_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_4X4_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X4_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_5x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X5_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_6x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X5_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_6x6_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X6_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_8x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X5_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_8x6_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X6_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_8x8_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X8_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_10x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X5_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_10x6_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X6_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_10x8_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X8_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_10x10_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X10_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_12x10_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X10_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_12x12_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X12_ > U8SRGB), > - fmt(VK_FORMAT_ASTC_4x4_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_4X4_ > FLT16), > - fmt(VK_FORMAT_ASTC_5x4_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X4_ > FLT16), > - fmt(VK_FORMAT_ASTC_5x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X5_ > FLT16), > - fmt(VK_FORMAT_ASTC_6x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X5_ > FLT16), > - fmt(VK_FORMAT_ASTC_6x6_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X6_ > FLT16), > - fmt(VK_FORMAT_ASTC_8x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X5_ > FLT16), > - fmt(VK_FORMAT_ASTC_8x6_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X6_ > FLT16), > - fmt(VK_FORMAT_ASTC_8x8_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X8_ > FLT16), > - fmt(VK_FORMAT_ASTC_10x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X5_ > FLT16), > - fmt(VK_FORMAT_ASTC_10x6_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X6_ > FLT16), > - fmt(VK_FORMAT_ASTC_10x8_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X8_ > FLT16), > - fmt(VK_FORMAT_ASTC_10x10_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X10_ > FLT16), > - fmt(VK_FORMAT_ASTC_12x10_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X10_ > FLT16), > - fmt(VK_FORMAT_ASTC_12x12_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X12_ > FLT16), > - fmt(VK_FORMAT_B8G8R8_UNORM, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8_SNORM, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8_USCALED, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8_SSCALED, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8_UINT, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8_SINT, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8_SRGB, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8A8_UNORM, ISL_FORMAT_B8G8R8A8_UNORM), > - fmt(VK_FORMAT_B8G8R8A8_SNORM, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8A8_USCALED, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8A8_SSCALED, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8A8_UINT, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8A8_SINT, ISL_FORMAT_UNSUPPORTED), > - fmt(VK_FORMAT_B8G8R8A8_SRGB, ISL_FORMAT_B8G8R8A8_UNORM_ > SRGB), > + fmt1(VK_FORMAT_R8G8_UNORM, ISL_FORMAT_R8G8_UNORM), > + fmt1(VK_FORMAT_R8G8_SNORM, ISL_FORMAT_R8G8_SNORM), > + fmt1(VK_FORMAT_R8G8_USCALED, ISL_FORMAT_R8G8_USCALED), > + fmt1(VK_FORMAT_R8G8_SSCALED, ISL_FORMAT_R8G8_SSCALED), > + fmt1(VK_FORMAT_R8G8_UINT, ISL_FORMAT_R8G8_UINT), > + fmt1(VK_FORMAT_R8G8_SINT, ISL_FORMAT_R8G8_SINT), > + fmt1(VK_FORMAT_R8G8_SRGB, ISL_FORMAT_UNSUPPORTED), /* > L8A8_UNORM_SRGB */ > + fmt1(VK_FORMAT_R8G8B8_UNORM, ISL_FORMAT_R8G8B8_UNORM), > + fmt1(VK_FORMAT_R8G8B8_SNORM, ISL_FORMAT_R8G8B8_SNORM), > + fmt1(VK_FORMAT_R8G8B8_USCALED, ISL_FORMAT_R8G8B8_USCALED), > + fmt1(VK_FORMAT_R8G8B8_SSCALED, ISL_FORMAT_R8G8B8_SSCALED), > + fmt1(VK_FORMAT_R8G8B8_UINT, ISL_FORMAT_R8G8B8_UINT), > + fmt1(VK_FORMAT_R8G8B8_SINT, ISL_FORMAT_R8G8B8_SINT), > + fmt1(VK_FORMAT_R8G8B8_SRGB, ISL_FORMAT_R8G8B8_UNORM_SRGB), > + fmt1(VK_FORMAT_R8G8B8A8_UNORM, ISL_FORMAT_R8G8B8A8_UNORM), > + fmt1(VK_FORMAT_R8G8B8A8_SNORM, ISL_FORMAT_R8G8B8A8_SNORM), > + fmt1(VK_FORMAT_R8G8B8A8_USCALED, ISL_FORMAT_R8G8B8A8_USCALED), > + fmt1(VK_FORMAT_R8G8B8A8_SSCALED, ISL_FORMAT_R8G8B8A8_SSCALED), > + fmt1(VK_FORMAT_R8G8B8A8_UINT, ISL_FORMAT_R8G8B8A8_UINT), > + fmt1(VK_FORMAT_R8G8B8A8_SINT, ISL_FORMAT_R8G8B8A8_SINT), > + fmt1(VK_FORMAT_R8G8B8A8_SRGB, ISL_FORMAT_R8G8B8A8_UNORM_ > SRGB), > + fmt1(VK_FORMAT_A8B8G8R8_UNORM_PACK32, ISL_FORMAT_R8G8B8A8_UNORM), > + fmt1(VK_FORMAT_A8B8G8R8_SNORM_PACK32, ISL_FORMAT_R8G8B8A8_SNORM), > + fmt1(VK_FORMAT_A8B8G8R8_USCALED_PACK32, ISL_FORMAT_R8G8B8A8_USCALED), > + fmt1(VK_FORMAT_A8B8G8R8_SSCALED_PACK32, ISL_FORMAT_R8G8B8A8_SSCALED), > + fmt1(VK_FORMAT_A8B8G8R8_UINT_PACK32, ISL_FORMAT_R8G8B8A8_UINT), > + fmt1(VK_FORMAT_A8B8G8R8_SINT_PACK32, ISL_FORMAT_R8G8B8A8_SINT), > + fmt1(VK_FORMAT_A8B8G8R8_SRGB_PACK32, ISL_FORMAT_R8G8B8A8_UNORM_ > SRGB), > + fmt1(VK_FORMAT_A2R10G10B10_UNORM_PACK32, > ISL_FORMAT_B10G10R10A2_UNORM), > + fmt1(VK_FORMAT_A2R10G10B10_SNORM_PACK32, > ISL_FORMAT_B10G10R10A2_SNORM), > + fmt1(VK_FORMAT_A2R10G10B10_USCALED_PACK32, ISL_FORMAT_B10G10R10A2_ > USCALED), > + fmt1(VK_FORMAT_A2R10G10B10_SSCALED_PACK32, ISL_FORMAT_B10G10R10A2_ > SSCALED), > + fmt1(VK_FORMAT_A2R10G10B10_UINT_PACK32, ISL_FORMAT_B10G10R10A2_UINT), > + fmt1(VK_FORMAT_A2R10G10B10_SINT_PACK32, ISL_FORMAT_B10G10R10A2_SINT), > + fmt1(VK_FORMAT_A2B10G10R10_UNORM_PACK32, > ISL_FORMAT_R10G10B10A2_UNORM), > + fmt1(VK_FORMAT_A2B10G10R10_SNORM_PACK32, > ISL_FORMAT_R10G10B10A2_SNORM), > + fmt1(VK_FORMAT_A2B10G10R10_USCALED_PACK32, ISL_FORMAT_R10G10B10A2_ > USCALED), > + fmt1(VK_FORMAT_A2B10G10R10_SSCALED_PACK32, ISL_FORMAT_R10G10B10A2_ > SSCALED), > + fmt1(VK_FORMAT_A2B10G10R10_UINT_PACK32, ISL_FORMAT_R10G10B10A2_UINT), > + fmt1(VK_FORMAT_A2B10G10R10_SINT_PACK32, ISL_FORMAT_R10G10B10A2_SINT), > + fmt1(VK_FORMAT_R16_UNORM, ISL_FORMAT_R16_UNORM), > + fmt1(VK_FORMAT_R16_SNORM, ISL_FORMAT_R16_SNORM), > + fmt1(VK_FORMAT_R16_USCALED, ISL_FORMAT_R16_USCALED), > + fmt1(VK_FORMAT_R16_SSCALED, ISL_FORMAT_R16_SSCALED), > + fmt1(VK_FORMAT_R16_UINT, ISL_FORMAT_R16_UINT), > + fmt1(VK_FORMAT_R16_SINT, ISL_FORMAT_R16_SINT), > + fmt1(VK_FORMAT_R16_SFLOAT, ISL_FORMAT_R16_FLOAT), > + fmt1(VK_FORMAT_R16G16_UNORM, ISL_FORMAT_R16G16_UNORM), > + fmt1(VK_FORMAT_R16G16_SNORM, ISL_FORMAT_R16G16_SNORM), > + fmt1(VK_FORMAT_R16G16_USCALED, ISL_FORMAT_R16G16_USCALED), > + fmt1(VK_FORMAT_R16G16_SSCALED, ISL_FORMAT_R16G16_SSCALED), > + fmt1(VK_FORMAT_R16G16_UINT, ISL_FORMAT_R16G16_UINT), > + fmt1(VK_FORMAT_R16G16_SINT, ISL_FORMAT_R16G16_SINT), > + fmt1(VK_FORMAT_R16G16_SFLOAT, ISL_FORMAT_R16G16_FLOAT), > + fmt1(VK_FORMAT_R16G16B16_UNORM, ISL_FORMAT_R16G16B16_UNORM), > + fmt1(VK_FORMAT_R16G16B16_SNORM, ISL_FORMAT_R16G16B16_SNORM), > + fmt1(VK_FORMAT_R16G16B16_USCALED, ISL_FORMAT_R16G16B16_USCALED), > + fmt1(VK_FORMAT_R16G16B16_SSCALED, ISL_FORMAT_R16G16B16_SSCALED), > + fmt1(VK_FORMAT_R16G16B16_UINT, ISL_FORMAT_R16G16B16_UINT), > + fmt1(VK_FORMAT_R16G16B16_SINT, ISL_FORMAT_R16G16B16_SINT), > + fmt1(VK_FORMAT_R16G16B16_SFLOAT, ISL_FORMAT_R16G16B16_FLOAT), > + fmt1(VK_FORMAT_R16G16B16A16_UNORM, ISL_FORMAT_R16G16B16A16_UNORM) > , > + fmt1(VK_FORMAT_R16G16B16A16_SNORM, ISL_FORMAT_R16G16B16A16_SNORM) > , > + fmt1(VK_FORMAT_R16G16B16A16_USCALED, ISL_FORMAT_R16G16B16A16_ > USCALED), > + fmt1(VK_FORMAT_R16G16B16A16_SSCALED, ISL_FORMAT_R16G16B16A16_ > SSCALED), > + fmt1(VK_FORMAT_R16G16B16A16_UINT, ISL_FORMAT_R16G16B16A16_UINT), > + fmt1(VK_FORMAT_R16G16B16A16_SINT, ISL_FORMAT_R16G16B16A16_SINT), > + fmt1(VK_FORMAT_R16G16B16A16_SFLOAT, ISL_FORMAT_R16G16B16A16_FLOAT) > , > + fmt1(VK_FORMAT_R32_UINT, ISL_FORMAT_R32_UINT), > + fmt1(VK_FORMAT_R32_SINT, ISL_FORMAT_R32_SINT), > + fmt1(VK_FORMAT_R32_SFLOAT, ISL_FORMAT_R32_FLOAT), > + fmt1(VK_FORMAT_R32G32_UINT, ISL_FORMAT_R32G32_UINT), > + fmt1(VK_FORMAT_R32G32_SINT, ISL_FORMAT_R32G32_SINT), > + fmt1(VK_FORMAT_R32G32_SFLOAT, ISL_FORMAT_R32G32_FLOAT), > + fmt1(VK_FORMAT_R32G32B32_UINT, ISL_FORMAT_R32G32B32_UINT), > + fmt1(VK_FORMAT_R32G32B32_SINT, ISL_FORMAT_R32G32B32_SINT), > + fmt1(VK_FORMAT_R32G32B32_SFLOAT, ISL_FORMAT_R32G32B32_FLOAT), > + fmt1(VK_FORMAT_R32G32B32A32_UINT, ISL_FORMAT_R32G32B32A32_UINT), > + fmt1(VK_FORMAT_R32G32B32A32_SINT, ISL_FORMAT_R32G32B32A32_SINT), > + fmt1(VK_FORMAT_R32G32B32A32_SFLOAT, ISL_FORMAT_R32G32B32A32_FLOAT) > , > + fmt1(VK_FORMAT_R64_UINT, ISL_FORMAT_R64_PASSTHRU), > + fmt1(VK_FORMAT_R64_SINT, ISL_FORMAT_R64_PASSTHRU), > + fmt1(VK_FORMAT_R64_SFLOAT, ISL_FORMAT_R64_PASSTHRU), > + fmt1(VK_FORMAT_R64G64_UINT, ISL_FORMAT_R64G64_PASSTHRU), > + fmt1(VK_FORMAT_R64G64_SINT, ISL_FORMAT_R64G64_PASSTHRU), > + fmt1(VK_FORMAT_R64G64_SFLOAT, ISL_FORMAT_R64G64_PASSTHRU), > + fmt1(VK_FORMAT_R64G64B64_UINT, ISL_FORMAT_R64G64B64_PASSTHRU) > , > + fmt1(VK_FORMAT_R64G64B64_SINT, ISL_FORMAT_R64G64B64_PASSTHRU) > , > + fmt1(VK_FORMAT_R64G64B64_SFLOAT, ISL_FORMAT_R64G64B64_PASSTHRU) > , > + fmt1(VK_FORMAT_R64G64B64A64_UINT, ISL_FORMAT_R64G64B64A64_ > PASSTHRU), > + fmt1(VK_FORMAT_R64G64B64A64_SINT, ISL_FORMAT_R64G64B64A64_ > PASSTHRU), > + fmt1(VK_FORMAT_R64G64B64A64_SFLOAT, ISL_FORMAT_R64G64B64A64_ > PASSTHRU), > + fmt1(VK_FORMAT_B10G11R11_UFLOAT_PACK32, ISL_FORMAT_R11G11B10_FLOAT), > + fmt1(VK_FORMAT_E5B9G9R9_UFLOAT_PACK32, ISL_FORMAT_R9G9B9E5_SHAREDEXP) > , > + > + fmt1(VK_FORMAT_D16_UNORM, ISL_FORMAT_R16_UNORM), > + fmt1(VK_FORMAT_X8_D24_UNORM_PACK32, ISL_FORMAT_R24_UNORM_X8_ > TYPELESS), > + fmt1(VK_FORMAT_D32_SFLOAT, ISL_FORMAT_R32_FLOAT), > + fmt1(VK_FORMAT_S8_UINT, ISL_FORMAT_R8_UINT), > + fmt1(VK_FORMAT_D16_UNORM_S8_UINT, ISL_FORMAT_UNSUPPORTED), > + ds_fmt(VK_FORMAT_D24_UNORM_S8_UINT, ISL_FORMAT_R24_UNORM_X8_TYPELESS, > ISL_FORMAT_R8_UINT), > + ds_fmt(VK_FORMAT_D32_SFLOAT_S8_UINT, ISL_FORMAT_R32_FLOAT, > ISL_FORMAT_R8_UINT), > + > + swiz_fmt1(VK_FORMAT_BC1_RGB_UNORM_BLOCK, ISL_FORMAT_BC1_UNORM, > RGB1), > + swiz_fmt1(VK_FORMAT_BC1_RGB_SRGB_BLOCK, > ISL_FORMAT_BC1_UNORM_SRGB, RGB1), > + fmt1(VK_FORMAT_BC1_RGBA_UNORM_BLOCK, ISL_FORMAT_BC1_UNORM), > + fmt1(VK_FORMAT_BC1_RGBA_SRGB_BLOCK, ISL_FORMAT_BC1_UNORM_SRGB), > + fmt1(VK_FORMAT_BC2_UNORM_BLOCK, ISL_FORMAT_BC2_UNORM), > + fmt1(VK_FORMAT_BC2_SRGB_BLOCK, ISL_FORMAT_BC2_UNORM_SRGB), > + fmt1(VK_FORMAT_BC3_UNORM_BLOCK, ISL_FORMAT_BC3_UNORM), > + fmt1(VK_FORMAT_BC3_SRGB_BLOCK, ISL_FORMAT_BC3_UNORM_SRGB), > + fmt1(VK_FORMAT_BC4_UNORM_BLOCK, ISL_FORMAT_BC4_UNORM), > + fmt1(VK_FORMAT_BC4_SNORM_BLOCK, ISL_FORMAT_BC4_SNORM), > + fmt1(VK_FORMAT_BC5_UNORM_BLOCK, ISL_FORMAT_BC5_UNORM), > + fmt1(VK_FORMAT_BC5_SNORM_BLOCK, ISL_FORMAT_BC5_SNORM), > + fmt1(VK_FORMAT_BC6H_UFLOAT_BLOCK, ISL_FORMAT_BC6H_UF16), > + fmt1(VK_FORMAT_BC6H_SFLOAT_BLOCK, ISL_FORMAT_BC6H_SF16), > + fmt1(VK_FORMAT_BC7_UNORM_BLOCK, ISL_FORMAT_BC7_UNORM), > + fmt1(VK_FORMAT_BC7_SRGB_BLOCK, ISL_FORMAT_BC7_UNORM_SRGB), > + fmt1(VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK, ISL_FORMAT_ETC2_RGB8), > + fmt1(VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK, ISL_FORMAT_ETC2_SRGB8), > + fmt1(VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK, ISL_FORMAT_ETC2_RGB8_PTA), > + fmt1(VK_FORMAT_ETC2_R8G8B8A1_SRGB_BLOCK, ISL_FORMAT_ETC2_SRGB8_PTA), > + fmt1(VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK, ISL_FORMAT_ETC2_EAC_RGBA8), > + fmt1(VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK, > ISL_FORMAT_ETC2_EAC_SRGB8_A8), > + fmt1(VK_FORMAT_EAC_R11_UNORM_BLOCK, ISL_FORMAT_EAC_R11), > + fmt1(VK_FORMAT_EAC_R11_SNORM_BLOCK, ISL_FORMAT_EAC_SIGNED_R11), > + fmt1(VK_FORMAT_EAC_R11G11_UNORM_BLOCK, ISL_FORMAT_EAC_RG11), > + fmt1(VK_FORMAT_EAC_R11G11_SNORM_BLOCK, ISL_FORMAT_EAC_SIGNED_RG11), > + fmt1(VK_FORMAT_ASTC_4x4_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_4X4_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_5x4_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X4_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_5x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X5_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_6x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X5_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_6x6_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X6_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_8x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X5_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_8x6_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X6_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_8x8_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X8_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_10x5_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X5_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_10x6_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X6_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_10x8_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X8_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_10x10_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X10_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_12x10_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X10_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_12x12_SRGB_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X12_ > U8SRGB), > + fmt1(VK_FORMAT_ASTC_4x4_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_4X4_ > FLT16), > + fmt1(VK_FORMAT_ASTC_5x4_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X4_ > FLT16), > + fmt1(VK_FORMAT_ASTC_5x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_5X5_ > FLT16), > + fmt1(VK_FORMAT_ASTC_6x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X5_ > FLT16), > + fmt1(VK_FORMAT_ASTC_6x6_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_6X6_ > FLT16), > + fmt1(VK_FORMAT_ASTC_8x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X5_ > FLT16), > + fmt1(VK_FORMAT_ASTC_8x6_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X6_ > FLT16), > + fmt1(VK_FORMAT_ASTC_8x8_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_8X8_ > FLT16), > + fmt1(VK_FORMAT_ASTC_10x5_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X5_ > FLT16), > + fmt1(VK_FORMAT_ASTC_10x6_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X6_ > FLT16), > + fmt1(VK_FORMAT_ASTC_10x8_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X8_ > FLT16), > + fmt1(VK_FORMAT_ASTC_10x10_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_10X10_ > FLT16), > + fmt1(VK_FORMAT_ASTC_12x10_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X10_ > FLT16), > + fmt1(VK_FORMAT_ASTC_12x12_UNORM_BLOCK, ISL_FORMAT_ASTC_LDR_2D_12X12_ > FLT16), > + fmt1(VK_FORMAT_B8G8R8_UNORM, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8_SNORM, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8_USCALED, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8_SSCALED, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8_UINT, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8_SINT, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8_SRGB, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8A8_UNORM, ISL_FORMAT_B8G8R8A8_UNORM), > + fmt1(VK_FORMAT_B8G8R8A8_SNORM, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8A8_USCALED, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8A8_SSCALED, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8A8_UINT, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8A8_SINT, ISL_FORMAT_UNSUPPORTED), > + fmt1(VK_FORMAT_B8G8R8A8_SRGB, ISL_FORMAT_B8G8R8A8_UNORM_ > SRGB), > }; > > +#undef _fmt > +#undef swiz_fmt1 > +#undef fmt1 > #undef fmt > > static const struct { > @@ -258,53 +280,55 @@ static const struct { > [0] = { .formats = main_formats, .n_formats = > ARRAY_SIZE(main_formats), }, > }; > > -static struct anv_format > -format_extract(VkFormat vk_format) > +const struct anv_format * > +anv_get_format(VkFormat vk_format) > { > uint32_t enum_offset = vk_enum_offset(vk_format); > uint32_t ext_number = vk_enum_extension(vk_format); > > if (ext_number >= ARRAY_SIZE(anv_formats) || > enum_offset >= anv_formats[ext_number].n_formats) > - return (struct anv_format) { .isl_format = ISL_FORMAT_UNSUPPORTED }; > + return NULL; > > - return anv_formats[ext_number].formats[enum_offset]; > -} > + const struct anv_format *format = > + &anv_formats[ext_number].formats[enum_offset]; > + if (format->planes[0].isl_format == ISL_FORMAT_UNSUPPORTED) > + return NULL; > > -static bool > -format_supported(VkFormat vk_format) > -{ > - return format_extract(vk_format).isl_format != ISL_FORMAT_UNSUPPORTED; > + return format; > } > > /** > * Exactly one bit must be set in \a aspect. > */ > -struct anv_format > -anv_get_format(const struct gen_device_info *devinfo, VkFormat vk_format, > - VkImageAspectFlags aspect, VkImageTiling tiling) > +struct anv_format_plane > +anv_get_plane_format(const struct gen_device_info *devinfo, VkFormat > vk_format, > + VkImageAspectFlags aspect, VkImageTiling tiling) > { > - struct anv_format format = format_extract(vk_format); > + const struct anv_format *format = anv_get_format(vk_format); > + struct anv_format_plane plane_format = { > + .isl_format = ISL_FORMAT_UNSUPPORTED, > + }; > > - if (format.isl_format == ISL_FORMAT_UNSUPPORTED) > - return format; > + if (format == NULL) > + return plane_format; > > - if (aspect == VK_IMAGE_ASPECT_STENCIL_BIT) { > - assert(vk_format_aspects(vk_format) & VK_IMAGE_ASPECT_STENCIL_BIT); > - format.isl_format = ISL_FORMAT_R8_UINT; > - return format; > - } > + uint32_t plane = anv_image_aspect_to_plane(vk_format_aspects(vk_format), > aspect); > + plane_format = format->planes[plane]; > + if (plane_format.isl_format == ISL_FORMAT_UNSUPPORTED) > + return plane_format; > > - if (aspect & VK_IMAGE_ASPECT_DEPTH_BIT) { > - assert(vk_format_aspects(vk_format) & VK_IMAGE_ASPECT_DEPTH_BIT); > - return format; > + if (aspect & (VK_IMAGE_ASPECT_DEPTH_BIT | > VK_IMAGE_ASPECT_STENCIL_BIT)) { > + assert(vk_format_aspects(vk_format) & > + (VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT)); > + return plane_format; > } > > assert(aspect == VK_IMAGE_ASPECT_COLOR_BIT); > assert(vk_format_aspects(vk_format) == VK_IMAGE_ASPECT_COLOR_BIT); > > const struct isl_format_layout *isl_layout = > - isl_format_get_layout(format.isl_format); > + isl_format_get_layout(plane_format.isl_format); > > if (tiling == VK_IMAGE_TILING_OPTIMAL && > !util_is_power_of_two(isl_layout->bpb)) { > @@ -313,13 +337,14 @@ anv_get_format(const struct gen_device_info > *devinfo, VkFormat vk_format, > * this by switching them over to RGBX or RGBA formats under the > * hood. > */ > - enum isl_format rgbx = isl_format_rgb_to_rgbx(format.isl_format); > + enum isl_format rgbx = isl_format_rgb_to_rgbx(plane_ > format.isl_format); > if (rgbx != ISL_FORMAT_UNSUPPORTED && > isl_format_supports_rendering(devinfo, rgbx)) { > - format.isl_format = rgbx; > + plane_format.isl_format = rgbx; > } else { > - format.isl_format = isl_format_rgb_to_rgba(format.isl_format); > - format.swizzle = ISL_SWIZZLE(RED, GREEN, BLUE, ONE); > + plane_format.isl_format = > + isl_format_rgb_to_rgba(plane_format.isl_format); > + plane_format.swizzle = ISL_SWIZZLE(RED, GREEN, BLUE, ONE); > } > } > > @@ -327,20 +352,18 @@ anv_get_format(const struct gen_device_info > *devinfo, VkFormat vk_format, > * back to a format with a more complex swizzle. > */ > if (vk_format == VK_FORMAT_B4G4R4A4_UNORM_PACK16 && devinfo->gen < 8) > { > - return (struct anv_format) { > - .isl_format = ISL_FORMAT_B4G4R4A4_UNORM, > - .swizzle = ISL_SWIZZLE(GREEN, RED, ALPHA, BLUE), > - }; > + plane_format.isl_format = ISL_FORMAT_B4G4R4A4_UNORM; > + plane_format.swizzle = ISL_SWIZZLE(GREEN, RED, ALPHA, BLUE); > } > > - return format; > + return plane_format; > } > > // Format capabilities > > static VkFormatFeatureFlags > get_image_format_properties(const struct gen_device_info *devinfo, > - enum isl_format base, struct anv_format > format) > + enum isl_format base, struct anv_format_plane > format) > { > if (format.isl_format == ISL_FORMAT_UNSUPPORTED) > return 0; > @@ -410,19 +433,20 @@ get_buffer_format_properties(const struct > gen_device_info *devinfo, > > static void > anv_physical_device_get_format_properties(struct anv_physical_device > *physical_device, > - VkFormat format, > + VkFormat vk_format, > VkFormatProperties > *out_properties) > { > int gen = physical_device->info.gen * 10; > if (physical_device->info.is_haswell) > gen += 5; > > + const struct anv_format *format = anv_get_format(vk_format); > VkFormatFeatureFlags linear = 0, tiled = 0, buffer = 0; > - if (!format_supported(format)) { > + if (format == NULL) { > /* Nothing to do here */ > - } else if (vk_format_is_depth_or_stencil(format)) { > + } else if (vk_format_is_depth_or_stencil(vk_format)) { > tiled |= VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT; > - if (vk_format_aspects(format) == VK_IMAGE_ASPECT_DEPTH_BIT || > + if (vk_format_aspects(vk_format) == VK_IMAGE_ASPECT_DEPTH_BIT || > physical_device->info.gen >= 8) > tiled |= VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT; > > @@ -431,13 +455,13 @@ anv_physical_device_get_format_properties(struct > anv_physical_device *physical_d > VK_FORMAT_FEATURE_TRANSFER_SRC_BIT_KHR | > VK_FORMAT_FEATURE_TRANSFER_DST_BIT_KHR; > } else { > - struct anv_format linear_fmt, tiled_fmt; > - linear_fmt = anv_get_format(&physical_device->info, format, > - VK_IMAGE_ASPECT_COLOR_BIT, > - VK_IMAGE_TILING_LINEAR); > - tiled_fmt = anv_get_format(&physical_device->info, format, > - VK_IMAGE_ASPECT_COLOR_BIT, > - VK_IMAGE_TILING_OPTIMAL); > + struct anv_format_plane linear_fmt, tiled_fmt; > + linear_fmt = anv_get_plane_format(&physical_device->info, > vk_format, > + VK_IMAGE_ASPECT_COLOR_BIT, > + VK_IMAGE_TILING_LINEAR); > + tiled_fmt = anv_get_plane_format(&physical_device->info, vk_format, > + VK_IMAGE_ASPECT_COLOR_BIT, > + VK_IMAGE_TILING_OPTIMAL); > > linear = get_image_format_properties(&physical_device->info, > linear_fmt.isl_format, > linear_fmt); > @@ -515,8 +539,9 @@ anv_get_image_format_properties( > uint32_t maxMipLevels; > uint32_t maxArraySize; > VkSampleCountFlags sampleCounts = VK_SAMPLE_COUNT_1_BIT; > + const struct anv_format *format = anv_get_format(info->format); > > - if (!format_supported(info->format)) > + if (format == NULL) > goto unsupported; > > anv_physical_device_get_format_properties(physical_device, > info->format, > @@ -570,7 +595,7 @@ anv_get_image_format_properties( > * * This field cannot be ASTC format if the Surface Type is > SURFTYPE_1D. > */ > if (info->type == VK_IMAGE_TYPE_1D && > - isl_format_is_compressed(format_extract(info->format).isl_format)) > { > + isl_format_is_compressed(format->planes[0].isl_format)) { > goto unsupported; > } > > diff --git a/src/intel/vulkan/anv_image.c b/src/intel/vulkan/anv_image.c > index 7561b9b52b4..262be68575a 100644 > --- a/src/intel/vulkan/anv_image.c > +++ b/src/intel/vulkan/anv_image.c > @@ -123,8 +123,8 @@ all_formats_ccs_e_compatible(const struct > gen_device_info *devinfo, > const struct VkImageCreateInfo *vk_info) > { > enum isl_format format = > - anv_get_isl_format(devinfo, vk_info->format, > - VK_IMAGE_ASPECT_COLOR_BIT, vk_info->tiling); > + anv_get_isl_plane_format(devinfo, vk_info->format, > + VK_IMAGE_ASPECT_COLOR_BIT, > vk_info->tiling); > > if (!isl_format_supports_ccs_e(devinfo, format)) > return false; > @@ -140,8 +140,8 @@ all_formats_ccs_e_compatible(const struct > gen_device_info *devinfo, > > for (uint32_t i = 0; i < fmt_list->viewFormatCount; i++) { > enum isl_format view_format = > - anv_get_isl_format(devinfo, fmt_list->pViewFormats[i], > - VK_IMAGE_ASPECT_COLOR_BIT, vk_info->tiling); > + anv_get_isl_plane_format(devinfo, fmt_list->pViewFormats[i], > + VK_IMAGE_ASPECT_COLOR_BIT, > vk_info->tiling); > > if (!isl_formats_are_ccs_e_compatible(devinfo, format, > view_format)) > return false; > @@ -264,8 +264,9 @@ make_surface(const struct anv_device *dev, > image->extent = anv_sanitize_image_extent(vk_info->imageType, > vk_info->extent); > > - enum isl_format format = anv_get_isl_format(&dev->info, > vk_info->format, > - aspect, vk_info->tiling); > + enum isl_format format = > + anv_get_isl_plane_format(&dev->info, vk_info->format, > + aspect, vk_info->tiling); > assert(format != ISL_FORMAT_UNSUPPORTED); > > /* If an image is created as BLOCK_TEXEL_VIEW_COMPATIBLE, then we need > to > @@ -972,8 +973,12 @@ anv_CreateImageView(VkDevice _device, > iview->aspect_mask = pCreateInfo->subresourceRange.aspectMask; > iview->vk_format = pCreateInfo->format; > > - struct anv_format format = anv_get_format(&device->info, > pCreateInfo->format, > - range->aspectMask, > image->tiling); > + struct anv_format_plane format = > + anv_get_plane_format(&device->info, pCreateInfo->format, > + range->aspectMask == > (VK_IMAGE_ASPECT_DEPTH_BIT | > + > VK_IMAGE_ASPECT_STENCIL_BIT) ? > + VK_IMAGE_ASPECT_DEPTH_BIT : range->aspectMask, > + image->tiling); > > iview->isl = (struct isl_view) { > .format = format.isl_format, > @@ -1121,9 +1126,9 @@ anv_CreateBufferView(VkDevice _device, > > /* TODO: Handle the format swizzle? */ > > - view->format = anv_get_isl_format(&device->info, pCreateInfo->format, > - VK_IMAGE_ASPECT_COLOR_BIT, > - VK_IMAGE_TILING_LINEAR); > + view->format = anv_get_isl_plane_format(&device->info, > pCreateInfo->format, > + VK_IMAGE_ASPECT_COLOR_BIT, > + VK_IMAGE_TILING_LINEAR); > const uint32_t format_bs = isl_format_get_layout(view->format)->bpb / > 8; > view->bo = buffer->bo; > view->offset = buffer->offset + pCreateInfo->offset; > diff --git a/src/intel/vulkan/anv_private.h b/src/intel/vulkan/anv_ > private.h > index 3ba623a37fd..40811b94065 100644 > --- a/src/intel/vulkan/anv_private.h > +++ b/src/intel/vulkan/anv_private.h > @@ -2176,20 +2176,60 @@ anv_pipeline_compile_cs(struct anv_pipeline > *pipeline, > const char *entrypoint, > const VkSpecializationInfo *spec_info); > > -struct anv_format { > +struct anv_format_plane { > enum isl_format isl_format:16; > struct isl_swizzle swizzle; > }; > > -struct anv_format > -anv_get_format(const struct gen_device_info *devinfo, VkFormat format, > - VkImageAspectFlags aspect, VkImageTiling tiling); > + > +struct anv_format { > + struct anv_format_plane planes[3]; > + uint8_t n_planes; > +}; > + > +static inline uint32_t > +anv_image_aspect_to_plane(VkImageAspectFlags image_aspects, > + VkImageAspectFlags aspect_mask) > +{ > + switch (aspect_mask) { > + case VK_IMAGE_ASPECT_COLOR_BIT: > + case VK_IMAGE_ASPECT_DEPTH_BIT: > + case VK_IMAGE_ASPECT_PLANE_0_BIT_KHR: > + return 0; > + case VK_IMAGE_ASPECT_STENCIL_BIT: > + if ((image_aspects & VK_IMAGE_ASPECT_DEPTH_BIT) == 0) > + return 0; > + /* Fall-through */ > + case VK_IMAGE_ASPECT_PLANE_1_BIT_KHR: > + return 1; > + case VK_IMAGE_ASPECT_PLANE_2_BIT_KHR: > + return 2; > + default: > + unreachable("invalid image aspect"); > + } > +} > + > +const struct anv_format * > +anv_get_format(VkFormat format); > + > +static inline uint32_t > +anv_get_format_planes(VkFormat vk_format) > +{ > + const struct anv_format *format = anv_get_format(vk_format); > + > + return format != NULL ? format->n_planes : 0; > +} > + > +struct anv_format_plane > +anv_get_plane_format(const struct gen_device_info *devinfo, VkFormat > vk_format, > + VkImageAspectFlags aspect, VkImageTiling tiling); > > static inline enum isl_format > -anv_get_isl_format(const struct gen_device_info *devinfo, VkFormat > vk_format, > - VkImageAspectFlags aspect, VkImageTiling tiling) > +anv_get_isl_plane_format(const struct gen_device_info *devinfo, > + VkFormat vk_format, VkImageAspectFlags aspect, > + VkImageTiling tiling) > { > - return anv_get_format(devinfo, vk_format, aspect, tiling).isl_format; > + return anv_get_plane_format(devinfo, vk_format, aspect, > tiling).isl_format; > } > > static inline struct isl_swizzle > diff --git a/src/intel/vulkan/genX_pipeline.c b/src/intel/vulkan/genX_ > pipeline.c > index c2fa9c0ff7f..f1413b20254 100644 > --- a/src/intel/vulkan/genX_pipeline.c > +++ b/src/intel/vulkan/genX_pipeline.c > @@ -120,10 +120,10 @@ emit_vertex_input(struct anv_pipeline *pipeline, > for (uint32_t i = 0; i < info->vertexAttributeDescriptionCount; i++) { > const VkVertexInputAttributeDescription *desc = > &info->pVertexAttributeDescriptions[i]; > - enum isl_format format = anv_get_isl_format(&pipeline-> > device->info, > - desc->format, > - > VK_IMAGE_ASPECT_COLOR_BIT, > - VK_IMAGE_TILING_LINEAR); > + enum isl_format format = > + anv_get_isl_plane_format(&pipeline->device->info, desc->format, > + VK_IMAGE_ASPECT_COLOR_BIT, > + VK_IMAGE_TILING_LINEAR); > > assert(desc->binding < MAX_VBS); > > @@ -505,9 +505,9 @@ emit_rs_state(struct anv_pipeline *pipeline, > assert(vk_format_is_depth_or_stencil(vk_format)); > if (vk_format_aspects(vk_format) & VK_IMAGE_ASPECT_DEPTH_BIT) { > enum isl_format isl_format = > - anv_get_isl_format(&pipeline->device->info, vk_format, > - VK_IMAGE_ASPECT_DEPTH_BIT, > - VK_IMAGE_TILING_OPTIMAL); > + anv_get_isl_plane_format(&pipeline->device->info, vk_format, > + VK_IMAGE_ASPECT_DEPTH_BIT, > + VK_IMAGE_TILING_OPTIMAL); > sf.DepthBufferSurfaceFormat = > isl_format_get_depth_format(isl_format, false); > } > -- > 2.14.2 > > _______________________________________________ > mesa-dev mailing list > mesa-dev@lists.freedesktop.org > https://lists.freedesktop.org/mailman/listinfo/mesa-dev >
_______________________________________________ mesa-dev mailing list mesa-dev@lists.freedesktop.org https://lists.freedesktop.org/mailman/listinfo/mesa-dev