Home Game Development projection – D3D12: the way to allow perspective correction when interpolating UV coordinates?

projection – D3D12: the way to allow perspective correction when interpolating UV coordinates?

0
projection – D3D12: the way to allow perspective correction when interpolating UV coordinates?

[ad_1]

Good morning,
I’m presently operating right into a UV interpolation situation with D3D12 that appears like this:

enter image description here

Is there one thing that must be set when initialising the pipeline in order that the GPU does the proper interpolation or do I’ve to do it myself?
I’ve seen a number of examples of textured 3D objects that appeared nice and the related code didn’t appear to do something particular to compute the proper UVs manually, so I assume I simply forgot to set a flag or one thing?

EDIT: right here is a few extra info (shaders are trimmed to the core):

// Input knowledge format:

D3D12_INPUT_ELEMENT_DESC mesh_layout[NUM_ELEMENTS_IN_MESH_LAYOUT] = {
    {"Vertex_Position",  0, DXGI_FORMAT_R32G32B32_FLOAT,    0, 0,                            D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA,   0},
    {"Normal",           0, DXGI_FORMAT_R32G32B32_FLOAT,    0, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA,   0},
    {"U_Axis",           0, DXGI_FORMAT_R32G32B32_FLOAT,    0, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA,   0},
    {"V_Axis",           0, DXGI_FORMAT_R32G32B32_FLOAT,    0, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA,   0},
    {"Uv_Coords",        0, DXGI_FORMAT_R32G32_FLOAT,       0, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_VERTEX_DATA,   0},
    
    {"Position",         0, DXGI_FORMAT_R32G32B32_FLOAT,    1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Scale",            0, DXGI_FORMAT_R32G32B32_FLOAT,    1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Orientation",      0, DXGI_FORMAT_R32G32B32A32_FLOAT, 1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Colour",           0, DXGI_FORMAT_R32G32B32A32_FLOAT, 1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Shadow_Map_Position", 0, DXGI_FORMAT_R32G32B32_FLOAT, 1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Shadow_Map_X_Axis",   0, DXGI_FORMAT_R32G32B32_FLOAT, 1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Shadow_Map_Y_Axis",   0, DXGI_FORMAT_R32G32B32_FLOAT, 1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Shadow_Map_Z_Axis",   0, DXGI_FORMAT_R32G32B32_FLOAT, 1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Shadow_Map_Uvs",   0, DXGI_FORMAT_R32G32_FLOAT,       1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
    {"Material_Index",   0, DXGI_FORMAT_R32_UINT,           1, D3D12_APPEND_ALIGNED_ELEMENT, D3D12_INPUT_CLASSIFICATION_PER_INSTANCE_DATA, 0},
};

/*
The vertex buffer appears to be like like this (solely positions and UVs are represented right here as that is what pursuits us):

{ // First triangle.
    { 1, -1, 0}, {1, 0},
    {-1,  1, 0}, {0, 1},
    {-1, -1, 0}, {0, 0},
},
{ // Second triangle.
    { 1, -1, 0}, {1, 0},
    { 1,  1, 0}, {1, 1},
    {-1,  1, 0}, {0, 1},
}
*/
// Vertex shader:

ConstantBuffer<Renderer_Info> renderer_info : register(b0, space2);
StructuredBuffer<Material> supplies : register(t0, space2);


struct To_Pixel_Shader
{
    v4  place              : SV_Position;
    v4  color                : Colour;
    v3  origin                : Origin;
    v3  position_3d           : Position_3d;
    v3  regular                : Normal;
    v2  uvs                   : Uv_Coords;
    v3  camera_view_direction : Camera_View_Direction;
    u32 material_index        : Material_Index;
    f32 scale                 : Uniform_Scale;
};

To_Pixel_Shader principal(
    v3  vertex_position : Vertex_Position,
    v3  regular          : Normal,
    v3  u_axis          : U_Axis,
    v3  v_axis          : V_Axis,
    v2  uvs             : Uv_Coords,
    
    v3  place            : Position,
    v3  scale               : Scale,
    v4  orientation         : Orientation,
    v4  color              : Colour,
    v3  shadow_map_position : Shadow_Map_Position,
    v3  shadow_map_x_axis   : Shadow_Map_X_Axis,
    v3  shadow_map_y_axis   : Shadow_Map_Y_Axis,
    v3  shadow_map_z_axis   : Shadow_Map_Z_Axis,
    v2  shadow_map_uvs      : Shadow_Map_Uvs,
    u32 material_index      : Material_Index
)
{
    To_Pixel_Shader end result;
    
    v2  rt_dimensions = {renderer_info.rt_width, renderer_info.rt_height};
    f32 rt_ratio = rt_dimensions.y / rt_dimensions.x;
    
    // Calculate the vertex world place. START
    v4 orientation_inverse = quaternion_inverse_no_norm(orientation); // @ This may very well be given within the mesh occasion together with the orientation.
    
    vertex_position *= scale;
    vertex_position  = rotate_point(orientation, vertex_position, orientation_inverse);
    vertex_position += place;
    
    end result.position_3d = vertex_position;
    // Calculate the vertex world place. END
    
    // Calculate the vertex display place. START
    v3 eye_to_vertex = vertex_position - renderer_info.camera_eye_position;
    end result.camera_view_direction = eye_to_vertex; // This is normalised within the pixel shader.
    
    f32 vertex_dot_camera_z = -dot(eye_to_vertex, renderer_info.camera_z_direction);
    f32 distance_to_vertex = distance(renderer_info.camera_eye_position, vertex_position);
    
    f32 triangle_ratio = renderer_info.camera_eye_offset / vertex_dot_camera_z;
    
    f32 MAX_DEPTH_DISTANCE = 1000.0f;
    f32 NEAR_CLIP_PLANE    = 1;
    
    v3 vertex_position_on_screen = {
        dot(eye_to_vertex, renderer_info.camera_x_direction),
        dot(eye_to_vertex, renderer_info.camera_y_direction),
        (distance_to_vertex - NEAR_CLIP_PLANE) / MAX_DEPTH_DISTANCE
    };
    
    vertex_position_on_screen.xy *= triangle_ratio;
    vertex_position_on_screen.x  *= rt_ratio;
    
    ///////////////////////////////////////////////////////////////////////////////////////////
    // NOTE: @ for now we don't take vertices mendacity behind the close to clip airplane under consideration!!!
    ///////////////////////////////////////////////////////////////////////////////////////////
    
    end result.screen_depth = vertex_position_on_screen.z;
    // Calculate the vertex display place. END
    
    end result.origin         = place;
    end result.place       = v4(vertex_position_on_screen.xyz, 1);
    end result.regular         = rotate_point(orientation, regular, orientation_inverse);
    end result.uvs            = uvs;
    end result.material_index = material_index;
    end result.color         = color;
    end result.scale          = scale.x;
    
    return end result;
}
// Pixel shader:

Texture2D<v4> colour_atlas : register(t0, space3);
StructuredBuffer<Atlas_Texture> colour_textures : register(t1, space3);
ConstantBuffer<Renderer_Info> renderer_info : register(b0, space3);
StructuredBuffer<Material> supplies : register(t2, space3);
Texture2D<f32> shadow_map_atlas : register(t3, space3);
sampler bilinear_sampler : register(s0);
sampler point_sampler    : register(s1);


v4 principal(
    v4  place              : SV_Position,
    v4  color                : Colour,
    v3  origin                : Origin,
    v3  position_3d           : Position_3d,
    v3  regular                : Normal,
    v2  uvs                   : Uv_Coords,
    v3  camera_view_direction : Camera_View_Direction,
    u32 material_index        : Material_Index,
    f32 scale                 : Uniform_Scale
) : SV_Target
{
    u32 real_material_index = material_index & 0x00ffFFff; // The prime 8 bits are reserved for flags.

    Material materials = supplies[real_material_index];
    color *= materials.color;
    
    if(materials.flags & TEXTURED)
    {
        Atlas_Texture t = colour_textures[material.texture_index];
        v2 sample_uvs = uvs * t.uv_ratio + t.uv_offset_in_atlas;
        color *= colour_atlas.Sample(bilinear_sampler, sample_uvs);
    }
    
    return linear_to_srgb(color);
}

[ad_2]

LEAVE A REPLY

Please enter your comment!
Please enter your name here