2012-04-17 80 views
2

我目前有纹理正在使用行军四面体算法生成网格的问题。该代码位于.fx HLSL文件中。 问题的说明可能是纹理似乎按照到照相机的定位移动。例如,如果相机左右移动,纹理也会左右移动。在左右平移相机时也会出现此问题。等值面纹理问题

这是一个奇怪的效果,而且很难描述,所以我编写了一些图像,纹理AddressU和AddressV设置为钳位,因此更容易展示问题。

http://i.imgur.com/JbyVZ.png

http://i.imgur.com/nDkB1.png

正如你可以看到,移动摄像机向右移动也将纹理的权利。

我可能错过了一些东西完全明显,比如通过一定的矩阵相乘(我试过)。任何帮助都将不胜感激。

这里是我的像素着色器代码。

float4 DiffusePS(SurfaceVertex IN) : SV_Target 

{

float4 AmbientColor = float4(0.2, 0.2, 0.2, 1); 
float AmbientIntensity = 0.2; 

float4 Kd = 0.5; 
float4 diffuseLight = 0.5; 

float4 Pos = GetWorldSpacePos(IN.Pos); 
float3 N = normalize(IN.N); 
float3 L1 = normalize(LightPos1 - Pos.xyz); 
float3 L2 = normalize(LightPos2 - Pos.xyz); 
float3 L3 = normalize(LightPos3 - Pos.xyz); 

float NdotL1 = max(0, dot(N, L1)); 
float NdotL2 = max(0, dot(N, L2)); 
float NdotL3 = max(0, dot(N, L3)); 

float3 I = normalize(Pos.xyz); 
float3 V = normalize(-Pos.xyz); 

float4 vDiff = diffuseLight * Kd * NdotL1; 
float4 vDiff2 = diffuseLight * Kd * NdotL2; 
float4 vDiff3 = diffuseLight * Kd * NdotL3; 

float3 Color = vDiff + vDiff2 + vDiff3; 
float4 derp = rockTexture.Sample(RockSampler, IN.tex.xy); 

return lerp(derp ,float4(Color, 1), 0.5); 

感谢所有帮助

编辑:

#define MAX_METABALLS 400 
#define IOR    2.5 

#define PI 3.1415 

Buffer<float4> SampleDataBuffer; 

struct SampleData 
{ 
    float4 Pos : SV_Position; 
    float4 Field : TEXCOORD0; // Gradient in .xyz, value in .w 
}; 

struct SurfaceVertex 
{ 
    float4 Pos : SV_Position; 
    float3 N : NORMAL; 
    float2 tex : TEXCOORD; 
}; 

cbuffer constants 
{ 
    float R0Constant = ((1.0 - (1.0/IOR)) * (1.0 - (1.0/IOR)))/((1.0 + (1.0/IOR)) * (1.0 + (1.0/IOR))); 
    float R0Inv = 1.0 - ((1.0 - (1.0/IOR)) * (1.0 - (1.0/IOR)))/((1.0 + (1.0/IOR)) * (1.0 + (1.0/IOR))); 
}; 


cbuffer cb0 : register(b0) 
{ 
    row_major float4x4 ProjInv; 
    row_major float3x3 ViewIT; 
    row_major float4x4 WorldViewProj; 
    row_major float4x4 World; 

    uint NumMetaballs; 
    float4 Metaballs[MAX_METABALLS]; // .xyz -> metaball center, .w -> metaball squared radius 

    float3 ViewportOrg; 
    float3 ViewportSizeInv; 

    float3 LightPos1;  // view-space light position 1 
    float3 LightPos2;  // view-space light position 2 
    float3 LightPos3;  // view-space light position 3 


}; 

Texture2D rockTexture; 

SamplerState RockSampler 
{ 
    Filter = MIN_MAG_MIP_LINEAR; 
    AddressU = Wrap; 
    AddressV = Wrap; 
}; 



float4 GetWorldSpacePos(float4 WindowPos) 
{ 
    float4 ClipPos; 
    ClipPos.x = (2 * ((WindowPos.x - ViewportOrg.x) * ViewportSizeInv.x) - 1); 
    ClipPos.y = (-2 * ((WindowPos.y - ViewportOrg.y) * ViewportSizeInv.y) + 1); 
    ClipPos.z = ((WindowPos.z - ViewportOrg.z) * ViewportSizeInv.z); 
    ClipPos.w = 1; 

    float4 Pos; 
    Pos = mul(ClipPos, ProjInv); // backtransform clipspace position to get viewspace position 
    Pos.xyz /= Pos.w;    // re-normalize 


    return Pos; 
} 


// Metaball function 
// Returns metaball function value in .w and its gradient in .xyz 
float4 Metaball(float3 Pos, float3 Center, float RadiusSq) 
{ 
    float4 o; 

    float3 d = Pos - Center; 
    float DistSq = dot(d, d); 
    float InvDistSq = 1/DistSq; 

    o.xyz = -2 * RadiusSq * InvDistSq * InvDistSq * d; 
    o.w = RadiusSq * InvDistSq; 

    return o; 
} 


SamplerState TriLinearSampler 
{ 
    Filter = MIN_MAG_MIP_LINEAR; 
    AddressU = WRAP; 
    AddressV = WRAP; 
}; 


// Vertex shader calculates field contributions at each grid vertex 
SampleData SampleFieldVS(float3 Pos : POSITION) 
{ 
    SampleData o; 

    float3 WorldPos = mul(float4(Pos, 1), World).xyz; 

    // Sum up contributions from all metaballs 

    o.Field = 0; 

    for (uint i = 0; i<NumMetaballs; i++) 
    { 
     //o.Field += WorldPos.y; 
     o.Field += Metaball(WorldPos, Metaballs[i].xyz, Metaballs[i].w); 


    } 
    // Transform position and normals 


    o.Pos = mul(float4(Pos.xyz, 1), WorldViewProj); 
    o.Field.xyz = -normalize(mul(o.Field.xyz, ViewIT)); // we want normals in view space 


    // Generate in-out flags 

    return o; 
} 



SampleData PassThroughVS(SampleData IN) 
{ 
    SampleData OUT; 
    OUT = IN; 
    return OUT; 
} 

// Estimate where isosurface intersects grid edge with endpoints v0, v1 
SurfaceVertex CalcIntersection(SampleData v0, SampleData v1) 
{ 
    SurfaceVertex o; 

    // We're taking special care to generate bit-exact results regardless of traversal (v0,v1) or (v1, v0) 

    float t = (2.0 - (v0.Field.w + v1.Field.w))/(v1.Field.w - v0.Field.w); 

    o.Pos = 0.5 * (t * (v1.Pos - v0.Pos) + (v1.Pos + v0.Pos)); 
    o.N = 0.5 * (t * (v1.Field.xyz - v0.Field.xyz) + (v1.Field.xyz + v0.Field.xyz));  

    float4 worldPos = mul(World, o.Pos); 
    o.tex = worldPos.xy; 

    return o; 
} 

// This struct stores vertex indices of up to 4 edges from the input tetrahedron. The GS code below 
// uses these indices to index into the input vertex set for interpolation along those edges. 
// It basically encodes topology for the output triangle strip (of up to 2 triangles). 
struct TetrahedronIndices 
{ 
    uint4 e0; 
    uint4 e1; 
}; 

[MaxVertexCount(4)] 
void TessellateTetrahedraGS(lineadj SampleData In[4], inout TriangleStream<SurfaceVertex> Stream) 
{ 
    // construct index for this tetrahedron 
    uint index = (uint(In[0].Field.w > 1) << 3) | (uint(In[1].Field.w > 1) << 2) | (uint(In[2].Field.w > 1) << 1) | uint(In[3].Field.w > 1); 


    // don't bother if all vertices out or all vertices in 
    if (index > 0 && index < 15) 
    { 
     uint4 e0 = EdgeTableGS[index].e0; 
     uint4 e1 = EdgeTableGS[index].e1; 

     // Emit a triangle 
     Stream.Append(CalcIntersection(In[e0.x], In[e0.y])); 
     Stream.Append(CalcIntersection(In[e0.z], In[e0.w])); 
     Stream.Append(CalcIntersection(In[e1.x], In[e1.y])); 

     // Emit additional triangle, if necessary 
     if (e1.z != 0) { 
      Stream.Append(CalcIntersection(In[e1.z], In[e1.w])); 
     } 

    } 
} 

TextureCube EnvMap; 

float FresnelApprox(float3 I, float3 N) 
{ 
    return R0Constant + R0Inv * pow(1.0 - dot(I, N), 5.0); 
} 

float4 ShadeSurfacePS(SurfaceVertex IN) : SV_Target 
{ 
    float4 Pos = GetWorldSpacePos(IN.Pos); 

    float3 N = normalize(IN.N); 
    float3 L1 = normalize(LightPos1 - Pos.xyz); 
    float3 L2 = normalize(LightPos2 - Pos.xyz); 
    float3 L3 = normalize(LightPos3 - Pos.xyz); 
    float3 I = normalize(Pos.xyz); 

    float3 R = reflect(I, N); 

    float4 Reflected = EnvMap.Sample(TriLinearSampler, mul(ViewIT, R)); 

    float NdotL1 = max(0, dot(N, L1)); 
    float NdotL2 = max(0, dot(N, L2)); 
    float NdotL3 = max(0, dot(N, L3)); 

    float3 Color = NdotL1 * float3(1, 1, 1) + pow(max(dot(R, L1), 0), 32) 
        + NdotL2 * float3(0.65, 0.6, 0.45) + pow(max(dot(R, L2), 0), 32) 
        + NdotL3 * float3(0.7, 0.7, 0.8) + pow(max(dot(R, L3), 0), 32); 

    return lerp(EnvMap.Sample(TriLinearSampler, mul(ViewIT, R)), float4(Color, 1), FresnelApprox(I, N) * 0.05); 

} 

float4 SimplePS(SurfaceVertex IN, uniform float4 color) : SV_Target 
{ 
    return color; 
} 

float4 DiffusePS(SurfaceVertex IN) : SV_Target 
{ 

    float4 AmbientColor = float4(0.2, 0.2, 0.2, 1); 
    float AmbientIntensity = 0.2; 

    float4 Kd = 0.5; 
    float4 diffuseLight = 0.5; 

    float4 Pos = GetWorldSpacePos(IN.Pos); 
    float3 N = normalize(IN.N); 
    float3 L1 = normalize(LightPos1 - Pos.xyz); 
    float3 L2 = normalize(LightPos2 - Pos.xyz); 
    float3 L3 = normalize(LightPos3 - Pos.xyz); 

    float NdotL1 = max(0, dot(N, L1)); 
    float NdotL2 = max(0, dot(N, L2)); 
    float NdotL3 = max(0, dot(N, L3)); 

    float3 I = normalize(Pos.xyz); 
    float3 V = normalize(-Pos.xyz); 

    float4 vDiff = diffuseLight * Kd * NdotL1; 
    float4 vDiff2 = diffuseLight * Kd * NdotL2; 
    float4 vDiff3 = diffuseLight * Kd * NdotL3; 

    float3 Color = vDiff + vDiff2 + vDiff3; 
    float4 derp = rockTexture.Sample(RockSampler, IN.tex.xy); 

    return lerp(derp ,float4(Color, 1), 0.5); 
    //return lerp(NoiseTexture.Sample(NoiseSampler, IN.tex), float4(Color, 1), FresnelApprox(V, N) * 0.05); 

    //return saturate(vDiff+vDiff2+vDiff3 + AmbientColor * AmbientIntensity); 


} 

DepthStencilState EnableDepthDSS 
{ 
    DepthEnable = true; 
    DepthWriteMask = 1; 
}; 

RasterizerState WireFrameRS 
{ 
    MultiSampleEnable = True; 
    CullMode = None; 
    FillMode = WireFrame; 
}; 

RasterizerState SolidRS 
{ 
    MultiSampleEnable = True; 
    CullMode = None; 
    FillMode = Solid; 
}; 


technique10 MarchingTetrahedraWireFrame 
{ 
    pass P0 
    { 
     SetRasterizerState(WireFrameRS); 
     SetDepthStencilState(EnableDepthDSS, 0); 

     SetVertexShader(CompileShader(vs_4_0, SampleFieldVS())); 
     SetGeometryShader(CompileShader(gs_4_0, TessellateTetrahedraGS())); 
     SetPixelShader(CompileShader(ps_4_0, SimplePS(float4(0.7, 0.7, 0.7, 1)))); 
    } 
} 

// Tessellate isosurface in a single pass 
technique10 MarchingTetrahedraSinglePassGS 
{ 
    pass P0 
    { 
     SetRasterizerState(SolidRS); 
     SetDepthStencilState(EnableDepthDSS, 0); 

     SetVertexShader(CompileShader(vs_4_0, SampleFieldVS())); 
     SetGeometryShader(CompileShader(gs_4_0, TessellateTetrahedraGS())); 
     SetPixelShader(CompileShader(ps_4_0, DiffusePS())); 
    } 
} 

// Tessellate isosurface in two passes, streaming out VS results in-between 
GeometryShader StreamOutGS = ConstructGSWithSO(CompileShader(vs_4_0, PassThroughVS()), "SV_Position.xyzw; TEXCOORD0.xyzw"); 

technique10 MarchingTetrahedraMultiPassGS 
{ 
    pass P0 
    { 
     SetVertexShader(CompileShader(vs_4_0, SampleFieldVS())); 
     SetGeometryShader(StreamOutGS); 
     SetPixelShader(NULL); 
    } 

    pass P1 
    { 

     SetRasterizerState(SolidRS); 
     SetDepthStencilState(EnableDepthDSS, 0); 

     SetVertexShader(CompileShader (vs_4_0, PassThroughVS())); 
     SetGeometryShader(CompileShader(gs_4_0, TessellateTetrahedraGS())); 
     SetPixelShader(CompileShader(ps_4_0, DiffusePS())); 
    } 
} 
+0

请显示其余部分。FX文件 – CarlJohnson 2012-04-17 23:20:45

+0

我已经添加了其余的代码。 – programmersblock 2012-04-18 15:37:55

+0

我看到您使用某种方形纹理对其进行纹理化处理,使用方框投影纹理和体积纹理对纹理进行纹理化处理,如果您可以获得足够有趣和足够多变的公式,基本上相当不错,虽然写起来有点困难,但需要一整天。 – 2013-12-04 00:04:19

回答

0

纹理坐标目前由顶点位置乘以所产生的的的.fx文件的休息由世界矩阵:

CalcIntersection() 
.... 
    float4 worldPos = mul(World, o.Pos); 
    o.tex = worldPos.xy; 
.... 

所以,相机平移,必须改变其中一个输入。猜测它可能是世界矩阵(即相机平移实际上是被移动的对象),请尝试切换到不依赖于世界矩阵的纹理坐标生成方法。例如:

CalcIntersection() 
... 
    o.tex = o.Pos.xy; 
... 


编辑:由于没有工作,它必须是由相机平移被修改的位置,并在SampleFieldVS寻找()存在,这似乎是做一个线:

o.Pos = mul(float4(Pos.xyz, 1), WorldViewProj); 

所以,你需要做的是在修改前保存位置。你必须将它保存在的sampleData,所以在sampleData在结构的最后补充一点:

float2 tex1 : TEXCOORD1; 

然后在SampleFieldVS()结束

o.tex1 = mul(float4(Pos.xyz, 0), World).xy; 

最后加储蓄线删除'float4 worldPos = mul(World,o.Pos); o.tex = worldPos.xy;在CalcIntersection()的末尾加上“”并替换为:

o.tex = 0.5 * (t * (v1.tex1 - v0.tex1) + (v1.tex1 + v0.tex1)); 
+0

谢谢,但不幸的是,这并没有解决它。 我还想补充说,旋转cameraview左右也会移动纹理的位置。这就像纹理始终集中在屏幕上,如果这是有道理的。 我没有从.cpp文件传递任何纹理坐标,我纯粹在着色器中生成它们,这是正确的吗? – programmersblock 2012-04-19 00:01:05

+0

好的,我已经编辑了答案 – CarlJohnson 2012-04-19 08:26:05

+0

非常感谢,这已经解决了单程问题。然而,多通道仍然不正确,这是因为StreamOutGS? – programmersblock 2012-04-19 21:08:11