forked from shader-slang/slang
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathshaders.slang
485 lines (445 loc) · 17.1 KB
/
shaders.slang
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
// shaders.slang
//
// This example builds on the simplistic shaders presented in the
// "Hello, World" example by adding support for (intentionally
// simplistic) surface materil and light shading.
//
// The code here is not meant to exemplify state-of-the-art material
// and lighting techniques, but rather to show how a shader
// library can be developed in a modular fashion without reliance
// on the C preprocessor manual parameter-binding decorations.
//
// We are going to define a simple model for surface material shading.
//
// The first building block in our model will be the representation of
// the geometry attributes of a surface as fed into the material.
//
struct SurfaceGeometry
{
float3 position;
float3 normal;
// TODO: tangent vectors would be the natural next thing to add here,
// and would be required for anisotropic materials. However, the
// simplistic model loading code we are currently using doesn't
// produce tangents...
//
// float3 tangentU;
// float3 tangentV;
// We store a single UV parameterization in these geometry attributes.
// A more complex renderer might need support for multiple UV sets,
// and indeed it might choose to use interfaces and generics to capture
// the different requirements that different materials impose on
// the available surface attributes. We won't go to that kind of
// trouble for such a simple example.
//
float2 uv;
};
//
// Next, we want to define the fundamental concept of a refletance
// function, so that we can use it as a building block for other
// parts of the system. This is a case where we are trying to
// show how a proper physically-based renderer (PBR) might
// decompose the problem using Slang, even though our simple
// example is *not* physically based.
//
interface IBRDF
{
// Technically, a BRDF is only a function of the incident
// (`wi`) and exitant (`wo`) directions, but for simplicity
// we are passing in the surface normal (`N`) as well.
//
float3 evaluate(float3 wo, float3 wi, float3 N);
};
//
// We can now define various implemntations of the `IBRDF` interface
// that represent different reflectance functions we want to support.
// For now we keep things simple by defining about the simplest
// reflectance function we can think of: the Blinn-Phong reflectance
// model:
//
struct BlinnPhong : IBRDF
{
// Blinn-Phong needs diffuse and specular reflectances, plus
// a specular exponent value (which relates to "roughness"
// in more modern physically-based models).
//
float3 kd;
float3 ks;
float specularity;
// Here we implement the one requirement of the `IBRDF` interface
// for our concrete implementation, using a textbook definition
// of Blinng-Phong shading.
//
// Note: our "BRDF" definition here folds the N-dot-L term into
// the evlauation of the reflectance function in case there are
// useful algebraic simplifications this enables.
//
float3 evaluate(float3 V, float3 L, float3 N)
{
float nDotL = saturate(dot(N, L));
float3 H = normalize(L + V);
float nDotH = saturate(dot(N, H));
return kd*nDotL + ks*pow(nDotH, specularity);
}
};
//
// It is important to note that a reflectance function is *not*
// a "material." In most cases, a material will have spatially-varying
// properties so that it cannot be summarized as a single `IBRDF`
// instance.
//
// Thus a "material" is a value that can produce a BRDF for any point
// on a surface (e.g., by sampling texture maps, etc.).
//
interface IMaterial
{
// Different concrete material implementations might yield BRDF
// values with different types. E.g., one material might yield
// reflectance functions using `BlinnPhong` while another uses
// a much more complicated/accurate representation.
//
// We encapsulate the choice of BRDF parameters/evaluation in
// our material interface with an "associated type." In the
// simplest terms, think of this as an interface requirement
// that is a type, instead of a method.
//
// (If you are C++-minded, you might think of this as akin to
// how every container provided an `iterator` type, but different
// containers may have different types of iterators)
//
associatedtype BRDF : IBRDF;
// For our simple example program, it is enough for a material to
// be able to return a BRDF given a point on the surface.
//
// A more complex implementation of material shading might also
// have the material return updated surface geometry to reflect
// the result of normal mapping, occlusion mapping, etc. or
// return an opacity/coverage value for partially transparent
// surfaces.
//
BRDF prepare(SurfaceGeometry geometry);
};
// We will now define a trivial first implementation of the material
// interface, which uses our Blinn-Phong BRDF with uniform values
// for its parameters.
//
// Note that this implemetnation is being provided *after* the
// shader parameter `gMaterial` is declared, so that there is no
// assumption in the shader code that `gMaterial` will be plugged
// in using an instance of `SimpleMaterial`
//
//
struct SimpleMaterial : IMaterial
{
// We declare the properties we need as fields of the material type.
// When `SimpleMaterial` is used for `TMaterial` above, then
// `gMaterial` will be a `ParameterBlock<SimpleMaterial>`, and these
// parameters will be allocated to a constant buffer that is part of
// that parameter block.
//
// TODO: A future version of this example will include texture parameters
// here to show that they are declared just like simple uniforms.
//
float3 diffuseColor;
float3 specularColor;
float specularity;
// To satisfy the requirements of the `IMaterial` interface, our
// material type needs to provide a suitable `BRDF` type. We
// do this by using a simple `typedef`, although a nested
// `struct` type can also satisfy an associated type requirement.
//
// A future version of the Slang compiler may allow the "right"
// associated type definition to be inferred from the signature
// of the `prepare()` method below.
//
typedef BlinnPhong BRDF;
BlinnPhong prepare(SurfaceGeometry geometry)
{
BlinnPhong brdf;
brdf.kd = diffuseColor;
brdf.ks = specularColor;
brdf.specularity = specularity;
return brdf;
}
};
//
// Note that no other code in this file statically
// references the `SimpleMaterial` type, and instead
// it is up to the application to "plug in" this type,
// or another `IMaterial` implementation for the
// `TMaterial` parameter.
//
// A light, or an entire lighting *environment* is an object
// that can illuminate a surface using some BRDF implemented
// with our abstractions above.
//
interface ILightEnv
{
// The `illuminate` method is intended to integrate incoming
// illumination from this light (environment) incident at the
// surface point given by `g` (which has the reflectance function
// `brdf`) and reflected into the outgoing direction `wo`.
//
float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo);
//
// Note that the `illuminate()` method is allowed as an interface
// requirement in Slang even though it is a generic. Constract that
// with C++ where a `template` method cannot be `virtual`.
};
// Given the `ILightEnv` interface, we can write up almost textbook
// definition of directional and point lights.
struct DirectionalLight : ILightEnv
{
float3 direction;
float3 intensity;
float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
{
return intensity * brdf.evaluate(wo, direction, g.normal);
}
};
struct PointLight : ILightEnv
{
float3 position;
float3 intensity;
float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
{
float3 delta = position - g.position;
float d = length(delta);
float3 direction = normalize(delta);
float3 illuminance = intensity / (d*d);
return illuminance * brdf.evaluate(wo, direction, g.normal);
}
};
// In most cases, a shader entry point will only be specialized for a single
// material, but interesting rendering almost always needs multiple lights.
// For that reason we will next define types to represent *composite* lighting
// environment with multiple lights.
//
// A naive approach might be to have a single undifferntiated list of lights
// where any type of light may appear at any index, but this would lose all
// of the benefits of static specialization: we would have to perform dynamic
// branching to determine what kind of light is stored at each index.
//
// Instead, we will start with a type for *homogeneous* arrays of lights:
//
struct LightArray<L : ILightEnv, let N : int> : ILightEnv
{
// The `LightArray` type has two generic parameters:
//
// - `L` is a type parameter, representing the type of lights that will be in our array
// - `N` is a generic *value* parameter, representing the maximum number of lights allowed
//
// Slang's support for generic value parameters is currently experimental,
// and the syntax might change.
int count;
L lights[N];
float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
{
// Our light array integrates illumination by naively summing
// contributions from all the lights in the array (up to `count`).
//
// A more advanced renderer might try apply sampling techniques
// to pick a subset of lights to sample.
//
float3 sum = 0;
for( int ii = 0; ii < count; ++ii )
{
sum += lights[ii].illuminate(g, brdf, wo);
}
return sum;
}
};
// `LightArray` can handle multiple lights as long as they have the
// same type, but we need a way to have a scene with multiple lights
// of different types *without* losing static specialization.
//
// The `LightPair<T,U>` type supports this in about the simplest way
// possible, by aggregating a light (environment) of type `T` and
// one of type `U`. Those light environments might themselves be
// `LightArray`s or `LightPair`s, so that arbitrarily complex
// environments can be created from just these two composite types.
//
// This is probably a good place to insert a reminder the Slang's
// generics are *not* C++ templates, so that the error messages
// produced when working with these types are in general reasonable,
// and this is *not* any form of "template metaprogramming."
//
// That said, we expect that future versions of Slang will make
// defining composite types light this a bit less cumbersome.
//
struct LightPair<T : ILightEnv, U : ILightEnv> : ILightEnv
{
T first;
U second;
float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
{
return first.illuminate(g, brdf, wo)
+ second.illuminate(g, brdf, wo);
}
};
// As a final (degenerate) case, we will define a light
// environment with *no* lights, which contributes no illumination.
//
struct EmptyLightEnv : ILightEnv
{
float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
{
return 0;
}
};
// The code above constitutes the "shader library" for our
// application, while the code below this point is the
// implementation of a simple forward rendering pass
// using that library.
//
// While the shader library has used many of Slang's advanced
// mechanisms, the vertex and fragment shaders will be
// much more modest, and hopefully easier to follow.
// We will start with a `struct` for per-view parameters that
// will be allocated into a `ParameterBlock`.
//
// As written, this isn't very different from using an HLSL
// `cbuffer` declaration, but importantly this code will
// continue to work if we add one or more resources (e.g.,
// an enironment map texture) to the `PerView` type.
//
struct PerView
{
float4x4 viewProjection;
float3 eyePosition;
};
ParameterBlock<PerView> gViewParams;
// Declaring a block for per-model parameter data is
// similarly simple.
//
struct PerModel
{
float4x4 modelTransform;
float4x4 inverseTransposeModelTransform;
};
ParameterBlock<PerModel> gModelParams;
// We want our shader to work with any kind of lighting environment
// - that is, and type that implements `ILightEnv`. Furthermore,
// we want the parameters of that lighting environment to be passed
// as parameter block - `ParameterBlock<L>` for some type `L`.
//
// We handle this by defining a global generic type parameter for
// our shader, and constrainting it to implement `ILightEnv`...
//
type_param TLightEnv : ILightEnv;
//
// ... and then defining a parameter block that uses that type
// parameter as the "element type" of the block:
//
ParameterBlock<TLightEnv> gLightEnv;
// Our handling of the material parameter for our shader
// is quite similar to the case for the lighting environment:
//
type_param TMaterial : IMaterial;
ParameterBlock<TMaterial> gMaterial;
// Our vertex shader entry point is only marginally more
// complicated than the Hello World example. We will
// start by declaring the various "connector" `struct`s.
//
struct AssembledVertex
{
float3 position : POSITION;
float3 normal : NORMAL;
float2 uv : UV;
};
struct CoarseVertex
{
float3 worldPosition;
float3 worldNormal;
float2 uv;
};
struct VertexStageOutput
{
CoarseVertex coarseVertex : CoarseVertex;
float4 sv_position : SV_Position;
};
// Perhaps most interesting new feature of the entry
// point decalrations is that we use a `[shader(...)]`
// attribute (as introduced in HLSL Shader Model 6.x)
// in order to tag our entry points.
//
// This attribute informs the Slang compiler which
// functions are intended to be compiled as shader
// entry points (and what stage they target), so that
// the programmer no longer needs to specify the
// entry point name/stage through the API (or on
// the command line when using `slangc`).
//
// While HLSL added this feature only in newer versions,
// the Slang compiler supports this attribute across
// *all* targets, so that it is okay to use whether you
// want DXBC, DXIL, or SPIR-V output.
//
[shader("vertex")]
VertexStageOutput vertexMain(
AssembledVertex assembledVertex)
{
VertexStageOutput output;
float3 position = assembledVertex.position;
float3 normal = assembledVertex.normal;
float2 uv = assembledVertex.uv;
float3 worldPosition = mul(gModelParams.modelTransform, float4(position, 1.0)).xyz;
float3 worldNormal = mul(gModelParams.inverseTransposeModelTransform, float4(normal, 0.0)).xyz;
output.coarseVertex.worldPosition = worldPosition;
output.coarseVertex.worldNormal = worldNormal;
output.coarseVertex.uv = uv;
output.sv_position = mul(gViewParams.viewProjection, float4(worldPosition, 1.0));
return output;
}
// Our fragment shader is almost trivial, with the most interesting
// thing being how it uses the `TMaterial` type parameter (through the
// value stored in the `gMaterial` parameter block) to dispatch to
// the correct implementation of the `getDiffuseColor()` method
// in the `IMaterial` interface.
//
// The `gMaterial` parameter block declaration thus serves not only
// to group certain shader parameters for efficient CPU-to-GPU
// communication, but also to select the code that will execute
// in specialized versions of the `fragmentMain` entry point.
//
[shader("fragment")]
float4 fragmentMain(
CoarseVertex coarseVertex : CoarseVertex) : SV_Target
{
// We start by using our interpolated vertex attributes
// to construct the local surface geometry that we will
// use for material evaluation.
//
SurfaceGeometry g;
g.position = coarseVertex.worldPosition;
g.normal = normalize(coarseVertex.worldNormal);
g.uv = coarseVertex.uv;
float3 V = normalize(gViewParams.eyePosition - g.position);
// Next we prepare the material, which involves running
// any "pattern generation" logic of the material (e.g.,
// sampling and blending texture layers), to produce
// a BRDF suitable for evaluating under illumination
// from different light sources.
//
// Note that the return type here is `TMaterial.BRDF`,
// which is the `BRDF` type *associated* with the (unknown)
// `TMaterial` type. When `TMaterial` gets substituted for
// a concrete type later (e.g., `SimpleMaterial`) this
// will resolve to a concrete type too (e.g., `SimpleMaterial.BRDF`
// which is an alias for `BlinnPhong`).
//
TMaterial.BRDF brdf = gMaterial.prepare(g);
// Now that we've done the first step of material evaluation
// and sampled texture maps, etc., it is time to start
// integrating incident light at our surface point.
//
// Because we've wrapped up the lighting environment as
// a single (composite) object, this is as simple as calling
// its `illuminate()` method. Our particular fragment shader
// is thus abstracted from how the renderer chooses to structure
// this integration step, somewhat similar to how an
// `illuminance` loop in RenderMan Shading Language works.
//
float3 color = gLightEnv.illuminate(g, brdf, V);
return float4(color, 1);
}