Thanks for using Compiler Explorer
Sponsors
Jakt
C++
Ada
Analysis
Android Java
Android Kotlin
Assembly
C
C3
Carbon
C++ (Circle)
CIRCT
Clean
CMake
CMakeScript
COBOL
C++ for OpenCL
MLIR
Cppx
Cppx-Blue
Cppx-Gold
Cpp2-cppfront
Crystal
C#
CUDA C++
D
Dart
Elixir
Erlang
Fortran
F#
GLSL
Go
Haskell
HLSL
Hook
Hylo
IL
ispc
Java
Julia
Kotlin
LLVM IR
LLVM MIR
Modula-2
Nim
Objective-C
Objective-C++
OCaml
OpenCL C
Pascal
Pony
Python
Racket
Ruby
Rust
Snowball
Scala
Solidity
Spice
SPIR-V
Swift
LLVM TableGen
Toit
TypeScript Native
V
Vala
Visual Basic
WASM
Zig
Javascript
GIMPLE
Ygen
hlsl source #1
Output
Compile to binary object
Link to binary
Execute the code
Intel asm syntax
Demangle identifiers
Verbose demangling
Filters
Unused labels
Library functions
Directives
Comments
Horizontal whitespace
Debug intrinsics
Compiler
Clang (trunk)
DXC (trunk)
DXC 1.6.2112
DXC 1.7.2207
DXC 1.7.2212
DXC 1.7.2308
DXC 1.8.2306-preview
DXC 1.8.2403
DXC 1.8.2403.1
DXC 1.8.2403.2
DXC 1.8.2405
DXC 1.8.2407
RGA 2.6.1 (DXC 1.6.2112)
RGA 2.6.1 (DXC 1.7.2207)
RGA 2.6.2 (DXC 1.6.2112)
RGA 2.6.2 (DXC 1.7.2207)
RGA 2.6.2 (DXC trunk)
RGA 2.9.0 (DXC trunk)
Options
Source code
//! Author: Mateusz "DevSH" Kielan //! LICENSE: MPL v 2.0 //! This code is formed by HLSL headers of Nabla // one day I'll use BOOST PREPROCESSOR// basics // basics #define NBL_EVAL(...) __VA_ARGS__ #define NBL_CONCAT_IMPL2(X,Y) X ## Y #define NBL_CONCAT_IMPL(X,Y) NBL_CONCAT_IMPL2(X,Y) #define NBL_CONCATENATE(X,Y) NBL_CONCAT_IMPL(NBL_EVAL(X) , NBL_EVAL(Y)) // #define NBL_ARG_125(a0,a1,a2,a3,a4,a5,a6,a7,a8,a9,a10,a11,a12,a13,a14,a15,a16,a17,a18,a19,a20,a21,a22,a23,a24,a25,a26,a27,a28,a29,a30,a31,a32,a33,a34,a35,a36,a37,a38,a39,a40,a41,a42,a43,a44,a45,a46,a47,a48,a49,a50,a51,a52,a53,a54,a55,a56,a57,a58,a59,a60,a61,a62,a63,a64,a65,a66,a67,a68,a69,a70,a71,a72,a73,a74,a75,a76,a77,a78,a79,a80,a81,a82,a83,a84,a85,a86,a87,a88,a89,a90,a91,a92,a93,a94,a95,a96,a97,a98,a99,a100,a101,a102,a103,a104,a105,a106,a107,a108,a109,a110,a111,a112,a113,a114,a115,a116,a117,a118,a119,a120,a121,a122,a123,a124,a125, ... ) a125 #define NBL_VA_ARGS_COUNT( ... ) NBL_EVAL(NBL_ARG_125(__VA_ARGS__,125,124,123,122,121,120,119,118,117,116,115,114,113,112,111,110,109,108,107,106,105,104,103,102,101,100,99,98,97,96,95,94,93,92,91,90,89,88,87,86,85,84,83,82,81,80,79,78,77,76,75,74,73,72,71,70,69,68,67,66,65,64,63,62,61,60,59,58,57,56,55,54,53,52,51,50,49,48,47,46,45,44,43,42,41,40,39,38,37,36,35,34,33,32,31,30,29,28,27,26,25,24,23,22,21,20,19,18,17,16,15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0)) // #define NBL_FOREACH_1_0(WHAT) #define NBL_FOREACH_1_1(WHAT, X) NBL_EVAL(WHAT(X)) #define NBL_FOREACH_1_2(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_1(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_3(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_2(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_4(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_3(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_5(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_4(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_6(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_5(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_7(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_6(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_8(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_7(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_9(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_8(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_10(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_9(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_11(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_10(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_12(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_11(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_13(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_12(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_14(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_13(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_15(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_14(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_16(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_15(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_17(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_16(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_18(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_17(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_19(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_18(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_20(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_19(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_21(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_20(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_22(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_21(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_23(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_22(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_24(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_23(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_25(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_24(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_26(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_25(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_27(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_26(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_28(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_27(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_29(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_28(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_30(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_29(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_31(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_30(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_32(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_31(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_33(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_32(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_34(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_33(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_35(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_34(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_36(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_35(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_37(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_36(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_38(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_37(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_39(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_38(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_40(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_39(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_41(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_40(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_42(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_41(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_43(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_42(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_44(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_43(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_45(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_44(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_46(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_45(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_47(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_46(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_48(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_47(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_49(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_48(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_50(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_49(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_51(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_50(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_52(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_51(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_53(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_52(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_54(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_53(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_55(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_54(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_56(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_55(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_57(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_56(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_58(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_57(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_59(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_58(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_60(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_59(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_61(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_60(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_62(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_61(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_63(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_62(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_64(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_63(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_65(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_64(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_66(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_65(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_67(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_66(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_68(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_67(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_69(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_68(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_70(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_69(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_71(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_70(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_72(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_71(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_73(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_72(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_74(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_73(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_75(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_74(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_76(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_75(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_77(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_76(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_78(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_77(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_79(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_78(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_80(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_79(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_81(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_80(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_82(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_81(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_83(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_82(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_84(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_83(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_85(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_84(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_86(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_85(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_87(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_86(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_88(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_87(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_89(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_88(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_90(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_89(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_91(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_90(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_92(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_91(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_93(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_92(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_94(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_93(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_95(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_94(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_96(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_95(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_97(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_96(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_98(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_97(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_99(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_98(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_100(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_99(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_101(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_100(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_102(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_101(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_103(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_102(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_104(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_103(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_105(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_104(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_106(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_105(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_107(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_106(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_108(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_107(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_109(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_108(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_110(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_109(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_111(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_110(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_112(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_111(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_113(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_112(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_114(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_113(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_115(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_114(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_116(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_115(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_117(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_116(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_118(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_117(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_119(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_118(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_120(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_119(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_121(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_120(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_122(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_121(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_123(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_122(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_124(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_123(WHAT, __VA_ARGS__)) #define NBL_FOREACH_1_125(WHAT, X, ...) NBL_EVAL(WHAT(X)NBL_FOREACH_1_124(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_0(WHAT) #define NBL_FOREACH_2_2(WHAT, X, Y) NBL_EVAL(WHAT(X,Y)) #define NBL_FOREACH_2_4(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_2(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_6(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_4(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_8(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_6(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_10(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_8(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_12(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_10(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_14(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_12(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_16(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_14(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_18(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_16(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_20(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_18(WHAT, __VA_ARGS__)) #define NBL_FOREACH_2_22(WHAT, X, Y, ...) NBL_EVAL(WHAT(X,Y)NBL_FOREACH_2_20(WHAT, __VA_ARGS__)) // TODO: generate more #define NBL_FOREACH_3_0(WHAT) #define NBL_FOREACH_3_3(WHAT, X, Y,Z) NBL_EVAL(WHAT(X,Y,Z)) #define NBL_FOREACH_3_6(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_3(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_9(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_6(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_12(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_9(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_15(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_12(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_18(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_15(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_21(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_18(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_24(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_21(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_27(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_24(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_30(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_27(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_33(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_30(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_36(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_33(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_39(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_36(WHAT, __VA_ARGS__)) #define NBL_FOREACH_3_42(WHAT, X, Y, Z, ...) NBL_EVAL(WHAT(X,Y,Z)NBL_FOREACH_3_39(WHAT, __VA_ARGS__)) // TODO: generate more #define NBL_FOREACH_4_0(WHAT) #define NBL_FOREACH_4_4(WHAT, X, Y, Z, W) NBL_EVAL(WHAT(X,Y,Z,W)) #define NBL_FOREACH_4_8(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_4(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_12(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_8(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_16(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_12(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_20(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_16(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_24(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_20(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_28(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_24(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_32(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_28(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_36(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_32(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_40(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_36(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_44(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_40(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_48(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_44(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_52(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_48(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_56(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_52(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_60(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_56(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_64(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_60(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_68(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_64(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_72(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_68(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_76(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_72(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_80(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_76(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_84(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_80(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_88(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_84(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_92(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_88(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_96(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_92(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_100(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_96(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_104(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_100(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_108(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_104(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_112(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_108(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_116(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_112(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_120(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_116(WHAT, __VA_ARGS__)) #define NBL_FOREACH_4_124(WHAT, X, Y, Z, W, ...) NBL_EVAL(WHAT(X,Y,Z,W)NBL_FOREACH_4_120(WHAT, __VA_ARGS__)) // #define NBL_FOREACH_N(WHAT,N, ... ) NBL_EVAL(NBL_CONCATENATE(NBL_CONCATENATE(NBL_FOREACH_,N),NBL_CONCATENATE(_,NBL_VA_ARGS_COUNT(__VA_ARGS__))))(WHAT, __VA_ARGS__) /// mmm some c++11 ageisms template<bool B, class T = void> struct enable_if {}; template<class T> struct enable_if<true,T> { using type=T; }; template<typename T, T _value> struct integral_constant { using type = T; static const uint32_t value = _value; }; template<typename T, typename U> struct is_same; template<typename T> struct is_same<T,T> : integral_constant<bool,true> {}; template<typename T, typename U> struct is_same : integral_constant<bool,false> {}; template<typename T> struct is_array; template<typename T, uint32_t count> struct is_array<T[count]> : integral_constant<bool,true> {}; template<typename T> struct is_array : integral_constant<bool,false> {}; template<typename X, typename Y> struct consteval_bitwise_or : integral_constant<typename X::type,X::value|Y::value> {}; template<typename X, typename Y> struct consteval_bitwise_lsh : integral_constant<typename X::type,X::value<<Y::value> {}; template<typename X, typename Y> struct consteval_bitwise_min : integral_constant<typename X::type,Y::value<X::value ? X::value:Y::value> {}; namespace impl { template<uint64_t X> struct consteval_log2 { static const uint32_t value = (X&0x1ull) ? 0u:( (X&0x2ull) ? 1u:( (X&0x4ull) ? 2u:( (X&0x8ull) ? 3u:( (X&0x10ull) ? 4u:( (X&0x20ull) ? 5u:( (X&0x40ull) ? 6u:( (X&0x80ull) ? 7u:( 8u // TODO REST ) ) ) ) ) ) ) ); }; } template<typename X> struct consteval_log2 : integral_constant<uint32_t,impl::consteval_log2<X::value>::value> {}; // working around lack of `alignof()` in HLSL template<typename T> struct _alignof; template<uint32_t base_alignment, uint64_t offset> struct resolve_alignment; template<uint32_t base_alignment> struct resolve_alignment<base_alignment,0x0ull> : integral_constant<uint32_t,base_alignment> { }; template<uint32_t base_alignment, uint64_t offset> struct resolve_alignment : consteval_bitwise_lsh< integral_constant<uint32_t,0x1u>, consteval_log2< consteval_bitwise_or< integral_constant<uint32_t,base_alignment>, integral_constant<uint32_t,offset> > > > { }; RWByteAddressBuffer metabuffer[]; // void pointer type template<uint32_t _alignment> struct ptr_void { static const uint32_t alignment = _alignment; uint64_t value; }; // forward declare reference type template<typename T, uint32_t alignment=_alignof<T>::value> struct ref; // generic pointer type template<typename T, uint32_t alignment=_alignof<T>::value> struct ptr : ptr_void<alignment> { using pointee = T; using ref_t = ref<T,alignment>; // this is ref<T,_alignof<T>> on purpose, we can't rely on alignment anymore with a dynamic index ref<T> operator[](const uint64_t index) { return ref<T>::construct(ptr_void<_alignof<T>::value>(ptr_void<alignment>::value+sizeof(T)*index)); } }; // alignment for pointers template<typename T> struct _alignof<ptr<T> > : integral_constant<uint32_t,8> {}; // this extra is only needed because `*operator`,`operator=` and `&operator` are not supported template<typename ptr_t> typename ptr_t::ref_t deref(ptr_t p) { return ptr_t::ref_t::construct(p); } // template<typename U, typename T, typename e=typename enable_if<is_same<ptr<typename U::pointee,U::alignment>,U>::value>::type> U _reinterpret_cast(ptr<T,U::alignment> orig) { return U(orig); } template<typename U, typename e=typename enable_if<is_same<ptr_void<U::alignment>,U>::value||is_same<ptr<typename U::pointee,U::alignment>,U>::value>::type> U _reinterpret_cast(uint64_t orig) { return U(orig); } // to change alignments forcefully template<uint32_t new_alignment, typename T, uint32_t old_alignment> ptr<T,new_alignment> align_cast(ptr<T,old_alignment> arg) { return ptr<T,new_alignment>(arg); } // this is tricky because RawBufferLoad is broken on multiple levels, when it stops being so we can stop specializing template<typename T, uint32_t alignment> struct bda { static void load(out T v, ptr_void<alignment> addr); static void store(ptr_void<alignment> addr, const T v); }; // specialization for array types template<typename T, uint32_t alignment, uint32_t count> struct bda<T[count],alignment> { static void load(out T v[count], ptr_void<alignment> addr) { [[unroll]] for (uint32_t i=0; i<count; i++) bda<T,_alignof<T>::value>::load(v[i],ptr_void<alignment>(addr.value+sizeof(T)*i)); } static void store(ptr_void<alignment> addr, const T v[count]) { [[unroll]] for (uint32_t i=0; i<count; i++) bda<T,_alignof<T>::value>::store(ptr_void<alignment>(addr.value+sizeof(T)*i),v[i]); } }; // specialization for pointer types template<typename T, uint32_t alignment> struct bda<ptr<T>,alignment> { static void load(out ptr<T> v, ptr_void<alignment> addr) { uint32_t hi = uint32_t(addr.value>>32ull); uint32_t lo = uint32_t(addr.value); v = ptr<T>(metabuffer[NonUniformResourceIndex(hi)].Load<uint64_t>(lo)); } static void store(ptr_void<alignment> addr, ptr<T> v) { bda<uint64_t,alignment>::store(addr,v.value); } }; // generic reference type template<typename T, uint32_t alignment> struct ref_base { using ptr_t = ptr<T,alignment>; void load(out T retval) { bda<T,alignment>::load(retval,addr); } // have to do this weird template in case T is an array template<typename U=T> U load() { T retval; bda<T,alignment>::load(retval,addr); return retval; } void store(T v) { bda<T,alignment>::store(addr,v); } // TODO: other operators that modify self and are atomic ptr_void<alignment> addr; }; // deal with array types template<typename T, uint32_t alignment, uint32_t count> struct ref<T[count],alignment> : ref_base<T[count],alignment> { static ref<T[count],alignment> construct(ptr_void<alignment> addr) { ref<T[count],alignment> retval; retval.addr = addr; return retval; } ref<T[count],alignment> store(T v[count]) { ref_base<T[count],alignment>::store(v); return ref<T[count],alignment>(this); } // this is ref<T,_alignof<T>> on purpose, we can't rely on alignment anymore with a dynamic index ref<T> operator[](const uint64_t index) { return deref(ptr<T>(ref_base<T[count],alignment>::addr.value+sizeof(T)*index)); } }; // deal with pointers template<typename T, uint32_t alignment> struct ref<ptr<T>,alignment> : ref_base<ptr<T>,alignment> { static ref<ptr<T>,alignment> construct(ptr_void<alignment> addr) { ref<ptr<T>,alignment> retval; retval.addr = addr; return retval; } ref<ptr<T>,alignment> store(ptr<T> v) { ref_base<ptr<T>,alignment>::store(v); return ref<ptr<T>,alignment>(this); } }; // helpers template<typename T, uint32_t base_alignment, uint64_t _offset> struct member_ref_t : ref<T,resolve_alignment<base_alignment,_offset>::value> { static const uint64_t offset = _offset; static const uint32_t alignment = resolve_alignment<base_alignment,offset>::value; using base_t = ref<T,alignment>; void initialize(uint64_t addr) { this = member_ref_t<T,base_alignment,offset>(base_t::construct(ptr_void<alignment>(addr+offset))); } }; // template<typename U, typename T, typename e=typename enable_if<is_same<ref<typename U::ptr_t::pointee,U::alignment>,U>::value>::type> U _reinterpret_cast(ref<T,U::alignment> orig) { return U(orig.addr); } // to change alignments forcefully template<uint32_t new_alignment, typename T, uint32_t old_alignment> ref<T,new_alignment> align_cast(ptr<T,old_alignment> arg) { return ref<T,new_alignment>(arg); } // this extra is only needed because `*operator`,`operator=` and `&operator` are not supported template<typename ref_t> typename ref_t::ptr_t addrof(ref_t r) { return _reinterpret_cast<ref_t::ptr_t>(r.addr); } // look at the horrible things DXC's BDA impl makes me do #define NBL_HLSL_BDA_IMPL0(OFFSET,TYPE,NAME,ARR_SZ) TYPE NAME ARR_SZ; #define NBL_HLSL_BDA_IMPL1(OFFSET,TYPE,NAME,ARR_SZ) reference.NAME.load(retval.NAME); #define NBL_HLSL_BDA_IMPL2(OFFSET,TYPE,NAME,ARR_SZ) reference.NAME.store(v.NAME); #define NBL_HLSL_BDA_IMPL3(OFFSET,TYPE,NAME,ARR_SZ) retval.NAME.initialize(addr.value); #define NBL_HLSL_BDA_IMPL4(OFFSET,TYPE,NAME,ARR_SZ) member_ref_t<TYPE ARR_SZ,alignment,OFFSET> NAME; #define NBL_HLSL_BDA_IMPL(TYPENAME,ALIGNMENT,...) #define NBL_HLSL_STRUCT(TYPENAME,ALIGNMENT,...) struct TYPENAME; \ \ template<> \ struct _alignof<TYPENAME> : integral_constant<uint32_t,ALIGNMENT> {}; \ \ struct TYPENAME \ { \ NBL_FOREACH_N(NBL_HLSL_BDA_IMPL0,4,__VA_ARGS__) \ }; \ \ template<uint32_t alignment> \ struct bda<TYPENAME,alignment> \ { \ static void load(out TYPENAME retval, ptr_void<alignment> addr) \ { \ ref<TYPENAME,alignment> reference = ref<TYPENAME,alignment>::construct(addr); \ NBL_FOREACH_N(NBL_HLSL_BDA_IMPL1,4,__VA_ARGS__) \ } \ \ static void store(ptr_void<alignment> addr, const TYPENAME v) \ { \ ref<TYPENAME,alignment> reference = ref<TYPENAME,alignment>::construct(addr); \ NBL_FOREACH_N(NBL_HLSL_BDA_IMPL2,4,__VA_ARGS__) \ } \ }; \ \ template<uint32_t alignment> \ struct ref<TYPENAME,alignment> : ref_base<TYPENAME,alignment> \ { \ static ref<TYPENAME,alignment> construct(ptr_void<alignment> addr) \ { \ ref<TYPENAME,alignment> retval; \ retval.addr = addr; \ NBL_FOREACH_N(NBL_HLSL_BDA_IMPL3,4,__VA_ARGS__) \ return retval; \ } \ \ ref<TYPENAME,alignment> store(TYPENAME v) \ { \ ref_base<TYPENAME,alignment>::store(v); \ return ref<TYPENAME,alignment>(this); \ } \ \ NBL_FOREACH_N(NBL_HLSL_BDA_IMPL4,4,__VA_ARGS__) \ } //! pregenerated specializations #define NBL_HLSL_BDA(TYPENAME,ALIGNMENT,...) template<> struct _alignof<TYPENAME> : integral_constant<uint32_t,ALIGNMENT> {}; \ \ template<uint32_t alignment> \ struct bda<TYPENAME,alignment> \ { \ static void load(out TYPENAME v, ptr_void<alignment> addr) \ { \ uint32_t hi = uint32_t(addr.value>>32ull); \ uint32_t lo = uint32_t(addr.value); \ v = metabuffer[NonUniformResourceIndex(hi)].Load<TYPENAME>(lo); \ } \ static void store(ptr_void<alignment> addr, const TYPENAME v) \ { \ uint32_t hi = uint32_t(addr.value>>32ull); \ uint32_t lo = uint32_t(addr.value); \ metabuffer[NonUniformResourceIndex(hi)].Store<TYPENAME>(lo,v); \ } \ }; \ \ template<uint32_t alignment> \ struct ref<TYPENAME,alignment> : ref_base<TYPENAME,alignment> \ { \ static ref<TYPENAME,alignment> construct(ptr_void<alignment> addr) \ { \ ref<TYPENAME,alignment> retval; \ retval.addr = addr; \ return retval; \ } \ ref<TYPENAME,alignment> store(TYPENAME v) \ { \ ref_base<TYPENAME,alignment>::store(v); \ return ref<TYPENAME,alignment>(this); \ } \ \ template<typename U=TYPENAME> \ U atomicAdd(U v)\ { \ U old; \ uint32_t hi = uint32_t(ref_base<TYPENAME,alignment>::addr.value>>32ull); \ uint32_t lo = uint32_t(ref_base<TYPENAME,alignment>::addr.value); \ metabuffer[NonUniformResourceIndex(hi)].InterlockedAdd(lo,v,old); \ return old; \ } \ } NBL_HLSL_BDA(float,4); NBL_HLSL_BDA(float2,4); NBL_HLSL_BDA(float3,4); NBL_HLSL_BDA(float4,4); NBL_HLSL_BDA(double,8); NBL_HLSL_BDA(uint32_t,4); NBL_HLSL_BDA(uint64_t,8); //! userspace code NBL_HLSL_STRUCT(LineStyle,8u, 0,ptr<LineStyle>,prev,, 8,float,phaseShift,, 12,uint32_t,normalizedStipple,[1] ); struct PSInput { float4 position : SV_Position; uint pLineStyle : COLOR0; }; float4 main(PSInput input) : SV_TARGET { ptr<LineStyle> pLineStyle = _reinterpret_cast<ptr<LineStyle> >(input.pLineStyle); LineStyle tmp = pLineStyle[45].load(); tmp = deref(pLineStyle).load(); float phase = deref(pLineStyle).phaseShift.load(); member_ref_t<uint32_t[1],8,12> pA = deref(pLineStyle).normalizedStipple; // the following cause us to SIGSEGV member_ref_t<uint32_t[1],8,12> pB = pLineStyle[0].normalizedStipple; ref<uint32_t> r = deref(pLineStyle).normalizedStipple[0]; r.atomicAdd(3); uint32_t shtuff = deref(pLineStyle).normalizedStipple[1].load(); pLineStyle[45].store(tmp); return float(tmp.phaseShift).xxxx; }
Become a Patron
Sponsor on GitHub
Donate via PayPal
Source on GitHub
Mailing list
Installed libraries
Wiki
Report an issue
How it works
Contact the author
CE on Mastodon
About the author
Statistics
Changelog
Version tree