Thanks for using Compiler Explorer
Sponsors
Jakt
C++
Ada
Analysis
Android Java
Android Kotlin
Assembly
C
C3
Carbon
C++ (Circle)
CIRCT
Clean
CMake
CMakeScript
COBOL
C++ for OpenCL
MLIR
Cppx
Cppx-Blue
Cppx-Gold
Cpp2-cppfront
Crystal
C#
CUDA C++
D
Dart
Elixir
Erlang
Fortran
F#
GLSL
Go
Haskell
HLSL
Hook
Hylo
IL
ispc
Java
Julia
Kotlin
LLVM IR
LLVM MIR
Modula-2
Nim
Objective-C
Objective-C++
OCaml
Odin
OpenCL C
Pascal
Pony
Python
Racket
Ruby
Rust
Snowball
Scala
Slang
Solidity
Spice
SPIR-V
Swift
LLVM TableGen
Toit
TypeScript Native
V
Vala
Visual Basic
Vyper
WASM
Zig
Javascript
GIMPLE
Ygen
zig source #1
Output
Compile to binary object
Link to binary
Execute the code
Intel asm syntax
Demangle identifiers
Verbose demangling
Filters
Unused labels
Library functions
Directives
Comments
Horizontal whitespace
Debug intrinsics
Compiler
zig 0.10.0
zig 0.11.0
zig 0.12.0
zig 0.12.1
zig 0.13.0
zig 0.2.0
zig 0.3.0
zig 0.4.0
zig 0.5.0
zig 0.6.0
zig 0.7.0
zig 0.7.1
zig 0.8.0
zig 0.9.0
zig trunk
Options
Source code
const std = @import("std"); const assert = std.debug.assert; pub fn Layer(comptime InputType: type, comptime input_size: usize, comptime layer_size_: usize) type { return struct { const SelfType = @This(); pub const layer_size: usize = layer_size_; weights: [layer_size_][input_size]InputType = undefined, biases: [input_size]InputType = undefined, pub fn evaluate(self: *const SelfType, inputs: [*]InputType, outputs: [*]InputType) void { comptime var neuron_index: usize = 0; inline while (neuron_index < layer_size_) : (neuron_index += 1) { var input_index: usize = 0; var neuron_result: InputType = 0; while (input_index < input_size) : (input_index += 1) { neuron_result += self.weights[neuron_index][input_index] * inputs[input_index] + self.biases[input_index]; } outputs[neuron_index] = neuron_result; } } }; } pub fn Network(comptime InputType: type, comptime layer_list: anytype) type { return struct { const SelfType = @This(); layers: @TypeOf(layer_list) = layer_list, pub fn feedForward(self: *const SelfType, inputs: [*]InputType, outputs: [*]InputType) void { comptime assert(self.layers.len > 0); var layer_inputs = inputs; comptime var layer_index: usize = 0; inline while (layer_index < self.layers.len - 1) : (layer_index += 1) { var output: [self.layers[layer_index].layer_size]InputType = undefined; self.layers[layer_index].evaluate(layer_inputs, &output); layer_inputs = &output; } self.layers[layer_index].evaluate(layer_inputs, outputs); } }; } pub fn main() anyerror!void { var inputs = [2]i16{ 2, 3 }; const layer1 = Layer(i16, 2, 2){ .weights = [2][2]i16{ [2]i16{ 2, 4 }, [2]i16{ 3, 4 }, }, .biases = [2]i16{ 10, 50 }, }; const layer2 = Layer(i16, 2, 1){ .weights = [1][2]i16{ [2]i16{ 3, 5 }, }, .biases = [2]i16{1, 2}, }; var outputs: [1]i16 = undefined; const network = Network(i16, .{ layer1, layer2 }){}; network.feedForward(&inputs, &outputs); assert(outputs[0] == 621); }
Become a Patron
Sponsor on GitHub
Donate via PayPal
Source on GitHub
Mailing list
Installed libraries
Wiki
Report an issue
How it works
Contact the author
CE on Mastodon
CE on Bluesky
About the author
Statistics
Changelog
Version tree