Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
# CMake build directories
build/
_codeql_build_dir/
_codeql_detected_source_root

# Compiled binaries
*.o
*.a
*.so
*.out
neural_demo
15 changes: 15 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
cmake_minimum_required(VERSION 3.14)
project(NatureRealityEngine CXX)

set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)

# Neural network library
add_library(nre_neural
engine/neural/NeuralNetwork.cpp
)
target_include_directories(nre_neural PUBLIC ${CMAKE_CURRENT_SOURCE_DIR})

# Neural network demo
add_executable(neural_demo examples/neural_demo/main.cpp)
target_link_libraries(neural_demo PRIVATE nre_neural)
83 changes: 83 additions & 0 deletions engine/neural/NeuralNetwork.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
#include "NeuralNetwork.h"

#include <algorithm>
#include <cmath>
#include <random>
#include <stdexcept>

namespace NRE {

NeuralNetwork::NeuralNetwork(uint32_t seed) {
if (seed == 0) {
std::random_device rd;
seed_ = rd();
} else {
seed_ = seed;
}
rng_.seed(seed_);
}

void NeuralNetwork::SetSeed(uint32_t seed) {
if (seed == 0) {
std::random_device rd;
seed_ = rd();
} else {
seed_ = seed;
}
rng_.seed(seed_);
InitialiseWeights();
}

uint32_t NeuralNetwork::GetSeed() const {
return seed_;
}

void NeuralNetwork::AddLayer(int inputSize, int outputSize) {
if (inputSize <= 0 || outputSize <= 0) {
throw std::invalid_argument("Layer dimensions must be positive");
}
layers_.push_back({inputSize, outputSize,
std::vector<float>(outputSize * inputSize),
std::vector<float>(outputSize, 0.0f)});
// He initialisation for this layer using the seeded RNG
float stddev = std::sqrt(2.0f / static_cast<float>(inputSize));
std::normal_distribution<float> dist(0.0f, stddev);
Layer& layer = layers_.back();
for (float& w : layer.weights) {
w = dist(rng_);
}
}

std::vector<float> NeuralNetwork::Forward(const std::vector<float>& input) const {
std::vector<float> activation = input;
for (const Layer& layer : layers_) {
if (static_cast<int>(activation.size()) != layer.inputSize) {
throw std::runtime_error("Input size does not match layer dimensions");
}
std::vector<float> output(layer.outputSize, 0.0f);
for (int o = 0; o < layer.outputSize; ++o) {
float sum = layer.biases[o];
for (int i = 0; i < layer.inputSize; ++i) {
sum += layer.weights[o * layer.inputSize + i] * activation[i];
}
// ReLU activation
output[o] = std::max(0.0f, sum);
}
activation = std::move(output);
}
return activation;
}

void NeuralNetwork::InitialiseWeights() {
rng_.seed(seed_);
for (Layer& layer : layers_) {
float stddev = std::sqrt(2.0f / static_cast<float>(layer.inputSize));
std::normal_distribution<float> dist(0.0f, stddev);
for (float& w : layer.weights) {
w = dist(rng_);
}
std::fill(layer.biases.begin(), layer.biases.end(), 0.0f);
}
}

} // namespace NRE
43 changes: 43 additions & 0 deletions engine/neural/NeuralNetwork.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
#pragma once

#include <cstdint>
#include <random>
#include <vector>

namespace NRE {

// A fully-connected feedforward neural network with seeded weight initialization
// for reproducible results in procedural generation.
class NeuralNetwork {
public:
// Construct with an optional seed (default 0 = use std::random_device)
explicit NeuralNetwork(uint32_t seed = 0);

// Set (or reset) the RNG seed and reinitialise all weights
void SetSeed(uint32_t seed);

// Return the seed currently in use
uint32_t GetSeed() const;

// Append a fully-connected layer (inputSize x outputSize weights + bias)
void AddLayer(int inputSize, int outputSize);

// Run a forward pass and return the output activations
std::vector<float> Forward(const std::vector<float>& input) const;

private:
void InitialiseWeights();

struct Layer {
int inputSize;
int outputSize;
std::vector<float> weights; // row-major [outputSize][inputSize]
std::vector<float> biases; // [outputSize]
};

uint32_t seed_;
std::mt19937 rng_;
std::vector<Layer> layers_;
};

} // namespace NRE
46 changes: 46 additions & 0 deletions examples/neural_demo/main.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
#include <cstdint>
#include <iostream>
#include <vector>

#include "../../engine/neural/NeuralNetwork.h"

// Demonstrates seeded neural network for reproducible procedural generation.
// Running with the same seed always produces identical outputs.
int main() {
const uint32_t seed = 42;

// Build a small network: 4 inputs -> 8 hidden -> 4 outputs
NRE::NeuralNetwork nn(seed);
nn.AddLayer(4, 8);
nn.AddLayer(8, 4);

std::vector<float> input = {0.5f, 0.3f, 0.8f, 0.1f};

std::cout << "Seed: " << nn.GetSeed() << "\n";
std::cout << "Forward pass output:\n [";
auto output = nn.Forward(input);
for (size_t i = 0; i < output.size(); ++i) {
std::cout << output[i];
if (i + 1 < output.size()) std::cout << ", ";
}
std::cout << "]\n";

// Verify reproducibility: rebuild with the same seed
NRE::NeuralNetwork nn2(seed);
nn2.AddLayer(4, 8);
nn2.AddLayer(8, 4);
auto output2 = nn2.Forward(input);

bool reproducible = (output == output2);
std::cout << "Reproducible with same seed: " << (reproducible ? "YES" : "NO") << "\n";

// Show that a different seed produces different weights
NRE::NeuralNetwork nn3(seed + 1);
nn3.AddLayer(4, 8);
nn3.AddLayer(8, 4);
auto output3 = nn3.Forward(input);
std::cout << "Different seed produces different output: "
<< (output != output3 ? "YES" : "NO") << "\n";

return 0;
}