From 96631c14c9bee9b6893b120bf98241e6b0274c1e Mon Sep 17 00:00:00 2001
From: anon <anon@anon.anon>
Date: Tue, 10 Dec 2024 20:39:01 +0100
Subject: [PATCH] Added 'C_C++/xor_nn.cpp'

---
 C_C++/xor_nn.cpp | 98 ++++++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 98 insertions(+)
 create mode 100644 C_C++/xor_nn.cpp

diff --git a/C_C++/xor_nn.cpp b/C_C++/xor_nn.cpp
new file mode 100644
index 0000000..a98775a
--- /dev/null
+++ b/C_C++/xor_nn.cpp
@@ -0,0 +1,98 @@
+// @BAKE g++ $@ -fpermissive -Wextra -Wpedantic -o $*.out
+#include <stdlib.h>
+#include <stdio.h>
+#include <math.h>
+#include <time.h>
+#include <vector>
+#include <algorithm>
+#include <numeric>
+
+const int TRAINING_SESSIONS = 1000;
+
+using namespace std;
+
+typedef struct nn {
+	static const int INPUT_COUNT       = 2;
+	static const int B                 = 1;
+	static const int NUMBER_OF_LAYERS  = 2;
+	static const int INPUT_SIZE        = INPUT_COUNT + B;
+
+	double weights[INPUT_SIZE][NUMBER_OF_LAYERS];
+
+	nn() {
+		this->random_fill();
+	}
+
+	~nn() {
+		this->print();
+	}
+
+	void random_fill() {
+		srand((unsigned int)time(NULL));
+		for (int i = 0; i < INPUT_SIZE; i++) {
+			for (int h = 0; h < NUMBER_OF_LAYERS; h++) {
+				weights[i][h] = ((double)rand() / RAND_MAX) * 2.0 - 1.0;
+			}
+		}
+	}
+
+	void print() {
+		for (int j = 0; j < NUMBER_OF_LAYERS; j++) {
+			for (int i = 0; i < INPUT_SIZE; i++) {
+				printf("%.4f    ", weights[i][j]);
+			}
+			printf("\n");
+		}
+	}
+} neural_network_t;
+
+typedef struct {
+	int inputs[neural_network_t::INPUT_SIZE];
+	int output;
+} data_t;
+
+
+data_t training_data[] = {
+	(data_t){{0, 0, neural_network_t::B}, 0},
+	(data_t){{1, 0, neural_network_t::B}, 1},
+	(data_t){{0, 1, neural_network_t::B}, 1},
+	(data_t){{1, 1, neural_network_t::B}, 0},
+};
+
+double sigmoid_derivative(int i) {
+	return (double)i * ((double)1 - (double)i);
+}
+
+double sigmoid_activation(int i) {
+	return (double)i / ((double)1 + exp(i));
+}
+
+signed main() {
+	neural_network_t my_neural_network;
+	int training_sessions_counter;
+	double sumerr = 0;
+	for (training_sessions_counter = 0;
+	     training_sessions_counter < TRAINING_SESSIONS;
+	     training_sessions_counter++) {
+		for (const auto &data : training_data) {
+				int y[neural_network_t::INPUT_COUNT];
+				for (int h = 0; h < neural_network_t::INPUT_COUNT; h++) {
+					//double r = std::accumulate(myList.begin(),
+					//                           myList.end(),
+					//                           0,
+					//                           [](int sum, int element) {
+					//                           	return sum + (element * 2);
+					//                           }
+					//);
+				}
+		}
+	}
+
+
+	printf("Number of steps: %d\n"
+	       "??: %lf\n",
+	       training_sessions_counter, sumerr
+	);
+
+	return 0;
+}