commit 85d28150be7da311d5393d473e99ad044e71f63a Author: HypoxiE Date: Sun Aug 17 15:18:07 2025 +0700 first commit diff --git a/__pycache__/generate.cpython-313.pyc b/__pycache__/generate.cpython-313.pyc new file mode 100644 index 0000000..bb0c231 Binary files /dev/null and b/__pycache__/generate.cpython-313.pyc differ diff --git a/__pycache__/neuro_defs.cpython-313.pyc b/__pycache__/neuro_defs.cpython-313.pyc new file mode 100644 index 0000000..2c90ed3 Binary files /dev/null and b/__pycache__/neuro_defs.cpython-313.pyc differ diff --git a/__pycache__/visual.cpython-313.pyc b/__pycache__/visual.cpython-313.pyc new file mode 100644 index 0000000..5dccb2d Binary files /dev/null and b/__pycache__/visual.cpython-313.pyc differ diff --git a/generate.py b/generate.py new file mode 100644 index 0000000..ea67684 --- /dev/null +++ b/generate.py @@ -0,0 +1,36 @@ +import random +class Dot: + def __init__(self, x: float, y: float) -> None: + self.x = x + self.y = y + self.classification = float(((x**2 + y**2)**0.5)>=0.5) + + def get_tup(self) -> tuple: + return (self.x, self.y, self.classification) + + def __str__(self) -> str: + return f"({self.x}, {self.y})" + + def __repr__(self) -> str: + return f"({self.x}, {self.y}, {self.classification})" + +class Dataset: + def __init__(self, train: list[Dot], test: list[Dot]) -> None: + self.train = train + self.test = test + + def __str__(self) -> str: + return f"Train: {str(self.train)}\nTest: {str(self.test)}" + + def __repr__(self) -> str: + return f"Train: {self.train}\nTest: {self.test}" + +def generate_data() -> Dot: + return Dot(random.uniform(-1.0, 1.0), random.uniform(-1.0, 1.0)) + +def generate_dataset(N = 1000) -> Dataset: + return Dataset([generate_data() for i in range(N//5*4)], [generate_data() for i in range(N//5)]) + +if __name__ == "__main__": + data = generate_dataset(10) + print(data) \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000..007425d --- /dev/null +++ b/main.py @@ -0,0 +1,19 @@ + +import generate +import visual +import neuro_defs + + +dataset = generate.generate_dataset(10_000) + + +# Создаём и обучаем сеть +nn = neuro_defs.SimpleNN() +nn.train(dataset.train, epochs=10) + +# Проверяем на новой точке +for dot in dataset.test[:10]: + print(nn.forward(dot.x, dot.y), dot.__repr__()) + +# visual.plot_dataset(dataset) +# visual.plt_show() \ No newline at end of file diff --git a/neuro_defs.py b/neuro_defs.py new file mode 100644 index 0000000..a1f9274 --- /dev/null +++ b/neuro_defs.py @@ -0,0 +1,49 @@ +import math +import random + +def sigmoid(x): + return 1 / (1 + math.exp(-x)) + +def sigmoid_derivative(x): + s = sigmoid(x) + return s * (1 - s) + +class SimpleNN: + def __init__(self): + # инициализация весов случайными числами + self.w1 = random.uniform(-1, 1) + self.w2 = random.uniform(-1, 1) + self.b = random.uniform(-1, 1) # смещение + self.w_out = random.uniform(-1, 1) + self.b_out = random.uniform(-1, 1) + self.lr = 0.001 # скорость обучения + + def forward(self, x1, x2): + # прямой проход + self.z1 = self.w1 * x1 + self.w2 * x2 + self.b + self.a1 = sigmoid(self.z1) # активация скрытого слоя + self.z2 = self.w_out * self.a1 + self.b_out + self.a2 = sigmoid(self.z2) # выход сети + return self.a2 + + def backward(self, x1, x2, y): + # вычисляем ошибку + error = self.a2 - y # dL/da2 + + # производные для выходного слоя + d_out = error * sigmoid_derivative(self.z2) + self.w_out -= self.lr * d_out * self.a1 + self.b_out -= self.lr * d_out + + # производные для скрытого слоя + d_hidden = d_out * self.w_out * sigmoid_derivative(self.z1) + self.w1 -= self.lr * d_hidden * x1 + self.w2 -= self.lr * d_hidden * x2 + self.b -= self.lr * d_hidden + + def train(self, data, epochs=1000): + for _ in range(epochs): + for x1, x2, y in [i.get_tup() for i in data]: + self.forward(x1, x2) + self.backward(x1, x2, y) + diff --git a/visual.py b/visual.py new file mode 100644 index 0000000..5a0b75f --- /dev/null +++ b/visual.py @@ -0,0 +1,41 @@ +import matplotlib.pyplot as plt +import matplotlib.colors as mcolors +import numpy as np + + +def plot_dataset(dataset): + x0 = [dot.x for dot in dataset.train if not dot.classification] + y0 = [dot.y for dot in dataset.train if not dot.classification] + x1 = [dot.x for dot in dataset.train if dot.classification] + y1 = [dot.y for dot in dataset.train if dot.classification] + + plt.scatter(x0, y0, color='green', label='Class 0') + plt.scatter(x1, y1, color='red', label='Class 1') + +def plot_decision_surface(network, resolution=0.02): + x_min, x_max = -1, 1 + y_min, y_max = -1, 1 + xx, yy = np.meshgrid(np.arange(x_min, x_max, resolution), + np.arange(y_min, y_max, resolution)) + + # прогоняем сетку через сеть + Z = np.array([network.predict([x, y]) for x, y in zip(xx.ravel(), yy.ravel())]) + Z = Z.reshape(xx.shape) + + # закрашиваем фон по вероятности + plt.contourf(xx, yy, Z, levels=50, cmap='RdYlGn', alpha=0.3) + +def plot_all(dataset, network): + plt.figure(figsize=(6,6)) + plot_decision_surface(network) + plot_dataset(dataset) + plt.xlim(-1, 1) + plt.ylim(-1, 1) + plt.legend() + +def plt_show(): + plt.show() + + +if __name__ == "__main__": + pass \ No newline at end of file