diff --git a/classes.py b/classes.py new file mode 100644 index 0000000..e6d2cef --- /dev/null +++ b/classes.py @@ -0,0 +1,18 @@ +import random + +class DataSet: + def __init__(self, N=1000) -> None: + self.train = [] + self.train_answs = [] + self.test = [] + self.test_answs = [] + + for i in range(N//5*4): + x = random.uniform(-1000, 1000) + self.train.append(x) + self.train_answs.append(x+1) + + for i in range(N//5): + x = random.uniform(-1000, 1000) + self.test.append(x) + self.test_answs.append(x+1) \ No newline at end of file diff --git a/generate.py b/generate.py index ea67684..ce89053 100644 --- a/generate.py +++ b/generate.py @@ -1,36 +1,4 @@ -import random -class Dot: - def __init__(self, x: float, y: float) -> None: - self.x = x - self.y = y - self.classification = float(((x**2 + y**2)**0.5)>=0.5) +import classes - def get_tup(self) -> tuple: - return (self.x, self.y, self.classification) - - def __str__(self) -> str: - return f"({self.x}, {self.y})" - - def __repr__(self) -> str: - return f"({self.x}, {self.y}, {self.classification})" - -class Dataset: - def __init__(self, train: list[Dot], test: list[Dot]) -> None: - self.train = train - self.test = test - - def __str__(self) -> str: - return f"Train: {str(self.train)}\nTest: {str(self.test)}" - - def __repr__(self) -> str: - return f"Train: {self.train}\nTest: {self.test}" - -def generate_data() -> Dot: - return Dot(random.uniform(-1.0, 1.0), random.uniform(-1.0, 1.0)) - -def generate_dataset(N = 1000) -> Dataset: - return Dataset([generate_data() for i in range(N//5*4)], [generate_data() for i in range(N//5)]) - -if __name__ == "__main__": - data = generate_dataset(10) - print(data) \ No newline at end of file +def generate_dataset(N=1000): + return classes.DataSet(N) \ No newline at end of file diff --git a/main.py b/main.py index 007425d..0158a9f 100644 --- a/main.py +++ b/main.py @@ -4,16 +4,16 @@ import visual import neuro_defs -dataset = generate.generate_dataset(10_000) +dataset = generate.generate_dataset(1000) # Создаём и обучаем сеть nn = neuro_defs.SimpleNN() -nn.train(dataset.train, epochs=10) +nn.train(dataset.train, dataset.train_answs, epochs=100) # Проверяем на новой точке for dot in dataset.test[:10]: - print(nn.forward(dot.x, dot.y), dot.__repr__()) + print(nn.forward(dot), dot) # visual.plot_dataset(dataset) # visual.plt_show() \ No newline at end of file diff --git a/neuro_defs.py b/neuro_defs.py index a1f9274..6159aef 100644 --- a/neuro_defs.py +++ b/neuro_defs.py @@ -2,7 +2,12 @@ import math import random def sigmoid(x): - return 1 / (1 + math.exp(-x)) + if x >= 0: + z = math.exp(-x) + return 1 / (1 + z) + else: + z = math.exp(x) + return z / (1 + z) def sigmoid_derivative(x): s = sigmoid(x) @@ -12,21 +17,20 @@ class SimpleNN: def __init__(self): # инициализация весов случайными числами self.w1 = random.uniform(-1, 1) - self.w2 = random.uniform(-1, 1) self.b = random.uniform(-1, 1) # смещение self.w_out = random.uniform(-1, 1) self.b_out = random.uniform(-1, 1) self.lr = 0.001 # скорость обучения - def forward(self, x1, x2): + def forward(self, x1): # прямой проход - self.z1 = self.w1 * x1 + self.w2 * x2 + self.b + self.z1 = self.w1 * x1 + self.b self.a1 = sigmoid(self.z1) # активация скрытого слоя self.z2 = self.w_out * self.a1 + self.b_out self.a2 = sigmoid(self.z2) # выход сети return self.a2 - def backward(self, x1, x2, y): + def backward(self, x1, y): # вычисляем ошибку error = self.a2 - y # dL/da2 @@ -38,12 +42,11 @@ class SimpleNN: # производные для скрытого слоя d_hidden = d_out * self.w_out * sigmoid_derivative(self.z1) self.w1 -= self.lr * d_hidden * x1 - self.w2 -= self.lr * d_hidden * x2 self.b -= self.lr * d_hidden - def train(self, data, epochs=1000): + def train(self, dataset, answs, epochs=1000): for _ in range(epochs): - for x1, x2, y in [i.get_tup() for i in data]: - self.forward(x1, x2) - self.backward(x1, x2, y) + for i in range(len(dataset)): + self.forward(dataset[i]) + self.backward(dataset[i], answs[i])