import numpy as np
# функция активации ReLU
def relu(x):
return max(0, x)
# функция активации tanh
def tanh(x):
return np.tanh(x)
# класс нейрона
class Neuron:
def __init__(self, weights, bias, activation):
self.weights = weights
self.bias = bias
self.activation = activation
def feedforward(self, inputs):
total = np.dot(self.weights, inputs) + self.bias
return self.activation(total)
# нейросеть с функцией активации ReLU
class NeuralNetworkReLU:
def __init__(self):
weights = np.array([0.5, 0.5, 0.5])
bias = 0
self.h1 = Neuron(weights, bias, relu)
self.h2 = Neuron(weights, bias, relu)
self.h3 = Neuron(weights, bias, relu)
self.o1 = Neuron(weights, bias, relu)
def feedforward(self, x):
out_h1 = self.h1.feedforward(x)
out_h2 = self.h2.feedforward(x)
out_h3 = self.h3.feedforward(x)
out_hidden = np.array([out_h1, out_h2, out_h3])
out_o1 = self.o1.feedforward(out_hidden)
return out_o1
# нейросеть с функцией активации tanh
class NeuralNetworkTanh:
def __init__(self):
weights = np.array([0.5, 0.5, 0.5])
bias = 0
self.h1 = Neuron(weights, bias, tanh)
self.h2 = Neuron(weights, bias, tanh)
self.h3 = Neuron(weights, bias, tanh)
self.o1 = Neuron(weights, bias, tanh)
def feedforward(self, x):
out_h1 = self.h1.feedforward(x)
out_h2 = self.h2.feedforward(x)
out_h3 = self.h3.feedforward(x)
out_hidden = np.array([out_h1, out_h2, out_h3])
out_o1 = self.o1.feedforward(out_hidden)
return out_o1
# входные данные
x = np.array([2, 3, 4])
# проверка ReLU-сети
network_relu = NeuralNetworkReLU()
print("Результат нейросети с ReLU:")
print(network_relu.feedforward(x))
# проверка tanh-сети
network_tanh = NeuralNetworkTanh()
print("Результат нейросети с tanh:")
print(network_tanh.feedforward(x))