首页 > 解决方案 > 具有两个或更多输出的感知器

问题描述

我已经用一个输出完成了感知器,但现在我想尝试用至少两个输出来做。我在谷歌找不到任何例子。可能吗?你能给我一个最小的例子吗?我想创建预测二进制数的神经网络。我不知道如何添加更多输出。如果我可以创建 8 个输出神经元,那将是最好的选择。

这就是我写的:

def sigmoid(x):
    return 1.0 / (1 + exp(-x))

def sigmoid_deriv(x):
    return sigmoid(x) * (1 - sigmoid(x))

training_data = [
    (array([0,0,1]), [0, 0]),
    (array([1,0,1]), [1, 0]),
    (array([0,1,1]), [0, 1]),
    (array([1,1,1]), [1, 1]),
]

def binary_to_int(binary_number_array: list):
    #print(binary)
    decimal = 0
    i = 0
    #print(binary_number_array)
    for number in binary_number_array:
        if number == 1:
            decimal += pow(2, i)
        i += 1
    #print(decimal)
    return decimal



errors = []
eta = 0.6
input_layer = 3
hidden_layer = 2
output_layer = 2

epoch_count = 500

weights_input_to_hidden = np.random.uniform(-1, 1, (input_layer, hidden_layer))
weights_hidden_to_output = np.random.uniform(-1, 1, hidden_layer)

preActivation_H = np.zeros(hidden_layer)
postActivation_H = np.zeros(hidden_layer)

training_count = len(training_data[:])
#print(training_count)

for epoch in range(epoch_count):
    for sample in range(training_count):
        data, expected = training_data[sample]
        for node in range(hidden_layer):
            #data, expected = training_data[sample]
            #print(data, weights_input_to_hidden[:, node])
            preActivation_H[node] = np.dot(data, weights_input_to_hidden[:, node])
            postActivation_H[node] = sigmoid(preActivation_H[node])
            #print(data, weights_input_to_hidden[:, node])
            #print(preActivation_H[node], postActivation_H[node])

        preActivation_O = np.dot(postActivation_H, weights_hidden_to_output)
        postActivation_O = sigmoid(preActivation_O)

        decimal_expected = binary_to_int(expected)
        error = postActivation_O - decimal_expected
        errors.append(error)
        #print(error)

        # back propagation
        for hidden_node in range(hidden_layer):
            S_error = error * sigmoid_deriv(preActivation_O)
            #print(S_error)
            gradient_hidden_to_output = S_error * postActivation_H[hidden_node]
            #print(gradient_hidden_to_output)

            for input_node in range(input_layer):
                # tu moze byc zle
                input_value = data[input_node]

                print(input_value)
                gradient_input_to_hidden = S_error * weights_hidden_to_output[hidden_node] * \
                                           sigmoid_deriv(preActivation_H[hidden_node]) * input_value

                weights_input_to_hidden[input_node, hidden_node] -= eta * gradient_input_to_hidden

            weights_hidden_to_output[hidden_node] -= eta * gradient_hidden_to_output


validation_data = []
for i in range(training_count):
    data, expected = training_data[i]
    for node in range(hidden_layer):
        preActivation_H[node] = np.dot(data, weights_input_to_hidden[:, node])
        postActivation_H[node] = sigmoid(preActivation_H[node])

    preActivation_O = np.dot(postActivation_H, weights_hidden_to_output)
    postActivation_O = sigmoid(preActivation_O)

    #print(postActivation_O)
    if postActivation_O > 0.5:
        output = 1
    else:
        output = 0

    validation_data.append((postActivation_O, expected, output))

print(validation_data)

标签: pythonneural-network

解决方案


推荐阅读