Redes Neurais Artificiais
De Aulas
Revisão de 13h38min de 8 de maio de 2025 por Admin (discussão | contribs) (→Neurônio Artificial Portas Lógicas em Python)
Links relacionados: Inteligência Artificial
Neurônio Artificial Portas Lógicas em Python
"""
PROGRAM: Perceptron
author: Saulo Popov Zambiasi
created: 2019.05.17
modified: 2025.05.08
description:
This program implements a perceptron model for training and executing
logic operations such as OR, NOT OR, AND, NOT AND, and XOR.
The perceptron is trained using a set of examples and can be used to
predict the output for given inputs.
"""
from random import randint, seed
from time import time
# -------------------------------------------------------
# CLASS PERCEPTRON
# -------------------------------------------------------
class Perceptron:
"""Perceptron class for training and executing a perceptron model.
Attributes:
learning_rate (float): Learning rate for weight adjustment.
epoch (int): Number of epochs for training.
weights (list): List of weights for the perceptron.
example (list[list[int]]): Training examples.
option (int): Option for the desired logic operation.
"""
# -------------------------------------------------------
# CONSTRUCTOR
# -------------------------------------------------------
def __init__(self, option: int):
seed(time())
self.learning_rate: float = 0.01
self.epoch: int = 200
self.weights: list = []
# Example by columns:
# -1, x1, x2, y_or, y_not_or, y_and, y_not_and, y_xor
# -1 is the bias
self.example: list[list[int]] = [
[-1, 0, 0, 0, 1, 0, 1, 0],
[-1, 0, 1, 1, 0, 0, 1, 1],
[-1, 1, 0, 1, 0, 0, 1, 1],
[-1, 1, 1, 1, 0, 1, 0, 0],
]
self.option: int = option
# Initialize weights randomly
for i in range(3):
random_value: float = randint(1, 100) / 100.0
self.weights.append(random_value)
# -------------------------------------------------------
# AJUST WEIGHT
# -------------------------------------------------------
def adjust_weight(self, e: int, out: int) -> None:
"""Adjusts the weights of the perceptron based on the error.
Args:
e (int): The index of the example.
out (int): The output of the perceptron.
"""
for i in range(3):
wi: float = self.weights[i] # Current weight
N: float = self.learning_rate # Learning rate
t: int = self.example[e][self.option] # Desired output
y: int = out # Current output
xi: int = self.example[e][i] # Input value
wi = wi + ((N * (t - y)) * xi) # Adjusted weight
self.weights[i] = wi # Update weight
# -------------------------------------------------------
# TRAINING
# -------------------------------------------------------
def training(self) -> bool:
"""Trains the perceptron using the examples provided.
Returns:
bool: True if the perceptron was trained successfully,
False otherwise.
"""
trained: bool = False
epoch: int = 0
while not trained and epoch < self.epoch:
trained = True
for i in range(4):
x1: int = self.example[i][1] # Input 1
x2: int = self.example[i][2] # Input 2
y: int = self.net(x1, x2) # Output of the perceptron
desired: int = self.example[i][self.option] # Desired output
if y != desired: # If the output is the desired
self.adjust_weight(i, y) # Adjust weights
trained = False # Set trained to false
epoch += 1 # Increment epoch
if trained:
print("Trained in", epoch, "epochs.")
return True
print("There was not possible to learn.")
return False
# -------------------------------------------------------
# NET AND ACTIVATION FUNCTION
# -------------------------------------------------------
def net(self, x1, x2) -> int:
w1: int = self.weights[0] # Weight for Bias
w2: int = self.weights[1] # Weight for x1
w3: int = self.weights[2] # Weight for x2
y: float = (-1 * w1) + (x1 * w2) + (x2 * w3) # Weighted sum
return 1 if y > 0 else 0 # Activation function
# -------------------------------------------------------
# MAIN PROGRAM
# -------------------------------------------------------
def train_perceptron(option: int) -> Perceptron:
"""Trains a perceptron for the specified logic operation.
Args:
option (int): The option for the desired logic operation.
Returns:
Perceptron: The trained perceptron object.
Raises:
ValueError: If the training fails.
"""
neuron = Perceptron(option)
if neuron.training():
return neuron
else:
raise ValueError("Training failed.")
def execute_perceptron(neuron: Perceptron) -> None:
"""Executes the perceptron for user-defined inputs.
Args:
neuron (Perceptron): The trained perceptron object.
"""
exit_loop: bool = False
while not exit_loop:
print("-- Type 0 or 1 - Enter 'back' to return to training menu")
user_input = input("x1: ").strip().lower()
if user_input == "back":
exit_loop = True
else:
try:
x1: int = int(user_input) # Input 1
x2: int = int(input("x2: ")) # Input 2
y: int = neuron.net(x1, x2) # Output of the perceptron
print("y = ", y)
except ValueError:
print("Invalid input. Please enter 0, 1, or 'back'.")
def main():
print("PERCEPTRON: training to logic ports")
operations = {
"or": 3,
"not or": 4,
"and": 5,
"not and": 6,
"xor": 7,
"exit": 9,
}
opc: int = -1
while opc != 9:
print(
"Choose operation",
" [ or | not or | and | not and | xor ]",
" - Type 'exit' to quit",
)
user_input = input("operation: ").strip().lower()
if user_input in operations:
opc = operations[user_input]
if opc == 9:
break
print("Training...")
try:
neuron = train_perceptron(opc)
execute_perceptron(neuron)
except ValueError as e:
print(e)
else:
print("Invalid operation. Please try again...")
print("Terminated...")
# -------------------------------------------------------
if __name__ == "__main__":
main()
Neurônio Artificial Porta OU em C++11
#include <iostream>
using namespace std;
class Neuronio
{
private:
int saida;
float N = 0.01;
int epocas = 100;
float peso[3];
int exemplo[4][4]
{
{ -1, 0, 0, 0 },
{ -1, 0, 1, 1 },
{ -1, 1, 0, 1 },
{ -1, 1, 1, 1 }
};
public:
Neuronio();
void transferencia();
void treinamento_pesos(int j);
void treinamento();
int net(int j);
int executar(int x[2]);
};
Neuronio::Neuronio()
{
for (int i = 0; i < 3; i++)
{
peso[i] = ((rand() % 200) - 100) / 100.0;
}
}
int Neuronio::net(int j)
{
float y = 0;
for (int i = 0; i < 3; i++)
{
y += exemplo[j][i] * peso[i];
}
if (y > 0)
{
return 1;
}
return 0;
}
void Neuronio::treinamento_pesos(int j)
{
for (int i = 0; i < 3; i++)
{
peso[i] = peso[i] + (N * (exemplo[j][3] - saida) * exemplo[j][i]);
}
}
void Neuronio::treinamento()
{
int e = 0;
bool errou;
do
{
cout << "Epoca(" << e << ")" << endl;
errou = false;
for (int i = 0; i < 4; i++)
{
saida = net(i);
if (saida != exemplo[i][3])
{
treinamento_pesos(i);
errou = true;
}
cout << "...Pesos: " << peso[0] << " " << peso[1] << " " << peso[2] << endl;
}
e++;
}
while((e < epocas) && (errou));
}
int Neuronio::executar(int x[2])
{
cout << "-1 * " << peso[0] << " + " << x[0] << " * " << peso[1] << " + " << x[1] << " + " << peso[2] << endl;
float y = (-1 * peso[0]) + (x[0] * peso[1]) + (x[1] * peso[2]);
if (y > 0)
{
return 1;
}
return 0;
}
int main()
{
Neuronio n;
n.treinamento();
int continuar = 1;
while (continuar == 1)
{
int in[2];
cout << "x1: ";
cin >> in[0];
cout << "x2: ";
cin >> in[1];
cout << "retorno: " << n.executar(in) << endl;
cout << "continuar? ";
cin >> continuar;
}
return 0;
}
Neurônio Artificial para Portas Lógicas em Java
import java.util.*;
public class Perceptron {
private double[] w = new double[3];
private double y = 0;
private double N = 0.1;
private final int BIAS = -1;
private final int MAX_EPOCAS = 1000;
private int operacao = 0;
private Random rand = new Random();
Scanner entrada;
private int[][] e = {
{ 0, 0, 0, 0, 1, 1, 0 },
{ 0, 1, 1, 0, 0, 1, 1 },
{ 1, 0, 1, 0, 0, 1, 1 },
{ 1, 1, 1, 1, 0, 0, 0 }
};
Perceptron(int op) {
if ((op >= 0) && (op < 5)) {
operacao = op + 2;
}
for (int i = 0; i < 3; i++) {
w[i] = ((rand.nextInt(20) + 1) / 10) - 1;
}
entrada = new Scanner(System.in);
}
int executar(int x1, int x2) {
// Somatorio NET
y = ((BIAS) * w[0]) + (x1 * w[1]) + (x2 * w[2]);
// Funcao de Transferencia
if (y > 0) {
return 1;
}
return 0;
}
boolean treinar() {
boolean treinou;
int epoca = 0;
do {
treinou = true;
for (int i = 0; i < 4; i++) {
int s = executar(e[i][0], e[i][1]);
if (s != e[i][operacao]) {
corrigirPeso(i, s);
treinou = false;
}
}
epoca++;
} while ((treinou == false) && (epoca < MAX_EPOCAS));
System.out.println("O algoritmo treinou " + epoca + " epocas...");
if (treinou == false) {
System.out.println("O algoritmo nao conseguiu convergir...");
}
return treinou;
}
void corrigirPeso(int exemplo, int saida) {
w[0] = w[0] + (N * (e[exemplo][operacao] - saida) * (BIAS));
w[1] = w[1] + (N * (e[exemplo][operacao] - saida) * e[exemplo][0]);
w[2] = w[2] + (N * (e[exemplo][operacao] - saida) * e[exemplo][1]);
}
void testar() {
boolean sair = false;
while (!sair) {
System.out.println("-- Digite 9 para sair --");
System.out.print("x1 : ");
int x1 = entrada.nextInt();
if (x1 == 9) {
sair = true;
} else {
System.out.print("x2 : ");
int x2 = entrada.nextInt();
int y = executar(x1, x2);
System.out.println(" y : " + y);
}
}
}
public static void main(String[] arguments) {
boolean erro = false;
if (arguments.length == 1) {
int op = Integer.valueOf(arguments[0]);
erro = ((op < 0) || (op > 4));
if (!erro) {
Perceptron p = new Perceptron(op);
if (p.treinar()) {
p.testar();
}
}
} else {
erro = true;
}
if (erro) {
System.out.println("Use: Perceptron <operacao>");
System.out.println("operacao:");
System.out.println("\t0 - ou");
System.out.println("\t1 - e");
System.out.println("\t2 - nao ou");
System.out.println("\t3 - nao e");
System.out.println("\t4 - ou exclusivo");
}
}
}