Prezados, tenho o código abaixo para solucionar o problema da lógica XOR, utilizando a junção das duas técnicas. No entanto o algoritmo ao invés de convergir diminuindo o erro, aumenta o erro a cada iteração.
Alguem poderia me ajudar a solucinar este problema ?
Main
public class Main {
public static void main(String args[]){
CGeneticNet nnuga = new CGeneticNet();
simplenet sn = nnuga.Run();
if(sn != null){
System.out.println("Solution found!!");
System.out.println("0 xor 0 = " + sn.rodar(0,0,0));
System.out.println("0 xor 1 = " + sn.rodar(0,1,1));
System.out.println("1 xor 0 = " + sn.rodar(1,0,1));
System.out.println("1 xor 1 = " + sn.rodar(1,1,0));
} else {
System.out.println("Solution not found!!");
}
}
}
Backpropagation
public class simplenet {
public double m_fPesos[][] = new double[3][3];
public static void main(String args[]){
simplenet sn = new simplenet();
sn.iniciaMatriz();
sn.SaidaMatrizPesos();
}
public void SaidaMatrizPesos(){
for (int i=0;i<3;i++) {
for (int j=0;j<3;j++) {
System.out.print(m_fPesos[i][j] + " ");
}
System.out.println();
}
}
public void iniciaMatriz(){
for (int i=0;i<3;i++) {
for (int j=0;j<3;j++) {
// Número aleatório entre -3 and 3.
m_fPesos[i][j] = ((Math.random())/(32767/6) - 3);
}
}
}
//Método executar/rodar
public double rodar(double i1,double i2,double d){
// Essas são as principais variáveis usadas na
// rotina.
double net1, net2, i3, i4;
// Calcular o calor dos net para as camadas escondidas dos neurônios
net1 = 1 * m_fPesos[0][0] + i1 * m_fPesos[1][0] +
i2 * m_fPesos[2][0];
net2 = 1 * m_fPesos[0][1] + i1 * m_fPesos[1][1] +
i2 * m_fPesos[2][1];
// Use the hardlimiter function - the Sigmoid.
i3 = sigmoid(net1);
i4 = sigmoid(net2);
// Now, calculate the net for the final output layer.
net1 = 1 * m_fPesos[0][2] + i3 * m_fPesos[1][2] +
i4 * m_fPesos[2][2];
//System.out.println("Net > "+net1);
return sigmoid(net1);
}
//Treinamento
public double treinamento(double i1, double i2, double d){
// Função de treinamento retorna o erro
double var = (double) (Math.abs(rodar(i1,i2,d) - d));
//System.out.println("Erro BackPropagation > " + var);
return var;
}
public double sigmoid(double num) {
return (double)(1/(1+Math.exp(-num)));
}
public void alterarPesos(double montante){
SaidaMatrizPesos();
//System.out.println("Saída após modificação dos Pesos");
//System.out.println("Montante > "+ montante);
for (int i=0;i<3;i++) {
for (int j=0;j<3;j++) {
m_fPesos[i][j] += (double)(Math.random())/(32767/(montante * 2)) - montante;
}
System.out.println("");
}
System.out.println("");
}
//Pega Pesos
public void pegaPesos(double vetorPesos[]){
int z = 0;
for (int i=0;i<3;i++) {
for (int j=0;j<3;j++) {
vetorPesos[z] = m_fPesos[i][j];
z++;
}
}
}
public void configPesos(double vetorPesos[]) {
int z = 0;
for (int i=0;i<3;i++) {
for (int j=0;j<3;j++) {
m_fPesos[i][j] = vetorPesos[z];
z++;
}
}
}
}
Algoritmo Genético
public class CGeneticNet{
private int CGN_POPULATION = 150;
private double CGN_THRESHOLD = 0.001;
private int CGN_MAXITER = 1000;
private simplenet[] m_pcPopulation;
private double m_fErrors[];
public CGeneticNet(){
m_pcPopulation = new simplenet[CGN_POPULATION];
m_fErrors = new double[CGN_POPULATION];
for(int i=0;i < CGN_POPULATION; i++){
// Weights automatically initialized
// in neural network constructor.
m_pcPopulation[i] = new simplenet();
}
}
public simplenet Run(){
double error = 0;
for(int iter=0;iter < CGN_MAXITER; iter++){
for (int i=0;i<CGN_POPULATION;i++) {
error = ((m_pcPopulation[i].treinamento(0,0,0) + m_pcPopulation[i].treinamento(0,1,1) + m_pcPopulation[i].treinamento(1,0,1) + m_pcPopulation[i].treinamento(1,1,0)) / 4);
System.out.println("Erro AG = " + error);
if(error < CGN_THRESHOLD)
{
System.out.println("Venci");
return m_pcPopulation[i];
}
m_fErrors[i] = error;
}
SortFitnesses();
NewPopulation();
}
return null;
}
void SortFitnesses(){
int n = CGN_POPULATION;
int disorder = n;
while(disorder != 0){
disorder = 0;
for(int i = 1; i < n; i++){
if(m_fErrors[i] < m_fErrors[i-1]){
double m_fTemp = m_fErrors[i-1];
m_fErrors[i-1] = m_fErrors[i];
m_fErrors[i] = m_fTemp;
simplenet tempnn = m_pcPopulation[i-1];
m_pcPopulation[i-1] = m_pcPopulation[i];
m_pcPopulation[i] = tempnn;
disorder++;
}
}
n--;
}
}
void NewPopulation(){
int id1, id2;
double wp1[] = new double[9];
double wp2[] = new double[9];
for (int i = 0; i < CGN_POPULATION/2; i++){
id1 = (int) (Math.random() % CGN_POPULATION / 2);
id2 = (int) (Math.random() % CGN_POPULATION / 2 + CGN_POPULATION / 2);
m_pcPopulation[id1].pegaPesos(wp1);
m_pcPopulation[id2].pegaPesos(wp2);
for (int j = 0; j < 2; j++){
double temp;
temp = wp1[j+6];
wp1[j+6] = wp2[j+6];
wp2[j+6] = temp;
}
if(Math.random()%10 < 2){
for (int k = 0; k < 3; k++){
for (int j=0;j<3;j++) {
m_pcPopulation[id1].alterarPesos((double)(Math.random())/(32767/2) - 1);
m_pcPopulation[id2].alterarPesos((double)(Math.random())/(32767/2) - 1);
}
}
}
m_pcPopulation[id2].configPesos(wp1);
}
}
}