Andrew’s genetic algorithm

In questo articolo vi presento un possibile algoritmo genetico da me implementato in C++ per questioni di velocità di calcolo (e non solo…).

L’algoritmo è molto semplice e compatto, ma vi assicuro che funziona benissimo! Come esempio di problema da risolvere ho adottato il subset sum problem.

//==========================================================================
// Name        : aga.cpp
// Author      : Andrea Bianchini (2021)
// Version     : 1.0.0
// Copyright   : SOME RIGHT RESERVED CREATIVE COMMONS BY 4.0
// License     : https://creativecommons.org/licenses/by/4.0/
// Description : AGA - Andrew's Genetic Algorithm
// Use         : AGA p = AGA(PSIZE,ISIZE,RMIN,RMAX,IMAX,PCROSS,PMUT);
//             : p.evolve();
// Where       : PSIZE = Population size (number of individuals)
//             : ISIZE = Individual size (number of variables)
//             : RMIN  = Individual variables min value
//             : RMAX  = Individual variables max value
//             : IMAX  = Max number of iteration
//             : PCROSS= Crossover probability
//             : PMUT  = Mutation probability
// Example     : AGA p = AGA(500,20,0,1,10,0.5,0.3);
//             : p.evolve();
//             :
//             : Personalize your objective into evalIndividual().
//             : This file is personalized to solve subset sum problem.
//==========================================================================
#include <stdlib.h>
#include <limits.h>
#include <stdio.h>
#include <math.h>
#include <time.h>


class AGA
{
protected:
	int POP_SIZE = 500;
	int IND_SIZE = 20;
	int RANGE_MIN = 0;
	int RANGE_MAX = 1;
	int MAXITER = 10;
	float PCROSS = 0.5;
	float PMUT = 0.3;
	int **pop;
	int first=1;
	int solfound;
	int niter;

public:
	int *w;
	int *bestIndividual;
	int c = 400;

	void initSSP()
	{
		w=new int [IND_SIZE];
		for(int i=0;i<IND_SIZE;i++)
		w[i]=c*0.1+(int)(rand()%(int)(c*0.3-c*0.1+1));

	}
	AGA(int pops, int inds, int rmin, int rmax, int maxiter, float pcross, float pmute)
		{
			solfound=0;
			niter=0;
			first=1;
			POP_SIZE=pops;
			IND_SIZE=inds;
			RANGE_MIN=rmin;
			RANGE_MAX=rmax;
			MAXITER = maxiter;
			PCROSS = pcross;
			PMUT = pmute;
			pop = new int* [POP_SIZE];
			int i;
			for(i=0;i<POP_SIZE;i++)
			{
				pop[i] = new int [IND_SIZE+1]; // +1 for fit
				pop[i][IND_SIZE]=0;
			}
			bestIndividual=new int [IND_SIZE+1];
			bestIndividual[IND_SIZE]=0;
			initSSP();
		}

	~AGA()
	{
		for(int i=0;i<POP_SIZE;i++)
			delete[] pop[i];
		delete[] pop;
		delete[] bestIndividual;
		delete[] w;
	}

	int * individual(int i)
	{
		return pop[i];
	}

	void populate()
	{
		int ind;
		for(ind=0;ind<POP_SIZE-1;ind++)
			for(int k=ind+1;k<POP_SIZE;k++)
				if (abs(individual(ind)[IND_SIZE]-c)>abs(individual(k)[IND_SIZE]-c))
				{
					int *p=individual(ind);
					pop[ind]=pop[k];
					pop[k]=p;
				}
		//..........
		if (first==1)
		{
			for(ind=0;ind<POP_SIZE;ind++)
			{
				int *in;
				in = individual(ind);
				for(int j=0;j<IND_SIZE;j++)
				{
					in[j] = rndInRange();
				}
				in[IND_SIZE]=0; // fit
			}
			first=0;
		}
		else
		{
			int p2=POP_SIZE*PCROSS+1;
			int p1=0;
			while(p2<POP_SIZE && p1<POP_SIZE*PCROSS+1)
			{
				Cross(p1,p2);
				if (p2<POP_SIZE*PMUT)
					Mutate(p2);
				p1++;
				p2++;
			}
			while (p2<POP_SIZE)
			{
				int *in;
				in = individual(p2);
				for(int j=0;j<IND_SIZE;j++)
				{
					in[j] = rndInRange();
				}
				in[IND_SIZE]=0; // fit
				p2++;
			}
		}
	}

	void Cross(int p1, int p2)
	{
		int *c1,*c2,*c3;

		c1=individual(p1);
		c2=individual(p1+1);
		c3=individual(p2);

		for(int i=0;i<IND_SIZE/2;i++)
		{
			c3[i]=c1[i];
		}
		for(int i=IND_SIZE/2;i<IND_SIZE;i++)
		{
			c3[i]=c2[i];
		}
		c3[IND_SIZE]=0;
	}

	void Mutate(int p2)
	{
		int c = rand() % IND_SIZE;
		int *d=individual(p2);
		d[c] = rndInRange();
	}

	int rndInRange()
	{
		int a = RANGE_MIN+(int)(rand()%(RANGE_MAX-RANGE_MIN+1));
		if (a>RANGE_MAX)
			a=RANGE_MAX;
		//printf("%d\n\r",a);
		return a;
	}

	void evalPopulation()
	{
		for(int i=0;i<POP_SIZE;i++)
		{
			evalIndividual(i);
		}

	}

	void evalIndividual(int i)
	{
		int z=0;
		int *p = individual(i);
		for(int j=0;j<IND_SIZE;j++)
			z+=w[j]*p[j];
		//if (z<c)
			p[IND_SIZE]=z;
		//else
			//p[IND_SIZE]=0;
		if (z<=c && z>bestIndividual[IND_SIZE])
		{
			for(int j=0;j<IND_SIZE;j++)
				bestIndividual[j]=p[j];
			bestIndividual[IND_SIZE]=z;
			printf("Best Fit=%d\r",z);
			if (z==c)
			{
				solfound=1;
				printf("Optimal Solution z=%d found!\r",z);
			}
		}
	
	}
	
	void evolve()
	{
		niter=0;
		while(niter<MAXITER && solfound==0)
		{
			printf("--------------------\rGenerazione n.%d\r",niter);
			populate();
			evalPopulation();
			int ind;
			int tot=0;
			for(ind=0;ind<POP_SIZE-1;ind++)
			{
				tot+=individual(ind)[IND_SIZE];
				for(int k=ind+1;k<POP_SIZE;k++)
					if (individual(ind)[IND_SIZE]<individual(k)[IND_SIZE])
					{
						int *p=individual(ind);
						pop[ind]=pop[k];
						pop[k]=p;
					}
			}
			tot+=individual(POP_SIZE-1)[IND_SIZE];
			tot/=POP_SIZE;
			printf("MAX=%d   AVG=%d   MIN=%d\r",pop[0][IND_SIZE],tot,pop[POP_SIZE-1][IND_SIZE]);
			niter++;
		}
	}
};

int main() {
	int POP_SIZE=500;
	int IND_SIZE=20;
	int RANGE_MIN=0;
	int RANGE_MAX=1;
	int MAXITER = 10;
	float PCROSS = 0.5;
	float PMUT = 0.3;

	AGA p = AGA(POP_SIZE,IND_SIZE,RANGE_MIN,RANGE_MAX,MAXITER,PCROSS,PMUT);
	p.c=600;
	p.initSSP();
	clock_t tStart = clock();
	p.evolve();
	printf("\rTempo di esecuzione: %.4fs\r", (double)(clock() - tStart)/CLOCKS_PER_SEC);

	printf("\r---------------\rSoluzione: %d\r",p.bestIndividual[IND_SIZE]);
	printf("Items:\r[");
	for(int i=0;i<IND_SIZE;i++)
		printf("%d ",p.w[i]);
	printf("]\r");
	printf("Items soluzione:\r[");
	for(int i=0;i<IND_SIZE;i++)
		printf("%d ",p.w[i]*p.bestIndividual[i]);
	printf("]\r");
	return 0;
}

Esempio di output:

--------------------
Generazione n.0
Best Fit=524
Best Fit=548
Best Fit=579
MAX=1833   AVG=382   MIN=339
--------------------
Generazione n.1
Best Fit=593
MAX=1689   AVG=333   MIN=283
--------------------
Generazione n.2
Best Fit=598
MAX=1189   AVG=351   MIN=283
--------------------
Generazione n.3
Best Fit=600
Optimal Solution z=600 found!
MAX=1189   AVG=406   MIN=203

Tempo di esecuzione: 0.0267s

---------------
Soluzione: 600
Items:
[92 123 109 82 174 110 148 147 96 136 169 67 114 129 100 124 122 101 62 81 ]
Items soluzione:
[92 0 0 0 0 0 0 0 0 136 0 67 0 0 100 124 0 0 0 81 ]

Algoritmi genetici

# DEAP_generico.py
#
# esempio di utilizzo della libreria per la computazione genetica evolutiva
# dei problemi denominata DEAP.
# in questo caso ho utilizzato lo scheletro della risoluzione di un
# problema generico per risolvere il problema di ottimizzazione
# noto come Knapsack Problem.
# documentazione su : https://deap.readthedocs.io/en/master/index.html
#
# by Andrea Bianchini (2021)



import random

from deap import base
from deap import creator
from deap import tools

IND_SIZE = 50
C = 10000
MIN_WEIGHT = int(C*0.01)
MAX_WEIGHT = int(C*0.07)
MAX=0
sol=[]
items=[random.randint(MIN_WEIGHT,MAX_WEIGHT) for _ in range(IND_SIZE)]

creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", list, fitness=creator.FitnessMax)

toolbox = base.Toolbox()
# Attribute generator 
toolbox.register("attr_bool", random.randint, 0, 1)
# Structure initializers
toolbox.register("individual", tools.initRepeat, creator.Individual, 
    toolbox.attr_bool, IND_SIZE)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)

def evalOneMax(individual):
    global sol
    global MAX
    e1 = sum([items[i]*individual[i] for i in range(len(individual))])
    if e1>MAX and e1<=C:
        MAX = e1
        sol=individual
    return e1,

toolbox.register("evaluate", evalOneMax)
toolbox.register("mate", tools.cxTwoPoint)
toolbox.register("mutate", tools.mutFlipBit, indpb=0.05)
toolbox.register("select", tools.selTournament, tournsize=3)

summa=0
def main():
    global summa
    pop = toolbox.population(n=300)

    # Evaluate the entire population
    fitnesses = list(map(toolbox.evaluate, pop))
    for ind, fit in zip(pop, fitnesses):
        ind.fitness.values = fit

    # CXPB  is the probability with which two individuals
    #       are crossed
    #
    # MUTPB is the probability for mutating an individual
    CXPB, MUTPB = 0.5, 0.2

    # Extracting all the fitnesses of 
    fits = [ind.fitness.values[0] for ind in pop]

    # Variable keeping track of the number of generations
    g = 0
    
    # Begin the evolution
    
    while max(fits) < C and g < 1000:
        # A new generation
        g = g + 1
        print("-- Generation %i --" % g)

        # Select the next generation individuals
        offspring = toolbox.select(pop, len(pop))
        # Clone the selected individuals
        offspring = list(map(toolbox.clone, offspring))

        # Apply crossover and mutation on the offspring
        for child1, child2 in zip(offspring[::2], offspring[1::2]):
            if random.random() < CXPB:
                toolbox.mate(child1, child2)
                del child1.fitness.values
                del child2.fitness.values

        for mutant in offspring:
            if random.random() < MUTPB:
                toolbox.mutate(mutant)
                del mutant.fitness.values

        # Evaluate the individuals with an invalid fitness
        invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
        fitnesses = map(toolbox.evaluate, invalid_ind)
        for ind, fit in zip(invalid_ind, fitnesses):
            ind.fitness.values = fit

        pop[:] = offspring

        # Gather all the fitnesses in one list and print the stats
        fits = [ind.fitness.values[0] for ind in pop]
        
        length = len(pop)
        mean = sum(fits) / length
        sum2 = sum(x*x for x in fits)
        std = abs(sum2 / length - mean**2)**0.5
        
        print("  Min %s" % min(fits))
        print("  Max %s" % max(fits))
        print("  Avg %s" % mean)
        print("  Std %s" % std)

        if max(fits) <=C and summa<max(fits):
            summa = max(fits)

main()
print()
print("Soluzione ottima = %d" %MAX)
print("Lista items completa :")
print(items)
print("Lista items soluzione :")
osol = [items[i]*sol[i] for i in range(IND_SIZE)]
print(osol)

Esempio:

Soluzione ottima = 10000
Lista items completa :
[481, 167, 361, 111, 670, 602, 119, 331, 654, 155, 400, 227, 651, 531, 413, 134, 409, 140, 488, 457, 495, 628, 153, 632, 327, 159, 593, 633, 682, 392, 368, 526, 599, 478, 408, 315, 466, 582, 302, 172, 427, 173, 551, 673, 272, 158, 502, 269, 685, 588]
Lista items soluzione :
[0, 0, 361, 0, 670, 0, 0, 331, 654, 0, 0, 0, 651, 0, 0, 0, 409, 140, 488, 0, 0, 628, 0, 632, 327, 0, 593, 0, 682, 0, 368, 0, 0, 478, 0, 315, 0, 582, 0, 172, 427, 0, 551, 0, 272, 0, 0, 269, 0, 0]
>>> 

Piccolo Sistema Esperto

# by Andrea Bianchini 2021
# questa è la base di conoscenza del Sistema Esperto

kb=[["C'è alimentazione di rete in casa ?",1,4],
["Con una lampadina sana si accende ?",2,3],
["La lampadina è fulminata; sostituiscila",-1,-1],
["C'è un problema elettrico o nella spina o nel portalampade",-1,-1],
["Collega la rete; (salvavita o interruttore generale)",-1,-1]]
# by Andrea Bianchini 2021
# questo è il motore inferenziale del sistema esperto

import kb

print("Sistema esperto per risoluzione problema : non si accende lampadina")
print("-------------------------------------------------------------------")
print()

k=0
while k!=-1:
    if k>-1:
        r=input(kb.kb[k][0]+"([s]/n): ")
        if r.upper()=="S" or r=="":
            k=kb.kb[k][1]
        else:        
            k=kb.kb[k][2]

print("fine")

Esempio:

Sistema esperto per risoluzione problema : non si accende lampadina
-------------------------------------------------------------------

C'è alimentazione di rete in casa ?([s]/n): s
Con una lampadina sana si accende ?([s]/n): n
C'è un problema elettrico o nella spina o nel portalampade([s]/n): 
fine
>>> 

Questo è un esempio di Sistema Esperto. La base della conoscenza è volutamente molto semplice. Anche il motore inferenziale è abbastanza semplice ma con questo modello potete costruire un Sistema Esperto veramente capace.

Nanointelligenza artificiale

import datetime

nascita = datetime.datetime(2000, 5, 17)
c=1
while c!=2:
   print()
   print("------------")
   print("1-la mia età")
   print("2-end")

   c = int(input("Scegli 1 o 2 : "))


   if c==1 :
      d = datetime.datetime.now()
      d = d - nascita
      print()
      print("La mia età è di : {:.2f} anni".format(d.days/365.0))

print()
print(":-) A presto!")

Questo è un semplicissimo programma dotato di intelligenza artificiale (nano e debole…). Se gli chiedete quanti anni ha, lui ve lo sa dire!

Esempio:

------------
1-la mia età
2-end
Scegli 1 o 2 : 1

La mia età è di : 21.32 anni

------------
1-la mia età
2-end
Scegli 1 o 2 : 2

:-) A presto!
>>> 

Rete Neuronale

import numpy as np

class NeuralNetwork():
    
    def __init__(self):
        # seeding for random number generation
        np.random.seed(1)
        
        #converting weights to a 3 by 1 matrix with values from -1 to 1 and mean of 0
        self.synaptic_weights = 2 * np.random.random((3, 1)) - 1

    def sigmoid(self, x):
        #applying the sigmoid function
        return 1 / (1 + np.exp(-x))

    def sigmoid_derivative(self, x):
        #computing derivative to the Sigmoid function
        return x * (1 - x)

    def train(self, training_inputs, training_outputs, training_iterations):
        
        #training the model to make accurate predictions while adjusting weights continually
        for iteration in range(training_iterations):
            #siphon the training data via  the neuron
            output = self.think(training_inputs)

            #computing error rate for back-propagation
            error = training_outputs - output
            
            #performing weight adjustments
            adjustments = np.dot(training_inputs.T, error * self.sigmoid_derivative(output))

            self.synaptic_weights += adjustments

    def think(self, inputs):
        #passing the inputs via the neuron to get output   
        #converting values to floats
        
        inputs = inputs.astype(float)
        output = self.sigmoid(np.dot(inputs, self.synaptic_weights))
        return output


if __name__ == "__main__":

    #initializing the neuron class
    neural_network = NeuralNetwork()

    print("Beginning Randomly Generated Weights: ")
    print(neural_network.synaptic_weights)

    #training data consisting of 4 examples--3 input values and 1 output
    training_inputs = np.array([[0,0,1],
                                [1,1,1],
                                [1,0,1],
                                [0,1,1]])

    training_outputs = np.array([[0,1,1,0]]).T

    #training taking place
    neural_network.train(training_inputs, training_outputs, 15000)

    print("Ending Weights After Training: ")
    print(neural_network.synaptic_weights)

    user_input_one = str(input("User Input One: "))
    user_input_two = str(input("User Input Two: "))
    user_input_three = str(input("User Input Three: "))
    
    print("Considering New Situation: ", user_input_one, user_input_two, user_input_three)
    print("New Output data: ")
    print(neural_network.think(np.array([user_input_one, user_input_two, user_input_three])))
    print("Wow, we did it!")

by  Dr. Michael J. Garbade

https://www.kdnuggets.com/2018/10/simple-neural-network-python.html