@@ -215,8 +215,8 @@ elements in the output array must range from 0 to 4 inclusive.
215215Generally, the class labels start from ``0 `` to ``N-1 `` where ``N `` is
216216the number of classes.
217217
218- Note that the project only supports classification problems where each
219- sample is assigned to only one class.
218+ Note that the project only supports that each sample is assigned to only
219+ one class.
220220
221221.. _header-n89 :
222222
@@ -396,11 +396,10 @@ predicting the outputs based on the current solution's
396396attribute is updated by weights evolved by the genetic algorithm after
397397each generation.
398398
399- PyGAD 2.0.0 and higher has a new parameter accepted by the ``pygad.GA ``
400- class constructor named ``callback_generation ``. It could be assigned to
401- a function that is called after each generation. The function must
402- accept a single parameter representing the instance of the ``pygad.GA ``
403- class.
399+ PyGAD has a parameter accepted by the ``pygad.GA `` class constructor
400+ named ``on_generation ``. It could be assigned to a function that is
401+ called after each generation. The function must accept a single
402+ parameter representing the instance of the ``pygad.GA `` class.
404403
405404This callback function can be used to update the ``trained_weights ``
406405attribute of layers of each network in the population.
@@ -470,7 +469,7 @@ number of generations is 10.
470469 crossover_type = crossover_type,
471470 mutation_type = mutation_type,
472471 keep_parents = keep_parents,
473- callback_generation = callback_generation)
472+ on_generation = callback_generation)
474473
475474 The last step for training the neural networks using the genetic
476475algorithm is calling the ``run() `` method.
@@ -618,12 +617,13 @@ complete code is listed below.
618617 def callback_generation (ga_instance ):
619618 global GACNN_instance , last_fitness
620619
621- population_matrices = gacnn.population_as_matrices(population_networks = GACNN_instance .population_networks,
620+ population_matrices = pygad. gacnn.population_as_matrices(population_networks = GACNN_instance .population_networks,
622621 population_vectors = ga_instance.population)
623622
624623 GACNN_instance .update_population_trained_weights(population_trained_weights = population_matrices)
625624
626625 print (" Generation = {generation} " .format(generation = ga_instance.generations_completed))
626+ print (" Fitness = {fitness} " .format(fitness = ga_instance.best_solutions_fitness))
627627
628628 data_inputs = numpy.load(" dataset_inputs.npy" )
629629 data_outputs = numpy.load(" dataset_outputs.npy" )
@@ -634,35 +634,35 @@ complete code is listed below.
634634 data_inputs = data_inputs
635635 data_outputs = data_outputs
636636
637- input_layer = cnn.Input2D(input_shape = sample_shape)
638- conv_layer1 = cnn.Conv2D(num_filters = 2 ,
639- kernel_size = 3 ,
640- previous_layer = input_layer,
641- activation_function = " relu" )
642- average_pooling_layer = cnn.AveragePooling2D(pool_size = 5 ,
643- previous_layer = conv_layer1,
644- stride = 3 )
637+ input_layer = pygad. cnn.Input2D(input_shape = sample_shape)
638+ conv_layer1 = pygad. cnn.Conv2D(num_filters = 2 ,
639+ kernel_size = 3 ,
640+ previous_layer = input_layer,
641+ activation_function = " relu" )
642+ average_pooling_layer = pygad. cnn.AveragePooling2D(pool_size = 5 ,
643+ previous_layer = conv_layer1,
644+ stride = 3 )
645645
646- flatten_layer = cnn.Flatten(previous_layer = average_pooling_layer)
647- dense_layer2 = cnn.Dense(num_neurons = num_classes,
648- previous_layer = flatten_layer,
649- activation_function = " softmax" )
646+ flatten_layer = pygad. cnn.Flatten(previous_layer = average_pooling_layer)
647+ dense_layer2 = pygad. cnn.Dense(num_neurons = num_classes,
648+ previous_layer = flatten_layer,
649+ activation_function = " softmax" )
650650
651- model = cnn.Model(last_layer = dense_layer2,
652- epochs = 1 ,
653- learning_rate = 0.01 )
651+ model = pygad. cnn.Model(last_layer = dense_layer2,
652+ epochs = 1 ,
653+ learning_rate = 0.01 )
654654
655655 model.summary()
656656
657657
658- GACNN_instance = gacnn.GACNN(model = model,
658+ GACNN_instance = pygad. gacnn.GACNN(model = model,
659659 num_solutions = 4 )
660660
661661 # GACNN_instance.update_population_trained_weights(population_trained_weights=population_matrices)
662662
663663 # population does not hold the numerical weights of the network instead it holds a list of references to each last layer of each network (i.e. solution) in the population. A solution or a network can be used interchangeably.
664664 # If there is a population with 3 solutions (i.e. networks), then the population is a list with 3 elements. Each element is a reference to the last layer of each network. Using such a reference, all details of the network can be accessed.
665- population_vectors = gacnn.population_as_vectors(population_networks = GACNN_instance .population_networks)
665+ population_vectors = pygad. gacnn.population_as_vectors(population_networks = GACNN_instance .population_networks)
666666
667667 # To prepare the initial population, there are 2 ways:
668668 # 1) Prepare it yourself and pass it to the initial_population parameter. This way is useful when the user wants to start the genetic algorithm with a custom initial population.
@@ -692,7 +692,7 @@ complete code is listed below.
692692 crossover_type = crossover_type,
693693 mutation_type = mutation_type,
694694 keep_parents = keep_parents,
695- callback_generation = callback_generation)
695+ on_generation = callback_generation)
696696
697697 ga_instance.run()
698698
0 commit comments