optimizers

class thefittest.optimizers.DifferentialEvolution(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], iters: int, pop_size: int, left_border: float | int | number | ndarray[Any, dtype[number]], right_border: float | int | number | ndarray[Any, dtype[number]], num_variables: int, mutation: str = 'rand_1', F: float = 0.5, CR: float = 0.5, elitism: bool = True, init_population: ndarray[Any, dtype[float64]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[float64]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Storn, Rainer & Price, Kenneth. (1995). Differential Evolution: A Simple and Efficient Adaptive Scheme for Global Optimization Over Continuous Spaces. Journal of Global Optimization. 23

Methods

fit

float_population

get_fittest

get_remains_calls

get_stats

static float_population(pop_size: int, left_border: float | int | number | ndarray[Any, dtype[number]], right_border: float | int | number | ndarray[Any, dtype[number]], num_variables: int) ndarray[Any, dtype[float64]]
class thefittest.optimizers.GeneticAlgorithm(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], iters: int, pop_size: int, str_len: int, tour_size: int = 2, mutation_rate: float = 0.05, parents_num: int = 2, elitism: bool = True, selection: str = 'tournament_5', crossover: str = 'uniform_2', mutation: str = 'weak', init_population: ndarray[Any, dtype[int8]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[int8]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Holland, J. H. (1992). Genetic algorithms. Scientific American, 267(1), 66-72

Methods

binary_string_population

fit

get_fittest

get_remains_calls

get_stats

static binary_string_population(pop_size: int, str_len: int) ndarray[Any, dtype[int8]]
class thefittest.optimizers.GeneticProgramming(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], uniset: UniversalSet, iters: int, pop_size: int, tour_size: int = 2, mutation_rate: float = 0.05, parents_num: int = 7, elitism: bool = True, selection: str = 'rank', crossover: str = 'gp_standard', mutation: str = 'gp_weak_grow', max_level: int = 16, init_level: int = 5, init_population: ndarray[Any, dtype[_ScalarType_co]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Koza, John R.. “Genetic programming - on the programming of computers by means of natural selection.” Complex Adaptive Systems (1993)

Methods

binary_string_population

fit

get_fittest

get_remains_calls

get_stats

half_and_half

static half_and_half(pop_size: int, uniset: UniversalSet, max_level: int) ndarray[Any, dtype[_ScalarType_co]]
class thefittest.optimizers.PDPGA(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], iters: int, pop_size: int, str_len: int, tour_size: int = 2, mutation_rate: float = 0.05, parents_num: int = 2, elitism: bool = True, selections: Tuple[str, ...] = ('proportional', 'rank', 'tournament_3', 'tournament_5', 'tournament_7'), crossovers: Tuple[str, ...] = ('empty', 'one_point', 'two_point', 'uniform_2', 'uniform_7', 'uniform_prop_2', 'uniform_prop_7', 'uniform_rank_2', 'uniform_rank_7', 'uniform_tour_3', 'uniform_tour_7'), mutations: Tuple[str, ...] = ('weak', 'average', 'strong'), init_population: ndarray[Any, dtype[int8]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[int8]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Niehaus, J., Banzhaf, W. (2001). Adaption of Operator Probabilities in Genetic Programming. In: Miller, J., Tomassini, M., Lanzi, P.L., Ryan, C., Tettamanzi, A.G.B., Langdon, W.B. (eds) Genetic Programming. EuroGP 2001. Lecture Notes in Computer Science, vol 2038. Springer, Berlin, Heidelberg. https://doi.org/10.1007/3-540-45355-5_26

Methods

binary_string_population

fit

get_fittest

get_remains_calls

get_stats

class thefittest.optimizers.PDPGP(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], uniset: UniversalSet, iters: int, pop_size: int, tour_size: int = 2, mutation_rate: float = 0.05, parents_num: int = 2, elitism: bool = True, selections: Tuple[str, ...] = ('proportional', 'rank', 'tournament_3', 'tournament_5', 'tournament_7'), crossovers: Tuple[str, ...] = ('gp_standard', 'gp_one_point', 'gp_uniform_rank_2'), mutations: Tuple[str, ...] = ('gp_weak_point', 'gp_average_point', 'gp_strong_point', 'gp_weak_grow', 'gp_average_grow', 'gp_strong_grow'), max_level: int = 16, init_level: int = 4, init_population: ndarray[Any, dtype[_ScalarType_co]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[_ScalarType_co]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Niehaus, J., Banzhaf, W. (2001). Adaption of Operator Probabilities in Genetic Programming. In: Miller, J., Tomassini, M., Lanzi, P.L., Ryan, C., Tettamanzi, A.G.B., Langdon, W.B. (eds) Genetic Programming. EuroGP 2001. Lecture Notes in Computer Science, vol 2038. Springer, Berlin, Heidelberg. https://doi.org/10.1007/3-540-45355-5_26

Methods

binary_string_population

fit

get_fittest

get_remains_calls

get_stats

half_and_half

class thefittest.optimizers.SHADE(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], iters: int, pop_size: int, left_border: float | int | number | ndarray[Any, dtype[number]], right_border: float | int | number | ndarray[Any, dtype[number]], num_variables: int, elitism: bool = True, init_population: ndarray[Any, dtype[float64]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[float64]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Tanabe, Ryoji & Fukunaga, Alex. (2013). Success-history based parameter adaptation for Differential Evolution. 2013 IEEE Congress on Evolutionary Computation, CEC 2013. 71-78. 10.1109/CEC.2013.6557555.

Methods

fit

float_population

get_fittest

get_remains_calls

get_stats

class thefittest.optimizers.SHAGA(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], iters: int, pop_size: int, str_len: int, elitism: bool = True, init_population: ndarray[Any, dtype[int8]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[int8]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Stanovov, Vladimir & Akhmedova, Shakhnaz & Semenkin, Eugene. (2019). Genetic Algorithm with Success History based Parameter Adaptation. 180-187. 10.5220/0008071201800187.

Methods

binary_string_population

fit

get_fittest

get_remains_calls

get_stats

static binary_string_population(pop_size: int, str_len: int) ndarray[Any, dtype[int8]]
class thefittest.optimizers.SelfCGA(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], iters: int, pop_size: int, str_len: int, tour_size: int = 2, mutation_rate: float = 0.05, parents_num: int = 2, elitism: bool = True, selections: Tuple[str, ...] = ('proportional', 'rank', 'tournament_3', 'tournament_5', 'tournament_7'), crossovers: Tuple[str, ...] = ('empty', 'one_point', 'two_point', 'uniform_2', 'uniform_7', 'uniform_prop_2', 'uniform_prop_7', 'uniform_rank_2', 'uniform_rank_7', 'uniform_tour_3', 'uniform_tour_7'), mutations: Tuple[str, ...] = ('weak', 'average', 'strong'), init_population: ndarray[Any, dtype[int8]] | None = None, K: float = 2, selection_threshold_proba: float = 0.05, crossover_threshold_proba: float = 0.05, mutation_threshold_proba: float = 0.05, genotype_to_phenotype: Callable[[ndarray[Any, dtype[int8]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Semenkin, E.S., Semenkina, M.E. Self-configuring Genetic Algorithm with Modified Uniform Crossover Operator. LNCS, 7331, 2012, pp. 414-421. https://doi.org/10.1007/978-3-642-30976-2_50

Methods

binary_string_population

fit

get_fittest

get_remains_calls

get_stats

class thefittest.optimizers.SelfCGP(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], uniset: UniversalSet, iters: int, pop_size: int, tour_size: int = 2, mutation_rate: float = 0.05, parents_num: int = 2, elitism: bool = True, selections: Tuple[str, ...] = ('proportional', 'rank', 'tournament_3', 'tournament_5', 'tournament_7'), crossovers: Tuple[str, ...] = ('gp_standard', 'gp_one_point', 'gp_uniform_rank_2'), mutations: Tuple[str, ...] = ('gp_weak_point', 'gp_average_point', 'gp_strong_point', 'gp_weak_grow', 'gp_average_grow', 'gp_strong_grow'), max_level: int = 16, init_level: int = 4, init_population: ndarray[Any, dtype[_ScalarType_co]] | None = None, K: float = 2, selection_threshold_proba: float = 0.05, crossover_threshold_proba: float = 0.05, mutation_threshold_proba: float = 0.05, genotype_to_phenotype: Callable[[ndarray[Any, dtype[_ScalarType_co]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Semenkin, Eugene & Semenkina, Maria. (2012). Self-configuring genetic programming algorithm with modified uniform crossover. 1-6. http://dx.doi.org/10.1109/CEC.2012.6256587

Methods

binary_string_population

fit

get_fittest

get_remains_calls

get_stats

half_and_half

class thefittest.optimizers.jDE(fitness_function: Callable[[ndarray[Any, dtype[Any]]], ndarray[Any, dtype[float64]]], iters: int, pop_size: int, left_border: float | int | number | ndarray[Any, dtype[number]], right_border: float | int | number | ndarray[Any, dtype[number]], num_variables: int, mutation: str = 'rand_1', F_min: float = 0.1, F_max: float = 0.9, t_F: float = 0.1, t_CR: float = 0.1, elitism: bool = True, init_population: ndarray[Any, dtype[float64]] | None = None, genotype_to_phenotype: Callable[[ndarray[Any, dtype[float64]]], ndarray[Any, dtype[Any]]] | None = None, optimal_value: float | None = None, termination_error_value: float = 0.0, no_increase_num: int | None = None, minimization: bool = False, show_progress_each: int | None = None, keep_history: bool = False, n_jobs: int = 1, fitness_function_args: Dict | None = None, genotype_to_phenotype_args: Dict | None = None, random_state: int | RandomState | None = None, on_generation: Callable | None = None)

Brest, Janez & Greiner, Sao & Bošković, Borko & Mernik, Marjan & Zumer, Viljem. (2007). Self-Adapting Control Parameters in Differential Evolution: A Comparative Study on Numerical Benchmark Problems. Evolutionary Computation, IEEE Transactions on. 10.

646 - 657. 10.1109/TEVC.2006.872133.

Methods

fit

float_population

get_fittest

get_remains_calls

get_stats