diff --git a/pymoo/__init__.py b/pymoo/__init__.py index 6fa44e1fd..f8796a9e6 100644 --- a/pymoo/__init__.py +++ b/pymoo/__init__.py @@ -1,3 +1,19 @@ from pymoo.version import __version__ +import numpy as np +import threading +# Create the global random state Singleton +class PymooPRNG(object): + _lock = threading.Lock() + _instance = None + def __new__(cls, seed=None): + if cls._instance is None: + with cls._lock: + # Another thread could have created the instance + # before we acquired the lock. So check that the + # instance is still nonexistent. + if not cls._instance: + cls._instance = np.random.default_rng(seed) + + return cls._instance diff --git a/pymoo/algorithms/hyperparameters.py b/pymoo/algorithms/hyperparameters.py index 0c59dd8df..8cfcd8f3c 100644 --- a/pymoo/algorithms/hyperparameters.py +++ b/pymoo/algorithms/hyperparameters.py @@ -5,6 +5,7 @@ from pymoo.core.parameters import get_params, flatten, set_params, hierarchical from pymoo.core.problem import ElementwiseProblem from pymoo.optimize import minimize +import pymoo def create(algorithm, params): @@ -77,7 +78,7 @@ def __init__(self, problem, n_runs=None, seeds=None, func_stats=stats_single_obj if n_runs is None: raise Exception("Either provide number of runs or seeds directly.") - seeds = np.random.randint(1, 1000000, size=n_runs) + seeds = pymoo.PymooPRNG().integers(1, 1000000, size=n_runs) self.seeds = seeds self.func_stats = func_stats diff --git a/pymoo/algorithms/moo/ctaea.py b/pymoo/algorithms/moo/ctaea.py index f021ceac4..3e5e279b0 100644 --- a/pymoo/algorithms/moo/ctaea.py +++ b/pymoo/algorithms/moo/ctaea.py @@ -17,6 +17,7 @@ from pymoo.util.misc import has_feasible, random_permuations from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting +import pymoo # ========================================================================================================= # Implementation @@ -37,13 +38,13 @@ def comp_by_cv_dom_then_random(pop, P, **kwargs): elif rel == -1: S[i] = b else: - S[i] = np.random.choice([a, b]) + S[i] = pymoo.PymooPRNG().choice([a, b]) elif pop[a].CV <= 0.0: S[i] = a elif pop[b].CV <= 0.0: S[i] = b else: - S[i] = np.random.choice([a, b]) + S[i] = pymoo.PymooPRNG().choice([a, b]) return S[:, None].astype(int) @@ -68,7 +69,7 @@ def _do(self, problem, Hm, n_select, n_parents, **kwargs): if Pc <= Pd: # Choose from DA P[::n_parents, :] += n_pop - pf = np.random.random(n_select) + pf = pymoo.PymooPRNG().random(n_select) P[1::n_parents, :][pf >= Pc] += n_pop # compare using tournament function @@ -175,7 +176,7 @@ def _updateCA(self, pop, n_survive): if (delta_d[min_d_i] < 0) or ( delta_d[min_d_i] == 0 and (FV[crowdest[list(min_d_i)]] > niche_worst).any()): min_d_i = list(min_d_i) - np.random.shuffle(min_d_i) + pymoo.PymooPRNG().shuffle(min_d_i) closest = crowdest[min_d_i] niche_worst = closest[np.argmax(FV[closest])] if FV[niche_worst] > worst_fit: diff --git a/pymoo/algorithms/moo/dnsga2.py b/pymoo/algorithms/moo/dnsga2.py index 0e5a19070..f8e72f2b8 100644 --- a/pymoo/algorithms/moo/dnsga2.py +++ b/pymoo/algorithms/moo/dnsga2.py @@ -2,7 +2,7 @@ from pymoo.algorithms.moo.nsga2 import NSGA2 from pymoo.core.population import Population - +import pymoo class DNSGA2(NSGA2): @@ -32,7 +32,7 @@ def _advance(self, **kwargs): n_samples = int(np.ceil(len(pop) * self.perc_detect_change)) # choose randomly some individuals of the current population to test if there was a change - I = np.random.choice(np.arange(len(pop)), size=n_samples) + I = pymoo.PymooPRNG().choice(np.arange(len(pop)), size=n_samples) samples = self.evaluator.eval(self.problem, Population.new(X=X[I])) # calculate the differences between the old and newly evaluated pop @@ -47,7 +47,7 @@ def _advance(self, **kwargs): pop = Population.new(X=X) # find indices to be replaced (introduce diversity) - I = np.where(np.random.random(len(pop)) < self.perc_diversity)[0] + I = np.where(pymoo.PymooPRNG().random(len(pop)) < self.perc_diversity)[0] # replace with randomly sampled individuals if self.version == "A": diff --git a/pymoo/algorithms/moo/moead.py b/pymoo/algorithms/moo/moead.py index 53cc0dae7..438250388 100755 --- a/pymoo/algorithms/moo/moead.py +++ b/pymoo/algorithms/moo/moead.py @@ -14,6 +14,7 @@ from pymoo.util.display.multi import MultiObjectiveOutput from pymoo.util.reference_direction import default_ref_dirs +import pymoo class NeighborhoodSelection(Selection): @@ -28,10 +29,10 @@ def _do(self, problem, pop, n_select, n_parents, neighbors=None, **kwargs): prob = get(self.prob, size=n_select) for k in range(n_select): - if np.random.random() < prob[k]: - P[k] = np.random.choice(neighbors[k], n_parents, replace=False) + if pymoo.PymooPRNG().random() < prob[k]: + P[k] = pymoo.PymooPRNG().choice(neighbors[k], n_parents, replace=False) else: - P[k] = np.random.permutation(len(pop))[:n_parents] + P[k] = pymoo.PymooPRNG().permutation(len(pop))[:n_parents] return P @@ -98,12 +99,13 @@ def _next(self): pop = self.pop # iterate for each member of the population in random order - for k in np.random.permutation(len(pop)): + for k in pymoo.PymooPRNG().permutation(len(pop)): # get the parents using the neighborhood selection P = self.selection.do(self.problem, pop, 1, self.mating.crossover.n_parents, neighbors=[self.neighbors[k]]) # perform a mating using the default operators - if more than one offspring just pick the first - off = np.random.choice(self.mating.do(self.problem, pop, 1, parents=P, n_max_iterations=1)) + # TODO: this is not just taking the first it's drawing one randomly - is this intended? + off = pymoo.PymooPRNG().choice(self.mating.do(self.problem, pop, 1, parents=P, n_max_iterations=1)) # evaluate the offspring off = yield off @@ -143,7 +145,7 @@ def _infill(self): pop_size, cross_parents, cross_off = self.pop_size, self.mating.crossover.n_parents, self.mating.crossover.n_offsprings # do the mating in a random order - indices = np.random.permutation(len(self.pop))[:self.n_offsprings] + indices = pymoo.PymooPRNG().permutation(len(self.pop))[:self.n_offsprings] # get the parents using the neighborhood selection P = self.selection.do(self.problem, self.pop, self.n_offsprings, cross_parents, @@ -153,7 +155,7 @@ def _infill(self): off = self.mating.do(self.problem, self.pop, 1e12, n_max_iterations=1, parents=P) # select a random offspring from each mating - off = Population.create(*[np.random.choice(pool) for pool in np.reshape(off, (self.n_offsprings, -1))]) + off = Population.create(*[pymoo.PymooPRNG().choice(pool) for pool in np.reshape(off, (self.n_offsprings, -1))]) # store the indices because of the neighborhood matching in advance self.indices = indices diff --git a/pymoo/algorithms/moo/nsga3.py b/pymoo/algorithms/moo/nsga3.py index f2d714895..ed6225828 100644 --- a/pymoo/algorithms/moo/nsga3.py +++ b/pymoo/algorithms/moo/nsga3.py @@ -15,6 +15,7 @@ from pymoo.util.misc import intersect, has_feasible from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting +import pymoo # ========================================================================================================= # Implementation @@ -32,7 +33,7 @@ def comp_by_cv_then_random(pop, P, **kwargs): # both solutions are feasible just set random else: - S[i] = np.random.choice([a, b]) + S[i] = pymoo.PymooPRNG().choice([a, b]) return S[:, None].astype(int) @@ -214,7 +215,7 @@ def niching(pop, n_remaining, niche_count, niche_of_individuals, dist_to_niche): # all niches with the minimum niche count (truncate if randomly if more niches than remaining individuals) next_niches = next_niches_list[np.where(next_niche_count == min_niche_count)[0]] - next_niches = next_niches[np.random.permutation(len(next_niches))[:n_select]] + next_niches = next_niches[pymoo.PymooPRNG().permutation(len(next_niches))[:n_select]] for next_niche in next_niches: @@ -222,7 +223,7 @@ def niching(pop, n_remaining, niche_count, niche_of_individuals, dist_to_niche): next_ind = np.where(np.logical_and(niche_of_individuals == next_niche, mask))[0] # shuffle to break random tie (equal perp. dist) or select randomly - np.random.shuffle(next_ind) + pymoo.PymooPRNG().shuffle(next_ind) if niche_count[next_niche] == 0: next_ind = next_ind[np.argmin(dist_to_niche[next_ind])] diff --git a/pymoo/algorithms/moo/sms.py b/pymoo/algorithms/moo/sms.py index f9ae526cb..54a589f60 100644 --- a/pymoo/algorithms/moo/sms.py +++ b/pymoo/algorithms/moo/sms.py @@ -17,6 +17,7 @@ from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting from pymoo.util.normalization import normalize +import pymoo # --------------------------------------------------------------------------------------------------------- # Environmental Survival - Remove the solution with the least HV contribution @@ -122,7 +123,7 @@ def cv_and_dom_tournament(pop, P, *args, **kwargs): # if rank or domination relation didn't make a decision compare by crowding if np.isnan(S[i]): - S[i] = np.random.choice([a, b]) + S[i] = pymoo.PymooPRNG().choice([a, b]) return S[:, None].astype(int, copy=False) diff --git a/pymoo/algorithms/moo/unsga3.py b/pymoo/algorithms/moo/unsga3.py index 4f47a9771..9f1f0ab46 100644 --- a/pymoo/algorithms/moo/unsga3.py +++ b/pymoo/algorithms/moo/unsga3.py @@ -3,6 +3,7 @@ from pymoo.algorithms.moo.nsga3 import NSGA3 from pymoo.operators.selection.tournament import compare, TournamentSelection +import pymoo # ========================================================================================================= # Implementation @@ -33,7 +34,7 @@ def comp_by_rank_and_ref_line_dist(pop, P, **kwargs): method='smaller_is_better') if np.isnan(S[i]): - S[i] = np.random.choice([a, b]) + S[i] = pymoo.PymooPRNG().choice([a, b]) return S[:, None].astype(int) diff --git a/pymoo/algorithms/soo/nonconvex/brkga.py b/pymoo/algorithms/soo/nonconvex/brkga.py index 2a6decd4f..e80809e9b 100755 --- a/pymoo/algorithms/soo/nonconvex/brkga.py +++ b/pymoo/algorithms/soo/nonconvex/brkga.py @@ -15,6 +15,7 @@ from pymoo.util.display.single import SingleObjectiveOutput from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting +import pymoo # ========================================================================================================= # Implementation @@ -68,8 +69,8 @@ def _do(self, problem, pop, n_select, n_parents, **kwargs): non_elites = elites # do the mating selection - always one elite and one non-elites - s_elite = np.random.choice(elites, size=n_select) - s_non_elite = np.random.choice(non_elites, size=n_select) + s_elite = pymoo.PymooPRNG().choice(elites, size=n_select) + s_non_elite = pymoo.PymooPRNG().choice(non_elites, size=n_select) return np.column_stack([s_elite, s_non_elite]) diff --git a/pymoo/algorithms/soo/nonconvex/cmaes.py b/pymoo/algorithms/soo/nonconvex/cmaes.py index 9e88fd029..118d13697 100755 --- a/pymoo/algorithms/soo/nonconvex/cmaes.py +++ b/pymoo/algorithms/soo/nonconvex/cmaes.py @@ -13,6 +13,7 @@ from pymoo.util.optimum import filter_optimum from pymoo.vendor.vendor_cmaes import my_fmin +import pymoo # ========================================================================================================= # Implementation @@ -282,6 +283,7 @@ def __init__(self, popsize : 4+int(3*np.log(N)) Population size, AKA lambda, number of new solution per iteration + # TODO is randn actually something that we can pass? doesn't appear to be used in code base. randn : np.random.randn Randn(lam, N) must return an np.array of shape (lam, N), see also cma.utilities.math.randhss diff --git a/pymoo/algorithms/soo/nonconvex/de.py b/pymoo/algorithms/soo/nonconvex/de.py index 3a5562f51..5aa255d86 100755 --- a/pymoo/algorithms/soo/nonconvex/de.py +++ b/pymoo/algorithms/soo/nonconvex/de.py @@ -38,6 +38,7 @@ from pymoo.util.display.single import SingleObjectiveOutput from pymoo.util.misc import where_is_what +import pymoo # ========================================================================================================= # Crossover @@ -54,7 +55,7 @@ def de_differential(X, F, jitter, alpha=0.001): for i in range(1, n_parents, 2): # create the weight vectors with jitter to give some variation _F = F[:, None].repeat(n_var, axis=1) - _F[jitter] *= (1 + alpha * (np.random.random((jitter.sum(), n_var)) - 0.5)) + _F[jitter] *= (1 + alpha * (pymoo.PymooPRNG().random((jitter.sum(), n_var)) - 0.5)) # add the difference to the vector delta += _F * (X[i] - X[i + 1]) @@ -129,7 +130,7 @@ def do(self, problem, pop, n_offsprings, algorithm=None, **kwargs): itself = np.array(targets)[:, None] - best = lambda: np.random.choice(np.where(pop.get("rank") == 0)[0], replace=True, size=n_matings) + best = lambda: pymoo.PymooPRNG().choice(np.where(pop.get("rank") == 0)[0], replace=True, size=n_matings) if sel_type == "rand": fast_fill_random(P, len(pop), columns=range(n_parents), Xp=itself) @@ -175,10 +176,10 @@ def do(self, problem, pop, n_offsprings, algorithm=None, **kwargs): _trial = np.copy(_target) _trial[M] = _donor[M] elif name == "line": - w = np.random.random((len(K), 1)) * _CR[:, None] + w = pymoo.PymooPRNG().random((len(K), 1)) * _CR[:, None] _trial = _target + w * (_donor - _target) elif name == "hypercube": - w = np.random.random((len(K), _target.shape[1])) * _CR[:, None] + w = pymoo.PymooPRNG().random((len(K), _target.shape[1])) * _CR[:, None] _trial = _target + w * (_donor - _target) else: raise Exception(f"Unknown crossover variant: {name}") @@ -252,7 +253,7 @@ def _infill(self): # if number of offsprings is set lower than pop_size - randomly select if self.n_offsprings < self.pop_size: - index = np.random.permutation(len(infills))[:self.n_offsprings] + index = pymoo.PymooPRNG().permutation(len(infills))[:self.n_offsprings] infills = infills[index] infills.set("index", index) diff --git a/pymoo/algorithms/soo/nonconvex/es.py b/pymoo/algorithms/soo/nonconvex/es.py index ca4c44545..b4d1eb5be 100644 --- a/pymoo/algorithms/soo/nonconvex/es.py +++ b/pymoo/algorithms/soo/nonconvex/es.py @@ -9,6 +9,7 @@ from pymoo.util.display.single import SingleObjectiveOutput from pymoo.util.optimum import filter_optimum +import pymoo class ES(GeneticAlgorithm): @@ -96,7 +97,7 @@ def _infill(self): sigmap = np.minimum(self.sigma_max, es_sigma(sigmap, self.tau, self.taup)) # execute the evolutionary strategy to calculate the offspring solutions - Xp = X + sigmap * np.random.normal(size=sigmap.shape) + Xp = X + sigmap * pymoo.PymooPRNG().normal(size=sigmap.shape) # if gamma is not none do the differential variation overwrite Xp and sigmap for the first mu-1 individuals if self.gamma is not None: @@ -127,7 +128,7 @@ def _set_optimum(self): def es_sigma(sigma, tau, taup): _lambda, _n = sigma.shape - return sigma * np.exp(taup * np.random.normal(size=(_lambda, 1)) + tau * np.random.normal(size=(_lambda, _n))) + return sigma * np.exp(taup * pymoo.PymooPRNG().normal(size=(_lambda, 1)) + tau * pymoo.PymooPRNG().normal(size=(_lambda, _n))) def es_intermediate_recomb(sigma): @@ -136,7 +137,7 @@ def es_intermediate_recomb(sigma): for i in range(_lambda): for j in range(_n): - k = np.random.randint(_lambda) + k = pymoo.PymooPRNG().integers(_lambda) sigma_hat[i, j] = (sigma[i, j] + sigma[k, j]) / 2.0 return sigma_hat @@ -160,7 +161,7 @@ def es_mut_repair(Xp, X, sigma, xl, xu, n_trials): break else: # do the mutation again vectored for all values not in bound - Xp[i, j] = X[i, j] + sigma[i, j] * np.random.normal(size=len(i)) + Xp[i, j] = X[i, j] + sigma[i, j] * pymoo.PymooPRNG().normal(size=len(i)) # if there are still solutions which boundaries are violated, set them to the original X if not all_in_bounds: @@ -189,7 +190,7 @@ def es_mut_loop(X, sigmap, xl, xu, n_trials=10): for _ in range(n_trials): # calculate the mutated value - x = X[i, j] + sigmap[i, j] * np.random.normal() + x = X[i, j] + sigmap[i, j] * pymoo.PymooPRNG().normal() # if it is inside the bounds accept it - otherwise try again if xl[j] <= x <= xu[j]: diff --git a/pymoo/algorithms/soo/nonconvex/g3pcx.py b/pymoo/algorithms/soo/nonconvex/g3pcx.py index 7e8f3c66e..0785cd540 100644 --- a/pymoo/algorithms/soo/nonconvex/g3pcx.py +++ b/pymoo/algorithms/soo/nonconvex/g3pcx.py @@ -14,6 +14,7 @@ from pymoo.operators.selection.rnd import fast_fill_random from pymoo.util.display.single import SingleObjectiveOutput +import pymoo # ========================================================================================================= # Implementation @@ -77,7 +78,7 @@ def _next(self, **kwargs): pop, family_size = self.pop, get(self.family_size) - rnd = np.random.choice(np.arange(len(pop)), size=family_size, replace=False) + rnd = pymoo.PymooPRNG().choice(np.arange(len(pop)), size=family_size, replace=False) family = Population.merge(pop[rnd], off) pop[rnd] = FitnessSurvival().do(self.problem, family, n_survive=family_size) diff --git a/pymoo/algorithms/soo/nonconvex/isres.py b/pymoo/algorithms/soo/nonconvex/isres.py index 06622a3d1..632aaa4ad 100644 --- a/pymoo/algorithms/soo/nonconvex/isres.py +++ b/pymoo/algorithms/soo/nonconvex/isres.py @@ -7,6 +7,7 @@ from pymoo.core.population import Population from pymoo.docs import parse_doc_string +import pymoo class ISRES(SRES): @@ -57,7 +58,7 @@ def _infill(self): sigmap[mu - 1:] = np.minimum(self.sigma_max, es_sigma(sigma[mu - 1:], self.tau, self.taup)) # execute the evolutionary strategy to calculate the offspring solutions - Xp[mu - 1:] = X[mu - 1:] + sigmap[mu - 1:] * np.random.normal(size=sigmap[mu - 1:].shape) + Xp[mu - 1:] = X[mu - 1:] + sigmap[mu - 1:] * pymoo.PymooPRNG().normal(size=sigmap[mu - 1:].shape) # repair the individuals which are not feasible by sampling from sigma again Xp = es_mut_repair(Xp, X, sigmap, xl, xu, 10) diff --git a/pymoo/algorithms/soo/nonconvex/pattern.py b/pymoo/algorithms/soo/nonconvex/pattern.py index 370caff92..f431a960b 100644 --- a/pymoo/algorithms/soo/nonconvex/pattern.py +++ b/pymoo/algorithms/soo/nonconvex/pattern.py @@ -9,6 +9,7 @@ from pymoo.util.display.single import SingleObjectiveOutput from pymoo.util.optimum import filter_optimum +import pymoo # ========================================================================================================= # Implementation @@ -124,7 +125,7 @@ def exploration_move(problem, center, sign, delta, rho, randomize=True): # the order for the variable iteration if randomize: - K = np.random.permutation(n_var) + K = pymoo.PymooPRNG().permutation(n_var) else: K = np.arange(n_var) diff --git a/pymoo/algorithms/soo/nonconvex/pso.py b/pymoo/algorithms/soo/nonconvex/pso.py index d4dbe836b..fa719d122 100644 --- a/pymoo/algorithms/soo/nonconvex/pso.py +++ b/pymoo/algorithms/soo/nonconvex/pso.py @@ -19,6 +19,7 @@ from pymoo.visualization.fitness_landscape import FitnessLandscape from pymoo.visualization.video.callback_video import AnimationCallback +import pymoo # ========================================================================================================= # Display @@ -104,10 +105,10 @@ def pso_equation(X, P_X, S_X, V, V_max, w, c1, c2, r1=None, r2=None): n_particles, n_var = X.shape if r1 is None: - r1 = np.random.random((n_particles, n_var)) + r1 = pymoo.PymooPRNG().random((n_particles, n_var)) if r2 is None: - r2 = np.random.random((n_particles, n_var)) + r2 = pymoo.PymooPRNG().random((n_particles, n_var)) inerta = w * V cognitive = c1 * r1 * (P_X - X) @@ -212,7 +213,7 @@ def _initialize_advance(self, infills=None, **kwargs): particles = self.pop if self.initial_velocity == "random": - init_V = np.random.random((len(particles), self.problem.n_var)) * self.V_max[None, :] + init_V = pymoo.PymooPRNG().random((len(particles), self.problem.n_var)) * self.V_max[None, :] elif self.initial_velocity == "zero": init_V = np.zeros((len(particles), self.problem.n_var)) else: @@ -256,7 +257,7 @@ def _infill(self): # try to improve the current best with a pertubation if self.pertube_best: k = FitnessSurvival().do(problem, pbest, n_survive=1, return_indices=True)[0] - mut = PM(prob=0.9, eta=np.random.uniform(5, 30), at_least_once=False) + mut = PM(prob=0.9, eta=pymoo.PymooPRNG().uniform(5, 30), at_least_once=False) mutant = mut(problem, Population(Individual(X=pbest[k].X)))[0] off[k].set("X", mutant.X) @@ -302,7 +303,7 @@ def _adapt(self): S = np.array([S1_exploration(f), S2_exploitation(f), S3_convergence(f), S4_jumping_out(f)]) strategy = S.argmax() + 1 - delta = 0.05 + (np.random.random() * 0.05) + delta = 0.05 + (pymoo.PymooPRNG().random() * 0.05) if strategy == 1: c1 += delta diff --git a/pymoo/algorithms/soo/nonconvex/pso_ep.py b/pymoo/algorithms/soo/nonconvex/pso_ep.py index 40aab2967..ec014e367 100644 --- a/pymoo/algorithms/soo/nonconvex/pso_ep.py +++ b/pymoo/algorithms/soo/nonconvex/pso_ep.py @@ -34,6 +34,7 @@ from pymoo.util.display.single import SingleObjectiveOutput from pymoo.util.sliding_window import SlidingWindow +import pymoo # ========================================================================================================= # Mating @@ -42,7 +43,7 @@ def pso_canonical(V, X, P_X, L_X, w, c1, c2): n_particles, n_var = X.shape - r1, r2 = np.random.random((n_particles, n_var)), np.random.random((n_particles, n_var)) + r1, r2 = pymoo.PymooPRNG().random((n_particles, n_var)), pymoo.PymooPRNG().random((n_particles, n_var)) Vp = w * V + c1 * r1 * (P_X - X) + c2 * r2 * (L_X - X) return Vp @@ -50,10 +51,10 @@ def pso_canonical(V, X, P_X, L_X, w, c1, c2): def pso_rotation_invariant(V, X, P_X, L_X, inertia, c1, c2): n_particles, n_var = X.shape - r1 = np.random.random((n_particles, n_var)) + r1 = pymoo.PymooPRNG().random((n_particles, n_var)) p = X + c1 * r1 * (P_X - X) - r2 = np.random.random((n_particles, n_var)) + r2 = pymoo.PymooPRNG().random((n_particles, n_var)) l = X + c2 * r2 * (L_X - X) G = (X + p + l) / 3 @@ -67,10 +68,10 @@ def pso_rotation_invariant(V, X, P_X, L_X, inertia, c1, c2): def alea_sphere(G, radius): n, m = G.shape - x = np.random.normal(size=(n, m)) + x = pymoo.PymooPRNG().normal(size=(n, m)) l = np.sqrt(np.sum(x ** 2, axis=1, keepdims=True)) - r = np.random.random(size=(n, 1)) + r = pymoo.PymooPRNG().random(size=(n, 1)) x = r * radius * x / l return x + G @@ -168,7 +169,7 @@ def get_neighbors(name, N): K = 3 neighbors = [] for i in range(N): - vals = np.random.permutation(N)[:K] + vals = pymoo.PymooPRNG().permutation(N)[:K] neighbors.append([i] + vals.tolist()) return neighbors else: diff --git a/pymoo/algorithms/soo/univariate/backtracking.py b/pymoo/algorithms/soo/univariate/backtracking.py index 8e764451b..4773f46af 100644 --- a/pymoo/algorithms/soo/univariate/backtracking.py +++ b/pymoo/algorithms/soo/univariate/backtracking.py @@ -4,7 +4,7 @@ from pymoo.core.termination import NoTermination from pymoo.optimize import minimize from pymoo.problems.single import Sphere - +import pymoo class BacktrackingLineSearch(LineSearch): @@ -45,7 +45,7 @@ def step(self): problem = Sphere() - X = np.array(np.random.random(problem.n_var)) + X = np.array(pymoo.PymooPRNG().random(problem.n_var)) point = Solution(X=X) Evaluator(evaluate_values_of=["F", "dF"]).eval(problem, point) diff --git a/pymoo/core/algorithm.py b/pymoo/core/algorithm.py index f3fc071f5..e6de853f3 100644 --- a/pymoo/core/algorithm.py +++ b/pymoo/core/algorithm.py @@ -13,6 +13,7 @@ from pymoo.util.misc import termination_from_tuple from pymoo.util.optimum import filter_optimum +import pymoo class Algorithm: @@ -99,6 +100,8 @@ def __init__(self, self.start_time = None def setup(self, problem, **kwargs): + # Use the global random number generator + # the problem to be solved by the algorithm self.problem = problem @@ -110,14 +113,12 @@ def setup(self, problem, **kwargs): # if seed is a boolean and true, then randomly set a seed (useful to reproduce runs) seed = self.seed if isinstance(seed, bool) and seed: - seed = np.random.randint(0, 10000000) + seed = pymoo.PymooPRNG().integers(0, 10000000) self.seed = seed # if a seed is set, then use it to call the random number generators if seed is not None: - import random - random.seed(seed) - np.random.seed(seed) + pymoo.PymooPRNG(seed) # make sure that some type of termination criterion is set if self.termination is None: diff --git a/pymoo/core/crossover.py b/pymoo/core/crossover.py index ad35e80ec..ed10f2f2f 100644 --- a/pymoo/core/crossover.py +++ b/pymoo/core/crossover.py @@ -4,6 +4,7 @@ from pymoo.core.population import Population from pymoo.core.variable import Real, get +import pymoo class Crossover(Operator): @@ -39,7 +40,7 @@ def do(self, problem, pop, parents=None, **kwargs): prob = get(self.prob, size=n_matings) # a boolean mask when crossover is actually executed - cross = np.random.random(n_matings) < prob + cross = pymoo.PymooPRNG().random(n_matings) < prob # the design space from the parents used for the crossover if np.any(cross): @@ -52,13 +53,13 @@ def do(self, problem, pop, parents=None, **kwargs): # now set the parents whenever NO crossover has been applied for k in np.flatnonzero(~cross): if n_offsprings < n_parents: - s = np.random.choice(np.arange(self.n_parents), size=n_offsprings, replace=False) + s = pymoo.PymooPRNG().choice(np.arange(self.n_parents), size=n_offsprings, replace=False) elif n_offsprings == n_parents: s = np.arange(n_parents) else: s = [] while len(s) < n_offsprings: - s.extend(np.random.permutation(n_parents)) + s.extend(pymoo.PymooPRNG().permutation(n_parents)) s = s[:n_offsprings] Xp[:, k] = np.copy(X[s, k]) diff --git a/pymoo/core/mutation.py b/pymoo/core/mutation.py index 6a75fb5f5..6a3f6b690 100644 --- a/pymoo/core/mutation.py +++ b/pymoo/core/mutation.py @@ -4,7 +4,7 @@ from pymoo.core.operator import Operator from pymoo.core.variable import Real, get - +import pymoo class Mutation(Operator): @@ -29,7 +29,7 @@ def do(self, problem, pop, inplace=True, **kwargs): # the likelihood for a mutation on the individuals prob = get(self.prob, size=n_mut) - mut = np.random.random(size=n_mut) <= prob + mut = pymoo.PymooPRNG().random(size=n_mut) <= prob # store the mutated individual back to the population pop[mut].set("X", Xp[mut]) diff --git a/pymoo/core/variable.py b/pymoo/core/variable.py index 683f0ee4c..6ceac7210 100644 --- a/pymoo/core/variable.py +++ b/pymoo/core/variable.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo class Variable(object): @@ -49,7 +49,7 @@ class Real(BoundedVariable): def _sample(self, n): low, high = self.bounds - return np.random.uniform(low=low, high=high, size=n) + return pymoo.PymooPRNG().uniform(low=low, high=high, size=n) class Integer(BoundedVariable): @@ -57,14 +57,14 @@ class Integer(BoundedVariable): def _sample(self, n): low, high = self.bounds - return np.random.randint(low, high=high + 1, size=n) + return pymoo.PymooPRNG().integers(low, high=high + 1, size=n) class Binary(BoundedVariable): vtype = bool def _sample(self, n): - return np.random.random(size=n) < 0.5 + return pymoo.PymooPRNG().random(size=n) < 0.5 class Choice(Variable): @@ -79,7 +79,7 @@ def __init__(self, value=None, options=None, all=None, **kwargs) -> None: self.all = all def _sample(self, n): - return np.random.choice(self.options, size=n) + return pymoo.PymooPRNG().choice(self.options, size=n) def get(*args, size=None, **kwargs): diff --git a/pymoo/indicators/hv/monte_carlo.py b/pymoo/indicators/hv/monte_carlo.py index 5f4ce8931..5318b6533 100644 --- a/pymoo/indicators/hv/monte_carlo.py +++ b/pymoo/indicators/hv/monte_carlo.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.indicators.hv.exact import DynamicHypervolume @@ -49,7 +49,7 @@ def _calc(self, ref_point, F): ideal = F.min(axis=0) V = np.prod(ref_point - ideal) - S = np.random.uniform(low=ideal, high=ref_point, size=(self.n_samples, M)) + S = pymoo.PymooPRNG().uniform(low=ideal, high=ref_point, size=(self.n_samples, M)) dom = np.array([np.all(F[i] <= S, axis=1) for i in range(N)]) diff --git a/pymoo/operators/crossover/binx.py b/pymoo/operators/crossover/binx.py index 2c5d1f869..ce827e73b 100644 --- a/pymoo/operators/crossover/binx.py +++ b/pymoo/operators/crossover/binx.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.core.variable import Real, get from pymoo.util.misc import row_at_least_once_true @@ -7,7 +7,7 @@ def mut_binomial(n, m, prob, at_least_once=True): prob = np.ones(n) * prob - M = np.random.random((n, m)) < prob[:, None] + M = pymoo.PymooPRNG().random((n, m)) < prob[:, None] if at_least_once: M = row_at_least_once_true(M) diff --git a/pymoo/operators/crossover/dex.py b/pymoo/operators/crossover/dex.py index 157856498..f3ca442b6 100644 --- a/pymoo/operators/crossover/dex.py +++ b/pymoo/operators/crossover/dex.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.core.population import Population from pymoo.operators.crossover.binx import mut_binomial @@ -24,13 +24,13 @@ def de_differential(X, F, dither=None, jitter=True, gamma=0.0001, return_differe for i, j in pairs: if dither == "vector": - F = (F + np.random.random(n_matings) * (1 - F)) + F = (F + pymoo.PymooPRNG().random(n_matings) * (1 - F)) elif dither == "scalar": - F = F + np.random.random() * (1 - F) + F = F + pymoo.PymooPRNG().random() * (1 - F) # http://www.cs.ndsu.nodak.edu/~siludwig/Publish/papers/SSCI20141.pdf if jitter: - F = (F * (1 + gamma * (np.random.random(n_matings) - 0.5))) + F = (F * (1 + gamma * (pymoo.PymooPRNG().random(n_matings) - 0.5))) # an add the difference to the first vector diffs += F[:, None] * (X[i] - X[j]) @@ -119,4 +119,4 @@ def do(self, problem, pop, parents=None, **kwargs): def rnd_F(m): - return 0.5 * (1 + np.random.uniform(size=len(m))) + return 0.5 * (1 + pymoo.PymooPRNG().uniform(size=len(m))) diff --git a/pymoo/operators/crossover/erx.py b/pymoo/operators/crossover/erx.py index 25ce9738d..019bb2d38 100644 --- a/pymoo/operators/crossover/erx.py +++ b/pymoo/operators/crossover/erx.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover @@ -46,7 +46,7 @@ def erx(a, b): H = calc_adjency_matrix(b, H=H) # randomly select the first node - _next = np.random.choice(list(H.keys())) + _next = pymoo.PymooPRNG().choice(list(H.keys())) y = [] while True: @@ -64,7 +64,7 @@ def erx(a, b): # if the current node does not have any neighbors if len(neighbors) == 0: - _next = np.random.choice(list(H.keys())) + _next = pymoo.PymooPRNG().choice(list(H.keys())) # otherwise search in the neighbors for a node with the fewest neighbors else: @@ -74,7 +74,7 @@ def erx(a, b): _next = [neighbors[k] for k in range(len(neighbors)) if n_neighbors[k] == min_n_neighbors] # break the tie if they might have the same number of neighbors - _next = np.random.choice(_next) + _next = pymoo.PymooPRNG().choice(_next) return y diff --git a/pymoo/operators/crossover/expx.py b/pymoo/operators/crossover/expx.py index 18c47e11d..d85d51771 100644 --- a/pymoo/operators/crossover/expx.py +++ b/pymoo/operators/crossover/expx.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.core.variable import get, Real from pymoo.util.misc import crossover_mask, row_at_least_once_true @@ -12,7 +12,7 @@ def mut_exp(n_matings, n_var, prob, at_least_once=True): M = np.full((n_matings, n_var), False) # start point of crossover - s = np.random.randint(0, n_var, size=n_matings) + s = pymoo.PymooPRNG().integers(0, n_var, size=n_matings) # create for each individual the crossover range for i in range(n_matings): @@ -25,7 +25,7 @@ def mut_exp(n_matings, n_var, prob, at_least_once=True): current = (start + j) % n_var # replace only if random value keeps being smaller than CR - if np.random.random() <= prob[i]: + if pymoo.PymooPRNG().random() <= prob[i]: M[i, current] = True else: break diff --git a/pymoo/operators/crossover/hux.py b/pymoo/operators/crossover/hux.py index 916bd5380..3ce4d8f7c 100644 --- a/pymoo/operators/crossover/hux.py +++ b/pymoo/operators/crossover/hux.py @@ -1,7 +1,7 @@ import math import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.util.misc import crossover_mask @@ -26,7 +26,7 @@ def _do(self, _, X, **kwargs): n = math.ceil(len(I) / 2) if n > 0: - _I = I[np.random.permutation(len(I))[:n]] + _I = I[pymoo.PymooPRNG().permutation(len(I))[:n]] M[i, _I] = True _X = crossover_mask(X, M) diff --git a/pymoo/operators/crossover/nox.py b/pymoo/operators/crossover/nox.py index bf895be47..5d41d83d3 100644 --- a/pymoo/operators/crossover/nox.py +++ b/pymoo/operators/crossover/nox.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.core.population import Population @@ -10,4 +10,4 @@ def __init__(self): super().__init__(1, 1, 0.0) def do(self, problem, pop, **kwargs): - return Population.create(*[np.random.choice(parents) for parents in pop]) + return Population.create(*[pymoo.PymooPRNG().choice(parents) for parents in pop]) diff --git a/pymoo/operators/crossover/ox.py b/pymoo/operators/crossover/ox.py index c1716683f..12e4144e1 100644 --- a/pymoo/operators/crossover/ox.py +++ b/pymoo/operators/crossover/ox.py @@ -1,10 +1,10 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover def random_sequence(n): - start, end = np.sort(np.random.choice(n, 2, replace=False)) + start, end = np.sort(pymoo.PymooPRNG().choice(n, 2, replace=False)) return tuple([start, end]) diff --git a/pymoo/operators/crossover/pcx.py b/pymoo/operators/crossover/pcx.py index 05297d772..0179e761b 100644 --- a/pymoo/operators/crossover/pcx.py +++ b/pymoo/operators/crossover/pcx.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.core.variable import Real, get from pymoo.operators.repair.bounds_repair import repair_random_init @@ -40,13 +40,13 @@ def pcx(X, eta, zeta, index): # generating zero-mean normally distributed variables sigma = D_not[:, None] * eta.repeat(n_var, axis=1) - rnd = np.random.normal(loc=0.0, scale=sigma) + rnd = pymoo.PymooPRNG().normal(loc=0.0, scale=sigma) # implemented just like the c code - generate_new.h file inner_prod = np.sum(rnd * diff_to_centroid, axis=-1, keepdims=True) noise = rnd - (inner_prod * diff_to_centroid) / dist_to_centroid[:, None] ** 2 - bias_to_centroid = np.random.normal(0.0, zeta) * diff_to_centroid + bias_to_centroid = pymoo.PymooPRNG().normal(0.0, zeta) * diff_to_centroid # the array which is finally returned Xp = X[index] + noise + bias_to_centroid diff --git a/pymoo/operators/crossover/pntx.py b/pymoo/operators/crossover/pntx.py index 48b8bedd7..d1d3430c2 100644 --- a/pymoo/operators/crossover/pntx.py +++ b/pymoo/operators/crossover/pntx.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.util.misc import crossover_mask @@ -16,7 +16,7 @@ def _do(self, _, X, **kwargs): _, n_matings, n_var = X.shape # start point of crossover - r = np.row_stack([np.random.permutation(n_var - 1) + 1 for _ in range(n_matings)])[:, :self.n_points] + r = np.row_stack([pymoo.PymooPRNG().permutation(n_var - 1) + 1 for _ in range(n_matings)])[:, :self.n_points] r.sort(axis=1) r = np.column_stack([r, np.full(n_matings, n_var)]) diff --git a/pymoo/operators/crossover/sbx.py b/pymoo/operators/crossover/sbx.py index 41a1c025a..b9153b01e 100644 --- a/pymoo/operators/crossover/sbx.py +++ b/pymoo/operators/crossover/sbx.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.core.variable import Real, get from pymoo.operators.repair.bounds_repair import repair_clamp @@ -14,7 +14,7 @@ def cross_sbx(X, xl, xu, eta, prob_var, prob_bin, eps=1.0e-14): n_parents, n_matings, n_var = X.shape # the probability of a crossover for each of the variables - cross = np.random.random((n_matings, n_var)) < prob_var + cross = pymoo.PymooPRNG().random((n_matings, n_var)) < prob_var # when solutions are too close -> do not apply sbx crossover too_close = np.abs(X[0] - X[1]) <= eps @@ -36,7 +36,7 @@ def cross_sbx(X, xl, xu, eta, prob_var, prob_bin, eps=1.0e-14): prob_bin = prob_bin.repeat(n_var, axis=1)[cross] # random values for each individual - rand = np.random.random(len(eta)) + rand = pymoo.PymooPRNG().random(len(eta)) def calc_betaq(beta): alpha = 2.0 - np.power(beta, -(eta + 1.0)) @@ -61,7 +61,7 @@ def calc_betaq(beta): c2 = 0.5 * ((y1 + y2) + betaq * delta) # with the given probability either assign the value from the first or second parent - b = np.random.random(len(prob_bin)) < prob_bin + b = pymoo.PymooPRNG().random(len(prob_bin)) < prob_bin tmp = np.copy(c1[b]) c1[b] = c2[b] c2[b] = tmp @@ -108,13 +108,13 @@ def _do(self, problem, X, **kwargs): size=(n_matings, 1)) # set the binomial probability to zero if no exchange between individuals shall happen - rand = np.random.random((len(prob_bin), 1)) + rand = pymoo.PymooPRNG().random((len(prob_bin), 1)) prob_bin[rand > prob_exch] = 0.0 Q = cross_sbx(X.astype(float), problem.xl, problem.xu, eta, prob_var, prob_bin) if self.n_offsprings == 1: - rand = np.random.random(size=n_matings) < 0.5 + rand = pymoo.PymooPRNG().random(size=n_matings) < 0.5 Q[0, rand] = Q[1, rand] Q = Q[[0]] diff --git a/pymoo/operators/crossover/ux.py b/pymoo/operators/crossover/ux.py index 32962c8c8..fd8d8d5e6 100644 --- a/pymoo/operators/crossover/ux.py +++ b/pymoo/operators/crossover/ux.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.crossover import Crossover from pymoo.util.misc import crossover_mask @@ -11,7 +11,7 @@ def __init__(self, **kwargs): def _do(self, _, X, **kwargs): _, n_matings, n_var = X.shape - M = np.random.random((n_matings, n_var)) < 0.5 + M = pymoo.PymooPRNG().random((n_matings, n_var)) < 0.5 _X = crossover_mask(X, M) return _X diff --git a/pymoo/operators/mutation/bitflip.py b/pymoo/operators/mutation/bitflip.py index 036da779b..35c21afc5 100644 --- a/pymoo/operators/mutation/bitflip.py +++ b/pymoo/operators/mutation/bitflip.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.mutation import Mutation @@ -8,7 +8,7 @@ class BitflipMutation(Mutation): def _do(self, problem, X, **kwargs): prob_var = self.get_prob_var(problem, size=(len(X), 1)) Xp = np.copy(X) - flip = np.random.random(X.shape) < prob_var + flip = pymoo.PymooPRNG().random(X.shape) < prob_var Xp[flip] = ~X[flip] return Xp diff --git a/pymoo/operators/mutation/gauss.py b/pymoo/operators/mutation/gauss.py index e2f7a9720..4d10b60f7 100644 --- a/pymoo/operators/mutation/gauss.py +++ b/pymoo/operators/mutation/gauss.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.mutation import Mutation from pymoo.core.variable import Real, get from pymoo.operators.repair.bounds_repair import repair_random_init @@ -17,7 +17,7 @@ def mut_gauss(X, xl, xu, sigma, prob): Xp = np.full(X.shape, np.inf) - mut = np.random.random(X.shape) < prob[:, None] + mut = pymoo.PymooPRNG().random(X.shape) < prob[:, None] Xp[:, :] = X @@ -25,7 +25,7 @@ def mut_gauss(X, xl, xu, sigma, prob): _xu = np.repeat(xu[None, :], X.shape[0], axis=0)[mut] sigma = sigma[:, None].repeat(n_var, axis=1)[mut] - Xp[mut] = np.random.normal(X[mut], sigma * (_xu * _xl)) + Xp[mut] = pymoo.PymooPRNG().normal(X[mut], sigma * (_xu * _xl)) Xp = repair_random_init(Xp, X, xl, xu) diff --git a/pymoo/operators/mutation/inversion.py b/pymoo/operators/mutation/inversion.py index e2a2a076b..75d1ceeed 100644 --- a/pymoo/operators/mutation/inversion.py +++ b/pymoo/operators/mutation/inversion.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.mutation import Mutation from pymoo.operators.crossover.ox import random_sequence @@ -35,7 +35,7 @@ def __init__(self, prob=1.0): def _do(self, problem, X, **kwargs): Y = X.copy() for i, y in enumerate(X): - if np.random.random() < self.prob: + if pymoo.PymooPRNG().random() < self.prob: seq = random_sequence(len(y)) Y[i] = inversion_mutation(y, seq, inplace=True) diff --git a/pymoo/operators/mutation/pm.py b/pymoo/operators/mutation/pm.py index 6cdcf467a..e1ce984f7 100644 --- a/pymoo/operators/mutation/pm.py +++ b/pymoo/operators/mutation/pm.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.mutation import Mutation from pymoo.core.variable import get, Real from pymoo.operators.crossover.binx import mut_binomial @@ -34,7 +34,7 @@ def mut_pm(X, xl, xu, eta, prob, at_least_once): mut_pow = 1.0 / (eta + 1.0) - rand = np.random.random(X.shape) + rand = pymoo.PymooPRNG().random(X.shape) mask = rand <= 0.5 mask_not = np.logical_not(mask) diff --git a/pymoo/operators/mutation/rm.py b/pymoo/operators/mutation/rm.py index c0e8d3cd5..e30dbfa21 100644 --- a/pymoo/operators/mutation/rm.py +++ b/pymoo/operators/mutation/rm.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.mutation import Mutation @@ -14,7 +14,7 @@ def _do(self, problem, X, **kwargs): prob_var = self.get_prob_var(problem, size=len(X)) for k, (_, var) in enumerate(problem.vars.items()): - mut = np.where(np.random.random(len(X)) < prob_var)[0] + mut = np.where(pymoo.PymooPRNG().random(len(X)) < prob_var)[0] v = var.sample(len(mut)) X[mut, k] = v diff --git a/pymoo/operators/repair/bounds_repair.py b/pymoo/operators/repair/bounds_repair.py index 66e27a904..4ca196268 100644 --- a/pymoo/operators/repair/bounds_repair.py +++ b/pymoo/operators/repair/bounds_repair.py @@ -1,7 +1,7 @@ import abc import numpy as np - +import pymoo from pymoo.core.population import Population from pymoo.core.repair import Repair @@ -59,11 +59,11 @@ def repair_random_init(Xp, X, xl, xu): i, j = np.where(Xp < XL) if len(i) > 0: - Xp[i, j] = XL[i, j] + np.random.random(len(i)) * (X[i, j] - XL[i, j]) + Xp[i, j] = XL[i, j] + pymoo.PymooPRNG().random(len(i)) * (X[i, j] - XL[i, j]) i, j = np.where(Xp > XU) if len(i) > 0: - Xp[i, j] = XU[i, j] - np.random.random(len(i)) * (XU[i, j] - X[i, j]) + Xp[i, j] = XU[i, j] - pymoo.PymooPRNG().random(len(i)) * (XU[i, j] - X[i, j]) return Xp diff --git a/pymoo/operators/repair/inverse_penalty.py b/pymoo/operators/repair/inverse_penalty.py index 8a3792360..c2476d4b1 100644 --- a/pymoo/operators/repair/inverse_penalty.py +++ b/pymoo/operators/repair/inverse_penalty.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.operators.repair.bounds_repair import BoundsRepair @@ -31,7 +31,7 @@ def inverse_penality(x, p, xl, xu, alpha=None): alpha = (normv - d) / normv alpha += 1e-32 - r = np.random.random() + r = pymoo.PymooPRNG().random() Y = d * (1.0 + alpha * np.tan(r * np.arctan((D - d) / (alpha * d)))) ret = x + (p - x) * Y / normv diff --git a/pymoo/operators/sampling/lhs.py b/pymoo/operators/sampling/lhs.py index 493598638..c6d64ec06 100644 --- a/pymoo/operators/sampling/lhs.py +++ b/pymoo/operators/sampling/lhs.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.sampling import Sampling from pymoo.util.misc import cdist @@ -38,11 +38,11 @@ def sampling_lhs(n_samples, n_var, xl=0, xu=1, smooth=True, criterion=criterion_ def sampling_lhs_unit(n_samples, n_var, smooth=True): - X = np.random.random(size=(n_samples, n_var)) + X = pymoo.PymooPRNG().random(size=(n_samples, n_var)) Xp = X.argsort(axis=0) + 1 if smooth: - Xp = Xp - np.random.random(Xp.shape) + Xp = Xp - pymoo.PymooPRNG().random(Xp.shape) else: Xp = Xp - 0.5 Xp /= n_samples diff --git a/pymoo/operators/sampling/rnd.py b/pymoo/operators/sampling/rnd.py index bac33c92c..3aba02b8f 100644 --- a/pymoo/operators/sampling/rnd.py +++ b/pymoo/operators/sampling/rnd.py @@ -1,10 +1,10 @@ import numpy as np - +import pymoo from pymoo.core.sampling import Sampling def random(problem, n_samples=1): - X = np.random.random((n_samples, problem.n_var)) + X = pymoo.PymooPRNG().random((n_samples, problem.n_var)) if problem.has_bounds(): xl, xu = problem.bounds() @@ -17,7 +17,7 @@ def random(problem, n_samples=1): class FloatRandomSampling(Sampling): def _do(self, problem, n_samples, **kwargs): - X = np.random.random((n_samples, problem.n_var)) + X = pymoo.PymooPRNG().random((n_samples, problem.n_var)) if problem.has_bounds(): xl, xu = problem.bounds() @@ -30,7 +30,7 @@ def _do(self, problem, n_samples, **kwargs): class BinaryRandomSampling(Sampling): def _do(self, problem, n_samples, **kwargs): - val = np.random.random((n_samples, problem.n_var)) + val = pymoo.PymooPRNG().random((n_samples, problem.n_var)) return (val < 0.5).astype(bool) @@ -38,7 +38,7 @@ class IntegerRandomSampling(FloatRandomSampling): def _do(self, problem, n_samples, **kwargs): n, (xl, xu) = problem.n_var, problem.bounds() - return np.column_stack([np.random.randint(xl[k], xu[k] + 1, size=n_samples) for k in range(n)]) + return np.column_stack([pymoo.PymooPRNG().integers(xl[k], xu[k] + 1, size=n_samples) for k in range(n)]) class PermutationRandomSampling(Sampling): @@ -46,5 +46,5 @@ class PermutationRandomSampling(Sampling): def _do(self, problem, n_samples, **kwargs): X = np.full((n_samples, problem.n_var), 0, dtype=int) for i in range(n_samples): - X[i, :] = np.random.permutation(problem.n_var) + X[i, :] = pymoo.PymooPRNG().permutation(problem.n_var) return X diff --git a/pymoo/operators/selection/rnd.py b/pymoo/operators/selection/rnd.py index 487460669..85323691c 100644 --- a/pymoo/operators/selection/rnd.py +++ b/pymoo/operators/selection/rnd.py @@ -1,7 +1,7 @@ import math import numpy as np - +import pymoo from pymoo.core.selection import Selection from pymoo.util.misc import random_permuations @@ -58,9 +58,9 @@ def fast_fill_random(X, N, columns=None, Xp=None, n_max_attempts=10): for _ in range(n_max_attempts): if len(rem) > N: - X[rem, col] = np.random.choice(N, replace=True, size=len(rem)) + X[rem, col] = pymoo.PymooPRNG().choice(N, replace=True, size=len(rem)) else: - X[rem, col] = np.random.permutation(N)[:len(rem)] + X[rem, col] = pymoo.PymooPRNG().permutation(N)[:len(rem)] rem = np.where((X[rem, col][:, None] == D[rem]).any(axis=1))[0] diff --git a/pymoo/operators/selection/tournament.py b/pymoo/operators/selection/tournament.py index 897e714f4..5469ba5f1 100644 --- a/pymoo/operators/selection/tournament.py +++ b/pymoo/operators/selection/tournament.py @@ -1,7 +1,7 @@ import math import numpy as np - +import pymoo from pymoo.core.selection import Selection from pymoo.util.misc import random_permuations @@ -59,7 +59,7 @@ def compare(a, a_val, b, b_val, method, return_random_if_equal=False): return b else: if return_random_if_equal: - return np.random.choice([a, b]) + return pymoo.PymooPRNG().choice([a, b]) else: return None elif method == 'smaller_is_better': @@ -69,7 +69,7 @@ def compare(a, a_val, b, b_val, method, return_random_if_equal=False): return b else: if return_random_if_equal: - return np.random.choice([a, b]) + return pymoo.PymooPRNG().choice([a, b]) else: return None else: diff --git a/pymoo/problems/many/wfg.py b/pymoo/problems/many/wfg.py index b69113b9e..ec3503a6b 100644 --- a/pymoo/problems/many/wfg.py +++ b/pymoo/problems/many/wfg.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo from pymoo.core.problem import Problem from pymoo.problems.many import generic_sphere, get_ref_dirs from pymoo.util.function_loader import load_function @@ -55,7 +55,7 @@ def _calculate(self, x, s, h): return x[:, -1][:, None] + s * np.column_stack(h) def _rand_optimal_position(self, n): - return np.random.random((n, self.k)) + return pymoo.PymooPRNG().random((n, self.k)) def _positional_to_optimal(self, K): suffix = np.full((len(K), self.l), 0.35) @@ -142,7 +142,7 @@ def _evaluate(self, x, out, *args, **kwargs): out["F"] = self._calculate(y, self.S, h) def _rand_optimal_position(self, n): - return np.power(np.random.random((n, self.k)), 50.0) + return np.power(pymoo.PymooPRNG().random((n, self.k)), 50.0) class WFG2(WFG): diff --git a/pymoo/problems/single/flowshop_scheduling.py b/pymoo/problems/single/flowshop_scheduling.py index 153a8ac14..62a8b804b 100644 --- a/pymoo/problems/single/flowshop_scheduling.py +++ b/pymoo/problems/single/flowshop_scheduling.py @@ -1,6 +1,6 @@ import matplotlib.pyplot as plt import numpy as np - +import pymoo from pymoo.core.problem import ElementwiseProblem @@ -77,8 +77,9 @@ def get_machine_times(self, x): def create_random_flowshop_problem(n_machines, n_jobs, seed=None): if seed is not None: - np.random.seed(seed) - T = np.random.random((n_machines, n_jobs)) * 50 + 50 + + pymoo.PymooPRNG(seed) + T = pymoo.PymooPRNG().random((n_machines, n_jobs)) * 50 + 50 return FlowshopScheduling(T) diff --git a/pymoo/problems/single/knapsack.py b/pymoo/problems/single/knapsack.py index 57b3c4010..e1174adc6 100644 --- a/pymoo/problems/single/knapsack.py +++ b/pymoo/problems/single/knapsack.py @@ -1,6 +1,6 @@ import numpy as np import numpy as np - +import pymoo from pymoo.core.problem import Problem @@ -34,10 +34,12 @@ def _evaluate(self, x, out, *args, **kwargs): out["G"] = (np.sum(self.W * x, axis=1) - self.C) -def create_random_knapsack_problem(n_items, seed=1, variant="single"): - np.random.seed(seed) - P = np.random.randint(1, 100, size=n_items) - W = np.random.randint(1, 100, size=n_items) +def create_random_knapsack_problem(n_items, seed=None, variant="single"): + if seed is not None: + + pymoo.PymooPRNG(seed) + P = pymoo.PymooPRNG().integers(1, 100, size=n_items) + W = pymoo.PymooPRNG().integers(1, 100, size=n_items) C = int(np.sum(W) / 10) if variant == "single": diff --git a/pymoo/problems/single/traveling_salesman.py b/pymoo/problems/single/traveling_salesman.py index fc612d79b..8be3acd85 100644 --- a/pymoo/problems/single/traveling_salesman.py +++ b/pymoo/problems/single/traveling_salesman.py @@ -1,7 +1,7 @@ import matplotlib.pyplot as plt import numpy as np from scipy.spatial.distance import cdist - +import pymoo from pymoo.core.problem import ElementwiseProblem @@ -48,9 +48,10 @@ def get_route_length(self, x): def create_random_tsp_problem(n_cities, grid_width=100.0, grid_height=None, seed=None): if seed is not None: - np.random.seed(seed) + + pymoo.PymooPRNG(seed) grid_height = grid_height if grid_height is not None else grid_width - cities = np.random.random((n_cities, 2)) * [grid_width, grid_height] + cities = pymoo.PymooPRNG().random((n_cities, 2)) * [grid_width, grid_height] return TravelingSalesman(cities) diff --git a/pymoo/util/archive.py b/pymoo/util/archive.py index d507db597..07b42761f 100644 --- a/pymoo/util/archive.py +++ b/pymoo/util/archive.py @@ -4,6 +4,7 @@ from pymoo.core.population import Population, merge from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting +import pymoo class Truncation: @@ -14,7 +15,7 @@ def __call__(self, sols, k): class RandomTruncation(Truncation): def __call__(self, sols, k): - return np.random.choice(sols, size=k, replace=False) + return pymoo.PymooPRNG().choice(sols, size=k, replace=False) class SurvivalTruncation(Truncation): diff --git a/pymoo/util/misc.py b/pymoo/util/misc.py index 392e718c2..8b8599e34 100644 --- a/pymoo/util/misc.py +++ b/pymoo/util/misc.py @@ -3,7 +3,7 @@ from itertools import combinations import numpy as np - +import pymoo from pymoo.core.population import Population from pymoo.core.sampling import Sampling @@ -61,7 +61,7 @@ def parameter_less_constraints(F, CV, F_max=None): def random_permuations(n, l, concat=True): P = [] for i in range(n): - P.append(np.random.permutation(l)) + P.append(pymoo.PymooPRNG().permutation(l)) if concat: P = np.concatenate(P) return P @@ -456,5 +456,5 @@ def crossover_mask(X, M): def row_at_least_once_true(M): _, d = M.shape for k in np.where(~np.any(M, axis=1))[0]: - M[k, np.random.randint(d)] = True + M[k, pymoo.PymooPRNG().integers(d)] = True return M \ No newline at end of file diff --git a/pymoo/util/randomized_argsort.py b/pymoo/util/randomized_argsort.py index 3fe94fefb..81ebc7117 100644 --- a/pymoo/util/randomized_argsort.py +++ b/pymoo/util/randomized_argsort.py @@ -1,11 +1,11 @@ import numpy as np - +import pymoo from pymoo.util.misc import swap def randomized_argsort(A, method="numpy", order='ascending'): if method == "numpy": - P = np.random.permutation(len(A)) + P = pymoo.PymooPRNG().permutation(len(A)) I = np.argsort(A[P], kind='quicksort') I = P[I] @@ -32,7 +32,7 @@ def quicksort(A): def _quicksort(A, I, left, right): if left < right: - index = np.random.randint(left, right + 1) + index = pymoo.PymooPRNG().integers(left, right + 1) swap(I, right, index) pivot = A[I[right]] diff --git a/pymoo/util/ref_dirs/construction.py b/pymoo/util/ref_dirs/construction.py index 237ac27d9..b30b7f9e2 100644 --- a/pymoo/util/ref_dirs/construction.py +++ b/pymoo/util/ref_dirs/construction.py @@ -5,6 +5,7 @@ from pymoo.util.ref_dirs.optimizer import Adam from pymoo.util.reference_direction import ReferenceDirectionFactory, map_onto_unit_simplex +import pymoo def calc_dist_to_others(x, X): return - np.sqrt(((x - X) ** 2).sum(axis=1)).min() @@ -54,7 +55,7 @@ def _do(self): def next(self): - x = np.random.random((self.n_samples, self.n_dim)) + x = pymoo.PymooPRNG().random((self.n_samples, self.n_dim)) x = map_onto_unit_simplex(x, "kraemer") x = x[vectorized_cdist(x, self.X).min(axis=1).argmax()] diff --git a/pymoo/util/reference_direction.py b/pymoo/util/reference_direction.py index f0f82f187..a45bca3d7 100644 --- a/pymoo/util/reference_direction.py +++ b/pymoo/util/reference_direction.py @@ -4,7 +4,7 @@ from scipy import special from pymoo.util.misc import find_duplicates, cdist - +import pymoo # ========================================================================================================= # Model @@ -40,7 +40,8 @@ def do(self): # set the random seed if it is provided if self.seed is not None: - np.random.seed(self.seed) + + pymoo.PymooPRNG(self.seed) if self.n_dim == 1: return np.array([[1.0]]) @@ -181,10 +182,10 @@ def get_rng(seed = None): def sample_on_unit_simplex(n_points, n_dim, unit_simplex_mapping="kraemer", seed = None): if unit_simplex_mapping == "sum": - rnd = map_onto_unit_simplex(get_rng(seed).random((n_points, n_dim)), "sum") + rnd = map_onto_unit_simplex(pymoo.PymooPRNG().random((n_points, n_dim)), "sum") elif unit_simplex_mapping == "kraemer": - rnd = map_onto_unit_simplex(get_rng(seed).random((n_points, n_dim)), "kraemer") + rnd = map_onto_unit_simplex(pymoo.PymooPRNG().random((n_points, n_dim)), "kraemer") elif unit_simplex_mapping == "das-dennis": n_partitions = get_partition_closest_to_points(n_points, n_dim) @@ -234,7 +235,7 @@ def select_points_with_maximum_distance(X, n_select, selected=[]): # if no selection provided pick randomly in the beginning if len(selected) == 0: - selected = [np.random.randint(len(X))] + selected = [pymoo.PymooPRNG().integers(len(X))] # create variables to store what selected and what not not_selected = [i for i in range(n_points) if i not in selected] diff --git a/pymoo/util/roulette.py b/pymoo/util/roulette.py index 2612bebcd..32cef4cbf 100644 --- a/pymoo/util/roulette.py +++ b/pymoo/util/roulette.py @@ -1,5 +1,5 @@ import numpy as np - +import pymoo class RouletteWheelSelection: @@ -12,9 +12,9 @@ def __init__(self, val, larger_is_better=True): def next(self, n=None): if n is None: - X = np.random.random((1, 1)) + X = pymoo.PymooPRNG().random((1, 1)) else: - X = np.random.random((n, 1)) + X = pymoo.PymooPRNG().random((n, 1)) if n > 1: X.repeat(n - 1, axis=1) diff --git a/pymoo/util/stochastic_ranking.py b/pymoo/util/stochastic_ranking.py index c02dd90e2..357a64544 100644 --- a/pymoo/util/stochastic_ranking.py +++ b/pymoo/util/stochastic_ranking.py @@ -1,7 +1,7 @@ from pymoo.util.misc import swap import numpy as np - +import pymoo def stochastic_ranking(f, phi, pr, I=None): _lambda = len(f) @@ -15,7 +15,7 @@ def stochastic_ranking(f, phi, pr, I=None): for j in range(_lambda - 1): - u = np.random.random() + u = pymoo.PymooPRNG().random() if u < pr or (phi[I[j]] == 0 and phi[I[j + 1]] == 0): if f[I[j]] > f[I[j + 1]]: diff --git a/pymoo/vendor/gta.py b/pymoo/vendor/gta.py index 754901c17..b7227b72f 100644 --- a/pymoo/vendor/gta.py +++ b/pymoo/vendor/gta.py @@ -10,7 +10,7 @@ # !/bin/python import numpy as np -from random import randint +import pymoo from math import floor, fabs, sin, pi, cos, sqrt ## Parameter configuration ## @@ -520,7 +520,7 @@ def dMOP3(X, tau, nt, taut, r, rIteration): """dMOP3 dynamic benchmark problem """ if tau % taut == 0 and rIteration != tau: - r = randint(0, 9) + r = pymoo.PymooPRNG().integers(0, 9) rIteration = tau XII = X[:r] + X[r + 1:] diff --git a/pymoo/vendor/vendor_cmaes.py b/pymoo/vendor/vendor_cmaes.py index ac17c1ec9..14efa6b1a 100644 --- a/pymoo/vendor/vendor_cmaes.py +++ b/pymoo/vendor/vendor_cmaes.py @@ -7,6 +7,8 @@ from cma.utilities import utils from cma.utilities.math import Mh +import pymoo + all_stoppings = [] def void(_): @@ -87,9 +89,9 @@ def my_fmin(x0, restarts += 1 # A small restart doesn't count in the total runs_with_small += 1 # _Before_ it's used in popsize_lastlarge - sigma_factor = 0.01 ** np.random.uniform() # Local search + sigma_factor = 0.01 ** pymoo.PymooPRNG().uniform() # Local search popsize_multiplier = fmin_options['incpopsize'] ** (irun - runs_with_small) - opts['popsize'] = np.floor(popsize0 * popsize_multiplier ** (np.random.uniform() ** 2)) + opts['popsize'] = np.floor(popsize0 * popsize_multiplier ** (pymoo.PymooPRNG().uniform() ** 2)) opts['maxiter'] = min(maxiter0, 0.5 * sum(large_i) / opts['popsize']) # print('small basemul %s --> %s; maxiter %s' % (popsize_multiplier, opts['popsize'], opts['maxiter'])) @@ -197,8 +199,8 @@ def objective_function(x, *args): if 11 < 3 and opts['vv']: # inject a solution # use option check_point = [0] - if 0 * np.random.randn() >= 0: - X[0] = 0 + opts['vv'] * es.sigma ** 0 * np.random.randn(es.N) + if 0 * pymoo.PymooPRNG().randn() >= 0: + X[0] = 0 + opts['vv'] * es.sigma ** 0 * pymoo.PymooPRNG().randn(es.N) fit[0] = yield X[0] # print fit[0] if es.opts['verbose'] > 4: # may be undesirable with dynamic fitness (e.g. Augmented Lagrangian) diff --git a/setup.py b/setup.py index 02bbf3c9e..b5d18b79d 100644 --- a/setup.py +++ b/setup.py @@ -40,7 +40,25 @@ 'cma==3.2.2', 'alive-progress', 'dill', - 'Deprecated'], + 'Deprecated' + ], + extras_require={ + 'AGEMOEA': ["numba"], + 'test': [ + "numba", + "Cython", + "pytest", + "nbformat", + "wheel", + "jupyter", + "pyrecorder", + "optproblems", + "pandas", + "ipython", + "ipykernel", + "optuna", + ] + }, platforms='any', classifiers=[ 'Intended Audience :: Developers', diff --git a/tests/algorithms/test_algorithms.py b/tests/algorithms/test_algorithms.py index 67e488628..b460cb11d 100644 --- a/tests/algorithms/test_algorithms.py +++ b/tests/algorithms/test_algorithms.py @@ -7,13 +7,19 @@ from pymoo.optimize import minimize from pymoo.problems.multi import ZDT +import pymoo def test_same_seed_same_result(): problem = get_problem("zdt3") algorithm = NSGA2(pop_size=100, eliminate_duplicates=True) + # get the result specifying a fixed seed res1 = minimize(problem, algorithm, ('n_gen', 20), seed=1) - np.random.seed(200) + + # set a new seed for the default global random number generator + pymoo.PymooPRNG(200) + + # get the result with specifying the same seed res2 = minimize(problem, algorithm, ('n_gen', 20), seed=1) np.testing.assert_almost_equal(res1.X, res2.X) diff --git a/tests/algorithms/test_ctaea.py b/tests/algorithms/test_ctaea.py index 405263aba..3bc93ac06 100644 --- a/tests/algorithms/test_ctaea.py +++ b/tests/algorithms/test_ctaea.py @@ -225,7 +225,6 @@ def test_mating_comparison(ref_dirs, evaluator): def test_restricted_mating_selection(ref_dirs, evaluator): - np.random.seed(200) selection = RestrictedMating(func_comp=comp_by_cv_dom_then_random) problem = C3DTLZ4(n_var=12, n_obj=3) diff --git a/tests/algorithms/test_pattern_search.py b/tests/algorithms/test_pattern_search.py index 95d841251..fb83511c7 100644 --- a/tests/algorithms/test_pattern_search.py +++ b/tests/algorithms/test_pattern_search.py @@ -8,6 +8,7 @@ from pymoo.optimize import minimize from pymoo.problems.single import Sphere, Himmelblau +import pymoo PROBLEMS = [ Himmelblau(), @@ -21,7 +22,8 @@ def test_against_original_implementation(problem, seed, bounds): problem = copy(problem) - np.random.seed(seed) + + pymoo.PymooPRNG(seed) x0 = FloatRandomSampling().do(problem, 1)[0].X if not bounds: diff --git a/tests/algorithms/test_rank_and_crowding.py b/tests/algorithms/test_rank_and_crowding.py index b08adde36..607426341 100644 --- a/tests/algorithms/test_rank_and_crowding.py +++ b/tests/algorithms/test_rank_and_crowding.py @@ -11,6 +11,7 @@ from pymoo.util.mnn import calc_mnn as calc_mnn_python from pymoo.util.mnn import calc_2nn as calc_2nn_python +import pymoo calc_mnn = load_function("calc_mnn") calc_2nn = load_function("calc_2nn") @@ -100,18 +101,19 @@ def test_mnn(): surv_mnn_py = RankAndCrowding(crowding_func=calc_mnn_python) surv_2nn_py = RankAndCrowding(crowding_func=calc_2nn_python) - np.random.seed(12) + + pymoo.PymooPRNG(12) pop_mnn = surv_mnn.do(problem, res.pop, n_survive=80) - np.random.seed(12) + pymoo.PymooPRNG(12) pop_mnn_py = surv_mnn_py.do(problem, res.pop, n_survive=80) assert np.sum(np.abs(pop_mnn.get("F") - pop_mnn_py.get("F"))) <= 1e-8 - np.random.seed(12) + pymoo.PymooPRNG(12) pop_2nn = surv_2nn.do(problem, res.pop, n_survive=70) - np.random.seed(12) + pymoo.PymooPRNG(12) pop_2nn_py = surv_2nn_py.do(problem, res.pop, n_survive=70) assert np.sum(np.abs(pop_2nn.get("F") - pop_2nn_py.get("F"))) <= 1e-8 \ No newline at end of file diff --git a/tests/algorithms/test_single_objective.py b/tests/algorithms/test_single_objective.py index 078fdc3d4..65b16550c 100644 --- a/tests/algorithms/test_single_objective.py +++ b/tests/algorithms/test_single_objective.py @@ -1,6 +1,7 @@ import numpy as np import pytest +import pymoo from pymoo.algorithms.soo.nonconvex.de import DE from pymoo.algorithms.soo.nonconvex.ga import GA from pymoo.algorithms.soo.nonconvex.nelder import NelderMead @@ -63,10 +64,11 @@ def test_sphere_with_constraints(algorithm, seed): @pytest.mark.parametrize('seed', SEEDS) @pytest.mark.parametrize('clazz', [NelderMead, PatternSearch], ids=["nelder", "pattern"]) def test_sphere_no_bounds(clazz, seed): - np.random.seed(seed) + + pymoo.PymooPRNG(seed) problem = SphereNoBounds() - x0 = np.random.random(problem.n_var) + x0 = pymoo.PymooPRNG().random(problem.n_var) algorithm = clazz(x0=x0) f, f_opt = run(problem, algorithm) diff --git a/tests/gradients/test_gradient.py b/tests/gradients/test_gradient.py index 8c0e4f322..dce4e0833 100644 --- a/tests/gradients/test_gradient.py +++ b/tests/gradients/test_gradient.py @@ -6,13 +6,14 @@ from tests.gradients.problems_with_gradients import MySphereWithGradient, MySphere, ZDT1WithGradient, ElementwiseZDT1, \ MyConstrainedSphereWithGradient, MyConstrainedSphere, ConstrainedZDT1WithGradient, ConstrainedZDT1 +import pymoo @pytest.mark.parametrize("correct, problem", [ (MySphereWithGradient(), MySphere()), (ZDT1WithGradient(), ElementwiseZDT1())], ids=['elementwise_sphere', 'elementwise_zdt1']) def test_elementwise_eval_with_gradient(correct, problem): - X = np.random.random((100, correct.n_var)) + X = pymoo.PymooPRNG().random((100, correct.n_var)) autodiff = ElementwiseAutomaticDifferentiation(problem) @@ -26,7 +27,7 @@ def test_elementwise_eval_with_gradient(correct, problem): @pytest.mark.parametrize("correct, problem", [(MyConstrainedSphereWithGradient(), MyConstrainedSphere())], ids=['elementwise_constr_sphere']) def test_elementwise_constrained_eval_with_gradient(correct, problem): - X = np.random.random((100, correct.n_var)) + X = pymoo.PymooPRNG().random((100, correct.n_var)) autodiff = AutomaticDifferentiation(problem) @@ -43,7 +44,7 @@ def test_elementwise_constrained_eval_with_gradient(correct, problem): @pytest.mark.parametrize("correct, problem", [ (ZDT1WithGradient(), ZDT1())], ids=['vectorized_zdt1']) def test_vectorized_eval_with_gradient(correct, problem): - X = np.random.random((100, correct.n_var)) + X = pymoo.PymooPRNG().random((100, correct.n_var)) autodiff = AutomaticDifferentiation(problem) @@ -57,7 +58,7 @@ def test_vectorized_eval_with_gradient(correct, problem): @pytest.mark.parametrize("correct, problem", [(ConstrainedZDT1WithGradient(), ConstrainedZDT1())], ids=['vectorized_constr_zdt1']) def test_constrained_multi_eval_with_gradient(correct, problem): - X = np.random.random((100, correct.n_var)) + X = pymoo.PymooPRNG().random((100, correct.n_var)) autodiff = AutomaticDifferentiation(problem) diff --git a/tests/indicators/test_hv.py b/tests/indicators/test_hv.py index 0bbbf036d..5bf669725 100644 --- a/tests/indicators/test_hv.py +++ b/tests/indicators/test_hv.py @@ -7,15 +7,17 @@ from pymoo.problems.many import DTLZ1 from pymoo.problems.multi import ZDT1 +import pymoo def case_2d(): - np.random.seed(1) + + pymoo.PymooPRNG(1) ref_point = np.array([1.5, 1.5]) F = ZDT1().pareto_front() F = F[::10] * 1.2 - F = F[np.random.permutation(len(F))] + F = F[pymoo.PymooPRNG().permutation(len(F))] return ref_point, F @@ -27,19 +29,21 @@ def case_2d_smaller_ref(): def case_3d(): - np.random.seed(1) + + pymoo.PymooPRNG(1) ref_point = np.array([1.5, 1.5, 1.5]) F = DTLZ1().pareto_front() F = F[::10] * 1.2 - F = F[np.random.permutation(len(F))] + F = F[pymoo.PymooPRNG().permutation(len(F))] return ref_point, F def test_hvc_2d(): - np.random.seed(1) + + pymoo.PymooPRNG(1) ref_point, F = case_2d() exact = ExactHypervolume(ref_point).add(F) @@ -49,7 +53,7 @@ def test_hvc_2d(): np.testing.assert_allclose(exact.hvc, exact2d.hvc) for i in range(len(F)): - k = np.random.randint(low=0, high=len(F) - i) + k = pymoo.PymooPRNG().integers(low=0, high=len(F) - i) exact.delete(k) exact2d.delete(k) @@ -69,7 +73,7 @@ def test_hvc_monte_carlo(case): np.testing.assert_allclose(exact.hvc, mc.hvc, rtol=0, atol=1e-1) for i in range(len(F)): - k = np.random.randint(low=0, high=len(F) - i) + k = pymoo.PymooPRNG().integers(low=0, high=len(F) - i) exact.delete(k) mc.delete(k) diff --git a/tests/misc/test_kktpm.py b/tests/misc/test_kktpm.py index 982b21578..9c215237e 100644 --- a/tests/misc/test_kktpm.py +++ b/tests/misc/test_kktpm.py @@ -36,7 +36,7 @@ def test_kktpm_correctness(str_problem, params): np.testing.assert_almost_equal(G, _G, decimal=5) np.testing.assert_almost_equal(dG, _dG, decimal=5) - # indices = np.random.permutation(X.shape[0])[:100] + # indices = pymoo.PymooPRNG().permutation(X.shape[0])[:100] n, _ = X.shape indices = np.arange(n) diff --git a/tests/misc/test_non_dominated_sorting.py b/tests/misc/test_non_dominated_sorting.py index 7163bc814..936d1cc89 100644 --- a/tests/misc/test_non_dominated_sorting.py +++ b/tests/misc/test_non_dominated_sorting.py @@ -8,6 +8,7 @@ from pymoo.util.function_loader import load_function from pymoo.util.ref_dirs import get_reference_directions +import pymoo def assert_fronts_equal(fronts_a, fronts_b): assert len(fronts_a) == len(fronts_b) @@ -18,7 +19,7 @@ def assert_fronts_equal(fronts_a, fronts_b): def test_fast_non_dominated_sorting(): - F = np.random.random((100, 2)) + F = pymoo.PymooPRNG().random((100, 2)) fronts = load_function("fast_non_dominated_sort", _type="python")(F) _fronts = load_function("fast_non_dominated_sort", _type="cython")(F) assert_fronts_equal(fronts, _fronts) @@ -27,7 +28,7 @@ def test_fast_non_dominated_sorting(): def test_efficient_non_dominated_sort(): print("Testing ENS...") F = np.ones((1000, 3)) - F[:, 1:] = np.random.random((1000, 2)) + F[:, 1:] = pymoo.PymooPRNG().random((1000, 2)) nds = load_function("fast_non_dominated_sort", _type="python")(F) @@ -47,7 +48,7 @@ def test_efficient_non_dominated_sort(): def test_tree_based_non_dominated_sort(): print("Testing T-ENS...") F = np.ones((1000, 3)) - F[:, 1:] = np.random.random((1000, 2)) + F[:, 1:] = pymoo.PymooPRNG().random((1000, 2)) _fronts = load_function("fast_non_dominated_sort", _type="python")(F) fronts = load_function("tree_based_non_dominated_sort", _type="python")(F) diff --git a/tests/misc/test_normalization.py b/tests/misc/test_normalization.py index fa5127d50..191989dea 100644 --- a/tests/misc/test_normalization.py +++ b/tests/misc/test_normalization.py @@ -2,6 +2,7 @@ import pytest from pymoo.util.normalization import ZeroToOneNormalization +import pymoo n_var = 10 @@ -9,14 +10,14 @@ @pytest.fixture def matrix_input(): xl, xu = np.full(n_var, -5.0), np.full(n_var, 5.0) - X = np.random.random((200, n_var)) * (xu - xl) + xl + X = pymoo.PymooPRNG().random((200, n_var)) * (xu - xl) + xl return X, xl, xu @pytest.fixture def vector_input(): xl, xu = np.full(n_var, -5.0), np.full(n_var, 5.0) - X = np.random.random(n_var) * (xu - xl) + xl + X = pymoo.PymooPRNG().random(n_var) * (xu - xl) + xl return X, xl, xu diff --git a/tests/misc/test_population.py b/tests/misc/test_population.py index b97d4712d..aacdfd314 100644 --- a/tests/misc/test_population.py +++ b/tests/misc/test_population.py @@ -4,7 +4,7 @@ from pymoo.core.population import Population import numpy as np - +import pymoo def test_init(): pop = Population.empty(100) @@ -15,7 +15,7 @@ def test_init(): def test_copy(): - a = Population.new(X=np.random.random((100, 2))) + a = Population.new(X=pymoo.PymooPRNG().random((100, 2))) b = deepcopy(a) b[0].X[:] = -1 diff --git a/tests/operators/test_order_crossover.py b/tests/operators/test_order_crossover.py index 4364cfdec..15439c04f 100644 --- a/tests/operators/test_order_crossover.py +++ b/tests/operators/test_order_crossover.py @@ -1,7 +1,7 @@ import numpy as np from pymoo.operators.crossover.ox import ox - +import pymoo def order_crossover_contributed_no_shift(x1, x2, seq=None): assert len(x1) == len(x2) @@ -9,7 +9,7 @@ def order_crossover_contributed_no_shift(x1, x2, seq=None): if seq is not None: start, end = seq else: - start, end = np.sort(np.random.choice(len(x1), 2, replace=False)) + start, end = np.sort(pymoo.PymooPRNG().choice(len(x1), 2, replace=False)) y1 = x1.copy() y2 = x2.copy() @@ -51,10 +51,10 @@ def test_example_to_bound(): def test_equal_constribution_no_shift(): for _ in range(100): - a = np.random.permutation(10) - b = np.random.permutation(10) + a = pymoo.PymooPRNG().permutation(10) + b = pymoo.PymooPRNG().permutation(10) - start, end = np.sort(np.random.choice(len(a), 2, replace=False)) + start, end = np.sort(pymoo.PymooPRNG().choice(len(a), 2, replace=False)) y1 = ox(a, b, seq=(start, end), shift=False) y2 = ox(b, a, seq=(start, end), shift=False) diff --git a/tests/problems/test_elementwise.py b/tests/problems/test_elementwise.py index 37197cfb7..b18ed7f63 100644 --- a/tests/problems/test_elementwise.py +++ b/tests/problems/test_elementwise.py @@ -1,7 +1,7 @@ import numpy as np from pymoo.core.problem import Problem, ElementwiseProblem - +import pymoo class MyElementwiseProblem(ElementwiseProblem): @@ -24,7 +24,7 @@ def _evaluate(self, x, out, *args, **kwargs): def test_elementwise_evaluation(): - X = np.random.random((100, 2)) + X = pymoo.PymooPRNG().random((100, 2)) vectorized = MyProblem() elementwise = MyElementwiseProblem() @@ -32,7 +32,7 @@ def test_elementwise_evaluation(): def test_misc_value(): - X = np.random.random((100, 2)) + X = pymoo.PymooPRNG().random((100, 2)) vectorized = MyProblem() elementwise = MyElementwiseProblem() diff --git a/tests/problems/test_evaluator.py b/tests/problems/test_evaluator.py index 497d651f3..c2385e8a2 100644 --- a/tests/problems/test_evaluator.py +++ b/tests/problems/test_evaluator.py @@ -4,10 +4,11 @@ from pymoo.core.evaluator import Evaluator from pymoo.core.individual import Individual from pymoo.core.population import Population +import pymoo problem = get_problem("Rastrigin") -X = np.random.random((100, problem.n_var)) +X = pymoo.PymooPRNG().random((100, problem.n_var)) F = problem.evaluate(X) diff --git a/tests/problems/test_g.py b/tests/problems/test_g.py index 87087a388..e0f294efb 100644 --- a/tests/problems/test_g.py +++ b/tests/problems/test_g.py @@ -6,6 +6,8 @@ from pymoo.util.misc import at_least_2d_array, at_least_2d from tests.problems.test_correctness import load +import pymoo + problems = [ ('G1', []), ('G2', []), ('G3', []), ('G4', []), ('G5', []), ('G6', []), ('G7', []), ('G8', []), ('G9', []), ('G10', []), ('G11', []), ('G12', []), ('G13', []), ('G14', []), ('G15', []), ('G16', []), @@ -41,6 +43,6 @@ def test_problems(name, params): @pytest.mark.parametrize('name,params', problems) def test_autodiff(name, params): problem = AutomaticDifferentiation(get_problem(name, *params)) - X = np.random.random((100, problem.n_var)) + X = pymoo.PymooPRNG().random((100, problem.n_var)) problem.evaluate(X) assert True diff --git a/tests/test_archive.py b/tests/test_archive.py index ee8819cdc..f2174fba6 100644 --- a/tests/test_archive.py +++ b/tests/test_archive.py @@ -5,7 +5,9 @@ from pymoo.core.population import Population from pymoo.util.archive import SingleObjectiveArchive, MultiObjectiveArchive, SurvivalTruncation from pymoo.util.nds.non_dominated_sorting import NonDominatedSorting +import pymoo +pymoo.PymooPRNG(1) def test_unconstr_add_to_archive(): archive = SingleObjectiveArchive() @@ -101,8 +103,7 @@ def test_multi_objective_archive(): def test_multi_objective_archive_multi(): - np.random.seed(1) - X, F = np.random.random(size=(100, 10)), np.random.random(size=(100, 3)) + X, F = pymoo.PymooPRNG().random(size=(100, 10)), pymoo.PymooPRNG().random(size=(100, 3)) pop = Population.new(X=X, F=F) archive = MultiObjectiveArchive().add(pop) diff --git a/tests/test_decomposition.py b/tests/test_decomposition.py index c7c52396e..2635b7375 100644 --- a/tests/test_decomposition.py +++ b/tests/test_decomposition.py @@ -4,9 +4,12 @@ from pymoo.decomposition.perp_dist import PerpendicularDistance from pymoo.decomposition.weighted_sum import WeightedSum +import pymoo + +pymoo.PymooPRNG(1) def test_one_to_one(): - F = np.random.random((2, 2)) + F = pymoo.PymooPRNG().random((2, 2)) weights = np.array([[0.5, 0.5], [0.25, 0.25]]) val = WeightedSum().do(F, weights=weights) @@ -15,7 +18,7 @@ def test_one_to_one(): def test_one_to_many(): - F = np.random.random((1, 2)) + F = pymoo.PymooPRNG().random((1, 2)) weights = np.array([[0.5, 0.5], [0.25, 0.25]]) val = WeightedSum().do(F, weights=weights) @@ -24,7 +27,7 @@ def test_one_to_many(): def test_many_to_one(): - F = np.random.random((10, 2)) + F = pymoo.PymooPRNG().random((10, 2)) weights = np.array([[0.5, 0.5]]) val = WeightedSum().do(F, weights=weights) @@ -33,7 +36,7 @@ def test_many_to_one(): def test_many_to_many(): - F = np.random.random((10, 2)) + F = pymoo.PymooPRNG().random((10, 2)) weights = np.array([[0.5, 0.5], [0.25, 0.25]]) val = WeightedSum().do(F, weights=weights) @@ -42,9 +45,8 @@ def test_many_to_many(): def test_perp_dist(): - np.random.seed(1) - F = np.random.random((100, 3)) - weights = np.random.random((10, 3)) + F = pymoo.PymooPRNG().random((100, 3)) + weights = pymoo.PymooPRNG().random((10, 3)) correct = Remote.get_instance().load("tests", "perp_dist")