From 971b5493b6b0b577d1b28f25807281d76f0238c2 Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Mon, 3 Feb 2025 09:41:42 +0530 Subject: [PATCH 01/16] create a draft of things to don CGA --- src/qutip_qoc/_genetic.py | 294 +++++++++++++++++++++++++++++++++++ src/qutip_qoc/pulse_optim.py | 16 ++ tests/test_result.py | 39 +++++ 3 files changed, 349 insertions(+) create mode 100644 src/qutip_qoc/_genetic.py diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py new file mode 100644 index 0000000..f6dd00f --- /dev/null +++ b/src/qutip_qoc/_genetic.py @@ -0,0 +1,294 @@ +import numpy as np +from joblib import Parallel, delayed +import scipy as sci +import time +from qutip_qoc.result import Result + +class _GENETIC(): + ### Template for a genetic algorithm optimizer + ### Copied from GOAT or RL + ### Modified to be a genetic algorithm optimizer + ### Contributed by Jonathan Brown + + def __init__( + self, + objectives, + control_parameters, + time_interval, + time_options, + alg_kwargs, + optimizer_kwargs, + minimizer_kwargs, + integrator_kwargs, + qtrl_optimizers, + ): + """ + Initialize the genetic algorithm with the given parameters. + + Args: + objectives: The objectives for the optimization. + control_parameters: Parameters for the control. + time_interval: The time interval for the optimization. + time_options: Options related to time discretization. + alg_kwargs: Additional arguments for the algorithm. + optimizer_kwargs: Arguments for the optimizer. + minimizer_kwargs: Arguments for the minimizer. + integrator_kwargs: Arguments for the integrator. + qtrl_optimizers: Quantum control optimizers. + """ + self.objectives = objectives + self.control_parameters = control_parameters + self.time_interval = time_interval + self.time_options = time_options + self.alg_kwargs = alg_kwargs + + # Initialize the genetic algorithm parameters + self.N_pop = alg_kwargs.get('population_size', 50) + self.N_var = alg_kwargs.get('number_variables', 10) + self.parent_rate = alg_kwargs.get('parent_rate', 0.5) + self.N_parents = int(np.floor(self.N_pop * self.parent_rate)) + self.N_survivors = max(1, int(self.N_parents * alg_kwargs.get('survival_rate', 1.0))) + self.N_offspring = self.N_pop - self.N_survivors + self.mutation_rate = alg_kwargs.get('mutation_rate', 0.1) + self.workers = alg_kwargs.get('workers', 1) + + # Initialize the Result object + self._result = Result( + objectives=objectives, + time_interval=time_interval, + start_local_time=time.time(), # initial optimization time + n_iters=0, # Number of iterations(episodes) until convergence + iter_seconds=[], # list containing the time taken for each iteration(episode) of the optimization + var_time=True, # Whether the optimization was performed with variable time + guess_params=[], + ) + "Initialize a first population" + def initial_population(self): + """Randomly generates an initial popuation to act as generation 0. + + Returns: + Array: Contains all randomly initialised chromosomes in a single matrix. + """ + return np.random.uniform(-1,1, (self.N_pop, self.N_var)) + + + def darwin(self, population, fitness): + """A function for creating the parent pool which will be used to repopulate the next + generation. + + Args: + population (Array): Array of chromosome vectors for the current generation stacked into a single array. + fitness (Vector): The corresponding fitness of each chromosome in the population. + + Returns: + Array: The pool of parent chromosomes. + Vector: The corresponding fitness of the parent chromosomes. + + """ + # takes the current generation of solutions (population) and kills the weakest + # Returns the ordered survivors (with the elite survivor at index 0) and their costs + # np.argsort sorts the indices from smallest at zeroth element to largest thus to get the largest elem at 0 + # we pass -1 X our array to be sorted, then take the first + indices = np.argsort(-fitness)[:self.N_parents] + parents = population[indices] + parent_fitness = fitness[indices] + return parents, parent_fitness + + def pairing(self, parents, parent_fitness): + """From the parent pool, probabilistically generates pairs to act as mother and father + chromosomes to reproduce, with the probability of pairng determined by the relative fitness + of each chromosome. + + Args: + parents (Array): Array of stacked chromosome vectors to act as the parent pool. + fitness (Vector): The corresponding fitness of each chromosome in the parent pool. + + Returns: + Array: The mother chromosomes. + Array: The corresponding father chromosomes. + + """ + # Select pair of chromosomes to mate from the survivors + # ------------ could use a softmax here ------------ # + # positive_pf = parent_fitness + np.abs(np.min(parent_fitness)) + # prob_dist = (positive_pf)/np.sum(positive_pf) + prob_dist = sci.special.softmax(parent_fitness) + # -------------------------------------------------- # + ma_indices = np.random.choice(np.arange(self.N_parents), + size=int(self.N_offspring), p=prob_dist, replace=True) + da_indices = np.zeros_like(ma_indices) + index_ls = np.arange(self.N_parents) + for i, ma in enumerate(ma_indices): + inter_prob_dist = parent_fitness.copy() + inter_prob_dist[ma] = -np.inf + inter_prob_dist_normd = sci.special.softmax(inter_prob_dist) + da_indices[i] = np.random.choice(index_ls, size=1, p=inter_prob_dist_normd) + father_chromes = parents[da_indices] + mother_chromes = parents[ma_indices] + return mother_chromes, father_chromes + + def mating_procedure(self, ma, da): + """Takes the pair of parent chromosomes and combines them in a way to produce offspring. + (Ma and Da - slang used for mother and father in Ireland, Scotland and northern England) + + Args: + Ma (Array): The list of mother chromosomes. + Da (Array): The corresponding list of father chromosomes. + + Returns: + Array: The pool of parent chromosomes. + + """ + # define an array of combination parameters beta to use to combine the parent chromosomes + # in a continuous way + beta = np.random.uniform(0,1, size=(self.N_offspring, self.N_var)) + beta_inverse = 1 - beta + # Randomly select an array of ones to mask the beta array (this also randomly selects) + # the indices to be swapped + swap_array = np.random.randint(low=0,high=2,size=(self.N_offspring, self.N_var)) + masked_beta = swap_array * beta + masked_inverse_beta = swap_array * beta_inverse + not_swap_array = np.mod((swap_array + 1),2) + # Implement the b*ma + (1-b)*da on each chosen element + offspring_array = masked_beta * ma + masked_inverse_beta * da + not_swap_array * ma + return offspring_array + + + def build_next_gen(self, parents, offspring): + """Takes the parent pool and keeps N_survivor parent chromosomes to survive to the next generation. + Builds said generation by stacking surviving parents with the offspring chromosomes. + + Args: + parents (Array): The complete parent pool array. + offspring (Array): The offspring array. + + Returns: + Array: The population of unmutated chromosomes which will constitute the next generation. + """ + # build next generation + # get the elite survivors to propagate to the next gen + survivors = parents[:self.N_survivors] + return np.concatenate((survivors, offspring), axis=0) + + + def mutate(self, population): + """Takes the unmutated new generation population and randomly applies continuous mutations. + + Args: + population (Array): The unmutated population which will constitute the new generation of chromosomes. + + Returns: + Array: The population for the new generation. + """ + # Mutate the new generation + number_of_mutations = int((population.shape[0] - 1) * population.shape[-1] * self.mutation_rate) + row_indices = np.random.choice(np.arange(1,int((population.shape[0]))), size=number_of_mutations) + col_indices = np.random.choice(np.arange(0,int((population.shape[-1]))), size=number_of_mutations) + mutated_population = np.copy(population) + # for some reason clipped normal noise works better. + # mutated_population[row_indices, col_indices] = np.random.uniform(-1,1,size=number_of_mutations) # for uniform random mutation + mutated_population[row_indices, col_indices] = np.clip(population[row_indices, col_indices] + np.random.normal(loc=0, scale=0.2, size=number_of_mutations), a_min=-1, a_max=1) # for normal mutation + return mutated_population + + + def optimize(self, fitness_func, iterations = 1000): + """Implements entire procedure for continuous genetic algorithm optimizer. This is a higher order function + which accepts a black box fitness function. The fitness function which will accept a single chromosome of + N_var variables in the range [-1,1], scale and process them returning a figure of merit which describes how useful said parameter + values were at acheiving the general task. This figure of merit will act as the fitness of the input chromosome. + + Args: + fitness_func (Function): Black box fitness function to evaluate a chromosome fitness. + iterations (Int): The number of generations for which to optimize. + + Returns: + Float: The maximum acheived fitness value from every generation of the optimization. + Vector: The chromosome that acheived the highest fitness. + Vector: The history of maximum fitness improvement for the entire optimization. + + """ + population = self.initial_population() # initialise a random seroth generation + bench = -1e10 # the initial benchmark fitness - we assume fitness >= 0 + done = False # terminal flag + count = 0 # iteration count + maxfit_hist = [] # to record the maximum fitness at each gen during optimizing + while not done: + if self.workers > 1: + fitness_ls = Parallel(n_jobs=self.workers)(delayed(fitness_func)(chromosome) for chromosome in population) + else: + fitness_ls = [] + for chromosome in population: + # The fitness function should be able to take a raw chromosome (whose constituent values will be \in [-1, 1]) + # scale the values to get the true parameter values, then use those parameters to do "stuff" (in our case + # build a control pulse and simulate the dynamics) then return a figure of merit which tells us how good + # this set of parameters has performed (could be fidelity to a target state for example). + # essentially all of the problem under consideration happens inside fitness_func + f = fitness_func(chromosome) + fitness_ls.append(f) + fitness = np.array(fitness_ls) + max_fit = np.max(fitness) + if max_fit>bench: + print(" EPISODE: {}, average cost: {}, Best Cost: {}".format(count, np.round(np.average(fitness), decimals=4), + np.max(fitness))) + # update benchmark and add max fitness to history + bench = max_fit + maxfit_hist.append(max_fit) + best_index = np.argmax(fitness) # get best index + best_chromosome = population[best_index,:] # so that we can get the best chromosome + + # May want to checkpoint here + if count == iterations: # if max iterations reached + done=True + + survivors, survivor_fitness = self.darwin(population,fitness) + mothers, fathers = self.pairing(survivors, survivor_fitness) + offspring = self.mating_procedure(ma=mothers, da=fathers) + unmutated_next_gen = self.build_next_gen(survivors,offspring) + mutated_next_gen = self.mutate(unmutated_next_gen) + population = mutated_next_gen + count+=1 + + return max_fit, best_chromosome, maxfit_hist + + def _save_result(self): + """ + Save the results of the optimization process, including the optimized + pulse sequences, final states, and performance metrics. + """ + result_obj = self._backup_result if self._use_backup_result else self._result + + if self._use_backup_result: + self._backup_result.iter_seconds = self._result.iter_seconds.copy() + self._backup_result._final_states = self._result._final_states.copy() + self._backup_result.infidelity = self._result.infidelity + + result_obj.end_local_time = time.time() + result_obj.n_iters = len(self._result.iter_seconds) + result_obj.optimized_params = self._actions.copy() + [ + self._result.total_seconds + ] # If var_time is True, the last parameter is the evolution time + result_obj._optimized_controls = self._actions.copy() + result_obj._guess_controls = [] + result_obj._optimized_H = [self._H] + + def result(self): + """ + Final conversions and return of optimization results + """ + if self._use_backup_result: + self._backup_result.start_local_time = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(self._backup_result.start_local_time) + ) + self._backup_result.end_local_time = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(self._backup_result.end_local_time) + ) + return self._backup_result + else: + self._save_result() + self._result.start_local_time = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(self._result.start_local_time) + ) + self._result.end_local_time = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(self._result.end_local_time) + ) + return self._result \ No newline at end of file diff --git a/src/qutip_qoc/pulse_optim.py b/src/qutip_qoc/pulse_optim.py index 5f65e46..3cb4fb2 100644 --- a/src/qutip_qoc/pulse_optim.py +++ b/src/qutip_qoc/pulse_optim.py @@ -11,6 +11,7 @@ from qutip_qoc._optimizer import _global_local_optimization from qutip_qoc._time import _TimeInterval from qutip_qoc._rl import _RL +from qutip_qoc._genetic import _GENETIC __all__ = ["optimize_pulses"] @@ -366,6 +367,21 @@ def optimize_pulses( ) rl_env.train() return rl_env.result() + + elif alg == "GENETIC": + gen_env = _GENETIC( + objectives, + control_parameters, + time_interval, + time_options, + algorithm_kwargs, + optimizer_kwargs, + minimizer_kwargs, + integrator_kwargs, + qtrl_optimizers, + ) + gen_env.optimize() + return gen_env.result() return _global_local_optimization( objectives, diff --git a/tests/test_result.py b/tests/test_result.py index 7146b04..00f3b7b 100644 --- a/tests/test_result.py +++ b/tests/test_result.py @@ -198,6 +198,44 @@ def sin_z_jax(t, r, **kwargs): }, ) +# --------------------------- Genetic --------------------------- + +# TODO: this is the input for optimiz_pulses() function +# you can use this routine to test your implementation + +# state to state transfer +init = qt.basis(2, 0) +target = qt.basis(2, 1) + +H_c = [qt.sigmax(), qt.sigmay(), qt.sigmaz()] # control Hamiltonians + +w, d, y = 0.1, 1.0, 0.1 +H_d = 1 / 2 * (w * qt.sigmaz() + d * qt.sigmax()) # drift Hamiltonian + +H = [H_d] + H_c # total Hamiltonian + +state2state_genetic = Case( + objectives=[Objective(initial, H, target)], + control_parameters={"bounds": [-13, 13]}, # TODO: for now only consider bounds + tlist=np.linspace(0, 10, 100), # TODO: derive single step duration and max evo time / max num steps from this + algorithm_kwargs={ + "fid_err_targ": 0.01, + "alg": "GENETIC", + "max_iter": 100, + }, + optimizer_kwargs={}, +) + +# TODO: no big difference for unitary evolution + +initial = qt.qeye(2) # Identity +target = qt.gates.hadamard_transform() + +unitary_genetic = state2state_genetic._replace( + objectives=[Objective(initial, H, target)], +) + + @pytest.fixture( params=[ @@ -207,6 +245,7 @@ def sin_z_jax(t, r, **kwargs): pytest.param(state2state_goat, id="State to state (GOAT)"), pytest.param(state2state_jax, id="State to state (JAX)"), pytest.param(state2state_rl, id="State to state (RL)"), + pytest.param(unitary_genetic, id="Unitary (Genetic)"), pytest.param(unitary_rl, id="Unitary (RL)"), ] ) From 55d9389e9463846b95f0fcdb8db46dc88c1670bf Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Thu, 6 Feb 2025 11:01:17 +0530 Subject: [PATCH 02/16] add infidelity instead of fitness function --- src/qutip_qoc/_genetic.py | 98 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 95 insertions(+), 3 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index f6dd00f..1d74548 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -3,6 +3,7 @@ import scipy as sci import time from qutip_qoc.result import Result +import qutip as qt class _GENETIC(): ### Template for a genetic algorithm optimizer @@ -62,6 +63,89 @@ def __init__( var_time=True, # Whether the optimization was performed with variable time guess_params=[], ) + + self._Hd_lst, self._Hc_lst = [], [] + for objective in objectives: + # extract drift and control Hamiltonians from the objective + self._Hd_lst.append(objective.H[0]) + self._Hc_lst.append( + [H[0] if isinstance(H, list) else H for H in objective.H[1:]] + ) + + def create_pulse_func(idx): + """ + Create a control pulse lambda function for a given index. + """ + return lambda t, args: self._pulse(t, args, idx + 1) + + # create the QobjEvo with Hd, Hc and controls(args) + self._H_lst = [self._Hd_lst[0]] + dummy_args = {f"alpha{i+1}": 1.0 for i in range(len(self._Hc_lst[0]))} + for i, Hc in enumerate(self._Hc_lst[0]): + self._H_lst.append([Hc, create_pulse_func(i)]) + self._H = qt.QobjEvo(self._H_lst, args=dummy_args) + + self.shorter_pulses = alg_kwargs.get( + "shorter_pulses", False + ) # lengthen the training to look for pulses of shorter duration, therefore episodes with fewer steps + + # extract bounds for control_parameters + bounds = [] + for key in control_parameters.keys(): + bounds.append(control_parameters[key].get("bounds")) + self._lbound = [b[0][0] for b in bounds] + self._ubound = [b[0][1] for b in bounds] + + self._alg_kwargs = alg_kwargs + + self._initial = objectives[0].initial + self._target = objectives[0].target + self._state = None + self._dim = self._initial.shape[0] + + self._result = Result( + objectives=objectives, + time_interval=time_interval, + start_local_time=time.time(), # initial optimization time + n_iters=0, # Number of iterations(episodes) until convergence + iter_seconds=[], # list containing the time taken for each iteration(episode) of the optimization + var_time=True, # Whether the optimization was performed with variable time + guess_params=[], + ) + + self._backup_result = Result( # used as a backup in case the algorithm with shorter_pulses does not find an episode with infid 1: - fitness_ls = Parallel(n_jobs=self.workers)(delayed(fitness_func)(chromosome) for chromosome in population) + fitness_ls = Parallel(n_jobs=self.workers)(delayed(infidelity)(chromosome) for chromosome in population) else: fitness_ls = [] for chromosome in population: @@ -223,7 +308,14 @@ def optimize(self, fitness_func, iterations = 1000): # build a control pulse and simulate the dynamics) then return a figure of merit which tells us how good # this set of parameters has performed (could be fidelity to a target state for example). # essentially all of the problem under consideration happens inside fitness_func - f = fitness_func(chromosome) + alphas = [ + ((chromosome + 1) / 2 * (self._ubound[0] - self._lbound[0])) + + self._lbound[0] + for i in range(len(chromosome)) + ] + args = {f"alpha{i+1}": value for i, value in enumerate(alphas)} + infidelity = self._infid(args) + f = infidelity(chromosome) fitness_ls.append(f) fitness = np.array(fitness_ls) max_fit = np.max(fitness) From 9f21eaf12d0982f8cdb3f1a7a858ebf2931cb96c Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Tue, 25 Feb 2025 12:32:15 +0530 Subject: [PATCH 03/16] fix init --- src/qutip_qoc/_genetic.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 1d74548..7d15c20 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -102,7 +102,7 @@ def create_pulse_func(idx): self._target = objectives[0].target self._state = None self._dim = self._initial.shape[0] - + self._result = Result( objectives=objectives, time_interval=time_interval, @@ -122,7 +122,13 @@ def create_pulse_func(idx): var_time=True, guess_params=[], ) - + def _pulse(self, t, args, idx): + """ + Returns the control pulse value at time t for a given index. + """ + alpha = args[f"alpha{idx}"] + return alpha + def _infid(self, params): """ Calculate infidelity to be minimized From 0ce0863e991442a969b679281d56261bec635bc5 Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Thu, 27 Feb 2025 08:26:15 +0530 Subject: [PATCH 04/16] fix solver functions --- src/qutip_qoc/_genetic.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 7d15c20..17e5e08 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -37,6 +37,8 @@ def __init__( integrator_kwargs: Arguments for the integrator. qtrl_optimizers: Quantum control optimizers. """ + + super(_GENETIC, self).__init__() self.objectives = objectives self.control_parameters = control_parameters self.time_interval = time_interval @@ -122,6 +124,14 @@ def create_pulse_func(idx): var_time=True, guess_params=[], ) + + if self._Hd_lst[0].issuper: + self._fid_type = self._alg_kwargs.get("fid_type", "TRACEDIFF") + self._solver = qt.MESolver(H=self._H, options=self._integrator_kwargs) + else: + self._fid_type = self._alg_kwargs.get("fid_type", "PSU") + self._solver = qt.SESolver(H=self._H, options=self._integrator_kwargs) + def _pulse(self, t, args, idx): """ Returns the control pulse value at time t for a given index. From a3164c138c3335163ceacdbabe696d248e9d7e52 Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Mon, 3 Mar 2025 08:45:38 +0530 Subject: [PATCH 05/16] initializing right parameters --- src/qutip_qoc/_genetic.py | 55 +++++++++++++++++++++++++-------------- 1 file changed, 36 insertions(+), 19 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 17e5e08..30cfe49 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -37,13 +37,16 @@ def __init__( integrator_kwargs: Arguments for the integrator. qtrl_optimizers: Quantum control optimizers. """ - + super(_GENETIC, self).__init__() self.objectives = objectives self.control_parameters = control_parameters self.time_interval = time_interval self.time_options = time_options self.alg_kwargs = alg_kwargs + self._integrator_kwargs = integrator_kwargs + self._rtol = self._integrator_kwargs.get("rtol", 1e-5) + self._atol = self._integrator_kwargs.get("atol", 1e-5) # Initialize the genetic algorithm parameters self.N_pop = alg_kwargs.get('population_size', 50) @@ -55,17 +58,6 @@ def __init__( self.mutation_rate = alg_kwargs.get('mutation_rate', 0.1) self.workers = alg_kwargs.get('workers', 1) - # Initialize the Result object - self._result = Result( - objectives=objectives, - time_interval=time_interval, - start_local_time=time.time(), # initial optimization time - n_iters=0, # Number of iterations(episodes) until convergence - iter_seconds=[], # list containing the time taken for each iteration(episode) of the optimization - var_time=True, # Whether the optimization was performed with variable time - guess_params=[], - ) - self._Hd_lst, self._Hc_lst = [], [] for objective in objectives: # extract drift and control Hamiltonians from the objective @@ -99,12 +91,12 @@ def create_pulse_func(idx): self._ubound = [b[0][1] for b in bounds] self._alg_kwargs = alg_kwargs - + self._initial = objectives[0].initial self._target = objectives[0].target self._state = None self._dim = self._initial.shape[0] - + self._result = Result( objectives=objectives, time_interval=time_interval, @@ -124,7 +116,32 @@ def create_pulse_func(idx): var_time=True, guess_params=[], ) + self._use_backup_result = ( + False # if true, use self._backup_result as the final optimization result + ) + + # for the reward + self._step_penalty = 1 + + # To check if it exceeds the maximum number of steps in an episode + self._current_step = 0 + + self.terminated = False + self.truncated = False + + self._fid_err_targ = alg_kwargs["fid_err_targ"] + # inferred attributes + self._norm_fac = 1 / self._target.norm() + + # integrator options + self._integrator_kwargs = integrator_kwargs + self._rtol = self._integrator_kwargs.get("rtol", 1e-5) + self._atol = self._integrator_kwargs.get("atol", 1e-5) + + self._step_duration = 1e7 #change + + # create the solver if self._Hd_lst[0].issuper: self._fid_type = self._alg_kwargs.get("fid_type", "TRACEDIFF") self._solver = qt.MESolver(H=self._H, options=self._integrator_kwargs) @@ -138,14 +155,15 @@ def _pulse(self, t, args, idx): """ alpha = args[f"alpha{idx}"] return alpha - - def _infid(self, params): + + def _infid(self, args): """ - Calculate infidelity to be minimized + The agent performs a step, then calculate infidelity to be minimized of the current state against the target state. """ X = self._solver.run( - self._initial, [0.0, self._evo_time], args={"p": params} + self._state, [0.0, self._step_duration], args=args ).final_state + self._state = X if self._fid_type == "TRACEDIFF": diff = X - self._target @@ -159,7 +177,6 @@ def _infid(self, params): infid = 1 - np.abs(g) elif self._fid_type == "SU": # f_SU (incl global phase) infid = 1 - np.real(g) - return infid "Initialize a first population" From 876b274488d79b225114cce5d2745e08d8f4d2bc Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Tue, 11 Mar 2025 09:36:33 +0530 Subject: [PATCH 06/16] calculate infidelity of genetic algorithm --- src/qutip_qoc/_genetic.py | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 30cfe49..012d97a 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -94,7 +94,7 @@ def create_pulse_func(idx): self._initial = objectives[0].initial self._target = objectives[0].target - self._state = None + self._state = qt.ket2dm(self._initial) self._dim = self._initial.shape[0] self._result = Result( @@ -139,7 +139,7 @@ def create_pulse_func(idx): self._rtol = self._integrator_kwargs.get("rtol", 1e-5) self._atol = self._integrator_kwargs.get("atol", 1e-5) - self._step_duration = 1e7 #change + self._step_duration = 1e-7 #change # create the solver if self._Hd_lst[0].issuper: @@ -160,8 +160,7 @@ def _infid(self, args): """ The agent performs a step, then calculate infidelity to be minimized of the current state against the target state. """ - X = self._solver.run( - self._state, [0.0, self._step_duration], args=args + X = self._solver.run(self._state, [0.0, self._step_duration] ).final_state self._state = X @@ -348,8 +347,8 @@ def optimize(self, infidelity, iterations = 1000): ] args = {f"alpha{i+1}": value for i, value in enumerate(alphas)} infidelity = self._infid(args) - f = infidelity(chromosome) - fitness_ls.append(f) + #f = infidelity(chromosome) + fitness_ls.append(infidelity) fitness = np.array(fitness_ls) max_fit = np.max(fitness) if max_fit>bench: From 51601907169e366af5ef8144f3523d6d719872b6 Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Wed, 12 Mar 2025 11:24:06 +0530 Subject: [PATCH 07/16] result class --- src/qutip_qoc/_genetic.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 012d97a..507a9ef 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -388,10 +388,9 @@ def _save_result(self): result_obj.end_local_time = time.time() result_obj.n_iters = len(self._result.iter_seconds) - result_obj.optimized_params = self._actions.copy() + [ + result_obj.optimized_params = [ self._result.total_seconds ] # If var_time is True, the last parameter is the evolution time - result_obj._optimized_controls = self._actions.copy() result_obj._guess_controls = [] result_obj._optimized_H = [self._H] From 260837fca1d4ce27791887d398c70b931ab9ef93 Mon Sep 17 00:00:00 2001 From: Rochisha Agarwal Date: Mon, 7 Jul 2025 15:00:57 +0900 Subject: [PATCH 08/16] fix the infidelity function --- src/qutip_qoc/_genetic.py | 151 +++++++++++++++++++++----------------- 1 file changed, 85 insertions(+), 66 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 507a9ef..ee0325d 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -139,8 +139,7 @@ def create_pulse_func(idx): self._rtol = self._integrator_kwargs.get("rtol", 1e-5) self._atol = self._integrator_kwargs.get("atol", 1e-5) - self._step_duration = 1e-7 #change - + self._step_duration = 10 # create the solver if self._Hd_lst[0].issuper: self._fid_type = self._alg_kwargs.get("fid_type", "TRACEDIFF") @@ -156,16 +155,62 @@ def _pulse(self, t, args, idx): alpha = args[f"alpha{idx}"] return alpha + def _infid(self, args): """ - The agent performs a step, then calculate infidelity to be minimized of the current state against the target state. + Compute infidelity of the final state/unitary using provided control args. + + Parameters: + args (dict): Dictionary of control amplitudes {"alpha1": val1, ...} + + Returns: + float: Infidelity value ∈ [0, 1] """ - X = self._solver.run(self._state, [0.0, self._step_duration] + X = self._solver.run( + self._state, [0.0, self._step_duration], args=args ).final_state self._state = X - + + # Ensure density matrix + # if not initial.issuper and not initial.isoper: + # initial = qt.ket2dm(initial) + + # try: + # self._solver._H.args = args + # result = self._solver.run(initial, tlist, args=args) + # final_state = result.final_state + # except Exception as e: + # print(f"[WARNING] Solver failed: {e}") + # return 1.0 # max infidelity + + # # Fidelity computation + # if self._fid_type == "TRACEDIFF": + # diff = final_state - target + # diff_dag = qt.Qobj(diff.data.adjoint(), dims=diff.dims) + # g = 0.5 * (diff_dag * diff).data.trace() + # infid = np.real(self._norm_fac * g) + # else: + # g = self._norm_fac * target.overlap(final_state) + # if self._fid_type == "PSU": + # infid = 1 - np.abs(g) + # elif self._fid_type == "SU": + # infid = 1 - np.real(g) + # else: + # raise ValueError(f"Unknown fidelity type: {self._fid_type}") + + # infid = 1 - qt.metrics.fidelity(X, self._target) + # print(infid) + # print(X.data) + print(f"Control args: {args}") + + # Store initial state for comparison + + # Your evolution code here... + X = self._solver.run(self._state, [0.0, self._step_duration], args=args).final_state + self._state = X + target_dm = qt.ket2dm(self._target) if self._fid_type == "TRACEDIFF": - diff = X - self._target + diff = X - target_dm # to prevent if/else in qobj.dag() and qobj.tr() diff_dag = qt.Qobj(diff.data.adjoint(), dims=diff.dims) g = 1 / 2 * (diff_dag * diff).data.trace() @@ -177,6 +222,7 @@ def _infid(self, args): elif self._fid_type == "SU": # f_SU (incl global phase) infid = 1 - np.real(g) return infid + "Initialize a first population" def initial_population(self): @@ -307,72 +353,45 @@ def mutate(self, population): return mutated_population - def optimize(self, infidelity, iterations = 1000): - """Implements entire procedure for continuous genetic algorithm optimizer. This is a higher order function - which accepts a black box fitness function. The fitness function which will accept a single chromosome of - N_var variables in the range [-1,1], scale and process them returning a figure of merit which describes how useful said parameter - values were at acheiving the general task. This figure of merit will act as the fitness of the input chromosome. - - Args: - fitness_func (Function): Black box fitness function to evaluate a chromosome fitness. - iterations (Int): The number of generations for which to optimize. + def optimize(self, iterations=1000): + population = self.initial_population() + best_fitness = -np.inf + best_chromosome = None + fitness_history = [] + + for count in range(iterations): + print(f"Generation {count + 1}/{iterations}") + + fitness_ls = [] + for chromosome in population: + alphas = [ + ((chromosome[i] + 1) / 2 * (self._ubound[i] - self._lbound[i]) + self._lbound[i]) + for i in range(len(chromosome)) + ] + args = {f"alpha{i+1}": float(val) for i, val in enumerate(alphas)} # force float + infid_val = self._infid(args) + fitness_ls.append(infid_val) - Returns: - Float: The maximum acheived fitness value from every generation of the optimization. - Vector: The chromosome that acheived the highest fitness. - Vector: The history of maximum fitness improvement for the entire optimization. - - """ - population = self.initial_population() # initialise a random seroth generation - bench = -1e10 # the initial benchmark fitness - we assume fitness >= 0 - done = False # terminal flag - count = 0 # iteration count - maxfit_hist = [] # to record the maximum fitness at each gen during optimizing - - while not done: - if self.workers > 1: - fitness_ls = Parallel(n_jobs=self.workers)(delayed(infidelity)(chromosome) for chromosome in population) - else: - fitness_ls = [] - for chromosome in population: - # The fitness function should be able to take a raw chromosome (whose constituent values will be \in [-1, 1]) - # scale the values to get the true parameter values, then use those parameters to do "stuff" (in our case - # build a control pulse and simulate the dynamics) then return a figure of merit which tells us how good - # this set of parameters has performed (could be fidelity to a target state for example). - # essentially all of the problem under consideration happens inside fitness_func - alphas = [ - ((chromosome + 1) / 2 * (self._ubound[0] - self._lbound[0])) - + self._lbound[0] - for i in range(len(chromosome)) - ] - args = {f"alpha{i+1}": value for i, value in enumerate(alphas)} - infidelity = self._infid(args) - #f = infidelity(chromosome) - fitness_ls.append(infidelity) fitness = np.array(fitness_ls) - max_fit = np.max(fitness) - if max_fit>bench: - print(" EPISODE: {}, average cost: {}, Best Cost: {}".format(count, np.round(np.average(fitness), decimals=4), - np.max(fitness))) - # update benchmark and add max fitness to history - bench = max_fit - maxfit_hist.append(max_fit) - best_index = np.argmax(fitness) # get best index - best_chromosome = population[best_index,:] # so that we can get the best chromosome - - # May want to checkpoint here - if count == iterations: # if max iterations reached - done=True - - survivors, survivor_fitness = self.darwin(population,fitness) + max_fit = np.min(fitness) + fitness_history.append(max_fit) + + if max_fit > best_fitness: + best_fitness = max_fit + best_index = np.argmax(fitness) + best_chromosome = population[best_index, :] + + print(f" Best infidelity: {-max_fit:.6f}, Avg fitness: {np.mean(fitness):.6f}") + + survivors, survivor_fitness = self.darwin(population, fitness) mothers, fathers = self.pairing(survivors, survivor_fitness) offspring = self.mating_procedure(ma=mothers, da=fathers) - unmutated_next_gen = self.build_next_gen(survivors,offspring) + unmutated_next_gen = self.build_next_gen(survivors, offspring) mutated_next_gen = self.mutate(unmutated_next_gen) population = mutated_next_gen - count+=1 - - return max_fit, best_chromosome, maxfit_hist + + return best_fitness, best_chromosome, fitness_history + def _save_result(self): """ From feec8c1cf888f23021c6bfaa71b730b4e4dc8ac8 Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Mon, 7 Jul 2025 15:15:05 +0900 Subject: [PATCH 09/16] fix infid fn --- src/qutip_qoc/_genetic.py | 37 ------------------------------------- 1 file changed, 37 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index ee0325d..41c92d8 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -171,43 +171,6 @@ def _infid(self, args): ).final_state self._state = X - # Ensure density matrix - # if not initial.issuper and not initial.isoper: - # initial = qt.ket2dm(initial) - - # try: - # self._solver._H.args = args - # result = self._solver.run(initial, tlist, args=args) - # final_state = result.final_state - # except Exception as e: - # print(f"[WARNING] Solver failed: {e}") - # return 1.0 # max infidelity - - # # Fidelity computation - # if self._fid_type == "TRACEDIFF": - # diff = final_state - target - # diff_dag = qt.Qobj(diff.data.adjoint(), dims=diff.dims) - # g = 0.5 * (diff_dag * diff).data.trace() - # infid = np.real(self._norm_fac * g) - # else: - # g = self._norm_fac * target.overlap(final_state) - # if self._fid_type == "PSU": - # infid = 1 - np.abs(g) - # elif self._fid_type == "SU": - # infid = 1 - np.real(g) - # else: - # raise ValueError(f"Unknown fidelity type: {self._fid_type}") - - # infid = 1 - qt.metrics.fidelity(X, self._target) - # print(infid) - # print(X.data) - print(f"Control args: {args}") - - # Store initial state for comparison - - # Your evolution code here... - X = self._solver.run(self._state, [0.0, self._step_duration], args=args).final_state - self._state = X target_dm = qt.ket2dm(self._target) if self._fid_type == "TRACEDIFF": diff = X - target_dm From 6d8eed0b3b6a652163e60757d47f2e21c308d2cb Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Wed, 9 Jul 2025 15:02:09 +0900 Subject: [PATCH 10/16] make the code working --- src/qutip_qoc/_genetic.py | 105 +++++++++++++++++++++----------------- 1 file changed, 57 insertions(+), 48 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 41c92d8..dde2c7c 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -41,8 +41,6 @@ def __init__( super(_GENETIC, self).__init__() self.objectives = objectives self.control_parameters = control_parameters - self.time_interval = time_interval - self.time_options = time_options self.alg_kwargs = alg_kwargs self._integrator_kwargs = integrator_kwargs self._rtol = self._integrator_kwargs.get("rtol", 1e-5) @@ -55,7 +53,7 @@ def __init__( self.N_parents = int(np.floor(self.N_pop * self.parent_rate)) self.N_survivors = max(1, int(self.N_parents * alg_kwargs.get('survival_rate', 1.0))) self.N_offspring = self.N_pop - self.N_survivors - self.mutation_rate = alg_kwargs.get('mutation_rate', 0.1) + self.mutation_rate = alg_kwargs.get('mutation_rate', 0.2) self.workers = alg_kwargs.get('workers', 1) self._Hd_lst, self._Hc_lst = [], [] @@ -138,8 +136,10 @@ def create_pulse_func(idx): self._integrator_kwargs = integrator_kwargs self._rtol = self._integrator_kwargs.get("rtol", 1e-5) self._atol = self._integrator_kwargs.get("atol", 1e-5) - - self._step_duration = 10 + #self.max_episode_time = time_interval.evo_time # maximum time for an episode + #self.max_steps = time_interval.n_tslots # maximum number of steps in an episode + self._step_duration = 2 + # create the solver if self._Hd_lst[0].issuper: self._fid_type = self._alg_kwargs.get("fid_type", "TRACEDIFF") @@ -169,25 +169,26 @@ def _infid(self, args): X = self._solver.run( self._state, [0.0, self._step_duration], args=args ).final_state - self._state = X + #self._state = X - target_dm = qt.ket2dm(self._target) - if self._fid_type == "TRACEDIFF": - diff = X - target_dm - # to prevent if/else in qobj.dag() and qobj.tr() - diff_dag = qt.Qobj(diff.data.adjoint(), dims=diff.dims) - g = 1 / 2 * (diff_dag * diff).data.trace() - infid = np.real(self._norm_fac * g) + # target_dm = qt.ket2dm(self._target) + # if self._fid_type == "TRACEDIFF": + # diff = X - target_dm + # # to prevent if/else in qobj.dag() and qobj.tr() + # diff_dag = qt.Qobj(diff.data.adjoint(), dims=diff.dims) + # g = 1 / 2 * (diff_dag * diff).data.trace() + # infid = np.real(self._norm_fac * g) + # else: + # g = self._norm_fac * self._target.overlap(X) + # if self._fid_type == "PSU": # f_PSU (drop global phase) + # infid = 1 - np.abs(g) + # elif self._fid_type == "SU": # f_SU (incl global phase) + # infid = 1 - np.real(g) + if self._target.isket: + return 1-qt.fidelity(self._target, X) else: - g = self._norm_fac * self._target.overlap(X) - if self._fid_type == "PSU": # f_PSU (drop global phase) - infid = 1 - np.abs(g) - elif self._fid_type == "SU": # f_SU (incl global phase) - infid = 1 - np.real(g) - return infid + return 1-qt.fidelity(qt.ket2dm(self._target), X) - - "Initialize a first population" def initial_population(self): """Randomly generates an initial popuation to act as generation 0. @@ -312,50 +313,58 @@ def mutate(self, population): mutated_population = np.copy(population) # for some reason clipped normal noise works better. # mutated_population[row_indices, col_indices] = np.random.uniform(-1,1,size=number_of_mutations) # for uniform random mutation - mutated_population[row_indices, col_indices] = np.clip(population[row_indices, col_indices] + np.random.normal(loc=0, scale=0.2, size=number_of_mutations), a_min=-1, a_max=1) # for normal mutation + mutated_population[row_indices, col_indices] = np.clip( + population[row_indices, col_indices] + np.random.normal(loc=0, scale=0.2, size=number_of_mutations), a_min=-1, a_max=1) return mutated_population def optimize(self, iterations=1000): - population = self.initial_population() - best_fitness = -np.inf - best_chromosome = None - fitness_history = [] - - for count in range(iterations): - print(f"Generation {count + 1}/{iterations}") - + population = self.initial_population() # initialise a random seroth generation + bench = -1e10 # the initial benchmark fitness - we assume fitness >= 0 + done = False # terminal flag + count = 0 # iteration count + maxfit_hist = [] # to record the maximum fitness at each gen during optimizing + while not done: fitness_ls = [] for chromosome in population: + # The fitness function should be able to take a raw chromosome (whose constituent values will be \in [-1, 1]) + # scale the values to get the true parameter values, then use those parameters to do "stuff" (in our case + # build a control pulse and simulate the dynamics) then return a figure of merit which tells us how good + # this set of parameters has performed (could be fidelity to a target state for example). + # essentially all of the problem under consideration happens inside fitness_func alphas = [ ((chromosome[i] + 1) / 2 * (self._ubound[i] - self._lbound[i]) + self._lbound[i]) for i in range(len(chromosome)) ] - args = {f"alpha{i+1}": float(val) for i, val in enumerate(alphas)} # force float - infid_val = self._infid(args) - fitness_ls.append(infid_val) - + args = {f"alpha{i+1}": float(val) for i, val in enumerate(alphas)} + f = 1-self._infid(args) + fitness_ls.append(f) fitness = np.array(fitness_ls) - max_fit = np.min(fitness) - fitness_history.append(max_fit) - - if max_fit > best_fitness: - best_fitness = max_fit - best_index = np.argmax(fitness) - best_chromosome = population[best_index, :] - - print(f" Best infidelity: {-max_fit:.6f}, Avg fitness: {np.mean(fitness):.6f}") - - survivors, survivor_fitness = self.darwin(population, fitness) + max_fit = np.max(fitness) + if max_fit>bench: + print(" EPISODE: {}, average cost: {}, Best Cost: {}".format(count, np.round(np.average(fitness), decimals=4), + np.max(fitness))) + # update benchmark and add max fitness to history + bench = max_fit + maxfit_hist.append(max_fit) + best_index = np.argmax(fitness) # get best index + best_chromosome = population[best_index,:] # so that we can get the best chromosome + + # May want to checkpoint here + if count == iterations: # if max iterations reached + done=True + + survivors, survivor_fitness = self.darwin(population,fitness) mothers, fathers = self.pairing(survivors, survivor_fitness) offspring = self.mating_procedure(ma=mothers, da=fathers) - unmutated_next_gen = self.build_next_gen(survivors, offspring) + unmutated_next_gen = self.build_next_gen(survivors,offspring) mutated_next_gen = self.mutate(unmutated_next_gen) population = mutated_next_gen + count+=1 + + return max_fit, best_chromosome, maxfit_hist - return best_fitness, best_chromosome, fitness_history - def _save_result(self): """ Save the results of the optimization process, including the optimized From 2438a168516314586bb750522796366740beba6b Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Tue, 15 Jul 2025 13:54:26 +0900 Subject: [PATCH 11/16] fix genetic algorithm --- src/qutip_qoc/_genetic.py | 550 +++++++++++++------------------------- tests/test_result.py | 5 +- 2 files changed, 188 insertions(+), 367 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index dde2c7c..e09490f 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -1,16 +1,9 @@ import numpy as np -from joblib import Parallel, delayed -import scipy as sci -import time -from qutip_qoc.result import Result import qutip as qt +import time +from qutip_qoc.result import Result -class _GENETIC(): - ### Template for a genetic algorithm optimizer - ### Copied from GOAT or RL - ### Modified to be a genetic algorithm optimizer - ### Contributed by Jonathan Brown - +class _GENETIC: def __init__( self, objectives, @@ -23,386 +16,211 @@ def __init__( integrator_kwargs, qtrl_optimizers, ): - """ - Initialize the genetic algorithm with the given parameters. - - Args: - objectives: The objectives for the optimization. - control_parameters: Parameters for the control. - time_interval: The time interval for the optimization. - time_options: Options related to time discretization. - alg_kwargs: Additional arguments for the algorithm. - optimizer_kwargs: Arguments for the optimizer. - minimizer_kwargs: Arguments for the minimizer. - integrator_kwargs: Arguments for the integrator. - qtrl_optimizers: Quantum control optimizers. - """ - - super(_GENETIC, self).__init__() - self.objectives = objectives - self.control_parameters = control_parameters - self.alg_kwargs = alg_kwargs - self._integrator_kwargs = integrator_kwargs - self._rtol = self._integrator_kwargs.get("rtol", 1e-5) - self._atol = self._integrator_kwargs.get("atol", 1e-5) - - # Initialize the genetic algorithm parameters - self.N_pop = alg_kwargs.get('population_size', 50) - self.N_var = alg_kwargs.get('number_variables', 10) - self.parent_rate = alg_kwargs.get('parent_rate', 0.5) - self.N_parents = int(np.floor(self.N_pop * self.parent_rate)) - self.N_survivors = max(1, int(self.N_parents * alg_kwargs.get('survival_rate', 1.0))) - self.N_offspring = self.N_pop - self.N_survivors - self.mutation_rate = alg_kwargs.get('mutation_rate', 0.2) - self.workers = alg_kwargs.get('workers', 1) - - self._Hd_lst, self._Hc_lst = [], [] - for objective in objectives: - # extract drift and control Hamiltonians from the objective - self._Hd_lst.append(objective.H[0]) - self._Hc_lst.append( - [H[0] if isinstance(H, list) else H for H in objective.H[1:]] - ) - - def create_pulse_func(idx): - """ - Create a control pulse lambda function for a given index. - """ - return lambda t, args: self._pulse(t, args, idx + 1) - - # create the QobjEvo with Hd, Hc and controls(args) - self._H_lst = [self._Hd_lst[0]] - dummy_args = {f"alpha{i+1}": 1.0 for i in range(len(self._Hc_lst[0]))} - for i, Hc in enumerate(self._Hc_lst[0]): - self._H_lst.append([Hc, create_pulse_func(i)]) - self._H = qt.QobjEvo(self._H_lst, args=dummy_args) - - self.shorter_pulses = alg_kwargs.get( - "shorter_pulses", False - ) # lengthen the training to look for pulses of shorter duration, therefore episodes with fewer steps - - # extract bounds for control_parameters - bounds = [] - for key in control_parameters.keys(): - bounds.append(control_parameters[key].get("bounds")) - self._lbound = [b[0][0] for b in bounds] - self._ubound = [b[0][1] for b in bounds] + self._objective = objectives[0] + self._Hd = self._objective.H[0] + self._Hc_lst = [H[0] if isinstance(H, list) else H for H in self._objective.H[1:]] + self._initial = self._objective.initial + self._target = self._objective.target + self._norm_fac = 1 / self._target.norm() + + self._evo_time = time_interval.evo_time + self.N_steps = time_interval.n_tslots + self.N_controls = len(self._Hc_lst) + self.N_var = self.N_controls * self.N_steps self._alg_kwargs = alg_kwargs - - self._initial = objectives[0].initial - self._target = objectives[0].target - self._state = qt.ket2dm(self._initial) - self._dim = self._initial.shape[0] + self.N_pop = alg_kwargs.get("population_size", 100) + self.generations = alg_kwargs.get("generations", 100) + self.mutation_rate = alg_kwargs.get("mutation_rate", 0.3) + self.fid_err_targ = alg_kwargs.get("fid_err_targ", 1e-4) + self._stagnation_patience = 20 # Internally fixed - self._result = Result( - objectives=objectives, - time_interval=time_interval, - start_local_time=time.time(), # initial optimization time - n_iters=0, # Number of iterations(episodes) until convergence - iter_seconds=[], # list containing the time taken for each iteration(episode) of the optimization - var_time=True, # Whether the optimization was performed with variable time - guess_params=[], - ) + self._integrator_kwargs = integrator_kwargs + self._fid_type = alg_kwargs.get("fid_type", "PSU") + + self._generator = self._prepare_generator() + self._solver = qt.MESolver(H=self._generator, options=self._integrator_kwargs) \ + if self._Hd.issuper else qt.SESolver(H=self._generator, options=self._integrator_kwargs) - self._backup_result = Result( # used as a backup in case the algorithm with shorter_pulses does not find an episode with infid best_fit: + best_fit = max_fit + best_chrom = population[np.argmax(fitness)] + no_improvement_counter = 0 + else: + no_improvement_counter += 1 + + self._result.infidelity = min(self._result.infidelity, -max_fit) + + if -max_fit <= self.fid_err_targ: + break + + if no_improvement_counter >= self._stagnation_patience: + break + + survivors, survivor_fit = self.darwin(population, fitness) + mothers, fathers = self.pairing(survivors, survivor_fit) + offspring = self.mating_procedure(mothers, fathers) + population = self.build_next_gen(survivors, offspring) + population = self.mutate(population) + + self._result.optimized_params = best_chrom.tolist() + self._result.infidelity = -best_fit + self._result.end_local_time = time.time() + self._result.n_iters = len(history) + self._result.new_params = self._result.optimized_params + self._result._optimized_controls = best_chrom.tolist() + self._result._optimized_H = [self._generator] + self._result._final_states = self._result._final_states # expose final_states + self._result.guess_params = self._result.guess_params or [] + self._result.var_time = False + + self._result.message = ( + f"Stopped early: reached infidelity target {self.fid_err_targ}" + if -best_fit <= self.fid_err_targ else + f"Stopped due to stagnation after {self._stagnation_patience} generations" + if no_improvement_counter >= self._stagnation_patience else + "Optimization completed successfully" + ) + return self._result + def result(self): + self._result.start_local_time = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(self._result.start_local_time) + ) + self._result.end_local_time = time.strftime( + "%Y-%m-%d %H:%M:%S", time.localtime(self._result.end_local_time) + ) + return self._result - def optimize(self, iterations=1000): - population = self.initial_population() # initialise a random seroth generation - bench = -1e10 # the initial benchmark fitness - we assume fitness >= 0 - done = False # terminal flag - count = 0 # iteration count - maxfit_hist = [] # to record the maximum fitness at each gen during optimizing - while not done: - fitness_ls = [] - for chromosome in population: - # The fitness function should be able to take a raw chromosome (whose constituent values will be \in [-1, 1]) - # scale the values to get the true parameter values, then use those parameters to do "stuff" (in our case - # build a control pulse and simulate the dynamics) then return a figure of merit which tells us how good - # this set of parameters has performed (could be fidelity to a target state for example). - # essentially all of the problem under consideration happens inside fitness_func - alphas = [ - ((chromosome[i] + 1) / 2 * (self._ubound[i] - self._lbound[i]) + self._lbound[i]) - for i in range(len(chromosome)) - ] - args = {f"alpha{i+1}": float(val) for i, val in enumerate(alphas)} - f = 1-self._infid(args) - fitness_ls.append(f) - fitness = np.array(fitness_ls) - max_fit = np.max(fitness) - if max_fit>bench: - print(" EPISODE: {}, average cost: {}, Best Cost: {}".format(count, np.round(np.average(fitness), decimals=4), - np.max(fitness))) - # update benchmark and add max fitness to history - bench = max_fit - maxfit_hist.append(max_fit) - best_index = np.argmax(fitness) # get best index - best_chromosome = population[best_index,:] # so that we can get the best chromosome - - # May want to checkpoint here - if count == iterations: # if max iterations reached - done=True - - survivors, survivor_fitness = self.darwin(population,fitness) - mothers, fathers = self.pairing(survivors, survivor_fitness) - offspring = self.mating_procedure(ma=mothers, da=fathers) - unmutated_next_gen = self.build_next_gen(survivors,offspring) - mutated_next_gen = self.mutate(unmutated_next_gen) - population = mutated_next_gen - count+=1 - - return max_fit, best_chromosome, maxfit_hist - - - def _save_result(self): - """ - Save the results of the optimization process, including the optimized - pulse sequences, final states, and performance metrics. - """ - result_obj = self._backup_result if self._use_backup_result else self._result - - if self._use_backup_result: - self._backup_result.iter_seconds = self._result.iter_seconds.copy() - self._backup_result._final_states = self._result._final_states.copy() - self._backup_result.infidelity = self._result.infidelity - - result_obj.end_local_time = time.time() - result_obj.n_iters = len(self._result.iter_seconds) - result_obj.optimized_params = [ - self._result.total_seconds - ] # If var_time is True, the last parameter is the evolution time - result_obj._guess_controls = [] - result_obj._optimized_H = [self._H] - def result(self): - """ - Final conversions and return of optimization results - """ - if self._use_backup_result: - self._backup_result.start_local_time = time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(self._backup_result.start_local_time) - ) - self._backup_result.end_local_time = time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(self._backup_result.end_local_time) - ) - return self._backup_result - else: - self._save_result() - self._result.start_local_time = time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(self._result.start_local_time) - ) - self._result.end_local_time = time.strftime( - "%Y-%m-%d %H:%M:%S", time.localtime(self._result.end_local_time) - ) - return self._result \ No newline at end of file + +from qutip_qoc import Objective, _TimeInterval + +initial = qt.basis(2, 0) +target = (qt.basis(2, 0) + qt.basis(2, 1)).unit() +H_d = qt.sigmaz() +H_c = [qt.sigmax()] + +objective = Objective(H=[H_d] + H_c, initial=initial, target=target) +time_interval = _TimeInterval(evo_time=1.0, n_tslots=20) + +control_parameters = {"guess": [0.0] * 20} +alg_kwargs = { + "population_size": 50, + "generations": 100, + "mutation_rate": 0.3, + "fid_err_targ": 1e-3 +} +integrator_kwargs = {"rtol": 1e-5, "atol": 1e-6} + +ga = _GENETIC(objectives=[objective], + control_parameters=control_parameters, + time_interval=time_interval, + time_options=None, + alg_kwargs=alg_kwargs, + optimizer_kwargs=None, + minimizer_kwargs=None, + integrator_kwargs=integrator_kwargs, + qtrl_optimizers=None) + +result = ga.optimize() +print("Best fidelity:", 1 - result.infidelity) diff --git a/tests/test_result.py b/tests/test_result.py index 00f3b7b..3afad6f 100644 --- a/tests/test_result.py +++ b/tests/test_result.py @@ -216,7 +216,9 @@ def sin_z_jax(t, r, **kwargs): state2state_genetic = Case( objectives=[Objective(initial, H, target)], - control_parameters={"bounds": [-13, 13]}, # TODO: for now only consider bounds + control_parameters={ + "p": {"bounds": [(-13, 13)]}, + }, tlist=np.linspace(0, 10, 100), # TODO: derive single step duration and max evo time / max num steps from this algorithm_kwargs={ "fid_err_targ": 0.01, @@ -245,6 +247,7 @@ def sin_z_jax(t, r, **kwargs): pytest.param(state2state_goat, id="State to state (GOAT)"), pytest.param(state2state_jax, id="State to state (JAX)"), pytest.param(state2state_rl, id="State to state (RL)"), + pytest.param(state2state_genetic, id="State to state (Genetic)"), pytest.param(unitary_genetic, id="Unitary (Genetic)"), pytest.param(unitary_rl, id="Unitary (RL)"), ] From 2a426b3af89e98e40c66bf159990aa55e35da11a Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Tue, 15 Jul 2025 14:01:57 +0900 Subject: [PATCH 12/16] fix merge issues and remove unused code --- src/qutip_qoc/_genetic.py | 34 --------------------- src/qutip_qoc/pulse_optim.py | 58 ++++++++++++++++++++++++++++++++++-- 2 files changed, 55 insertions(+), 37 deletions(-) diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index e09490f..cebb89c 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -190,37 +190,3 @@ def result(self): "%Y-%m-%d %H:%M:%S", time.localtime(self._result.end_local_time) ) return self._result - - - -from qutip_qoc import Objective, _TimeInterval - -initial = qt.basis(2, 0) -target = (qt.basis(2, 0) + qt.basis(2, 1)).unit() -H_d = qt.sigmaz() -H_c = [qt.sigmax()] - -objective = Objective(H=[H_d] + H_c, initial=initial, target=target) -time_interval = _TimeInterval(evo_time=1.0, n_tslots=20) - -control_parameters = {"guess": [0.0] * 20} -alg_kwargs = { - "population_size": 50, - "generations": 100, - "mutation_rate": 0.3, - "fid_err_targ": 1e-3 -} -integrator_kwargs = {"rtol": 1e-5, "atol": 1e-6} - -ga = _GENETIC(objectives=[objective], - control_parameters=control_parameters, - time_interval=time_interval, - time_options=None, - alg_kwargs=alg_kwargs, - optimizer_kwargs=None, - minimizer_kwargs=None, - integrator_kwargs=integrator_kwargs, - qtrl_optimizers=None) - -result = ga.optimize() -print("Best fidelity:", 1 - result.infidelity) diff --git a/src/qutip_qoc/pulse_optim.py b/src/qutip_qoc/pulse_optim.py index 3cb4fb2..4b70658 100644 --- a/src/qutip_qoc/pulse_optim.py +++ b/src/qutip_qoc/pulse_optim.py @@ -10,9 +10,16 @@ from qutip_qoc._optimizer import _global_local_optimization from qutip_qoc._time import _TimeInterval -from qutip_qoc._rl import _RL + +import qutip as qt from qutip_qoc._genetic import _GENETIC +try: + from qutip_qoc._rl import _RL + _rl_available = True +except ImportError: + _rl_available = False + __all__ = ["optimize_pulses"] @@ -24,6 +31,7 @@ def optimize_pulses( optimizer_kwargs=None, minimizer_kwargs=None, integrator_kwargs=None, + optimization_type=None, ): """ Run GOAT, JOPT, GRAPE, CRAB or RL optimization. @@ -120,6 +128,11 @@ def optimize_pulses( Options for the solver, see :obj:`MESolver.options` and `Integrator <./classes.html#classes-ode>`_ for a list of all options. + optimization_type : str, optional + Type of optimization. By default, QuTiP-QOC will try to automatically determine + whether this is a *state transfer* or a *gate synthesis* problem. Set this + flag to ``"state_transfer"`` or ``"gate_synthesis"`` to set the mode manually. + Returns ------- result : :class:`qutip_qoc.Result` @@ -183,10 +196,43 @@ def optimize_pulses( "maxiter": algorithm_kwargs.get("max_iter", 1000), "gtol": algorithm_kwargs.get("min_grad", 0.0 if alg == "CRAB" else 1e-8), } + # Iterate over objectives and convert initial and target states based on the optimization type + for objective in objectives: + H_list = objective.H if isinstance(objective.H, list) else [objective.H] + if any(qt.issuper(H_i) for H_i in H_list): + if isinstance(optimization_type, str) and optimization_type.lower() == "state_transfer": + if qt.isket(objective.initial): + objective.initial = qt.operator_to_vector(qt.ket2dm(objective.initial)) + elif qt.isoper(objective.initial): + objective.initial = qt.operator_to_vector(objective.initial) + if qt.isket(objective.target): + objective.target = qt.operator_to_vector(qt.ket2dm(objective.target)) + elif qt.isoper(objective.target): + objective.target = qt.operator_to_vector(objective.target) + elif isinstance(optimization_type, str) and optimization_type.lower() == "gate_synthesis": + objective.initial = qt.to_super(objective.initial) + objective.target = qt.to_super(objective.target) + elif optimization_type is None: + if qt.isoper(objective.initial) and qt.isoper(objective.target): + if np.isclose((objective.initial).tr(), 1) and np.isclose((objective.target).tr(), 1): + objective.initial = qt.operator_to_vector(objective.initial) + objective.target = qt.operator_to_vector(objective.target) + else: + objective.initial = qt.to_super(objective.initial) + objective.target = qt.to_super(objective.target) + if qt.isket(objective.initial): + objective.initial = qt.operator_to_vector(qt.ket2dm(objective.initial)) + if qt.isket(objective.target): + objective.target = qt.operator_to_vector(qt.ket2dm(objective.target)) # prepare qtrl optimizers qtrl_optimizers = [] if alg == "CRAB" or alg == "GRAPE": + dyn_type = "GEN_MAT" + for objective in objectives: + if any(qt.isoper(H_i) for H_i in (objective.H if isinstance(objective.H, list) else [objective.H])): + dyn_type = "UNIT" + if alg == "GRAPE": # algorithm specific kwargs use_as_amps = True minimizer_kwargs.setdefault("method", "L-BFGS-B") # gradient @@ -243,7 +289,7 @@ def optimize_pulses( "accuracy_factor": None, # deprecated "alg_params": alg_params, "optim_params": algorithm_kwargs.get("optim_params", None), - "dyn_type": algorithm_kwargs.get("dyn_type", "GEN_MAT"), + "dyn_type": algorithm_kwargs.get("dyn_type", dyn_type), "dyn_params": algorithm_kwargs.get("dyn_params", None), "prop_type": algorithm_kwargs.get( "prop_type", "DEF" @@ -354,6 +400,12 @@ def optimize_pulses( qtrl_optimizers.append(qtrl_optimizer) elif alg == "RL": + if not _rl_available: + raise ImportError( + "The required dependencies (gymnasium, stable-baselines3) for " + "the reinforcement learning algorithm are not available." + ) + rl_env = _RL( objectives, control_parameters, @@ -393,4 +445,4 @@ def optimize_pulses( minimizer_kwargs, integrator_kwargs, qtrl_optimizers, - ) + ) \ No newline at end of file From 4dccfd52a5dcc1f47be244a0786588fa44065d0b Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Tue, 15 Jul 2025 14:27:07 +0900 Subject: [PATCH 13/16] add documentation --- doc/guide/guide-control.rst | 96 +++++++++++++++++++++++++++++++++++++ src/qutip_qoc/_genetic.py | 8 ++++ 2 files changed, 104 insertions(+) diff --git a/doc/guide/guide-control.rst b/doc/guide/guide-control.rst index 915b6b6..e1a0aa5 100644 --- a/doc/guide/guide-control.rst +++ b/doc/guide/guide-control.rst @@ -310,6 +310,102 @@ Eventually, the optimization for a desired `fid_err_targ` can be started by call }, ) +The Genetic Algorithm (GA) +========================== + +The Genetic Algorithm (GA) is a global optimization technique inspired by natural selection. +Unlike gradient-based methods like GRAPE or CRAB, GA explores the solution space stochastically, +making it robust against local minima and suitable for problems with non-differentiable or noisy objectives. + +In QuTiP, the GA-based optimizer evolves a population of candidate control pulses across multiple +generations to minimize the infidelity between the system's final and target states. + +The GA optimization consists of the following components: + +**Population**: + A collection of candidate solutions (chromosomes), where each chromosome encodes a full set + of control amplitudes over time for the given control Hamiltonians. + +**Fitness Function**: + The fitness of each candidate is evaluated using a fidelity-based measure, such as: + + - **PSU (Projective State Overlap)**: + :math:`1 - |\langle \psi_{\text{target}} | \psi_{\text{final}} \rangle|` + - **TRACEDIFF**: + Trace distance between final and target density matrices. + +**Genetic Operations**: + +- **Selection**: + A subset of the best-performing candidates (based on fitness) are chosen to survive. + +- **Crossover (Mating)**: + New candidates are generated by combining genes from selected parents using arithmetic crossover. + +- **Mutation**: + Random perturbations are added to the new candidates' genes to maintain diversity and escape local minima. + +This process continues until either a target fidelity error is reached or a fixed number of generations +have passed without improvement (stagnation criterion). + +Each generation represents a full evaluation of the population, making the method inherently parallelizable +and effective in high-dimensional control landscapes. + +In QuTiP, the GA optimization is implemented via the ``_GENETIC`` class, and can be invoked using the +standard ``optimize_pulses`` interface by setting the algorithm to ``"GENETIC"``. + +Optimal Quantum Control in QuTiP (Genetic Algorithm) +==================================================== + +Defining a control problem in QuTiP using the Genetic Algorithm follows the same pattern as with other algorithms. + +.. code-block:: python + + import qutip as qt + import qutip_qoc as qoc + import numpy as np + + # state to state transfer + initial = qt.basis(2, 0) + target = qt.basis(2, 1) + + # define the drift and control Hamiltonians + drift = qt.sigmaz() + controls = [qt.sigmax(), qt.sigmay()] + + H = [drift] + controls + + # define the objective + objective = qoc.Objective(initial, H, target) + + # discretized time grid (e.g., 100 steps over 10 units of time) + tlist = np.linspace(0, 10, 100) + + # define control parameter bounds (same for all controls in GA) + control_parameters = { + "p": {"bounds": [(-1.0, 1.0)]} + } + + # set genetic algorithm hyperparameters + algorithm_kwargs = { + "alg": "GENETIC", + "population_size": 50, + "generations": 100, + "mutation_rate": 0.2, + "fid_err_targ": 1e-3, + } + + # run the optimization + result = qoc.optimize_pulses( + objectives=[objective], + control_parameters=control_parameters, + tlist=tlist, + algorithm_kwargs=algorithm_kwargs, + ) + + print("Final infidelity:", result.infidelity) + print("Best control parameters:", result.optimized_params) + .. TODO: add examples Examples for Liouvillian dynamics and multi-objective optimization will follow soon. diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index cebb89c..0aa9cda 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -4,6 +4,14 @@ from qutip_qoc.result import Result class _GENETIC: + """ + Genetic Algorithm-based optimizer for quantum control problems. + + This class implements a global optimization routine using a Genetic Algorithm + to optimize control pulses that drive a quantum system from an initial state + to a target state (or unitary). + """ + def __init__( self, objectives, From a944a89870a6e67eecf0d3e0f2f7d070f7655cd9 Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Fri, 18 Jul 2025 13:59:49 +0900 Subject: [PATCH 14/16] change GENETIC to Genetic --- doc/guide/guide-control.rst | 6 ++-- src/qutip_qoc/_genetic.py | 62 ++++++++++++++++++++++++++----------- tests/test_result.py | 9 ++---- 3 files changed, 49 insertions(+), 28 deletions(-) diff --git a/doc/guide/guide-control.rst b/doc/guide/guide-control.rst index e1a0aa5..c48f5c1 100644 --- a/doc/guide/guide-control.rst +++ b/doc/guide/guide-control.rst @@ -351,8 +351,8 @@ have passed without improvement (stagnation criterion). Each generation represents a full evaluation of the population, making the method inherently parallelizable and effective in high-dimensional control landscapes. -In QuTiP, the GA optimization is implemented via the ``_GENETIC`` class, and can be invoked using the -standard ``optimize_pulses`` interface by setting the algorithm to ``"GENETIC"``. +In QuTiP, the GA optimization is implemented via the ``_Genetic`` class, and can be invoked using the +standard ``optimize_pulses`` interface by setting the algorithm to ``"Genetic"``. Optimal Quantum Control in QuTiP (Genetic Algorithm) ==================================================== @@ -388,7 +388,7 @@ Defining a control problem in QuTiP using the Genetic Algorithm follows the same # set genetic algorithm hyperparameters algorithm_kwargs = { - "alg": "GENETIC", + "alg": "Genetic", "population_size": 50, "generations": 100, "mutation_rate": 0.2, diff --git a/src/qutip_qoc/_genetic.py b/src/qutip_qoc/_genetic.py index 0aa9cda..ede3ee8 100644 --- a/src/qutip_qoc/_genetic.py +++ b/src/qutip_qoc/_genetic.py @@ -3,13 +3,16 @@ import time from qutip_qoc.result import Result -class _GENETIC: + +class _Genetic: """ Genetic Algorithm-based optimizer for quantum control problems. This class implements a global optimization routine using a Genetic Algorithm - to optimize control pulses that drive a quantum system from an initial state - to a target state (or unitary). + to optimize control pulses that drive a quantum system from an initial state + to a target state (or unitary). + + Based on the code from Jonathan Brown """ def __init__( @@ -26,7 +29,9 @@ def __init__( ): self._objective = objectives[0] self._Hd = self._objective.H[0] - self._Hc_lst = [H[0] if isinstance(H, list) else H for H in self._objective.H[1:]] + self._Hc_lst = [ + H[0] if isinstance(H, list) else H for H in self._objective.H[1:] + ] self._initial = self._objective.initial self._target = self._objective.target self._norm_fac = 1 / self._target.norm() @@ -41,14 +46,17 @@ def __init__( self.generations = alg_kwargs.get("generations", 100) self.mutation_rate = alg_kwargs.get("mutation_rate", 0.3) self.fid_err_targ = alg_kwargs.get("fid_err_targ", 1e-4) - self._stagnation_patience = 20 # Internally fixed + self._stagnation_patience = 50 # Internally fixed self._integrator_kwargs = integrator_kwargs self._fid_type = alg_kwargs.get("fid_type", "PSU") self._generator = self._prepare_generator() - self._solver = qt.MESolver(H=self._generator, options=self._integrator_kwargs) \ - if self._Hd.issuper else qt.SESolver(H=self._generator, options=self._integrator_kwargs) + self._solver = ( + qt.MESolver(H=self._generator, options=self._integrator_kwargs) + if self._Hd.issuper + else qt.SESolver(H=self._generator, options=self._integrator_kwargs) + ) self._result = Result( objectives=[self._objective], @@ -64,10 +72,18 @@ def __init__( self._result._final_states = [] def _prepare_generator(self): - args = {f"p{i+1}_{j}": 0.0 for i in range(self.N_controls) for j in range(self.N_steps)} + args = { + f"p{i+1}_{j}": 0.0 + for i in range(self.N_controls) + for j in range(self.N_steps) + } def make_coeff(i, j): - return lambda t, args: args[f"p{i+1}_{j}"] if int(t / (self._evo_time / self.N_steps)) == j else 0 + return lambda t, args: ( + args[f"p{i+1}_{j}"] + if int(t / (self._evo_time / self.N_steps)) == j + else 0 + ) H_qev = [self._Hd] for i, Hc in enumerate(self._Hc_lst): @@ -77,7 +93,11 @@ def make_coeff(i, j): return qt.QobjEvo(H_qev, args=args) def _infid(self, params): - args = {f"p{i+1}_{j}": params[i * self.N_steps + j] for i in range(self.N_controls) for j in range(self.N_steps)} + args = { + f"p{i+1}_{j}": params[i * self.N_steps + j] + for i in range(self.N_controls) + for j in range(self.N_steps) + } result = self._solver.run(self._initial, [0.0, self._evo_time], args=args) final_state = result.final_state self._result._final_states.append(final_state) @@ -87,7 +107,9 @@ def _infid(self, params): fid = 0.5 * np.real((diff.dag() * diff).tr()) else: overlap = self._norm_fac * self._target.overlap(final_state) - fid = 1 - np.abs(overlap) if self._fid_type == "PSU" else 1 - np.real(overlap) + fid = ( + 1 - np.abs(overlap) if self._fid_type == "PSU" else 1 - np.real(overlap) + ) return fid @@ -101,7 +123,7 @@ def initial_population(self): return np.random.uniform(-1, 1, (self.N_pop, self.N_var)) def darwin(self, population, fitness): - indices = np.argsort(-fitness)[:self.N_pop // 2] + indices = np.argsort(-fitness)[: self.N_pop // 2] return population[indices], fitness[indices] def pairing(self, survivors, survivor_fitness): @@ -130,7 +152,9 @@ def build_next_gen(self, survivors, offspring): return np.vstack((survivors, offspring)) def mutate(self, population): - n_mut = int((population.shape[0] - 1) * population.shape[1] * self.mutation_rate) + n_mut = int( + (population.shape[0] - 1) * population.shape[1] * self.mutation_rate + ) row = np.random.randint(1, population.shape[0], size=n_mut) col = np.random.randint(0, population.shape[1], size=n_mut) population[row, col] += np.random.normal(0, 0.3, size=n_mut) @@ -183,13 +207,15 @@ def optimize(self): self._result.message = ( f"Stopped early: reached infidelity target {self.fid_err_targ}" - if -best_fit <= self.fid_err_targ else - f"Stopped due to stagnation after {self._stagnation_patience} generations" - if no_improvement_counter >= self._stagnation_patience else - "Optimization completed successfully" + if -best_fit <= self.fid_err_targ + else ( + f"Stopped due to stagnation after {self._stagnation_patience} generations" + if no_improvement_counter >= self._stagnation_patience + else "Optimization completed successfully" + ) ) return self._result - + def result(self): self._result.start_local_time = time.strftime( "%Y-%m-%d %H:%M:%S", time.localtime(self._result.start_local_time) diff --git a/tests/test_result.py b/tests/test_result.py index 3afad6f..d316e12 100644 --- a/tests/test_result.py +++ b/tests/test_result.py @@ -200,8 +200,6 @@ def sin_z_jax(t, r, **kwargs): # --------------------------- Genetic --------------------------- -# TODO: this is the input for optimiz_pulses() function -# you can use this routine to test your implementation # state to state transfer init = qt.basis(2, 0) @@ -219,16 +217,15 @@ def sin_z_jax(t, r, **kwargs): control_parameters={ "p": {"bounds": [(-13, 13)]}, }, - tlist=np.linspace(0, 10, 100), # TODO: derive single step duration and max evo time / max num steps from this + tlist=np.linspace(0, 10, 100), algorithm_kwargs={ "fid_err_targ": 0.01, - "alg": "GENETIC", + "alg": "Genetic", "max_iter": 100, }, optimizer_kwargs={}, ) -# TODO: no big difference for unitary evolution initial = qt.qeye(2) # Identity target = qt.gates.hadamard_transform() @@ -237,8 +234,6 @@ def sin_z_jax(t, r, **kwargs): objectives=[Objective(initial, H, target)], ) - - @pytest.fixture( params=[ pytest.param(state2state_grape, id="State to state (GRAPE)"), From 7a167e51b1df4e9131b5754ff2645825f3e8398e Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Fri, 18 Jul 2025 14:05:01 +0900 Subject: [PATCH 15/16] add refs --- doc/guide/guide-control.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/doc/guide/guide-control.rst b/doc/guide/guide-control.rst index c48f5c1..09caedb 100644 --- a/doc/guide/guide-control.rst +++ b/doc/guide/guide-control.rst @@ -406,6 +406,11 @@ Defining a control problem in QuTiP using the Genetic Algorithm follows the same print("Final infidelity:", result.infidelity) print("Best control parameters:", result.optimized_params) +References +========== + +.. [Brown2023] J. Brown, M. Paternostro, and A. Ferraro, "Optimal quantum control via genetic algorithms for quantum state engineering in driven‑resonator mediated networks," *Quantum Sci. Technol.*, vol. 8, no. 2, p. 025004, 2023. https://doi.org/10.1088/2058-9565/acb2f2 + .. TODO: add examples Examples for Liouvillian dynamics and multi-objective optimization will follow soon. From 601f3b9a602649e473eecd4f69c51babc9be2a8a Mon Sep 17 00:00:00 2001 From: rochisha0 Date: Fri, 18 Jul 2025 14:13:56 +0900 Subject: [PATCH 16/16] GENETIC to genetic --- src/qutip_qoc/pulse_optim.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/qutip_qoc/pulse_optim.py b/src/qutip_qoc/pulse_optim.py index 4b70658..6d7079f 100644 --- a/src/qutip_qoc/pulse_optim.py +++ b/src/qutip_qoc/pulse_optim.py @@ -12,7 +12,7 @@ from qutip_qoc._time import _TimeInterval import qutip as qt -from qutip_qoc._genetic import _GENETIC +from qutip_qoc._genetic import _Genetic try: from qutip_qoc._rl import _RL @@ -420,8 +420,8 @@ def optimize_pulses( rl_env.train() return rl_env.result() - elif alg == "GENETIC": - gen_env = _GENETIC( + elif alg == "Genetic": + gen_env = _Genetic( objectives, control_parameters, time_interval,