From c78233e072a49e7f1aef33a631f106d10d33e611 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 21 Nov 2023 15:20:15 -0800 Subject: [PATCH 01/32] First pass at IBMQ checkpointing. --- pygsti/extras/ibmq/ibmqcore.py | 66 ++++++++++++++++++++++++++-------- 1 file changed, 52 insertions(+), 14 deletions(-) diff --git a/pygsti/extras/ibmq/ibmqcore.py b/pygsti/extras/ibmq/ibmqcore.py index 74b36c836..f8dc0a893 100644 --- a/pygsti/extras/ibmq/ibmqcore.py +++ b/pygsti/extras/ibmq/ibmqcore.py @@ -8,8 +8,7 @@ # http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. #*************************************************************************************************** -from ... import data as _data -from ...protocols import ProtocolData as _ProtocolData +from contextlib import contextmanager import numpy as _np import time as _time import json as _json @@ -20,6 +19,9 @@ try: import qiskit as _qiskit except: _qiskit = None +from ... import data as _data +from ...protocols import ProtocolData as _ProtocolData + # Most recent version of QisKit that this has been tested on: #qiskit.__qiskit_version__ = { # 'qiskit-terra': '0.25.3', @@ -34,7 +36,7 @@ #} #qiskit_ibm_provider.__version__ = '0.7.2' -_attribute_to_json = ['remove_duplicates', 'randomized_order', 'circuits_per_batch', 'num_shots', 'job_ids'] +_attribute_to_json = ['remove_duplicates', 'randomized_order', 'circuits_per_batch', 'num_shots', 'job_ids', 'seed'] _attribute_to_pickle = ['pspec', 'pygsti_circuits', 'pygsti_openqasm_circuits', 'qiskit_QuantumCircuits', 'qiskit_QuantumCircuits_as_openqasm', 'submit_time_calibration_data', 'qobj', 'batch_result_object' @@ -72,11 +74,20 @@ def q_list_to_ordered_target_indices(q_list, num_qubits): output.append(int(q[1:])) return output +@contextmanager +def checkpointed_ibmq(*args, **kwargs): + """Checkpointing context manager for IBMQExperiment. + """ + exp = IBMQExperiment.from_dir(*args, **kwargs) + try: + yield exp + finally: + exp.write() class IBMQExperiment(dict): def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, - num_shots=1024): + num_shots=1024, seed=None): """ A object that converts pyGSTi ExperimentDesigns into jobs to be submitted to IBM Q, submits these jobs to IBM Q and receives the results. @@ -109,6 +120,12 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True num_shots: int, optional The number of samples from / repeats of each circuit. + + seed: int, optional + Seed for RNG during order randomization of circuits. + + checkpoint_dirname: str, optional + Name of checkpoint directory. If None, no checkpointing is used. Returns ------- @@ -120,6 +137,9 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True IBM Q). """ + rand_state = _np.random.RandomState(seed) + + self.dirty_attributes = {} self['edesign'] = edesign self['pspec'] = pspec @@ -127,6 +147,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self['randomized_order'] = randomized_order self['circuits_per_batch'] = circuits_per_batch self['num_shots'] = num_shots + self['seed'] = seed # Populated when submitting to IBM Q with .submit() self['qjob'] = None self['job_ids'] = None @@ -138,7 +159,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True if randomized_order: if remove_duplicates: circuits = list(set(circuits)) - _np.random.shuffle(circuits) + rand_state.shuffle(circuits) else: assert(not remove_duplicates), "Can only remove duplicates if randomizing order!" @@ -152,7 +173,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self['qobj'] = [] batch_idx = 0 - for circ_idx, circ in enumerate(circuits): + for circ in circuits: self['pygsti_circuits'][batch_idx].append(circ) if len(self['pygsti_circuits'][batch_idx]) == circuits_per_batch: batch_idx += 1 @@ -163,8 +184,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True #start = _time.time() for batch_idx, circuit_batch in enumerate(self['pygsti_circuits']): print("Constructing job for circuit batch {} of {}".format(batch_idx + 1, num_batches)) - #openqasm_circuit_ids = [] - for circ_idx, circ in enumerate(circuit_batch): + for circ in circuit_batch: pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=pspec.num_qubits, standard_gates_version='x-sx-rz') qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) @@ -177,6 +197,18 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True total_num += 1 self['qobj'].append(_qiskit.compiler.assemble(self['qiskit_QuantumCircuits'][batch_idx], shots=num_shots)) + + def __getitem__(self, __key): + # It is not guaranteed that a __getitem__ will result in the returned attribute value being modified + # However, that is almost always the case internally, so this is the "safer" option + # with the downside that accessing dict elements will mark them as dirty even if not changed + self.dirty_attributes[__key] = True + return super().__getitem__(__key) + + def __setitem__(self, __key, __value): + # Keep track of which items we've explicitly modified + self.dirty_attributes[__key] = True + return super().__setitem__(__key, __value) def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wait_time=1, wait_steps=10): @@ -361,6 +393,9 @@ def write(self, dirname=None): format and saving all of the IBM Q submission information stored in this object, written into the subdirectory 'ibmqexperiment'. + If an existing IBMQExperiment is in the specified directory, only "dirty", i.e. modified, + attributes will be rewritten to file. + Parameters ---------- dirname : str @@ -374,18 +409,21 @@ def write(self, dirname=None): if dirname is None: dirname = self['edesign']._loaded_from if dirname is None: raise ValueError("`dirname` must be given because there's no default directory") + + if self.dirty_attributes.get('data', False): + self['data'].write(dirname) + + _os.mkdir(dirname + '/ibmqexperiment') - self['data'].write(dirname) - + # Dump JSON items regardless of dirty (cheaper than load/diff/rewrite) dict_to_json = {atr: self[atr] for atr in _attribute_to_json} - - _os.mkdir(dirname + '/ibmqexperiment') with open(dirname + '/ibmqexperiment/meta.json', 'w') as f: _json.dump(dict_to_json, f, indent=4) for atr in _attribute_to_pickle: - with open(dirname + '/ibmqexperiment/{}.pkl'.format(atr), 'wb') as f: - _pickle.dump(self[atr], f) + if self.dirty_attributes.get(atr, False): + with open(dirname + '/ibmqexperiment/{}.pkl'.format(atr), 'wb') as f: + _pickle.dump(self[atr], f) @classmethod def from_dir(cls, dirname, provider=None): From db63eb8f0e3da334db5d827e1921405d344372b7 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 28 Nov 2023 15:26:59 -0700 Subject: [PATCH 02/32] First pass reworking IBMQExperiment The goal is to add checkpointing, which will be facilitated by making this properly serializable. --- pygsti/extras/ibmq/ibmqcore.py | 470 ++++++++++++++++++--------------- 1 file changed, 256 insertions(+), 214 deletions(-) diff --git a/pygsti/extras/ibmq/ibmqcore.py b/pygsti/extras/ibmq/ibmqcore.py index f8dc0a893..612748ff7 100644 --- a/pygsti/extras/ibmq/ibmqcore.py +++ b/pygsti/extras/ibmq/ibmqcore.py @@ -8,19 +8,17 @@ # http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. #*************************************************************************************************** -from contextlib import contextmanager import numpy as _np +import pathlib as _pathlib import time as _time -import json as _json -import pickle as _pickle -import os as _os import warnings as _warnings try: import qiskit as _qiskit except: _qiskit = None -from ... import data as _data -from ...protocols import ProtocolData as _ProtocolData +from ... import data as _data, io as _io +from ...protocols import ProtocolData as _ProtocolData, HasProcessorSpec as _HasPSpec +from ...protocols.protocol import _TreeNode # Most recent version of QisKit that this has been tested on: #qiskit.__qiskit_version__ = { @@ -36,55 +34,8 @@ #} #qiskit_ibm_provider.__version__ = '0.7.2' -_attribute_to_json = ['remove_duplicates', 'randomized_order', 'circuits_per_batch', 'num_shots', 'job_ids', 'seed'] -_attribute_to_pickle = ['pspec', 'pygsti_circuits', 'pygsti_openqasm_circuits', - 'qiskit_QuantumCircuits', 'qiskit_QuantumCircuits_as_openqasm', - 'submit_time_calibration_data', 'qobj', 'batch_result_object' - ] - -def reverse_dict_key_bits(counts_dict): - new_dict = {} - for key in counts_dict.keys(): - new_dict[key[::-1]] = counts_dict[key] - return new_dict - - -# NOTE: This is probably duplicative of some other code in pyGSTi -def partial_trace(ordered_target_indices, input_dict): - output_dict = {} - for bitstring in input_dict.keys(): - new_string = '' - for index in ordered_target_indices: - new_string += bitstring[index] - try: - output_dict[new_string] += input_dict[bitstring] - except: - output_dict[new_string] = input_dict[bitstring] - return output_dict - - -def q_list_to_ordered_target_indices(q_list, num_qubits): - if q_list is None: - return list(range(num_qubits)) - else: - output = [] - for q in q_list: - assert q[0] == 'Q' - output.append(int(q[1:])) - return output - -@contextmanager -def checkpointed_ibmq(*args, **kwargs): - """Checkpointing context manager for IBMQExperiment. - """ - exp = IBMQExperiment.from_dir(*args, **kwargs) - try: - yield exp - finally: - exp.write() - -class IBMQExperiment(dict): +class IBMQExperiment(_TreeNode, _HasPSpec): def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, num_shots=1024, seed=None): @@ -137,81 +88,113 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True IBM Q). """ - rand_state = _np.random.RandomState(seed) - - self.dirty_attributes = {} - - self['edesign'] = edesign - self['pspec'] = pspec - self['remove_duplicates'] = remove_duplicates - self['randomized_order'] = randomized_order - self['circuits_per_batch'] = circuits_per_batch - self['num_shots'] = num_shots - self['seed'] = seed + _TreeNode.__init__(self, edesign._dirs) + _HasPSpec.__init__(self, pspec) + + self.edesign = edesign + self.processor_spec = pspec # Must be called processor_spec for _HasPSpec class + self.remove_duplicates = remove_duplicates + self.randomized_order = randomized_order + self.circuits_per_batch = circuits_per_batch + self.num_shots = num_shots + self.seed = seed + # Populated with transpiling to IBMQ with .transpile() + self.pygsti_circuit_batches = [] + self.qasm_circuit_batches = [] + self.qiskit_circuit_batches = [] # Populated when submitting to IBM Q with .submit() - self['qjob'] = None - self['job_ids'] = None + self.qjobs = [] + self.job_ids = [] + self.submit_time_calibration_data = [] # Populated when grabbing results from IBM Q with .retrieve_results() - self['batch_result_object'] = None - self['data'] = None - - circuits = edesign.all_circuits_needing_data.copy() - if randomized_order: - if remove_duplicates: - circuits = list(set(circuits)) - rand_state.shuffle(circuits) - else: - assert(not remove_duplicates), "Can only remove duplicates if randomizing order!" - - num_batches = int(_np.ceil(len(circuits) / circuits_per_batch)) - - self['pygsti_circuits'] = [[] for i in range(num_batches)] - self['pygsti_openqasm_circuits'] = [[] for i in range(num_batches)] - self['qiskit_QuantumCircuits'] = [[] for i in range(num_batches)] - self['qiskit_QuantumCircuits_as_openqasm'] = [[] for i in range(num_batches)] - self['submit_time_calibration_data'] = [] - self['qobj'] = [] - - batch_idx = 0 - for circ in circuits: - self['pygsti_circuits'][batch_idx].append(circ) - if len(self['pygsti_circuits'][batch_idx]) == circuits_per_batch: - batch_idx += 1 + self.batch_result_objects = [] + self.data = None + + # If not in this list, will be automatically dumped to meta.json + # 'none' means it will not be read in, 'reset' means it will come back in as None + # Several of these could be stored in the meta.json but are kept external for easy chkpts + self.auxfile_types['edesign'] = 'none' + self.auxfile_types['data'] = 'reset' + # self.processor_spec is handled by _HasPSpec base class + self.auxfile_types['pygsti_circuit_batches'] = 'list:text-circuit-list' + self.auxfile_types['qasm_circuit_batches'] = 'list:json' + self.auxfile_types['qiskit_circuit_batches'] = 'none' + self.auxfile_types['qjobs'] = 'none' + self.auxfile_types['job_ids'] = 'json' + self.auxfile_types['submit_time_calibration_data'] = 'list:json' + self.auxfile_types['batch_result_objects'] = 'none' + + def transpile(self, dirname=None): + """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. - #create Qiskit quantum circuit for each circuit defined in experiment list - total_num = 0 + Parameters + ---------- + dirname: str, optional + If provided, the root directory (i.e. same as passed to write() + and from_dir()) to use for checkpointing + """ + num_batches = int(_np.ceil(len(circuits) / self.circuits_per_batch)) + + if self.pygsti_circuit_batches is None: + self.pygsti_circuit_batches = [] + rand_state = _np.random.RandomState(self.seed) + + circuits = self.edesign.all_circuits_needing_data.copy() + if self.randomized_order: + if self.remove_duplicates: + circuits = list(set(circuits)) + rand_state.shuffle(circuits) + else: + assert(not self.remove_duplicates), "Can only remove duplicates if randomizing order!" + + for batch_idx in range(num_batches): + start = batch_idx*self.circuits_per_batch + end = min(len(circuits), (batch_idx+1)*self.circuits_per_batch) + self.pygsti_circuit_batches[batch_idx] = circuits[start:end] + + if dirname is not None: + exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' + exp_dir.mkdir(parents=True, exist_ok=True) + + # Checkpoint by writing this member as .write() would + _io.metadir._write_auxfile_member(exp_dir, 'pygsti_circuit_batches', + self.auxfile_types['pygsti_circuit_batches'], + self.pygsti_circuit_batches) - #start = _time.time() - for batch_idx, circuit_batch in enumerate(self['pygsti_circuits']): - print("Constructing job for circuit batch {} of {}".format(batch_idx + 1, num_batches)) - for circ in circuit_batch: - pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=pspec.num_qubits, + #create Qiskit quantum circuit for each circuit defined in experiment list + self.qasm_circuit_batches = [] if self.qasm_circuit_batches is None else self.qasm_circuit_batches + self.qiskit_circuit_batches = [] if self.qiskit_circuit_batches is None else self.qiskit_circuit_batches + + if len(self.qiskit_circuit_batches): + print(f'Already completed transpilation of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') + + for batch_idx in range(len(self.qiskit_circuit_batches, num_batches)): + print(f"Transpiling circuit batch {batch_idx+1}/{num_batches}") + batch = [] + batch_strs = [] + for circ in self.pygsti_circuit_batches[batch_idx]: + pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=self.pspec.num_qubits, standard_gates_version='x-sx-rz') - qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) - - self['pygsti_openqasm_circuits'][batch_idx].append(pygsti_openqasm_circ) - self['qiskit_QuantumCircuits'][batch_idx].append(qiskit_qc) - self['qiskit_QuantumCircuits_as_openqasm'][batch_idx].append(qiskit_qc.qasm()) - - #print(batch_idx, circ_idx, len(submitted_openqasm_circuits), total_num) - total_num += 1 + batch_strs.append(pygsti_openqasm_circ) - self['qobj'].append(_qiskit.compiler.assemble(self['qiskit_QuantumCircuits'][batch_idx], shots=num_shots)) - - def __getitem__(self, __key): - # It is not guaranteed that a __getitem__ will result in the returned attribute value being modified - # However, that is almost always the case internally, so this is the "safer" option - # with the downside that accessing dict elements will mark them as dirty even if not changed - self.dirty_attributes[__key] = True - return super().__getitem__(__key) - - def __setitem__(self, __key, __value): - # Keep track of which items we've explicitly modified - self.dirty_attributes[__key] = True - return super().__setitem__(__key, __value) + qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) + batch.append(qiskit_qc) + + self.qasm_circuit_batches.append(batch_strs) + self.qiskit_circuit_batches.append(batch) + + if dirname is not None: + exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' + + # Checkpoint by writing this member as .write() would + # Here, we go to a lower level than pygsti_circuit_batches and write each list entry as they are completed + subtypes = self.auxfile_types['qasm_circuit_batches'].split(':') + assert subtypes[0] == 'list', "Checkpointing of qasm_circuit_batches only possible when auxfile_type is list:" + subtype = ':'.join(subtypes[1:]) + _io.metadir._write_auxfile_member(exp_dir, f'qasm_circuit_batches{batch_idx}', subtype, batch_strs) def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, - wait_time=1, wait_steps=10): + wait_time=1, wait_steps=10, dirname=None): """ Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign used to create this object. @@ -249,85 +232,88 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, ------- None """ + assert len(self.qiskit_circuit_batches) == len(self.pygsti_circuit_batches), \ + "Transpilation missing! Either run .transpile() first, or if loading from file, " + \ + "use the regen_qiskit_circs=True option in from_dir()." #Get the backend version backend_version = ibmq_backend.version total_waits = 0 - self['qjob'] = [] if self['qjob'] is None else self['qjob'] - self['job_ids'] = [] if self['job_ids'] is None else self['job_ids'] + self.qjobs = [] if self.qjobs is None else self.qjobs + self.job_ids = [] if self.job_ids is None else self.job_ids # Set start and stop to submit the next unsubmitted jobs if not specified if start is None: - start = len(self['qjob']) + start = len(self.qjobs) - if stop is not None: - stop = min(stop, len(self['qobj'])) - elif ignore_job_limit: - stop = len(self['qobj']) - else: + stop = len(self.qiskit_circuit_batches) if stop is None else min(stop, len(self.qiskit_circuit_batches)) + if not ignore_job_limit: job_limit = ibmq_backend.job_limit() allowed_jobs = job_limit.maximum_jobs - job_limit.active_jobs - if start + allowed_jobs < len(self['qobj']): + if start + allowed_jobs < stop: print(f'Given job limit and active jobs, only {allowed_jobs} can be submitted') - stop = min(start + allowed_jobs, len(self['qobj'])) - - #if the backend version is 1 I believe this should correspond to the use of the legacy - #qiskit-ibmq-provider API which supports passing in Qobj objects for specifying experiments - #if the backend version is 2 I believe this should correspond to the new API in qiskit-ibm-provider. - #This new API doesn't support passing in Qobjs into the run method for backends, so we need - #to pass in the list of QuantumCircuit objects directly. - if backend_version == 1: - batch_iterator = enumerate(self['qobj']) - elif backend_version >= 2: - batch_iterator = enumerate(self['qiskit_QuantumCircuits']) + stop = min(start + allowed_jobs, stop) - for batch_idx, batch in batch_iterator: + for batch_idx, batch in self.qiskit_circuit_batches: if batch_idx < start or batch_idx >= stop: continue - print("Submitting batch {}".format(batch_idx + 1)) + print(f"Submitting batch {batch_idx + 1}") submit_status = False batch_waits = 0 while not submit_status: try: - backend_properties = ibmq_backend.properties() - #If using a simulator backend then backend_properties is None + #If submitting to a real device, get calibration data if not ibmq_backend.simulator: - self['submit_time_calibration_data'].append(backend_properties.to_dict()) - #if using the new API we need to pass in the number of shots. + backend_properties = ibmq_backend.properties() + self.submit_time_calibration_data.append(backend_properties.to_dict()) + if backend_version == 1: - self['qjob'].append(ibmq_backend.run(batch)) + # If using qiskit-ibmq-provider API, assemble into Qobj first + qobj = _qiskit.compiler.assemble(batch, shots=self.num_shots) + self.qjobs.append(ibmq_backend.run(qobj)) else: - self['qjob'].append(ibmq_backend.run(batch, shots = self['num_shots'])) + # Newer qiskit-ibm-provider can take list of Qiskit circuits directly + self.qjobs.append(ibmq_backend.run(batch, shots = self.num_shots)) - status = self['qjob'][-1].status() + status = self.qjobs[-1].status() initializing = True initializing_steps = 0 while initializing: if status.name == 'INITIALIZING' or status.name == 'VALIDATING': - #print(status) - status = self['qjob'][-1].status() - print(' - {} (query {})'.format(status, initializing_steps)) - _time.sleep(1) + status = self.qjobs[-1].status() + print(f' - {status} (query {initializing_steps})') + _time.sleep(wait_time) initializing_steps += 1 else: initializing = False - #print(" -Done intializing. Job status is {}".format(status.name)) - #print(status) + try: - job_id = self['qjob'][-1].job_id() - print(' - Job ID is {}'.format(job_id)) - self['job_ids'].append(job_id) + job_id = self.qjobs.job_id() + print(f' - Job ID is {job_id}') + self.job_ids.append(job_id) except: print(' - Failed to get job_id.') - self['job_ids'].append(None) + self.job_ids.append(None) + try: - print(' - Queue position is {}'.format(self['qjob'][-1].queue_info().position)) + print(f' - Queue position is {self.qjobs[-1].queue_info().position)}') except: - print(' - Failed to get queue position for batch {}'.format(batch_idx + 1)) + print(f' - Failed to get queue position for batch {batch_idx + 1}') submit_status = True + + # Checkpoint calibration and job id data + if dirname is not None: + p = _pathlib.Path(dirname) + + exp_dir = p / 'ibmqexperiment' + # Write as .write() would for checkpointing + _io.metadir._write_auxfile_member(exp_dir, 'job_ids', 'json', self.job_ids) + _io.metadir._write_auxfile_member(exp_dir, 'submit_time_calibration_data', 'json', + self.submit_time_calibration_data) + except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) @@ -353,15 +339,18 @@ def monitor(self): """ Queries IBM Q for the status of the jobs. """ - for counter, qjob in enumerate(self['qjob']): + assert len(self.qjobs) == len(self.job_ids), \ + "Mismatch between jobs and job ids! If loading from file, use the regen_jobs=True option in from_dir()." + + for counter, qjob in enumerate(self.qjobs): status = qjob.status() - print("Batch {}: {}".format(counter + 1, status)) + print(f"Batch {counter + 1}: {status}") if status.name == 'QUEUED': - print(' - Queue position is {}'.format(qjob.queue_info().position)) + print(f' - Queue position is {qjob.queue_info().position}') # Print unsubmitted for any entries in qobj but not qjob - for counter in range(len(self['qjob']), len(self['qobj'])): - print("Batch {}: NOT SUBMITTED".format(counter + 1)) + for counter in range(len(self.qjobs), len(self.qiskit_circuit_batches)): + print(f"Batch {counter + 1}: NOT SUBMITTED") def retrieve_results(self): """ @@ -371,21 +360,46 @@ def retrieve_results(self): was a GST experiment, it can input into a GST protocol object that will analyze the data). """ - self['batch_result_object'] = [] + assert len(self.qjobs) == len(self.job_ids), \ + "Mismatch between jobs and job ids! If loading from file, use the regen_jobs=True option in from_dir()." + + def reverse_dict_key_bits(counts_dict): + new_dict = {} + for key in counts_dict.keys(): + new_dict[key[::-1]] = counts_dict[key] + return new_dict + + # NOTE: This is probably duplicative of some other code in pyGSTi + def partial_trace(ordered_target_indices, input_dict): + output_dict = {} + for bitstring in input_dict.keys(): + new_string = '' + for index in ordered_target_indices: + new_string += bitstring[index] + try: + output_dict[new_string] += input_dict[bitstring] + except: + output_dict[new_string] = input_dict[bitstring] + return output_dict + #get results from backend jobs and add to dict + self.batch_result_objects = [] ds = _data.DataSet() - for exp_idx, qjob in enumerate(self['qjob']): - print("Querying IBMQ for results objects for batch {}...".format(exp_idx + 1)) + for exp_idx, qjob in enumerate(self.qjobs): + print(f"Querying IBMQ for results objects for batch {exp_idx + 1}...") batch_result = qjob.result() - self['batch_result_object'].append(batch_result) - #exp_dict['batch_data'] = [] - for i, circ in enumerate(self['pygsti_circuits'][exp_idx]): - ordered_target_indices = [self['pspec'].qubit_labels.index(q) for q in circ.line_labels] + self.batch_result_objects.append(batch_result) + + # TODO: Checkpoint + + for i, circ in enumerate(self.pygsti_circuit_batches[exp_idx]): + ordered_target_indices = [self.processor_spec.qubit_labels.index(q) for q in circ.line_labels] counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result.get_counts(i))) - #exp_dict['batch_data'].append(counts_data) ds.add_count_dict(circ, counts_data) - self['data'] = _ProtocolData(self['edesign'], ds) + self.data = _ProtocolData(self['edesign'], ds) + + # TODO: Checkpoint def write(self, dirname=None): """ @@ -393,9 +407,6 @@ def write(self, dirname=None): format and saving all of the IBM Q submission information stored in this object, written into the subdirectory 'ibmqexperiment'. - If an existing IBMQExperiment is in the specified directory, only "dirty", i.e. modified, - attributes will be rewritten to file. - Parameters ---------- dirname : str @@ -407,26 +418,26 @@ def write(self, dirname=None): """ if dirname is None: - dirname = self['edesign']._loaded_from + dirname = self.edesign._loaded_from if dirname is None: raise ValueError("`dirname` must be given because there's no default directory") - if self.dirty_attributes.get('data', False): - self['data'].write(dirname) + dirname = _pathlib.Path(dirname) + + self.edesign.write(dirname) - _os.mkdir(dirname + '/ibmqexperiment') + if self.data is not None: + self.data.write(dirname, edesign_already_written=True) - # Dump JSON items regardless of dirty (cheaper than load/diff/rewrite) - dict_to_json = {atr: self[atr] for atr in _attribute_to_json} - with open(dirname + '/ibmqexperiment/meta.json', 'w') as f: - _json.dump(dict_to_json, f, indent=4) + exp_dir = dirname / 'ibmqexperiment' + exp_dir.mkdir(parents=True, exist_ok=True) + _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') - for atr in _attribute_to_pickle: - if self.dirty_attributes.get(atr, False): - with open(dirname + '/ibmqexperiment/{}.pkl'.format(atr), 'wb') as f: - _pickle.dump(self[atr], f) + # Handle nonstandard serializations + # TODO: batch_result_objs? @classmethod - def from_dir(cls, dirname, provider=None): + def from_dir(cls, dirname, regen_qiskit_circs=False, + regen_runtime_jobs=False, provider=None): """ Initialize a new IBMQExperiment object from `dirname`. @@ -435,37 +446,68 @@ def from_dir(cls, dirname, provider=None): dirname : str The directory name. + regen_qiskit_circs: bool, optional + Whether to recreate the Qiskit circuits from the transpiled + OpenQASM strings. Defaults to False. You should set this to True + if you would like to call submit(). + + regen_runtime_jobs: bool, optional + Whether to recreate the RuntimeJobs from IBMQ based on the job ides. + Defaults to False. You should set this to True if you would like to + call monitor() or retrieve_results(). + provider: IBMProvider - Provider used to retrieve qjob objects from job_ids + Provider used to retrieve RuntimeJobs from IBMQ based on job_ids + (if lazy_qiskit_load is False) Returns ------- IBMQExperiment """ - ret = cls.__new__(cls) - with open(dirname + '/ibmqexperiment/meta.json', 'r') as f: - from_json = _json.load(f) - ret.update(from_json) - - for atr in _attribute_to_pickle: - with open(dirname + '/ibmqexperiment/{}.pkl'.format(atr), 'rb') as f: - try: - ret[atr] = _pickle.load(f) - except: - _warnings.warn("Couldn't unpickle {}, so skipping this attribute.".format(atr)) - ret[atr] = None - - if provider is None: - _warnings.warn("No provider specified, cannot retrieve IBM jobs") - else: - ret['qjob'] = [] - for i, jid in enumerate(ret['job_ids']): - print(f"Loading job {i+1}/{len(ret['job_ids'])}...") - ret['qjob'].append(provider.backend.retrieve_job(jid)) + p = _pathlib.Path(dirname) + edesign = _io.read_edesign_from_dir(dirname) + + exp_dir = p / 'ibmqexperiment' + attributes_from_meta = _io.load_meta_based_dir(exp_dir) + + ret = cls(edesign, None) + ret.__dict__.update(attributes_from_meta) + # Handle nonstandard serialization try: - ret['data'] = _ProtocolData.from_dir(dirname) + data = _ProtocolData.from_dir(p, preloaded_edesign=edesign) + ret.data = data except: pass + ret.qiskit_circuit_batches = [] + ret.qjobs = [] + if regen_qiskit_circs: + # Regenerate Qiskit circuits + for batch_strs in attributes_from_meta['qasm_circuit_batches']: + batch = [_qiskit.QuantumCircuit.from_qasm_str(bs) for bs in batch_strs] + ret.qiskit_circuit_batches.append(batch) + + if regen_runtime_jobs: + # Regenerate RuntimeJobs (if have provider) + if provider is None: + _warnings.warn("No provider specified, cannot retrieve IBM jobs") + else: + ret._retrieve_jobs(provider) + + # Handle nonstandard serializations + # TODO: batch_result_objs? + return ret + + def _retrieve_jobs(self, provider): + """Retrieves RuntimeJobs from IBMQ based on job_ids. + + Parameters + ---------- + provider: IBMProvider + Provider used to retrieve RuntimeJobs from IBMQ based on job_ids + """ + for i, jid in enumerate(self.job_ids): + print(f"Loading job {i+1}/{len(self.job_ids)}...") + self.qjobs.append(provider.backend.retrieve_job(jid)) From e5ac24630111bc855e0be06d823a4d5ccf32fd54 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 1 Dec 2023 14:25:28 -0700 Subject: [PATCH 03/32] Updates to FreeformDesign serialization Avoid pickling and don't write all_circuits_needing_data. Should cut on-disk space in half, and circuits can be reinitialized from keys (should also save on circ construction time) --- pygsti/protocols/protocol.py | 42 +++++++++++++++++++++++++++++++++++- 1 file changed, 41 insertions(+), 1 deletion(-) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index ae326b23c..499b12132 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1625,6 +1625,43 @@ class FreeformDesign(ExperimentDesign): line labels of the first circuit is used. """ + @classmethod + def from_dir(cls, dirname, parent=None, name=None, quick_load=False): + """ + Initialize a new ExperimentDesign object from `dirname`. + + Parameters + ---------- + dirname : str + The *root* directory name (under which there is a 'edesign' + subdirectory). + + parent : ExperimentDesign, optional + The parent design object, if there is one. Primarily used + internally - if in doubt, leave this as `None`. + + name : str, optional + The sub-name of the design object being loaded, i.e. the + key of this data object beneath `parent`. Only used when + `parent` is not None. + + quick_load : bool, optional + Setting this to True skips the loading of the potentially long + circuit lists. This can be useful when loading takes a long time + and all the information of interest lies elsewhere, e.g. in an + encompassing results object. + + Returns + ------- + ExperimentDesign + """ + edesign = ExperimentDesign.from_dir(dirname, parent=parent, name=name, quick_load=quick_load) + + # Reset circuits (which were not saved for space savings) from aux_info keys + edesign.all_circuits_needing_data = list(edesign.aux_info.keys()) + + return cls.from_edesign(edesign) + @classmethod def from_dataframe(cls, df, qubit_labels=None): """ @@ -1683,7 +1720,10 @@ def __init__(self, circuits, qubit_labels=None): else: self.aux_info = {c: None for c in circuits} super().__init__(circuits, qubit_labels) - self.auxfile_types['aux_info'] = 'pickle' + + # Don't save all_circuits_needing_data, it's redundant with aux_info keys + self.auxfile_types['all_circuits_needing_data'] = 'reset' + self.auxfile_types['aux_info'] = 'circuit-str-json' def _truncate_to_circuits_inplace(self, circuits_to_keep): truncated_aux_info = {k: v for k, v in self.aux_info.items() if k in circuits_to_keep} From 7962383a7979a1b51e369e0f83c0472c623d40b5 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 1 Dec 2023 14:53:09 -0700 Subject: [PATCH 04/32] Further update to FreeformDesign serialization --- pygsti/protocols/protocol.py | 33 +++++++++++++++++++++++++++++++-- 1 file changed, 31 insertions(+), 2 deletions(-) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index 499b12132..d2e9fef1b 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1656,8 +1656,11 @@ def from_dir(cls, dirname, parent=None, name=None, quick_load=False): ExperimentDesign """ edesign = ExperimentDesign.from_dir(dirname, parent=parent, name=name, quick_load=quick_load) + + # Convert back to circuits + edesign.aux_info = {_circuits.Circuit(k, check=False, expand_subcircuits=False): v for k,v in edesign.aux_info.items()} - # Reset circuits (which were not saved for space savings) from aux_info keys + # Reset all_circuits_needing_data (which were not saved for space savings) from aux_info keys edesign.all_circuits_needing_data = list(edesign.aux_info.keys()) return cls.from_edesign(edesign) @@ -1723,7 +1726,8 @@ def __init__(self, circuits, qubit_labels=None): # Don't save all_circuits_needing_data, it's redundant with aux_info keys self.auxfile_types['all_circuits_needing_data'] = 'reset' - self.auxfile_types['aux_info'] = 'circuit-str-json' + # Currently not jsonable, but will be fixed in write() + self.auxfile_types['aux_info'] = 'json' def _truncate_to_circuits_inplace(self, circuits_to_keep): truncated_aux_info = {k: v for k, v in self.aux_info.items() if k in circuits_to_keep} @@ -1755,6 +1759,31 @@ def map_qubit_labels(self, mapper): mapped_circuits = [c.map_state_space_labels(mapper) for c in self.all_circuits_needing_data] mapped_qubit_labels = self._mapped_qubit_labels(mapper) return FreeformDesign(mapped_circuits, mapped_qubit_labels) + + def write(self, dirname=None, parent=None): + """ + Write this experiment design to a directory. + + Parameters + ---------- + dirname : str + The *root* directory to write into. This directory will have + an 'edesign' subdirectory, which will be created if needed and + overwritten if present. If None, then the path this object + was loaded from is used (if this object wasn't loaded from disk, + an error is raised). + + parent : ExperimentDesign, optional + The parent experiment design, when a parent is writing this + design as a sub-experiment-design. Otherwise leave as None. + + Returns + ------- + None + """ + # Convert circuits to string for then-jsonable serialization + self.aux_info = {str(k):v for k,v in self.aux_info.items()} + super().write(dirname, parent) class ProtocolData(_TreeNode, _MongoSerializable): From fad56e24e693056129cc25dafb8a41ec90756053 Mon Sep 17 00:00:00 2001 From: Stefan Seritan Date: Fri, 1 Dec 2023 14:31:01 -0800 Subject: [PATCH 05/32] Fix new FreeformDesign serialization --- pygsti/protocols/protocol.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index d2e9fef1b..65316f309 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1658,12 +1658,9 @@ def from_dir(cls, dirname, parent=None, name=None, quick_load=False): edesign = ExperimentDesign.from_dir(dirname, parent=parent, name=name, quick_load=quick_load) # Convert back to circuits - edesign.aux_info = {_circuits.Circuit(k, check=False, expand_subcircuits=False): v for k,v in edesign.aux_info.items()} + edesign.aux_info = {_circuits.Circuit(k, check=False): v for k,v in edesign.aux_info.items()} - # Reset all_circuits_needing_data (which were not saved for space savings) from aux_info keys - edesign.all_circuits_needing_data = list(edesign.aux_info.keys()) - - return cls.from_edesign(edesign) + return cls(edesign.aux_info, edesign.qubit_labels) @classmethod def from_dataframe(cls, df, qubit_labels=None): @@ -1760,7 +1757,7 @@ def map_qubit_labels(self, mapper): mapped_qubit_labels = self._mapped_qubit_labels(mapper) return FreeformDesign(mapped_circuits, mapped_qubit_labels) - def write(self, dirname=None, parent=None): + def write(self, dirname=None, parent=None): """ Write this experiment design to a directory. @@ -1782,8 +1779,10 @@ def write(self, dirname=None, parent=None): None """ # Convert circuits to string for then-jsonable serialization - self.aux_info = {str(k):v for k,v in self.aux_info.items()} + aux_info = self.aux_info + self.aux_info = {repr(k)[8:-1]: v for k,v in self.aux_info.items()} super().write(dirname, parent) + self.aux_info = aux_info class ProtocolData(_TreeNode, _MongoSerializable): From f74dce502055c5066791669c12fba086700f5fdf Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 1 Dec 2023 22:50:32 -0700 Subject: [PATCH 06/32] Complete rework of IBMQExperiment with checkpointing Checkpointing is facilitated by moving this from a dict to a class that inherits from TreeNode and serializes more "pyGSTi-like". All major IMBQExperiment stages (transpile, submit, retrieve results) are accompanied by internal writes of the ibmqexperiment directory, which can be loaded from as checkpoints. --- .gitignore | 1 + .../objects/advanced/IBMQExperiment.ipynb | 128 ++++++++++------- pygsti/extras/ibmq/ibmqcore.py | 129 +++++++++--------- pygsti/io/metadir.py | 45 +++++- setup.py | 2 + 5 files changed, 184 insertions(+), 121 deletions(-) diff --git a/.gitignore b/.gitignore index 026134b95..b09ced543 100644 --- a/.gitignore +++ b/.gitignore @@ -49,6 +49,7 @@ jupyter_notebooks/Tutorials/tutorial_files/exampleBriefReport jupyter_notebooks/Tutorials/tutorial_files/*.ipynb jupyter_notebooks/Tutorials/tutorial_files/tempTest jupyter_notebooks/Tutorials/tutorial_files/*checkpoints +jupyter_notebooks/Tutorials/objects/advanced/test_ibmq diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 334c5cf73..0f34183b2 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -11,7 +11,7 @@ ] }, { - "cell_type": "raw", + "cell_type": "markdown", "metadata": {}, "source": [ "qiskit.__qiskit_version__ = {'qiskit-terra': '0.25.3', 'qiskit': '0.44.3', 'qiskit-aer': None, 'qiskit-ignis': None, 'qiskit-ibmq-provider': '0.20.2', 'qiskit-nature': None, 'qiskit-finance': None, 'qiskit-optimization': None, 'qiskit-machine-learning': None}\n", @@ -20,7 +20,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": { "tags": [] }, @@ -55,7 +55,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -65,7 +65,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -82,7 +82,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -90,8 +90,14 @@ }, "outputs": [], "source": [ - "dev_name = 'ibm_lagos'\n", - "backend = provider.get_backend(dev_name)" + "# Use backends() to see what backends you have access to\n", + "#provider.backends()\n", + "\n", + "# Can use a physical device...\n", + "backend = provider.get_backend('ibm_hanoi')\n", + "\n", + "# ... or can use a simulator\n", + "sim_backend = provider.get_backend('ibmq_qasm_simulator')" ] }, { @@ -112,7 +118,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -122,7 +128,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -226,9 +232,9 @@ "metadata": {}, "source": [ "## Running on IBM Q\n", - "We're now ready to run on the IBM Q processor. We do this using an `IBMQExperiment` object, which \n", + "We're now ready to run on the IBM Q processor. We do this using an `IBMQExperiment` object.\n", "\n", - "First it converts pyGSTi circuits into jobs that can be submitted to IBM Q. **This step includes transpiling of the pyGSTi circuits into OpenQASM** (and then into QisKit objects)." + "We can enable checkpointing for `IBMQExperiment` objects by writing the object to disk now, and then providing the directory name in downstream calls." ] }, { @@ -239,54 +245,64 @@ }, "outputs": [], "source": [ - "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024)" + "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024, seed=20231201)\n", + "exp.write('test_ibmq')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We're now ready to submit this experiment to IBM Q." + "First we convert pyGSTi circuits into jobs that can be submitted to IBM Q. **This step includes transpiling of the pyGSTi circuits into OpenQASM** (and then into QisKit objects)." ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [ - "nbval-skip" - ] - }, + "metadata": {}, "outputs": [], "source": [ - "exp.submit(backend)" + "# Provide the directory name to enable transpilation checkpointing\n", + "exp.transpile(dirname='test_ibmq')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# We can simulate having been interrupted by removing the last few transpiled batches\n", + "del exp.qasm_circuit_batches[4:]\n", + "del exp.qiskit_circuit_batches[4:]\n", + "\n", + "# And now transpilation should only redo the missing batches\n", + "exp.transpile(dirname='test_ibmq')\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "You can then monitor the jobs. If get an error message, you can query the error using `exp['qjob'][i].error_message()` for batch `i`." + "If the `IBMQExperiment` object is lost and needs to be reloaded (i.e. notebook restarts), it can be loaded from file now.\n", + "\n", + "However, the Qiskit circuits are not automatically regenerated from the transpiled QASM during loading for speed. They can (and need to be regenerated if calling `submit()`) by passing in the `regen_qiskit_circs=True` flag to `from_dir()`." ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [ - "nbval-skip" - ] - }, + "metadata": {}, "outputs": [], "source": [ - "exp.monitor()" + "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_qiskit_circs=True)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "You can then grab the results, **Once you see that all the jobs are complete** (`.retrieve_results()` will just hang if the jobs have not yet completed)." + "We're now ready to submit this experiment to IBM Q.Note that we can submit using a different backend than what was used to generate the experiment design. In general, it is not a good idea to mix and match backends for physical devices unless they have the exact same connectivity and qubit labeling; however, it **is** often useful for debugging purposes to use the simulator backend rather than a physical device." ] }, { @@ -299,52 +315,60 @@ }, "outputs": [], "source": [ - "exp.retrieve_results()" + "# Again, we can checkpoint by passing in dirname\n", + "exp2.submit(sim_backend, dirname='test_ibmq')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "This `IBMQExperiment` object now contains the results of your experiment. It contains much of the information about exactly what was submitted to IBM Q, and raw results objects that IBM Q returned." + "You can then monitor the jobs. If get an error message, you can query the error using `exp.qjobs[i].error_message()` for batch `i`." ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "tags": [] + "tags": [ + "nbval-skip" + ] }, "outputs": [], "source": [ - "print(exp.keys())" + "exp2.monitor()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "But, most importantly, it contains the data formatted into a pyGSTi `ProtocolData` object, which is the packaged-up data that pyGSTi analysis proctols use." + "Again, the `IBMQExperiment` can be loaded from file if checkpointing is being used. The Qiskit RuntimeJobs are not serialized; however, they can be retrieved from the IBMQ service from their job ids. In order to do this, pass `regen_runtime_jobs=True` and a `provider` to the `from_dir()` call." ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [ - "nbval-skip" - ] - }, + "metadata": {}, "outputs": [], "source": [ - "data = exp['data']" + "exp3 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_runtime_jobs=True, provider=provider)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exp3.monitor()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We can write this data to disk, which saves the `ProtocolData` in the standard pyGSTi format. It also pickles (or JSONs) up all of the additional information contained then `IBMQExperiment` object, e.g., the job IDs, in a subfolder `ibmqexperiment`." + "You can then grab the results, **Once you see that all the jobs are complete** (`.retrieve_results()` will just hang if the jobs have not yet completed)." ] }, { @@ -357,29 +381,32 @@ }, "outputs": [], "source": [ - "exp.write('test_ibmq_experiment')" + "exp3.retrieve_results(dirname='test_ibmq')" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "If you only want to load the `ProtocolData` you can do this using pyGSTi's standard `io` functions. We can also load the `IBMQExperiment` object, which will skip unpickling any objects when the unpickling fails (e.g., due to changes in `QisKit`).\n", - "\n", - "New in '0.9.12': IBM jobs are no longer pickle-able. Instead, they will be retrieved from the server. However, this requires the provider to be passed in at load time." + "This `IBMQExperiment` object now contains the results of your experiment. It contains much of the information about exactly what was submitted to IBM Q, and raw results objects that IBM Q returned." ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "tags": [ - "nbval-skip" - ] + "tags": [] }, "outputs": [], "source": [ - "loaded_exp = ibmq.IBMQExperiment.from_dir('test_ibmq_experiment', provider)" + "print(exp3.keys())" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "But, most importantly, it contains the data formatted into a pyGSTi `ProtocolData` object, which is the packaged-up data that pyGSTi analysis proctols use." ] }, { @@ -392,15 +419,14 @@ }, "outputs": [], "source": [ - "# Now we can run as before\n", - "loaded_exp.monitor()" + "data = exp3.data" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Analzing the results\n", + "## Analyzing the results\n", "Because `retrieve_results()` has formatted the data into a `ProctocolData` object, we can just hand this to the analysis protocol(s) that are designed for analyzing this type of data. Here we'll analyze this data using a standard RB curve-fitting analysis." ] }, @@ -473,7 +499,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.10.13" } }, "nbformat": 4, diff --git a/pygsti/extras/ibmq/ibmqcore.py b/pygsti/extras/ibmq/ibmqcore.py index 612748ff7..07d8f54fb 100644 --- a/pygsti/extras/ibmq/ibmqcore.py +++ b/pygsti/extras/ibmq/ibmqcore.py @@ -16,6 +16,11 @@ try: import qiskit as _qiskit except: _qiskit = None +try: + from bson import json_util as _json_util +except ImportError: + _json_util = None + from ... import data as _data, io as _io from ...protocols import ProtocolData as _ProtocolData, HasProcessorSpec as _HasPSpec from ...protocols.protocol import _TreeNode @@ -89,6 +94,8 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True """ _TreeNode.__init__(self, edesign._dirs) + + self.auxfile_types = {} _HasPSpec.__init__(self, pspec) self.edesign = edesign @@ -107,7 +114,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.job_ids = [] self.submit_time_calibration_data = [] # Populated when grabbing results from IBM Q with .retrieve_results() - self.batch_result_objects = [] + self.batch_results = [] self.data = None # If not in this list, will be automatically dumped to meta.json @@ -121,8 +128,13 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.auxfile_types['qiskit_circuit_batches'] = 'none' self.auxfile_types['qjobs'] = 'none' self.auxfile_types['job_ids'] = 'json' - self.auxfile_types['submit_time_calibration_data'] = 'list:json' - self.auxfile_types['batch_result_objects'] = 'none' + if _json_util is not None: + self.auxfile_types['submit_time_calibration_data'] = 'list:json' + self.auxfile_types['batch_results'] = 'list:json' + else: + # Fall back to pickles if we do not have bson to deal with datetime.datetime + self.auxfile_types['submit_time_calibration_data'] = 'pickle' + self.auxfile_types['batch_results'] = 'pickle' def transpile(self, dirname=None): """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. @@ -133,13 +145,12 @@ def transpile(self, dirname=None): If provided, the root directory (i.e. same as passed to write() and from_dir()) to use for checkpointing """ + circuits = self.edesign.all_circuits_needing_data.copy() num_batches = int(_np.ceil(len(circuits) / self.circuits_per_batch)) - if self.pygsti_circuit_batches is None: - self.pygsti_circuit_batches = [] + if not len(self.pygsti_circuit_batches): rand_state = _np.random.RandomState(self.seed) - circuits = self.edesign.all_circuits_needing_data.copy() if self.randomized_order: if self.remove_duplicates: circuits = list(set(circuits)) @@ -150,30 +161,19 @@ def transpile(self, dirname=None): for batch_idx in range(num_batches): start = batch_idx*self.circuits_per_batch end = min(len(circuits), (batch_idx+1)*self.circuits_per_batch) - self.pygsti_circuit_batches[batch_idx] = circuits[start:end] + self.pygsti_circuit_batches.append(circuits[start:end]) - if dirname is not None: - exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' - exp_dir.mkdir(parents=True, exist_ok=True) - - # Checkpoint by writing this member as .write() would - _io.metadir._write_auxfile_member(exp_dir, 'pygsti_circuit_batches', - self.auxfile_types['pygsti_circuit_batches'], - self.pygsti_circuit_batches) - - #create Qiskit quantum circuit for each circuit defined in experiment list - self.qasm_circuit_batches = [] if self.qasm_circuit_batches is None else self.qasm_circuit_batches - self.qiskit_circuit_batches = [] if self.qiskit_circuit_batches is None else self.qiskit_circuit_batches + self._write_checkpoint(dirname) if len(self.qiskit_circuit_batches): print(f'Already completed transpilation of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') - for batch_idx in range(len(self.qiskit_circuit_batches, num_batches)): + for batch_idx in range(len(self.qiskit_circuit_batches), num_batches): print(f"Transpiling circuit batch {batch_idx+1}/{num_batches}") batch = [] batch_strs = [] for circ in self.pygsti_circuit_batches[batch_idx]: - pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=self.pspec.num_qubits, + pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=self.processor_spec.num_qubits, standard_gates_version='x-sx-rz') batch_strs.append(pygsti_openqasm_circ) @@ -183,15 +183,7 @@ def transpile(self, dirname=None): self.qasm_circuit_batches.append(batch_strs) self.qiskit_circuit_batches.append(batch) - if dirname is not None: - exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' - - # Checkpoint by writing this member as .write() would - # Here, we go to a lower level than pygsti_circuit_batches and write each list entry as they are completed - subtypes = self.auxfile_types['qasm_circuit_batches'].split(':') - assert subtypes[0] == 'list', "Checkpointing of qasm_circuit_batches only possible when auxfile_type is list:" - subtype = ':'.join(subtypes[1:]) - _io.metadir._write_auxfile_member(exp_dir, f'qasm_circuit_batches{batch_idx}', subtype, batch_strs) + self._write_checkpoint(dirname) def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wait_time=1, wait_steps=10, dirname=None): @@ -256,7 +248,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, stop = min(start + allowed_jobs, stop) - for batch_idx, batch in self.qiskit_circuit_batches: + for batch_idx, batch in enumerate(self.qiskit_circuit_batches): if batch_idx < start or batch_idx >= stop: continue @@ -291,7 +283,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, initializing = False try: - job_id = self.qjobs.job_id() + job_id = self.qjobs[-1].job_id() print(f' - Job ID is {job_id}') self.job_ids.append(job_id) except: @@ -299,21 +291,11 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, self.job_ids.append(None) try: - print(f' - Queue position is {self.qjobs[-1].queue_info().position)}') + print(f' - Queue position is {self.qjobs[-1].queue_info().position}') except: print(f' - Failed to get queue position for batch {batch_idx + 1}') submit_status = True - # Checkpoint calibration and job id data - if dirname is not None: - p = _pathlib.Path(dirname) - - exp_dir = p / 'ibmqexperiment' - # Write as .write() would for checkpointing - _io.metadir._write_auxfile_member(exp_dir, 'job_ids', 'json', self.job_ids) - _io.metadir._write_auxfile_member(exp_dir, 'submit_time_calibration_data', 'json', - self.submit_time_calibration_data) - except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) @@ -334,6 +316,9 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, print('{} '.format(step), end='') _time.sleep(wait_time) print() + finally: + # Checkpoint calibration and job id data + self._write_checkpoint(dirname) def monitor(self): """ @@ -346,13 +331,17 @@ def monitor(self): status = qjob.status() print(f"Batch {counter + 1}: {status}") if status.name == 'QUEUED': - print(f' - Queue position is {qjob.queue_info().position}') + info = qjob.queue_info() + if info is not None: + print(f' - Queue position is {info.position}') + else: + print(' - Unable to retrieve queue position') # Print unsubmitted for any entries in qobj but not qjob for counter in range(len(self.qjobs), len(self.qiskit_circuit_batches)): print(f"Batch {counter + 1}: NOT SUBMITTED") - def retrieve_results(self): + def retrieve_results(self, dirname=None): """ Gets the results of the completed jobs from IBM Q, and processes them into a pyGSTi DataProtocol object (stored as the key 'data'), @@ -382,24 +371,28 @@ def partial_trace(ordered_target_indices, input_dict): output_dict[new_string] = input_dict[bitstring] return output_dict + if len(self.batch_results): + print(f'Already retrieved results of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') + #get results from backend jobs and add to dict - self.batch_result_objects = [] ds = _data.DataSet() - for exp_idx, qjob in enumerate(self.qjobs): + for exp_idx in range(len(self.batch_results), len(self.qjobs)): + qjob = self.qjobs[exp_idx] print(f"Querying IBMQ for results objects for batch {exp_idx + 1}...") batch_result = qjob.result() - self.batch_result_objects.append(batch_result) + self.batch_results.append(batch_result.to_dict()) - # TODO: Checkpoint + self._write_checkpoint(dirname) for i, circ in enumerate(self.pygsti_circuit_batches[exp_idx]): ordered_target_indices = [self.processor_spec.qubit_labels.index(q) for q in circ.line_labels] counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result.get_counts(i))) ds.add_count_dict(circ, counts_data) - self.data = _ProtocolData(self['edesign'], ds) + self.data = _ProtocolData(self.edesign, ds) - # TODO: Checkpoint + if dirname is not None: + self.data.write(dirname, edesign_already_written=True) def write(self, dirname=None): """ @@ -428,12 +421,7 @@ def write(self, dirname=None): if self.data is not None: self.data.write(dirname, edesign_already_written=True) - exp_dir = dirname / 'ibmqexperiment' - exp_dir.mkdir(parents=True, exist_ok=True) - _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') - - # Handle nonstandard serializations - # TODO: batch_result_objs? + self._write_checkpoint(dirname) @classmethod def from_dir(cls, dirname, regen_qiskit_circs=False, @@ -472,6 +460,7 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, ret = cls(edesign, None) ret.__dict__.update(attributes_from_meta) + ret.edesign = edesign # Handle nonstandard serialization try: @@ -480,25 +469,39 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, except: pass + # Regenerate Qiskit circuits ret.qiskit_circuit_batches = [] - ret.qjobs = [] if regen_qiskit_circs: - # Regenerate Qiskit circuits for batch_strs in attributes_from_meta['qasm_circuit_batches']: batch = [_qiskit.QuantumCircuit.from_qasm_str(bs) for bs in batch_strs] ret.qiskit_circuit_batches.append(batch) + # Regenerate Qiskit RuntimeJobs + ret.qjobs = [] if regen_runtime_jobs: - # Regenerate RuntimeJobs (if have provider) if provider is None: _warnings.warn("No provider specified, cannot retrieve IBM jobs") else: ret._retrieve_jobs(provider) - # Handle nonstandard serializations - # TODO: batch_result_objs? - return ret + + def _write_checkpoint(self, dirname): + """Write only the ibmqexperiment part of .write(). + + Parameters + ---------- + dirname : str + The *root* directory to write into. This directory will have + an 'edesign' subdirectory, which will be created if needed and + overwritten if present. If None, then the path this object + was loaded from is used (if this object wasn't loaded from disk, + an error is raised). + """ + if dirname is not None: + exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' + exp_dir.mkdir(parents=True, exist_ok=True) + _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') def _retrieve_jobs(self, provider): """Retrieves RuntimeJobs from IBMQ based on job_ids. diff --git a/pygsti/io/metadir.py b/pygsti/io/metadir.py index 849da9729..d9bfbdf2f 100644 --- a/pygsti/io/metadir.py +++ b/pygsti/io/metadir.py @@ -10,6 +10,7 @@ # http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. #*************************************************************************************************** +import datetime as _dt import numpy as _np import scipy.sparse as _sps import importlib as _importlib @@ -23,6 +24,13 @@ except ImportError: _ObjectId = None +try: + # If available, use bson's JSON converter utilities + # Allows us to serialize datetime objects, for example + from bson import json_util as _json_util +except ImportError: + _json_util=None + from pygsti.io import readers as _load from pygsti.io import writers as _write from pygsti.baseobjs.nicelyserializable import NicelySerializable as _NicelySerializable @@ -146,7 +154,10 @@ def load_meta_based_dir(root_dir, auxfile_types_member='auxfile_types', ret = {} with open(str(root_dir / 'meta.json'), 'r') as f: - meta = _json.load(f) + if _json_util is not None: + meta = _json.load(f, object_hook=_json_util.object_hook) + else: + meta = _json.load(f) #Convert lists => tuples, as we prefer immutable tuples #for key in meta: @@ -302,7 +313,10 @@ def should_skip_loading(path): val = _np.load(pth) elif typ == 'json': with open(str(pth), 'r') as f: - val = _json.load(f) + if _json_util is not None: + val = _json.load(f, object_hook=_json_util.object_hook) + else: + val = _json.load(f) elif typ == 'pickle': with open(str(pth), 'rb') as f: val = _pickle.load(f) @@ -386,7 +400,10 @@ def write_meta_based_dir(root_dir, valuedict, auxfile_types=None, init_meta=None with open(str(root_dir / 'meta.json'), 'w') as f: _check_jsonable(meta) - _json.dump(meta, f) + if _json_util is not None: + _json.dump(meta, f, indent=4, default=_json_util.default) + else: + _json.dump(meta, f, indent=4) def _write_auxfile_member(root_dir, filenm, typ, val): @@ -451,7 +468,10 @@ def _write_auxfile_member(root_dir, filenm, typ, val): elif typ == 'json': with open(str(pth), 'w') as f: _check_jsonable(val) - _json.dump(val, f, indent=4) + if _json_util is not None: + _json.dump(val, f, indent=4, default=_json_util.default) + else: + _json.dump(val, f, indent=4) elif typ == 'pickle': with open(str(pth), 'wb') as f: _pickle.dump(val, f) @@ -475,7 +495,10 @@ def _cls_from_meta_json(dirname): class """ with open(str(_pathlib.Path(dirname) / 'meta.json'), 'r') as f: - meta = _json.load(f) + if _json_util is not None: + meta = _json.load(f, object_hook=_json_util.object_hook) + else: + meta = _json.load(f) return _class_for_name(meta['type']) # class of object to create @@ -501,7 +524,10 @@ def _obj_to_meta_json(obj, dirname): meta = {'type': _full_class_name(obj)} with open(str(_pathlib.Path(dirname) / 'meta.json'), 'w') as f: _check_jsonable(meta) - _json.dump(meta, f) + if _json_util is not None: + _json.dump(meta, f, indent=4, default=_json_util.default) + else: + _json.dump(meta, f, indent=4) def write_obj_to_meta_based_dir(obj, dirname, auxfile_types_member, omit_attributes=(), @@ -633,7 +659,10 @@ def write_dict_to_json_or_pkl_files(d, dirname): jsonable = _to_jsonable(val) _check_jsonable(jsonable) with open(dirname / (key + '.json'), 'w') as f: - _json.dump(jsonable, f) + if _json_util is not None: + _json.dump(jsonable, f, indent=4, default=_json_util.default) + else: + _json.dump(jsonable, f, indent=4) except Exception as e: fn = str(dirname / (key + '.json')) _warnings.warn("Could not write %s (falling back on pickle format):\n" % fn + str(e)) @@ -673,6 +702,8 @@ def _check_jsonable(x): doesn't contain dicts with non-string-valued keys """ if x is None or isinstance(x, (float, int, str)): pass # no problem + elif _json_util is not None and isinstance(x, _dt.datetime): + pass # No problem for datetime.datetime so long as we have bson.json_utils elif isinstance(x, (tuple, list)): for i, v in enumerate(x): try: diff --git a/setup.py b/setup.py index 271c6cc91..3f85a8c60 100644 --- a/setup.py +++ b/setup.py @@ -58,11 +58,13 @@ 'flake8' ], 'interpygate': ['csaps'], + 'serialization': ['bson'], 'testing': [ 'pytest', 'pytest-xdist', 'pytest-cov', 'nbval', + 'bson', 'csaps', 'cvxopt<=1.3.0.1', 'cvxpy', From 2782417f89585042ffdab7af3c6a8bed1e39316a Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 1 Dec 2023 23:22:09 -0700 Subject: [PATCH 07/32] Add deserialization support for old pickle format --- .gitignore | 2 +- .../objects/advanced/IBMQExperiment.ipynb | 74 +++++++++++++++++-- pygsti/extras/ibmq/ibmqcore.py | 43 +++++++++-- 3 files changed, 104 insertions(+), 15 deletions(-) diff --git a/.gitignore b/.gitignore index b09ced543..4c9c82883 100644 --- a/.gitignore +++ b/.gitignore @@ -49,7 +49,7 @@ jupyter_notebooks/Tutorials/tutorial_files/exampleBriefReport jupyter_notebooks/Tutorials/tutorial_files/*.ipynb jupyter_notebooks/Tutorials/tutorial_files/tempTest jupyter_notebooks/Tutorials/tutorial_files/*checkpoints -jupyter_notebooks/Tutorials/objects/advanced/test_ibmq +jupyter_notebooks/Tutorials/objects/advanced/test_ibmq* diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 0f34183b2..6ce2d42c0 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -11,16 +11,18 @@ ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": null, "metadata": {}, + "outputs": [], "source": [ - "qiskit.__qiskit_version__ = {'qiskit-terra': '0.25.3', 'qiskit': '0.44.3', 'qiskit-aer': None, 'qiskit-ignis': None, 'qiskit-ibmq-provider': '0.20.2', 'qiskit-nature': None, 'qiskit-finance': None, 'qiskit-optimization': None, 'qiskit-machine-learning': None}\n", - "qiskit_ibm_provider.__version__ = '0.7.2'" + "#qiskit.__qiskit_version__ = {'qiskit-terra': '0.25.3', 'qiskit': '0.44.3', 'qiskit-aer': None, 'qiskit-ignis': None, 'qiskit-ibmq-provider': '0.20.2', 'qiskit-nature': None, 'qiskit-finance': None, 'qiskit-optimization': None, 'qiskit-machine-learning': None}\n", + "#qiskit_ibm_provider.__version__ = '0.7.2'" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "tags": [] }, @@ -34,7 +36,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "tags": [ "nbval-skip" @@ -45,6 +47,66 @@ "from qiskit_ibm_provider import IBMProvider" ] }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/Users/sserita/Documents/repos/subcirc-vb/src/pygsti/pygsti/extras/ibmq/ibmqcore.py:468: UserWarning: Failed to load ibmqexperiment, falling back to old serialization format logic\n", + " _warnings.warn(\"Failed to load ibmqexperiment, falling back to old serialization format logic\")\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading job 1/7...\n", + "Loading job 2/7...\n", + "Loading job 3/7...\n", + "Loading job 4/7...\n", + "Loading job 5/7...\n", + "Loading job 6/7...\n", + "Loading job 7/7...\n" + ] + } + ], + "source": [ + "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq_experiment', regen_qiskit_circs=True, regen_runtime_jobs=True, provider=provider)" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Already retrieved results of 7/7 circuit batches\n" + ] + }, + { + "ename": "AttributeError", + "evalue": "'IBMQExperiment' object has no attribute 'edesign'", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", + "\u001b[1;32m/Users/sserita/Documents/repos/subcirc-vb/src/pygsti/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb Cell 6\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> 1\u001b[0m exp2\u001b[39m.\u001b[39;49mretrieve_results()\n", + "File \u001b[0;32m~/Documents/repos/subcirc-vb/src/pygsti/pygsti/extras/ibmq/ibmqcore.py:394\u001b[0m, in \u001b[0;36mIBMQExperiment.retrieve_results\u001b[0;34m(self, dirname)\u001b[0m\n\u001b[1;32m 391\u001b[0m counts_data \u001b[39m=\u001b[39m partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result\u001b[39m.\u001b[39mget_counts(i)))\n\u001b[1;32m 392\u001b[0m ds\u001b[39m.\u001b[39madd_count_dict(circ, counts_data)\n\u001b[0;32m--> 394\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdata \u001b[39m=\u001b[39m _ProtocolData(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49medesign, ds)\n\u001b[1;32m 396\u001b[0m \u001b[39mif\u001b[39;00m dirname \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m 397\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdata\u001b[39m.\u001b[39mwrite(dirname, edesign_already_written\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m)\n", + "\u001b[0;31mAttributeError\u001b[0m: 'IBMQExperiment' object has no attribute 'edesign'" + ] + } + ], + "source": [ + "exp2.retrieve_results()" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -65,7 +127,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "tags": [ "nbval-skip" diff --git a/pygsti/extras/ibmq/ibmqcore.py b/pygsti/extras/ibmq/ibmqcore.py index 07d8f54fb..e1aa53142 100644 --- a/pygsti/extras/ibmq/ibmqcore.py +++ b/pygsti/extras/ibmq/ibmqcore.py @@ -8,8 +8,10 @@ # http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. #*************************************************************************************************** +import json as _json import numpy as _np import pathlib as _pathlib +import pickle as _pickle import time as _time import warnings as _warnings @@ -372,7 +374,7 @@ def partial_trace(ordered_target_indices, input_dict): return output_dict if len(self.batch_results): - print(f'Already retrieved results of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') + print(f'Already retrieved results of {len(self.batch_results)}/{len(self.qiskit_circuit_batches)} circuit batches') #get results from backend jobs and add to dict ds = _data.DataSet() @@ -455,12 +457,37 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, p = _pathlib.Path(dirname) edesign = _io.read_edesign_from_dir(dirname) - exp_dir = p / 'ibmqexperiment' - attributes_from_meta = _io.load_meta_based_dir(exp_dir) - - ret = cls(edesign, None) - ret.__dict__.update(attributes_from_meta) - ret.edesign = edesign + try: + exp_dir = p / 'ibmqexperiment' + attributes_from_meta = _io.load_meta_based_dir(exp_dir) + + ret = cls(edesign, None) + ret.__dict__.update(attributes_from_meta) + ret.edesign = edesign + except KeyError: + _warnings.warn("Failed to load ibmqexperiment, falling back to old serialization format logic") + + ret = cls(edesign, None) + with open(p / 'ibmqexperiment/meta.json', 'r') as f: + from_json = _json.load(f) + ret.__dict__.update(from_json) + + # Old keys to new class members + key_attr_map = { + 'pspec': ('processor_spec', None), + 'pygsti_circuits': ('pygsti_circuit_batches', []), + 'pygsti_openqasm_circuits': ('qasm_circuit_batches', []), + 'submit_time_calibration_data': ('submit_time_calibration_data', []), + 'batch_result_object': ('batch_results', []) + } + + for key, (attr, def_val) in key_attr_map.items(): + with open(p / f'ibmqexperiment/{key}.pkl', 'rb') as f: + try: + setattr(ret, attr, _pickle.load(f)) + except: + _warnings.warn(f"Couldn't unpickle {key}, so setting {attr} to {def_val}.") + setattr(ret, attr, def_val) # Handle nonstandard serialization try: @@ -472,7 +499,7 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, # Regenerate Qiskit circuits ret.qiskit_circuit_batches = [] if regen_qiskit_circs: - for batch_strs in attributes_from_meta['qasm_circuit_batches']: + for batch_strs in ret.qasm_circuit_batches: batch = [_qiskit.QuantumCircuit.from_qasm_str(bs) for bs in batch_strs] ret.qiskit_circuit_batches.append(batch) From b325ef73c878435442a09ce2eb9605c30adcee67 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 1 Dec 2023 23:22:51 -0700 Subject: [PATCH 08/32] Clean up tutorial. --- .../objects/advanced/IBMQExperiment.ipynb | 66 +------------------ 1 file changed, 3 insertions(+), 63 deletions(-) diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 6ce2d42c0..c4821ad90 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -22,7 +22,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": { "tags": [] }, @@ -36,7 +36,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -47,66 +47,6 @@ "from qiskit_ibm_provider import IBMProvider" ] }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "/Users/sserita/Documents/repos/subcirc-vb/src/pygsti/pygsti/extras/ibmq/ibmqcore.py:468: UserWarning: Failed to load ibmqexperiment, falling back to old serialization format logic\n", - " _warnings.warn(\"Failed to load ibmqexperiment, falling back to old serialization format logic\")\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Loading job 1/7...\n", - "Loading job 2/7...\n", - "Loading job 3/7...\n", - "Loading job 4/7...\n", - "Loading job 5/7...\n", - "Loading job 6/7...\n", - "Loading job 7/7...\n" - ] - } - ], - "source": [ - "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq_experiment', regen_qiskit_circs=True, regen_runtime_jobs=True, provider=provider)" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Already retrieved results of 7/7 circuit batches\n" - ] - }, - { - "ename": "AttributeError", - "evalue": "'IBMQExperiment' object has no attribute 'edesign'", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)", - "\u001b[1;32m/Users/sserita/Documents/repos/subcirc-vb/src/pygsti/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb Cell 6\u001b[0m line \u001b[0;36m1\n\u001b[0;32m----> 1\u001b[0m exp2\u001b[39m.\u001b[39;49mretrieve_results()\n", - "File \u001b[0;32m~/Documents/repos/subcirc-vb/src/pygsti/pygsti/extras/ibmq/ibmqcore.py:394\u001b[0m, in \u001b[0;36mIBMQExperiment.retrieve_results\u001b[0;34m(self, dirname)\u001b[0m\n\u001b[1;32m 391\u001b[0m counts_data \u001b[39m=\u001b[39m partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result\u001b[39m.\u001b[39mget_counts(i)))\n\u001b[1;32m 392\u001b[0m ds\u001b[39m.\u001b[39madd_count_dict(circ, counts_data)\n\u001b[0;32m--> 394\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdata \u001b[39m=\u001b[39m _ProtocolData(\u001b[39mself\u001b[39;49m\u001b[39m.\u001b[39;49medesign, ds)\n\u001b[1;32m 396\u001b[0m \u001b[39mif\u001b[39;00m dirname \u001b[39mis\u001b[39;00m \u001b[39mnot\u001b[39;00m \u001b[39mNone\u001b[39;00m:\n\u001b[1;32m 397\u001b[0m \u001b[39mself\u001b[39m\u001b[39m.\u001b[39mdata\u001b[39m.\u001b[39mwrite(dirname, edesign_already_written\u001b[39m=\u001b[39m\u001b[39mTrue\u001b[39;00m)\n", - "\u001b[0;31mAttributeError\u001b[0m: 'IBMQExperiment' object has no attribute 'edesign'" - ] - } - ], - "source": [ - "exp2.retrieve_results()" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -127,7 +67,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" From f8fda1faab04c423f9e0023b4aa657ce1091a76e Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 12 Dec 2023 13:05:35 -0800 Subject: [PATCH 09/32] Make IBMQExperiment chkpting in line with GST chkpting --- .../objects/advanced/IBMQExperiment.ipynb | 36 +++++---- pygsti/extras/ibmq/ibmqcore.py | 74 +++++++++++-------- 2 files changed, 66 insertions(+), 44 deletions(-) diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index c4821ad90..b8095e42a 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -139,7 +139,7 @@ "outputs": [], "source": [ "# Using the active backend to pull current device specification\n", - "device = ExperimentalDevice.from_qiskit_backend(backend)" + "device = ExperimentalDevice.from_qiskit_backend(sim_backend)" ] }, { @@ -236,7 +236,7 @@ "## Running on IBM Q\n", "We're now ready to run on the IBM Q processor. We do this using an `IBMQExperiment` object.\n", "\n", - "We can enable checkpointing for `IBMQExperiment` objects by writing the object to disk now, and then providing the directory name in downstream calls." + "We can enable checkpointing for `IBMQExperiment` objects by providing a path. This is the default and is recommended!" ] }, { @@ -247,8 +247,7 @@ }, "outputs": [], "source": [ - "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024, seed=20231201)\n", - "exp.write('test_ibmq')" + "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024, seed=20231201, checkpoint_path='test_ibmq')" ] }, { @@ -265,7 +264,7 @@ "outputs": [], "source": [ "# Provide the directory name to enable transpilation checkpointing\n", - "exp.transpile(dirname='test_ibmq')" + "exp.transpile()" ] }, { @@ -279,7 +278,7 @@ "del exp.qiskit_circuit_batches[4:]\n", "\n", "# And now transpilation should only redo the missing batches\n", - "exp.transpile(dirname='test_ibmq')\n" + "exp.transpile()\n" ] }, { @@ -318,7 +317,7 @@ "outputs": [], "source": [ "# Again, we can checkpoint by passing in dirname\n", - "exp2.submit(sim_backend, dirname='test_ibmq')" + "exp2.submit(sim_backend)" ] }, { @@ -351,7 +350,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, "outputs": [], "source": [ "exp3 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_runtime_jobs=True, provider=provider)" @@ -360,7 +363,11 @@ { "cell_type": "code", "execution_count": null, - "metadata": {}, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, "outputs": [], "source": [ "exp3.monitor()" @@ -383,7 +390,7 @@ }, "outputs": [], "source": [ - "exp3.retrieve_results(dirname='test_ibmq')" + "exp3.retrieve_results()" ] }, { @@ -397,11 +404,14 @@ "cell_type": "code", "execution_count": null, "metadata": { - "tags": [] + "tags": [ + "nbval-skip" + ] }, "outputs": [], "source": [ - "print(exp3.keys())" + "display(exp3.qjobs)\n", + "display(exp3.batch_results)" ] }, { @@ -501,7 +511,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.13" + "version": "3.11.5" } }, "nbformat": 4, diff --git a/pygsti/extras/ibmq/ibmqcore.py b/pygsti/extras/ibmq/ibmqcore.py index e1aa53142..038a5a19e 100644 --- a/pygsti/extras/ibmq/ibmqcore.py +++ b/pygsti/extras/ibmq/ibmqcore.py @@ -45,7 +45,7 @@ class IBMQExperiment(_TreeNode, _HasPSpec): def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, - num_shots=1024, seed=None): + num_shots=1024, seed=None, checkpoint_path=None, disable_checkpointing=False): """ A object that converts pyGSTi ExperimentDesigns into jobs to be submitted to IBM Q, submits these jobs to IBM Q and receives the results. @@ -82,17 +82,23 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True seed: int, optional Seed for RNG during order randomization of circuits. - checkpoint_dirname: str, optional - Name of checkpoint directory. If None, no checkpointing is used. + checkpoint_path: str, optional + A string for the path to use for writing intermediate checkpoint + files to disk. This should match the `dirname` kwarg used in + serialization, i.e. `from_dir()` or `write()`. + If None, will attempt to use load location for `edesign`. + + disable_checkpointing : bool, optional (default False) + When set to True checkpoint objects will not be constructed and written + to disk during the course of this protocol. It is strongly recommended + that this be kept set to False without good reason to disable the checkpoints. Returns ------- IBMQExperiment - An object containing jobs to be submitted to IBM Q, which can then be submitted - using the methods .submit() and whose results can be grabbed from IBM Q using - the method .retrieve_results(). This object has dictionary-like access for all of - the objects it contains (e.g., ['qobj'] is a list of the objects to be submitted to - IBM Q). + An object containing jobs to be submitted to IBM Q created by `.transpile()`, + which can then be submitted using the methods `.submit()` and whose results + can be grabbed from IBM Q using the method `.retrieve_results()`. """ _TreeNode.__init__(self, edesign._dirs) @@ -101,12 +107,13 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True _HasPSpec.__init__(self, pspec) self.edesign = edesign - self.processor_spec = pspec # Must be called processor_spec for _HasPSpec class self.remove_duplicates = remove_duplicates self.randomized_order = randomized_order self.circuits_per_batch = circuits_per_batch self.num_shots = num_shots self.seed = seed + self.checkpoint_path = checkpoint_path if checkpoint_path is not None else self.edesign._loaded_from + self.disable_checkpointing = disable_checkpointing # Populated with transpiling to IBMQ with .transpile() self.pygsti_circuit_batches = [] self.qasm_circuit_batches = [] @@ -137,15 +144,15 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True # Fall back to pickles if we do not have bson to deal with datetime.datetime self.auxfile_types['submit_time_calibration_data'] = 'pickle' self.auxfile_types['batch_results'] = 'pickle' + + if not disable_checkpointing: + if self.checkpoint_path is None: + raise SyntaxError("Default checkpointing is enabled, either provide " + \ + "`checkpoint_path` or `disable_checkpointing=True` (not recommended).") + self.write(self.checkpoint_path) - def transpile(self, dirname=None): + def transpile(self): """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. - - Parameters - ---------- - dirname: str, optional - If provided, the root directory (i.e. same as passed to write() - and from_dir()) to use for checkpointing """ circuits = self.edesign.all_circuits_needing_data.copy() num_batches = int(_np.ceil(len(circuits) / self.circuits_per_batch)) @@ -165,7 +172,8 @@ def transpile(self, dirname=None): end = min(len(circuits), (batch_idx+1)*self.circuits_per_batch) self.pygsti_circuit_batches.append(circuits[start:end]) - self._write_checkpoint(dirname) + if not self.disable_checkpointing: + self._write_checkpoint() if len(self.qiskit_circuit_batches): print(f'Already completed transpilation of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') @@ -185,10 +193,11 @@ def transpile(self, dirname=None): self.qasm_circuit_batches.append(batch_strs) self.qiskit_circuit_batches.append(batch) - self._write_checkpoint(dirname) + if not self.disable_checkpointing: + self._write_checkpoint() def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, - wait_time=1, wait_steps=10, dirname=None): + wait_time=1, wait_steps=10): """ Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign used to create this object. @@ -320,7 +329,8 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, print() finally: # Checkpoint calibration and job id data - self._write_checkpoint(dirname) + if not self.disable_checkpointing: + self._write_checkpoint() def monitor(self): """ @@ -343,7 +353,7 @@ def monitor(self): for counter in range(len(self.qjobs), len(self.qiskit_circuit_batches)): print(f"Batch {counter + 1}: NOT SUBMITTED") - def retrieve_results(self, dirname=None): + def retrieve_results(self): """ Gets the results of the completed jobs from IBM Q, and processes them into a pyGSTi DataProtocol object (stored as the key 'data'), @@ -384,7 +394,8 @@ def partial_trace(ordered_target_indices, input_dict): batch_result = qjob.result() self.batch_results.append(batch_result.to_dict()) - self._write_checkpoint(dirname) + if not self.disable_checkpointing: + self._write_checkpoint() for i, circ in enumerate(self.pygsti_circuit_batches[exp_idx]): ordered_target_indices = [self.processor_spec.qubit_labels.index(q) for q in circ.line_labels] @@ -393,8 +404,8 @@ def partial_trace(ordered_target_indices, input_dict): self.data = _ProtocolData(self.edesign, ds) - if dirname is not None: - self.data.write(dirname, edesign_already_written=True) + if not self.disable_checkpointing: + self.data.write(self.checkpoint_path, edesign_already_written=True) def write(self, dirname=None): """ @@ -413,8 +424,9 @@ def write(self, dirname=None): """ if dirname is None: - dirname = self.edesign._loaded_from - if dirname is None: raise ValueError("`dirname` must be given because there's no default directory") + dirname = self.checkpoint_path + if dirname is None: + raise ValueError("`dirname` must be given because there's no checkpoint or default edesign directory") dirname = _pathlib.Path(dirname) @@ -513,7 +525,7 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, return ret - def _write_checkpoint(self, dirname): + def _write_checkpoint(self, dirname=None): """Write only the ibmqexperiment part of .write(). Parameters @@ -525,10 +537,10 @@ def _write_checkpoint(self, dirname): was loaded from is used (if this object wasn't loaded from disk, an error is raised). """ - if dirname is not None: - exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' - exp_dir.mkdir(parents=True, exist_ok=True) - _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') + dirname = dirname if dirname is not None else self.checkpoint_path + exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' + exp_dir.mkdir(parents=True, exist_ok=True) + _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') def _retrieve_jobs(self, provider): """Retrieves RuntimeJobs from IBMQ based on job_ids. From b58f5b3767306f82909a5f95425266b438235fd9 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 12 Dec 2023 13:20:21 -0800 Subject: [PATCH 10/32] Provide opt-out for saving all_circuits_needing_data for CombinedExperimentDesign The thought process is that in many cases, the all_circuits_needing_data for CombinedExperimentDesign is simply a union of subdesign lists. In this case, a user can opt out of saving this and just regenerate it on serialization (thereby saving 2x disk space and save/load time). --- pygsti/protocols/protocol.py | 82 ++++++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index 65316f309..b21ea8921 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1403,6 +1403,88 @@ def map_qubit_labels(self, mapper): mapped_sub_designs = {key: child.map_qubit_labels(mapper) for key, child in self._vals.items()} return CombinedExperimentDesign(mapped_sub_designs, mapped_circuits, mapped_qubit_labels, self._dirs) + @classmethod + def from_dir(cls, dirname, parent=None, name=None, quick_load=False): + """ + Initialize a new ExperimentDesign object from `dirname`. + + This is specialized for CombinedExperimentDesign so that it can reset + all_circuits_needing_data in case that was skipped in write(). + + Parameters + ---------- + dirname : str + The *root* directory name (under which there is a 'edesign' + subdirectory). + + parent : ExperimentDesign, optional + The parent design object, if there is one. Primarily used + internally - if in doubt, leave this as `None`. + + name : str, optional + The sub-name of the design object being loaded, i.e. the + key of this data object beneath `parent`. Only used when + `parent` is not None. + + quick_load : bool, optional + Setting this to True skips the loading of the potentially long + circuit lists. This can be useful when loading takes a long time + and all the information of interest lies elsewhere, e.g. in an + encompassing results object. + + Returns + ------- + ExperimentDesign + """ + ret = ExperimentDesign.from_dir(dirname=dirname, parent=parent, name=name, quick_load=quick_load) + + if ret.auxfile_types['all_circuits_needing_data'] == 'reset': + all_circuits = [] + for des in ret.sub_designs.values(): + all_circuits.extend(des.all_circuits_needing_data) + _lt.remove_duplicates_in_place(all_circuits) + + ret.all_circuits_needing_data = all_circuits + ret.auxfile_types['all_circuits_needing_data'] = ret.old_all_circuits_type + del ret.old_all_circuits_type + + return ret + + def write(self, dirname=None, parent=None, skip_all_circuits=False): + """ + Write this experiment design to a directory. + + This is exactly the same + + Parameters + ---------- + dirname : str + The *root* directory to write into. This directory will have + an 'edesign' subdirectory, which will be created if needed and + overwritten if present. If None, then the path this object + was loaded from is used (if this object wasn't loaded from disk, + an error is raised). + + parent : ExperimentDesign, optional + The parent experiment design, when a parent is writing this + design as a sub-experiment-design. Otherwise leave as None. + + skip_all_circuits : bool, optional + If True (not the default), then this will skip writing + all_circuits_needing_data. This is intended to be used + when all_circuits_needing_data is just the union of the + subdesigns as a space-saving mechanism. + + Returns + ------- + None + """ + if skip_all_circuits: + self.old_all_circuits_type = self.auxfile_types['all_circuits_needing_data'] + self.auxfile_types['all_circuits_needing_data'] = 'reset' + + ExperimentDesign.write(self, dirname=dirname, parent=parent) + class SimultaneousExperimentDesign(ExperimentDesign): """ From df4bcc825ac5210da0a257f7489fabd5eb8a8df2 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 12 Dec 2023 13:21:53 -0800 Subject: [PATCH 11/32] Finish docstring --- pygsti/protocols/protocol.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index b21ea8921..077cb7a5f 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1454,7 +1454,11 @@ def write(self, dirname=None, parent=None, skip_all_circuits=False): """ Write this experiment design to a directory. - This is exactly the same + This is the same as ExperimentDesign writing, except the user can + opt out of saving the all_circuits_needing_data. This is intended + to be used when all_circuits_needing_data is simply the union of + the subdesign all_circuits_needing_data, thus saving on disk space + and load times. Parameters ---------- From 8f5b05a29ad6dc8cb41b927f73c91357616aab4f Mon Sep 17 00:00:00 2001 From: Stefan Seritan Date: Tue, 12 Dec 2023 14:31:50 -0800 Subject: [PATCH 12/32] Bugfixes for serialization updates --- pygsti/extras/ibmq/ibmqcore.py | 2 +- pygsti/protocols/protocol.py | 146 +++++++++++++-------------------- 2 files changed, 60 insertions(+), 88 deletions(-) diff --git a/pygsti/extras/ibmq/ibmqcore.py b/pygsti/extras/ibmq/ibmqcore.py index 038a5a19e..150543312 100644 --- a/pygsti/extras/ibmq/ibmqcore.py +++ b/pygsti/extras/ibmq/ibmqcore.py @@ -112,7 +112,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.circuits_per_batch = circuits_per_batch self.num_shots = num_shots self.seed = seed - self.checkpoint_path = checkpoint_path if checkpoint_path is not None else self.edesign._loaded_from + self.checkpoint_path = str(checkpoint_path) if checkpoint_path is not None else self.edesign._loaded_from self.disable_checkpointing = disable_checkpointing # Populated with transpiling to IBMQ with .transpile() self.pygsti_circuit_batches = [] diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index 077cb7a5f..48c034078 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1239,6 +1239,51 @@ class CombinedExperimentDesign(ExperimentDesign): # for multiple designs on the form the circuit ordering of this experiment design. """ + @classmethod + def from_dir(cls, dirname, parent=None, name=None, quick_load=False): + """ + Initialize a new ExperimentDesign object from `dirname`. + + This is specialized for CombinedExperimentDesign so that it can reset + all_circuits_needing_data in case that was skipped in write(). + + Parameters + ---------- + dirname : str + The *root* directory name (under which there is a 'edesign' + subdirectory). + + parent : ExperimentDesign, optional + The parent design object, if there is one. Primarily used + internally - if in doubt, leave this as `None`. + + name : str, optional + The sub-name of the design object being loaded, i.e. the + key of this data object beneath `parent`. Only used when + `parent` is not None. + + quick_load : bool, optional + Setting this to True skips the loading of the potentially long + circuit lists. This can be useful when loading takes a long time + and all the information of interest lies elsewhere, e.g. in an + encompassing results object. + + Returns + ------- + ExperimentDesign + """ + ret = super().from_dir(dirname=dirname, parent=parent, name=name, quick_load=quick_load) + + if ret.skip_writing_all_circuits: + all_circuits = [] + for des in ret._vals.values(): + all_circuits.extend(des.all_circuits_needing_data) + _lt.remove_duplicates_in_place(all_circuits) + + ret.all_circuits_needing_data = all_circuits + + return ret + @classmethod def from_edesign(cls, edesign, name): """ @@ -1268,7 +1313,7 @@ def from_edesign(cls, edesign, name): raise ValueError("Cannot convert a %s to a %s!" % (str(type(edesign)), str(cls))) def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design_dirs=None, - interleave=False): + interleave=False, skip_writing_all_circuits=False): """ Create a new CombinedExperimentDesign object. @@ -1301,6 +1346,15 @@ def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design Whether the circuits of the `sub_designs` should be interleaved to form the circuit ordering of this experiment design. + skip_writing_all_circuits : bool, optional + If True, all_circuits_needing_data will be skipped during `write()` + and regenerated as the union of `all_circuits_needing_data` from + `sub_designs` upon reading with `from_dir()`. + This can have save space on disk and cut down on read/write times, + but the user needs to guarantee that `all_circuits_needing_data` + is initialized as the union of sublists and remains unmodified, + so this is False by default. + Returns ------- CombinedExperimentDesign @@ -1327,6 +1381,10 @@ def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design super().__init__(all_circuits, qubit_labels, sub_designs, sub_design_dirs) + self.skip_writing_all_circuits = skip_writing_all_circuits + if self.skip_writing_all_circuits: + self.auxfile_types['all_circuits_needing_data'] = 'reset' + def _create_subdata(self, sub_name, dataset): """ Creates a :class:`ProtocolData` object for a sub-experiment-design. @@ -1403,92 +1461,6 @@ def map_qubit_labels(self, mapper): mapped_sub_designs = {key: child.map_qubit_labels(mapper) for key, child in self._vals.items()} return CombinedExperimentDesign(mapped_sub_designs, mapped_circuits, mapped_qubit_labels, self._dirs) - @classmethod - def from_dir(cls, dirname, parent=None, name=None, quick_load=False): - """ - Initialize a new ExperimentDesign object from `dirname`. - - This is specialized for CombinedExperimentDesign so that it can reset - all_circuits_needing_data in case that was skipped in write(). - - Parameters - ---------- - dirname : str - The *root* directory name (under which there is a 'edesign' - subdirectory). - - parent : ExperimentDesign, optional - The parent design object, if there is one. Primarily used - internally - if in doubt, leave this as `None`. - - name : str, optional - The sub-name of the design object being loaded, i.e. the - key of this data object beneath `parent`. Only used when - `parent` is not None. - - quick_load : bool, optional - Setting this to True skips the loading of the potentially long - circuit lists. This can be useful when loading takes a long time - and all the information of interest lies elsewhere, e.g. in an - encompassing results object. - - Returns - ------- - ExperimentDesign - """ - ret = ExperimentDesign.from_dir(dirname=dirname, parent=parent, name=name, quick_load=quick_load) - - if ret.auxfile_types['all_circuits_needing_data'] == 'reset': - all_circuits = [] - for des in ret.sub_designs.values(): - all_circuits.extend(des.all_circuits_needing_data) - _lt.remove_duplicates_in_place(all_circuits) - - ret.all_circuits_needing_data = all_circuits - ret.auxfile_types['all_circuits_needing_data'] = ret.old_all_circuits_type - del ret.old_all_circuits_type - - return ret - - def write(self, dirname=None, parent=None, skip_all_circuits=False): - """ - Write this experiment design to a directory. - - This is the same as ExperimentDesign writing, except the user can - opt out of saving the all_circuits_needing_data. This is intended - to be used when all_circuits_needing_data is simply the union of - the subdesign all_circuits_needing_data, thus saving on disk space - and load times. - - Parameters - ---------- - dirname : str - The *root* directory to write into. This directory will have - an 'edesign' subdirectory, which will be created if needed and - overwritten if present. If None, then the path this object - was loaded from is used (if this object wasn't loaded from disk, - an error is raised). - - parent : ExperimentDesign, optional - The parent experiment design, when a parent is writing this - design as a sub-experiment-design. Otherwise leave as None. - - skip_all_circuits : bool, optional - If True (not the default), then this will skip writing - all_circuits_needing_data. This is intended to be used - when all_circuits_needing_data is just the union of the - subdesigns as a space-saving mechanism. - - Returns - ------- - None - """ - if skip_all_circuits: - self.old_all_circuits_type = self.auxfile_types['all_circuits_needing_data'] - self.auxfile_types['all_circuits_needing_data'] = 'reset' - - ExperimentDesign.write(self, dirname=dirname, parent=parent) - class SimultaneousExperimentDesign(ExperimentDesign): """ From fc7d7bf9dd39f0c44b8c7446223256806b57b293 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 15 Dec 2023 10:35:39 -0800 Subject: [PATCH 13/32] Standardize and add tests for edesign serialization. This adds checks for Combined and FreeformDesigns to ensure we only skip all_circuits_needing_data when it can be regenerated. This also adds unit tests that (roughly) tests serialization/deserialization for all edesigns. That test would be improved by edesign equality code... but this is a first step. --- pygsti/protocols/protocol.py | 88 ++++++++++++++++++++------- test/unit/protocols/test_protocols.py | 64 +++++++++++++------ 2 files changed, 113 insertions(+), 39 deletions(-) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index 48c034078..3bf676d9e 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1274,13 +1274,16 @@ def from_dir(cls, dirname, parent=None, name=None, quick_load=False): """ ret = super().from_dir(dirname=dirname, parent=parent, name=name, quick_load=quick_load) - if ret.skip_writing_all_circuits: + if ret.auxfile_types['all_circuits_needing_data'] == 'reset': all_circuits = [] for des in ret._vals.values(): all_circuits.extend(des.all_circuits_needing_data) _lt.remove_duplicates_in_place(all_circuits) ret.all_circuits_needing_data = all_circuits + + ret.auxfile_types['all_circuits_needing_data'] = ret.old_all_circuits_type + del ret.old_all_circuits_type return ret @@ -1313,7 +1316,7 @@ def from_edesign(cls, edesign, name): raise ValueError("Cannot convert a %s to a %s!" % (str(type(edesign)), str(cls))) def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design_dirs=None, - interleave=False, skip_writing_all_circuits=False): + interleave=False): """ Create a new CombinedExperimentDesign object. @@ -1346,15 +1349,6 @@ def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design Whether the circuits of the `sub_designs` should be interleaved to form the circuit ordering of this experiment design. - skip_writing_all_circuits : bool, optional - If True, all_circuits_needing_data will be skipped during `write()` - and regenerated as the union of `all_circuits_needing_data` from - `sub_designs` upon reading with `from_dir()`. - This can have save space on disk and cut down on read/write times, - but the user needs to guarantee that `all_circuits_needing_data` - is initialized as the union of sublists and remains unmodified, - so this is False by default. - Returns ------- CombinedExperimentDesign @@ -1381,10 +1375,6 @@ def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design super().__init__(all_circuits, qubit_labels, sub_designs, sub_design_dirs) - self.skip_writing_all_circuits = skip_writing_all_circuits - if self.skip_writing_all_circuits: - self.auxfile_types['all_circuits_needing_data'] = 'reset' - def _create_subdata(self, sub_name, dataset): """ Creates a :class:`ProtocolData` object for a sub-experiment-design. @@ -1461,6 +1451,45 @@ def map_qubit_labels(self, mapper): mapped_sub_designs = {key: child.map_qubit_labels(mapper) for key, child in self._vals.items()} return CombinedExperimentDesign(mapped_sub_designs, mapped_circuits, mapped_qubit_labels, self._dirs) + def write(self, dirname=None, parent=None): + """ + Write this experiment design to a directory. + + Parameters + ---------- + dirname : str + The *root* directory to write into. This directory will have + an 'edesign' subdirectory, which will be created if needed and + overwritten if present. If None, then the path this object + was loaded from is used (if this object wasn't loaded from disk, + an error is raised). + + parent : ExperimentDesign, optional + The parent experiment design, when a parent is writing this + design as a sub-experiment-design. Otherwise leave as None. + + Returns + ------- + None + """ + all_subcircuits = [] + for des in self._vals.values(): + all_subcircuits.extend(des.all_circuits_needing_data) + _lt.remove_duplicates_in_place(all_subcircuits) + + # If equal, we can just regenerate from subdesigns on load (saves space and time) + # This is not set equality so that we don't do this just in case interleave is ever implemented + if all_subcircuits == self.all_circuits_needing_data: + self.old_all_circuits_type = self.auxfile_types['all_circuits_needing_data'] + self.auxfile_types['all_circuits_needing_data'] = 'reset' + + super().write(dirname=dirname, parent=parent) + + # Undo auxfile_type modifications if we made them + if self.auxfile_types['all_circuits_needing_data'] == 'reset': + self.auxfile_types['all_circuits_needing_data'] = self.old_all_circuits_type + del self.old_all_circuits_type + class SimultaneousExperimentDesign(ExperimentDesign): """ @@ -1713,12 +1742,18 @@ def from_dir(cls, dirname, parent=None, name=None, quick_load=False): ------- ExperimentDesign """ - edesign = ExperimentDesign.from_dir(dirname, parent=parent, name=name, quick_load=quick_load) + ret = super().from_dir(dirname, parent=parent, name=name, quick_load=quick_load) # Convert back to circuits - edesign.aux_info = {_circuits.Circuit(k, check=False): v for k,v in edesign.aux_info.items()} - - return cls(edesign.aux_info, edesign.qubit_labels) + ret.aux_info = {_circuits.Circuit(k, check=False): v for k,v in ret.aux_info.items()} + + if ret.auxfile_types['all_circuits_needing_data'] == 'reset': + ret.all_circuits_needing_data = list(ret.aux_info.keys()) + + ret.auxfile_types['all_circuits_needing_data'] = ret.old_all_circuits_type + del ret.old_all_circuits_type + + return ret @classmethod def from_dataframe(cls, df, qubit_labels=None): @@ -1779,9 +1814,7 @@ def __init__(self, circuits, qubit_labels=None): self.aux_info = {c: None for c in circuits} super().__init__(circuits, qubit_labels) - # Don't save all_circuits_needing_data, it's redundant with aux_info keys - self.auxfile_types['all_circuits_needing_data'] = 'reset' - # Currently not jsonable, but will be fixed in write() + # Currently not jsonable due to Circuits, but will be fixed in write() self.auxfile_types['aux_info'] = 'json' def _truncate_to_circuits_inplace(self, circuits_to_keep): @@ -1836,12 +1869,23 @@ def write(self, dirname=None, parent=None): ------- None """ + # Check if all_circuits_needing_data are just aux_info keys + # If yes, do not write them and regenerate on load + if self.all_circuits_needing_data == list(self.aux_info.keys()): + self.old_all_circuits_type = self.auxfile_types['all_circuits_needing_data'] + self.auxfile_types['all_circuits_needing_data'] = 'reset' + # Convert circuits to string for then-jsonable serialization aux_info = self.aux_info self.aux_info = {repr(k)[8:-1]: v for k,v in self.aux_info.items()} super().write(dirname, parent) self.aux_info = aux_info + # Undo auxfile_type modificiations if we made them + if self.auxfile_types['all_circuits_needing_data'] == 'reset': + self.auxfile_types['all_circuits_needing_data'] = self.old_all_circuits_type + del self.old_all_circuits_type + class ProtocolData(_TreeNode, _MongoSerializable): """ diff --git a/test/unit/protocols/test_protocols.py b/test/unit/protocols/test_protocols.py index 1385707b6..5ffbd475b 100644 --- a/test/unit/protocols/test_protocols.py +++ b/test/unit/protocols/test_protocols.py @@ -89,6 +89,52 @@ def test_create_edesign_fromdir_subdirs(self, root_path): self.assertTrue(all([a == b for a,b in zip(edesign3['subdir2'].all_circuits_needing_data, self.gst_design.circuit_lists[1])])) def test_map_edesign_sslbls(self): + edesigns = self._get_tester_edesigns() + for edesign in edesigns: + print("Testing edesign of type: ", str(type(edesign))) + orig_qubits = edesign.qubit_labels + for c in edesign.all_circuits_needing_data: + self.assertTrue(set(c.line_labels).issubset(orig_qubits)) + + if orig_qubits == (0,): + mapper = {0: 4}; mapped_qubits = (4,) + if orig_qubits == (1,): + mapper = {1: 5}; mapped_qubits = (5,) + if orig_qubits == (0,1): + mapper = {0:4, 1: 5}; mapped_qubits = (4,5) + mapped_edesign = edesign.map_qubit_labels(mapper) + self.assertEqual(mapped_edesign.qubit_labels, mapped_qubits) + for c in mapped_edesign.all_circuits_needing_data: + self.assertTrue(set(c.line_labels).issubset(mapped_qubits)) + + @with_temp_path + def test_serialization(self, root_path): + edesigns = self._get_tester_edesigns() + for i, edesign in enumerate(edesigns): + print("Testing edesign of type: ", str(type(edesign))) + root = pathlib.Path(root_path) / str(i) + edesign.write(root) + loaded_edesign = type(edesign).from_dir(root) + # TODO: We don't have good edesign equality + self.assertEqual(set(edesign.all_circuits_needing_data), set(loaded_edesign.all_circuits_needing_data)) + self.assertEqual(edesign.auxfile_types, loaded_edesign.auxfile_types) + self.assertEqual(edesign._vals.keys(), loaded_edesign._vals.keys()) + + if isinstance(edesign, (pygsti.protocols.CombinedExperimentDesign, pygsti.protocols.FreeformDesign)): + # We also need to test that all_circuits_needing_data is not dumped by default + self.assertTrue(not (root / 'edesign' / 'all_circuits_needing_data.txt').exists()) + + root2 = pathlib.Path(root_path) / f'{i}_2' + edesign.all_circuits_needing_data = [] + edesign.write(root2) + loaded_edesign = type(edesign).from_dir(root2) + # TODO: We don't have good edesign equality + self.assertEqual(set(edesign.all_circuits_needing_data), set(loaded_edesign.all_circuits_needing_data)) + self.assertEqual(edesign.auxfile_types, loaded_edesign.auxfile_types) + self.assertEqual(edesign._vals.keys(), loaded_edesign._vals.keys()) + self.assertTrue((root2 / 'edesign' / 'all_circuits_needing_data.txt').exists()) + + def _get_tester_edesigns(self): #Create a bunch of experiment designs: from pygsti.protocols import ExperimentDesign, CircuitListsDesign, CombinedExperimentDesign, \ SimultaneousExperimentDesign, FreeformDesign, StandardGSTDesign, GateSetTomographyDesign, \ @@ -118,7 +164,6 @@ def test_map_edesign_sslbls(self): "paulieq": CCR.create_standard(pspec1Q, "paulieq", ("1Qcliffords", "allcnots"), verbosity=0), } - edesigns = [] edesigns.append(ExperimentDesign(circuits_on0)) edesigns.append(CircuitListsDesign([circuits_on0, circuits_on0b])) @@ -134,19 +179,4 @@ def test_map_edesign_sslbls(self): edesigns.append(MirrorRBDesign(pspec1Q, depths=[0,2,4], circuits_per_depth=4, clifford_compilations=compilations1Q)) - for edesign in edesigns: - print("Testing edesign of type: ", str(type(edesign))) - orig_qubits = edesign.qubit_labels - for c in edesign.all_circuits_needing_data: - self.assertTrue(set(c.line_labels).issubset(orig_qubits)) - - if orig_qubits == (0,): - mapper = {0: 4}; mapped_qubits = (4,) - if orig_qubits == (1,): - mapper = {1: 5}; mapped_qubits = (5,) - if orig_qubits == (0,1): - mapper = {0:4, 1: 5}; mapped_qubits = (4,5) - mapped_edesign = edesign.map_qubit_labels(mapper) - self.assertEqual(mapped_edesign.qubit_labels, mapped_qubits) - for c in mapped_edesign.all_circuits_needing_data: - self.assertTrue(set(c.line_labels).issubset(mapped_qubits)) + return edesigns \ No newline at end of file From a7ea0c4ec46825e27917ac88e1b94e383d52da46 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 15 Dec 2023 11:40:39 -0800 Subject: [PATCH 14/32] Add CanCreateAllCircuitsDesign class. This generalizes the previous changes to CombinedExperimentDesign and FreeformDesign. For classes that can create all_circuits_needing_data, it will be automatically not saved and regenerated in cases where it has not changed. Allows removal of from_dir and write from Combined and FreeformDesigns. --- pygsti/protocols/protocol.py | 311 +++++++++++--------------- test/unit/protocols/test_protocols.py | 2 +- 2 files changed, 136 insertions(+), 177 deletions(-) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index 3bf676d9e..2cbe438c4 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1012,6 +1012,102 @@ def map_qubit_labels(self, mapper): return ExperimentDesign(mapped_circuits, mapped_qubit_labels, mapped_children, self._dirs) +class CanCreateAllCircuitsDesign(ExperimentDesign): + """A type of ExperimentDesign that can create + all_circuits_needing_data from subdesigns or other information. + + In cases where all_circuits_needing_data *can* be recreated, + i.e. it has not been modified by the user in some unexpected way, + this class will ensure that all_circuits_needing_data is skipped + during serialization and regenerated during deserialization. + """ + def _create_all_circuits_needing_data(self): + """Create all_circuits_needing_data for other information. + + This interface is needed to ensure that all_circuits_needing_data + can be regenerated consistently during construction and deserialization. + """ + raise NotImplementedError("Derived classes should implement this") + + @classmethod + def from_dir(cls, dirname, parent=None, name=None, quick_load=False): + """ + Initialize a new ExperimentDesign object from `dirname`. + + This is specialized to regenerate all_circuits_needing_data + if it was not serialized. + + Parameters + ---------- + dirname : str + The *root* directory name (under which there is a 'edesign' + subdirectory). + + parent : ExperimentDesign, optional + The parent design object, if there is one. Primarily used + internally - if in doubt, leave this as `None`. + + name : str, optional + The sub-name of the design object being loaded, i.e. the + key of this data object beneath `parent`. Only used when + `parent` is not None. + + quick_load : bool, optional + Setting this to True skips the loading of the potentially long + circuit lists. This can be useful when loading takes a long time + and all the information of interest lies elsewhere, e.g. in an + encompassing results object. + + Returns + ------- + ExperimentDesign + """ + ret = super().from_dir(dirname, parent=parent, name=name, quick_load=quick_load) + + if ret.auxfile_types['all_circuits_needing_data'] == 'reset': + ret.all_circuits_needing_data = ret._create_all_circuits_needing_data() + + ret.auxfile_types['all_circuits_needing_data'] = ret.old_all_circuits_type + del ret.old_all_circuits_type + + return ret + + def write(self, dirname=None, parent=None): + """ + Write this experiment design to a directory. + + This is specialized to skip writing all_circuits_needing_data + if it can be regenerated from other class information. + + Parameters + ---------- + dirname : str + The *root* directory to write into. This directory will have + an 'edesign' subdirectory, which will be created if needed and + overwritten if present. If None, then the path this object + was loaded from is used (if this object wasn't loaded from disk, + an error is raised). + + parent : ExperimentDesign, optional + The parent experiment design, when a parent is writing this + design as a sub-experiment-design. Otherwise leave as None. + + Returns + ------- + None + """ + initial_circuits = self._create_all_circuits_needing_data() + if self.all_circuits_needing_data == initial_circuits: + self.old_all_circuits_type = self.auxfile_types['all_circuits_needing_data'] + self.auxfile_types['all_circuits_needing_data'] = 'reset' + + super().write(dirname=dirname, parent=parent) + + if self.auxfile_types['all_circuits_needing_data'] == 'reset': + self.auxfile_types['all_circuits_needing_data'] = self.old_all_circuits_type + del self.old_all_circuits_type + + class CircuitListsDesign(ExperimentDesign): """ Experiment design specification that is comprised of multiple circuit lists. @@ -1200,7 +1296,7 @@ def map_qubit_labels(self, mapper): self.nested, remove_duplicates=False) # no need to remove duplicates -class CombinedExperimentDesign(ExperimentDesign): # for multiple designs on the same dataset +class CombinedExperimentDesign(CanCreateAllCircuitsDesign): # for multiple designs on the same dataset """ An experiment design that combines the specifications of one or more "sub-designs". @@ -1236,56 +1332,33 @@ class CombinedExperimentDesign(ExperimentDesign): # for multiple designs on the interleave : bool, optional Whether the circuits of the `sub_designs` should be interleaved to - form the circuit ordering of this experiment design. + form the circuit ordering of this experiment design. DEPRECATED """ - @classmethod - def from_dir(cls, dirname, parent=None, name=None, quick_load=False): - """ - Initialize a new ExperimentDesign object from `dirname`. - - This is specialized for CombinedExperimentDesign so that it can reset - all_circuits_needing_data in case that was skipped in write(). + def _create_all_circuits_needing_data(self, subdesigns=None): + """Create all_circuits_needing_data for other information. + + This interface is needed to ensure that all_circuits_needing_data + can be regenerated consistently during construction and deserialization. Parameters ---------- - dirname : str - The *root* directory name (under which there is a 'edesign' - subdirectory). - - parent : ExperimentDesign, optional - The parent design object, if there is one. Primarily used - internally - if in doubt, leave this as `None`. - - name : str, optional - The sub-name of the design object being loaded, i.e. the - key of this data object beneath `parent`. Only used when - `parent` is not None. - - quick_load : bool, optional - Setting this to True skips the loading of the potentially long - circuit lists. This can be useful when loading takes a long time - and all the information of interest lies elsewhere, e.g. in an - encompassing results object. - + subdesigns: list of ExperimentDesigns, optional + List of subdesigns to use. If not provided, will use self._vals.values() + as the subdesigns. Primarily used during initialization when self._vals + is not set yet. + Returns ------- - ExperimentDesign + all_circuits: list of Circuits + Union of all_circuits_needing_data from subdesigns without duplicates """ - ret = super().from_dir(dirname=dirname, parent=parent, name=name, quick_load=quick_load) - - if ret.auxfile_types['all_circuits_needing_data'] == 'reset': - all_circuits = [] - for des in ret._vals.values(): - all_circuits.extend(des.all_circuits_needing_data) - _lt.remove_duplicates_in_place(all_circuits) - - ret.all_circuits_needing_data = all_circuits - - ret.auxfile_types['all_circuits_needing_data'] = ret.old_all_circuits_type - del ret.old_all_circuits_type - - return ret + subdesigns = self._vals if subdesigns is None else subdesigns + all_circuits = [] + for des in subdesigns.values(): + all_circuits.extend(des.all_circuits_needing_data) + _lt.remove_duplicates_in_place(all_circuits) # Maybe don't always do this? + return all_circuits @classmethod def from_edesign(cls, edesign, name): @@ -1357,14 +1430,11 @@ def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design if not isinstance(sub_designs, dict): sub_designs = {("**%d" % i): des for i, des in enumerate(sub_designs)} - if all_circuits is None: - all_circuits = [] - if not interleave: - for des in sub_designs.values(): - all_circuits.extend(des.all_circuits_needing_data) - else: - raise NotImplementedError("Interleaving not implemented yet") - _lt.remove_duplicates_in_place(all_circuits) # Maybe don't always do this? + if interleave: + raise NotImplementedError("Interleaving not implemented and will be removed in 0.9.13") + + all_circuits = self._create_all_circuits_needing_data(sub_designs) if all_circuits is None \ + else all_circuits if qubit_labels is None and len(sub_designs) > 0: first = sub_designs[list(sub_designs.keys())[0]].qubit_labels @@ -1451,45 +1521,6 @@ def map_qubit_labels(self, mapper): mapped_sub_designs = {key: child.map_qubit_labels(mapper) for key, child in self._vals.items()} return CombinedExperimentDesign(mapped_sub_designs, mapped_circuits, mapped_qubit_labels, self._dirs) - def write(self, dirname=None, parent=None): - """ - Write this experiment design to a directory. - - Parameters - ---------- - dirname : str - The *root* directory to write into. This directory will have - an 'edesign' subdirectory, which will be created if needed and - overwritten if present. If None, then the path this object - was loaded from is used (if this object wasn't loaded from disk, - an error is raised). - - parent : ExperimentDesign, optional - The parent experiment design, when a parent is writing this - design as a sub-experiment-design. Otherwise leave as None. - - Returns - ------- - None - """ - all_subcircuits = [] - for des in self._vals.values(): - all_subcircuits.extend(des.all_circuits_needing_data) - _lt.remove_duplicates_in_place(all_subcircuits) - - # If equal, we can just regenerate from subdesigns on load (saves space and time) - # This is not set equality so that we don't do this just in case interleave is ever implemented - if all_subcircuits == self.all_circuits_needing_data: - self.old_all_circuits_type = self.auxfile_types['all_circuits_needing_data'] - self.auxfile_types['all_circuits_needing_data'] = 'reset' - - super().write(dirname=dirname, parent=parent) - - # Undo auxfile_type modifications if we made them - if self.auxfile_types['all_circuits_needing_data'] == 'reset': - self.auxfile_types['all_circuits_needing_data'] = self.old_all_circuits_type - del self.old_all_circuits_type - class SimultaneousExperimentDesign(ExperimentDesign): """ @@ -1698,7 +1729,7 @@ def map_qubit_labels(self, mapper): return SimultaneousExperimentDesign(mapped_edesigns, mapped_circuits, mapped_qubit_labels) -class FreeformDesign(ExperimentDesign): +class FreeformDesign(CanCreateAllCircuitsDesign): """ Experiment design holding an arbitrary circuit list and meta data. @@ -1711,49 +1742,19 @@ class FreeformDesign(ExperimentDesign): The qubits that this experiment design applies to. If None, the line labels of the first circuit is used. """ + + def _create_all_circuits_needing_data(self): + """Create all_circuits_needing_data for other information. - @classmethod - def from_dir(cls, dirname, parent=None, name=None, quick_load=False): - """ - Initialize a new ExperimentDesign object from `dirname`. - - Parameters - ---------- - dirname : str - The *root* directory name (under which there is a 'edesign' - subdirectory). - - parent : ExperimentDesign, optional - The parent design object, if there is one. Primarily used - internally - if in doubt, leave this as `None`. - - name : str, optional - The sub-name of the design object being loaded, i.e. the - key of this data object beneath `parent`. Only used when - `parent` is not None. - - quick_load : bool, optional - Setting this to True skips the loading of the potentially long - circuit lists. This can be useful when loading takes a long time - and all the information of interest lies elsewhere, e.g. in an - encompassing results object. - + This interface is needed to ensure that all_circuits_needing_data + can be regenerated consistently during construction and deserialization. + Returns ------- - ExperimentDesign + list of Circuits + Keys of self.aux_info """ - ret = super().from_dir(dirname, parent=parent, name=name, quick_load=quick_load) - - # Convert back to circuits - ret.aux_info = {_circuits.Circuit(k, check=False): v for k,v in ret.aux_info.items()} - - if ret.auxfile_types['all_circuits_needing_data'] == 'reset': - ret.all_circuits_needing_data = list(ret.aux_info.keys()) - - ret.auxfile_types['all_circuits_needing_data'] = ret.old_all_circuits_type - del ret.old_all_circuits_type - - return ret + return list(self.aux_info.keys()) @classmethod def from_dataframe(cls, df, qubit_labels=None): @@ -1807,15 +1808,11 @@ def from_edesign(cls, edesign): raise ValueError("Cannot convert a %s to a %s!" % (str(type(edesign)), str(cls))) def __init__(self, circuits, qubit_labels=None): - if isinstance(circuits, dict): - self.aux_info = circuits.copy() - circuits = list(circuits.keys()) - else: - self.aux_info = {c: None for c in circuits} - super().__init__(circuits, qubit_labels) + self.aux_info = circuits.copy() if isinstance(circuits, dict) else {c: None for c in circuits} - # Currently not jsonable due to Circuits, but will be fixed in write() - self.auxfile_types['aux_info'] = 'json' + super().__init__(self._create_all_circuits_needing_data(), qubit_labels) + + self.auxfile_types['aux_info'] = 'circuit-str-json' def _truncate_to_circuits_inplace(self, circuits_to_keep): truncated_aux_info = {k: v for k, v in self.aux_info.items() if k in circuits_to_keep} @@ -1847,44 +1844,6 @@ def map_qubit_labels(self, mapper): mapped_circuits = [c.map_state_space_labels(mapper) for c in self.all_circuits_needing_data] mapped_qubit_labels = self._mapped_qubit_labels(mapper) return FreeformDesign(mapped_circuits, mapped_qubit_labels) - - def write(self, dirname=None, parent=None): - """ - Write this experiment design to a directory. - - Parameters - ---------- - dirname : str - The *root* directory to write into. This directory will have - an 'edesign' subdirectory, which will be created if needed and - overwritten if present. If None, then the path this object - was loaded from is used (if this object wasn't loaded from disk, - an error is raised). - - parent : ExperimentDesign, optional - The parent experiment design, when a parent is writing this - design as a sub-experiment-design. Otherwise leave as None. - - Returns - ------- - None - """ - # Check if all_circuits_needing_data are just aux_info keys - # If yes, do not write them and regenerate on load - if self.all_circuits_needing_data == list(self.aux_info.keys()): - self.old_all_circuits_type = self.auxfile_types['all_circuits_needing_data'] - self.auxfile_types['all_circuits_needing_data'] = 'reset' - - # Convert circuits to string for then-jsonable serialization - aux_info = self.aux_info - self.aux_info = {repr(k)[8:-1]: v for k,v in self.aux_info.items()} - super().write(dirname, parent) - self.aux_info = aux_info - - # Undo auxfile_type modificiations if we made them - if self.auxfile_types['all_circuits_needing_data'] == 'reset': - self.auxfile_types['all_circuits_needing_data'] = self.old_all_circuits_type - del self.old_all_circuits_type class ProtocolData(_TreeNode, _MongoSerializable): diff --git a/test/unit/protocols/test_protocols.py b/test/unit/protocols/test_protocols.py index 5ffbd475b..e267ddd2d 100644 --- a/test/unit/protocols/test_protocols.py +++ b/test/unit/protocols/test_protocols.py @@ -120,7 +120,7 @@ def test_serialization(self, root_path): self.assertEqual(edesign.auxfile_types, loaded_edesign.auxfile_types) self.assertEqual(edesign._vals.keys(), loaded_edesign._vals.keys()) - if isinstance(edesign, (pygsti.protocols.CombinedExperimentDesign, pygsti.protocols.FreeformDesign)): + if isinstance(edesign, (pygsti.protocols.CanCreateAllCircuitsDesign)): # We also need to test that all_circuits_needing_data is not dumped by default self.assertTrue(not (root / 'edesign' / 'all_circuits_needing_data.txt').exists()) From f03cb358be09f67913ff1418d733dc237b5bc21f Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 15 Dec 2023 12:23:10 -0800 Subject: [PATCH 15/32] Add FreeformDesign tests and fix discovered bugs. In addition to testing to/from_dataframe, this fixes (or at least clarifies) bugs when aux_info values are not dicts and when parsing circuit-str-json members with strings that are not circuits. --- pygsti/io/readers.py | 6 +++++- pygsti/protocols/protocol.py | 5 ++++- test/unit/protocols/test_protocols.py | 17 +++++++++++++++-- 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/pygsti/io/readers.py b/pygsti/io/readers.py index 573826bfe..30fb430c5 100644 --- a/pygsti/io/readers.py +++ b/pygsti/io/readers.py @@ -374,7 +374,11 @@ def _replace_strs_with_circuits(x): if isinstance(x, dict): # this case isn't written anymore - just to read old-format files (TODO REMOVE LATER) return {_replace_strs_with_circuits(k): _replace_strs_with_circuits(v) for k, v in x.items()} if isinstance(x, str): - return std.parse_circuit(x, create_subcircuits=not _Circuit.default_expand_subcircuits) + try: + return std.parse_circuit(x, create_subcircuits=not _Circuit.default_expand_subcircuits) + except ValueError: + # Failed to parse, possible this string is not a circuit + pass return x return _replace_strs_with_circuits(obj) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index 2cbe438c4..a157b5a83 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1822,7 +1822,10 @@ def _truncate_to_circuits_inplace(self, circuits_to_keep): def to_dataframe(self, pivot_valuename=None, pivot_value="Value", drop_columns=False): cdict = _NamedDict('Circuit', None) for cir, info in self.aux_info.items(): - cdict[cir.str] = _NamedDict('ValueName', 'category', items=info) + try: + cdict[cir.str] = _NamedDict('ValueName', 'category', items=info) + except TypeError: + raise TypeError("Failed to cast to dataframe. Ensure that aux_info values are dicts!") df = cdict.to_dataframe() return _process_dataframe(df, pivot_valuename, pivot_value, drop_columns, preserve_order=True) diff --git a/test/unit/protocols/test_protocols.py b/test/unit/protocols/test_protocols.py index e267ddd2d..f9ff688aa 100644 --- a/test/unit/protocols/test_protocols.py +++ b/test/unit/protocols/test_protocols.py @@ -134,6 +134,18 @@ def test_serialization(self, root_path): self.assertEqual(edesign._vals.keys(), loaded_edesign._vals.keys()) self.assertTrue((root2 / 'edesign' / 'all_circuits_needing_data.txt').exists()) + def test_dataframe_conversion(self): + # Currently this is just FreeformDesign, but who knows if we add dataframe support to others in the future + edesigns = self._get_tester_edesigns() + freeform_design = edesigns[4] + + df = freeform_design.to_dataframe() + freeform_design2 = pygsti.protocols.FreeformDesign.from_dataframe(df) + + for (c1, aux1), (c2, aux2) in zip(freeform_design.aux_info.items(), freeform_design2.aux_info.items()): + self.assertEqual(str(c1), str(c2)) + self.assertEqual(aux1, aux2) + def _get_tester_edesigns(self): #Create a bunch of experiment designs: from pygsti.protocols import ExperimentDesign, CircuitListsDesign, CombinedExperimentDesign, \ @@ -171,7 +183,7 @@ def _get_tester_edesigns(self): 'two': ExperimentDesign(circuits_on1), 'three': ExperimentDesign(circuits_on01)}, qubit_labels=(0,1))) edesigns.append(SimultaneousExperimentDesign([ExperimentDesign(circuits_on0), ExperimentDesign(circuits_on1)])) - edesigns.append(FreeformDesign(circuits_on01)) + edesigns.append(FreeformDesign({c: {'id': i} for i,c in enumerate(circuits_on01)})) edesigns.append(std.create_gst_experiment_design(2)) edesigns.append(GateSetTomographyDesign(gst_pspec, [circuits_on0, circuits_on0b])) edesigns.append(CliffordRBDesign(pspec, compilations, depths=[0,2,5], circuits_per_depth=4)) @@ -179,4 +191,5 @@ def _get_tester_edesigns(self): edesigns.append(MirrorRBDesign(pspec1Q, depths=[0,2,4], circuits_per_depth=4, clifford_compilations=compilations1Q)) - return edesigns \ No newline at end of file + return edesigns + \ No newline at end of file From b726a4016274326684dd1588350e965a64fb3ae1 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Wed, 10 Jan 2024 11:08:47 -0800 Subject: [PATCH 16/32] Rename ibmqcore to ibmqexperiment --- pygsti/extras/ibmq/__init__.py | 4 +- pygsti/extras/ibmq/ibmqexperiment.py | 580 +++++++++++++++++++++++++++ 2 files changed, 582 insertions(+), 2 deletions(-) create mode 100644 pygsti/extras/ibmq/ibmqexperiment.py diff --git a/pygsti/extras/ibmq/__init__.py b/pygsti/extras/ibmq/__init__.py index 1ad464321..244c24c75 100644 --- a/pygsti/extras/ibmq/__init__.py +++ b/pygsti/extras/ibmq/__init__.py @@ -1,4 +1,4 @@ -""" Exteneral Device Specifications Sub-package """ +""" External Device Specifications Sub-package """ #*************************************************************************************************** # Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS). # Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights @@ -8,4 +8,4 @@ # http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. #*************************************************************************************************** -from .ibmqcore import * +from .ibmqexperiment import * diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py new file mode 100644 index 000000000..a3731fcb0 --- /dev/null +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -0,0 +1,580 @@ +""" Functions for sending experiments to IBMQ devices and converting the results to pyGSTi objects """ +#*************************************************************************************************** +# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS). +# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights +# in this software. +# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except +# in compliance with the License. You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. +#*************************************************************************************************** + +import json as _json +import numpy as _np +import pathlib as _pathlib +import pickle as _pickle +import time as _time +import warnings as _warnings + +try: import qiskit as _qiskit +except: _qiskit = None + +try: + from bson import json_util as _json_util +except ImportError: + _json_util = None + +from ... import data as _data, io as _io +from ...protocols import ProtocolData as _ProtocolData, HasProcessorSpec as _HasPSpec +from ...protocols.protocol import _TreeNode + +# Most recent version of QisKit that this has been tested on: +#qiskit.__qiskit_version__ = { +# 'qiskit-terra': '0.25.3', +# 'qiskit': '0.44.3', +# 'qiskit-aer': None, +# 'qiskit-ignis': None, +# 'qiskit-ibmq-provider': '0.20.2', +# 'qiskit-nature': None, +# 'qiskit-finance': None, +# 'qiskit-optimization': None, +# 'qiskit-machine-learning': None +#} +#qiskit_ibm_provider.__version__ = '0.7.2' + + +class IBMQExperiment(_TreeNode, _HasPSpec): + """ + A object that converts pyGSTi ExperimentDesigns into jobs to be submitted to IBM Q, submits these + jobs to IBM Q and receives the results. + + Parameters + ---------- + edesign: ExperimentDesign + The ExperimentDesign to be run on IBM Q. This can be a combined experiment design (e.g., a GST + design combined with an RB design). + + pspec: QubitProcessorSpec + A QubitProcessorSpec that represents the IBM Q device being used. This can be created using the + extras.devices.create_processor_spec(). The ProcessorSpecs qubit ordering *must* correspond + to that of the IBM device (which will be the case if you create it using that function). + I.e., pspecs qubits should be labelled Q0 through Qn-1 and the labelling of the qubits + should agree with IBM's labelling. + + remove_duplicates: bool, optional + If true, each distinct circuit in `edesign` is run only once. If false, if a circuit is + repeated multiple times in `edesign` it is run multiple times. + + randomized_order: bool, optional + Whether or not to randomize the order of the circuits in `edesign` before turning them + into jobs to be submitted to IBM Q. + + circuits_per_batch: int, optional + The circuits in edesign are divded into batches, each containing at most this many + circuits. The default value of 75 is (or was) the maximal value allowed on the public + IBM Q devices. + + num_shots: int, optional + The number of samples from / repeats of each circuit. + + seed: int, optional + Seed for RNG during order randomization of circuits. + + checkpoint_path: str, optional + A string for the path to use for writing intermediate checkpoint + files to disk. Defaults to `ibmqexperiment_checkpoint`, but can also be + the desired {dirname} for an eventual `write({dirname})` call, i.e. the + serialized IBMQExperiment checkpoint after a successful `retrieve_results()` + is equivalent to the serialized IBMQExperiment after `write()`. + + disable_checkpointing : bool, optional (default False) + When set to True checkpoint objects will not be constructed and written + to disk during the course of this protocol. It is strongly recommended + that this be kept set to False without good reason to disable the checkpoints. + + Returns + ------- + IBMQExperiment + An object containing jobs to be submitted to IBM Q created by `.transpile()`, + which can then be submitted using the methods `.submit()` and whose results + can be grabbed from IBM Q using the method `.retrieve_results()`. + + """ + + @classmethod + def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, provider=None, + new_checkpoint_path=None): + """ + Initialize a new IBMQExperiment object from `dirname`. + + Parameters + ---------- + dirname : str + The directory name. + + regen_qiskit_circs: bool, optional + Whether to recreate the Qiskit circuits from the transpiled + OpenQASM strings. Defaults to False. You should set this to True + if you would like to call submit(). + + regen_runtime_jobs: bool, optional + Whether to recreate the RuntimeJobs from IBMQ based on the job ides. + Defaults to False. You should set this to True if you would like to + call monitor() or retrieve_results(). + + provider: IBMProvider + Provider used to retrieve RuntimeJobs from IBMQ based on job_ids + (if lazy_qiskit_load is False) + + checkpoint_path: str, optional + A string for the path to use for writing intermediate checkpoint + files to disk. If None, this defaults to using the same checkpoint + as the serialized IBMQExperiment object. If provided, this will be + the new checkpoint path used moving forward. Note that this can be + the desired {dirname} for an eventual `write({dirname})` call, i.e. the + serialized IBMQExperiment checkpoint after a successful `retrieve_results()` + is equivalent to the serialized IBMQExperiment after `write()`. + + Returns + ------- + IBMQExperiment + """ + p = _pathlib.Path(dirname) + edesign = _io.read_edesign_from_dir(dirname) + + try: + exp_dir = p / 'ibmqexperiment' + attributes_from_meta = _io.load_meta_based_dir(exp_dir) + + ret = cls(edesign, None) + ret.__dict__.update(attributes_from_meta) + ret.edesign = edesign + except KeyError: + _warnings.warn("Failed to load ibmqexperiment, falling back to old serialization format logic") + + ret = cls(edesign, None) + with open(p / 'ibmqexperiment/meta.json', 'r') as f: + from_json = _json.load(f) + ret.__dict__.update(from_json) + + # Old keys to new class members + key_attr_map = { + 'pspec': ('processor_spec', None), + 'pygsti_circuits': ('pygsti_circuit_batches', []), + 'pygsti_openqasm_circuits': ('qasm_circuit_batches', []), + 'submit_time_calibration_data': ('submit_time_calibration_data', []), + 'batch_result_object': ('batch_results', []) + } + + for key, (attr, def_val) in key_attr_map.items(): + with open(p / f'ibmqexperiment/{key}.pkl', 'rb') as f: + try: + setattr(ret, attr, _pickle.load(f)) + except: + _warnings.warn(f"Couldn't unpickle {key}, so setting {attr} to {def_val}.") + setattr(ret, attr, def_val) + + # Handle nonstandard serialization + try: + data = _ProtocolData.from_dir(p, preloaded_edesign=edesign) + ret.data = data + except: + pass + + # Regenerate Qiskit circuits + ret.qiskit_circuit_batches = [] + if regen_qiskit_circs: + for batch_strs in ret.qasm_circuit_batches: + batch = [_qiskit.QuantumCircuit.from_qasm_str(bs) for bs in batch_strs] + ret.qiskit_circuit_batches.append(batch) + + # Regenerate Qiskit RuntimeJobs + ret.qjobs = [] + if regen_runtime_jobs: + if provider is None: + _warnings.warn("No provider specified, cannot retrieve IBM jobs") + else: + ret._retrieve_jobs(provider) + + # Update checkpoint path if requested + if checkpoint_path is not None: + ret.checkpoint_path = checkpoint_path + if _pathlib.Path(ret.checkpoint_path).exists(): + msg = "Checkpoint path already exists! If trying to load an existing checkpoint," \ + +" use .from_dir({checkpoint_path}) instead. Otherwise, choose a different" \ + +" checkpoint path, remove the existing checkpoint, or move the existing" \ + +" checkpoint to a different directory." + raise RuntimeError(msg) + if not ret.disable_checkpointing: + ret.write(ret.checkpoint_path) + + return ret + + def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, + num_shots=1024, seed=None, checkpoint_path=None, disable_checkpointing=False): + _TreeNode.__init__(self, edesign._dirs, edesign._vals) + + self.auxfile_types = {} + _HasPSpec.__init__(self, pspec) + + self.edesign = edesign + self.remove_duplicates = remove_duplicates + self.randomized_order = randomized_order + self.circuits_per_batch = circuits_per_batch + self.num_shots = num_shots + self.seed = seed + self.checkpoint_path = str(checkpoint_path) if checkpoint_path is not None else 'ibmqexperiment_checkpoint' + self.disable_checkpointing = disable_checkpointing + # Populated with transpiling to IBMQ with .transpile() + self.pygsti_circuit_batches = [] + self.qasm_circuit_batches = [] + self.qiskit_circuit_batches = [] + # Populated when submitting to IBM Q with .submit() + self.qjobs = [] + self.job_ids = [] + self.submit_time_calibration_data = [] + # Populated when grabbing results from IBM Q with .retrieve_results() + self.batch_results = [] + self.data = None + + # If not in this list, will be automatically dumped to meta.json + # 'none' means it will not be read in, 'reset' means it will come back in as None + # Several of these could be stored in the meta.json but are kept external for easy chkpts + self.auxfile_types['edesign'] = 'none' + self.auxfile_types['data'] = 'reset' + # self.processor_spec is handled by _HasPSpec base class + self.auxfile_types['pygsti_circuit_batches'] = 'list:text-circuit-list' + self.auxfile_types['qasm_circuit_batches'] = 'list:json' + self.auxfile_types['qiskit_circuit_batches'] = 'none' + self.auxfile_types['qjobs'] = 'none' + self.auxfile_types['job_ids'] = 'json' + if _json_util is not None: + self.auxfile_types['submit_time_calibration_data'] = 'list:json' + self.auxfile_types['batch_results'] = 'list:json' + else: + # Fall back to pickles if we do not have bson to deal with datetime.datetime + self.auxfile_types['submit_time_calibration_data'] = 'pickle' + self.auxfile_types['batch_results'] = 'pickle' + + if not self.disable_checkpointing: + if _pathlib.Path(self.checkpoint_path).exists(): + msg = "Checkpoint path already exists! If trying to load an existing checkpoint," \ + +" use .from_dir({checkpoint_path}) instead. Otherwise, choose a different" \ + +" checkpoint path, remove the existing checkpoint, or move the existing" \ + +" checkpoint to a different directory." + raise RuntimeError(msg) + self.write(self.checkpoint_path) + + def monitor(self): + """ + Queries IBM Q for the status of the jobs. + """ + assert len(self.qjobs) == len(self.job_ids), \ + "Mismatch between jobs and job ids! If loading from file, use the regen_jobs=True option in from_dir()." + + for counter, qjob in enumerate(self.qjobs): + status = qjob.status() + print(f"Batch {counter + 1}: {status}") + if status.name == 'QUEUED': + info = qjob.queue_info() + if info is not None: + print(f' - Queue position is {info.position}') + else: + print(' - Unable to retrieve queue position') + + # Print unsubmitted for any entries in qobj but not qjob + for counter in range(len(self.qjobs), len(self.qiskit_circuit_batches)): + print(f"Batch {counter + 1}: NOT SUBMITTED") + + def retrieve_results(self): + """ + Gets the results of the completed jobs from IBM Q, and processes + them into a pyGSTi DataProtocol object (stored as the key 'data'), + which can then be used in pyGSTi data analysis routines (e.g., if this + was a GST experiment, it can input into a GST protocol object that will + analyze the data). + """ + assert len(self.qjobs) == len(self.job_ids), \ + "Mismatch between jobs and job ids! If loading from file, use the regen_jobs=True option in from_dir()." + + def reverse_dict_key_bits(counts_dict): + new_dict = {} + for key in counts_dict.keys(): + new_dict[key[::-1]] = counts_dict[key] + return new_dict + + # NOTE: This is probably duplicative of some other code in pyGSTi + def partial_trace(ordered_target_indices, input_dict): + output_dict = {} + for bitstring in input_dict.keys(): + new_string = '' + for index in ordered_target_indices: + new_string += bitstring[index] + try: + output_dict[new_string] += input_dict[bitstring] + except: + output_dict[new_string] = input_dict[bitstring] + return output_dict + + if len(self.batch_results): + print(f'Already retrieved results of {len(self.batch_results)}/{len(self.qiskit_circuit_batches)} circuit batches') + + #get results from backend jobs and add to dict + ds = _data.DataSet() + for exp_idx in range(len(self.batch_results), len(self.qjobs)): + qjob = self.qjobs[exp_idx] + print(f"Querying IBMQ for results objects for batch {exp_idx + 1}...") + batch_result = qjob.result() + self.batch_results.append(batch_result.to_dict()) + + if not self.disable_checkpointing: + self._write_checkpoint() + + for i, circ in enumerate(self.pygsti_circuit_batches[exp_idx]): + ordered_target_indices = [self.processor_spec.qubit_labels.index(q) for q in circ.line_labels] + counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result.get_counts(i))) + ds.add_count_dict(circ, counts_data) + + self.data = _ProtocolData(self.edesign, ds) + + if not self.disable_checkpointing: + self.data.write(self.checkpoint_path, edesign_already_written=True) + + def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, + wait_time=1, wait_steps=10): + """ + Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign + used to create this object. + + Parameters + ---------- + ibmq_backend: qiskit.providers.ibmq.ibmqbackend.IBMQBackend + The IBM Q backend to submit the jobs to. Should be the backend corresponding to the + processor that this experiment has been designed for. + + start: int, optional + Batch index to start submission (inclusive). Defaults to None, + which will start submission on the first unsubmitted job. + Jobs can be resubmitted by manually specifying this, + i.e. start=0 will start resubmitting jobs from the beginning. + + stop: int, optional + Batch index to stop submission (exclusive). Defaults to None, + which will submit as many jobs as possible given the backend's + maximum job limit. + + ignore_job_limit: bool, optional + If True, then stop is set to submit all remaining jobs. This is set + as True to maintain backwards compatibility. Note that is more jobs + are needed than the max limit, this will enter a wait loop until all + jobs have been successfully submitted. + + wait_time: int + Number of seconds for each waiting step. + + wait_steps: int + Number of steps to take before retrying job submission. + + Returns + ------- + None + """ + assert len(self.qiskit_circuit_batches) == len(self.pygsti_circuit_batches), \ + "Transpilation missing! Either run .transpile() first, or if loading from file, " + \ + "use the regen_qiskit_circs=True option in from_dir()." + + #Get the backend version + backend_version = ibmq_backend.version + + total_waits = 0 + self.qjobs = [] if self.qjobs is None else self.qjobs + self.job_ids = [] if self.job_ids is None else self.job_ids + + # Set start and stop to submit the next unsubmitted jobs if not specified + if start is None: + start = len(self.qjobs) + + stop = len(self.qiskit_circuit_batches) if stop is None else min(stop, len(self.qiskit_circuit_batches)) + if not ignore_job_limit: + job_limit = ibmq_backend.job_limit() + allowed_jobs = job_limit.maximum_jobs - job_limit.active_jobs + if start + allowed_jobs < stop: + print(f'Given job limit and active jobs, only {allowed_jobs} can be submitted') + + stop = min(start + allowed_jobs, stop) + + for batch_idx, batch in enumerate(self.qiskit_circuit_batches): + if batch_idx < start or batch_idx >= stop: + continue + + print(f"Submitting batch {batch_idx + 1}") + submit_status = False + batch_waits = 0 + while not submit_status: + try: + #If submitting to a real device, get calibration data + if not ibmq_backend.simulator: + backend_properties = ibmq_backend.properties() + self.submit_time_calibration_data.append(backend_properties.to_dict()) + + if backend_version == 1: + # If using qiskit-ibmq-provider API, assemble into Qobj first + qobj = _qiskit.compiler.assemble(batch, shots=self.num_shots) + self.qjobs.append(ibmq_backend.run(qobj)) + else: + # Newer qiskit-ibm-provider can take list of Qiskit circuits directly + self.qjobs.append(ibmq_backend.run(batch, shots = self.num_shots)) + + status = self.qjobs[-1].status() + initializing = True + initializing_steps = 0 + while initializing: + if status.name == 'INITIALIZING' or status.name == 'VALIDATING': + status = self.qjobs[-1].status() + print(f' - {status} (query {initializing_steps})') + _time.sleep(wait_time) + initializing_steps += 1 + else: + initializing = False + + try: + job_id = self.qjobs[-1].job_id() + print(f' - Job ID is {job_id}') + self.job_ids.append(job_id) + except: + print(' - Failed to get job_id.') + self.job_ids.append(None) + + try: + print(f' - Queue position is {self.qjobs[-1].queue_info().position}') + except: + print(f' - Failed to get queue position for batch {batch_idx + 1}') + submit_status = True + + except Exception as ex: + template = "An exception of type {0} occurred. Arguments:\n{1!r}" + message = template.format(type(ex).__name__, ex.args) + print(message) + try: + print('Machine status is {}.'.format(ibmq_backend.status().status_msg)) + except Exception as ex1: + print('Failed to get machine status!') + template = "An exception of type {0} occurred. Arguments:\n{1!r}" + message = template.format(type(ex).__name__, ex1.args) + print(message) + total_waits += 1 + batch_waits += 1 + print("This batch has failed {0} times and there have been {1} total failures".format( + batch_waits, total_waits)) + print('Waiting ', end='') + for step in range(wait_steps): + print('{} '.format(step), end='') + _time.sleep(wait_time) + print() + finally: + # Checkpoint calibration and job id data + if not self.disable_checkpointing: + self._write_checkpoint() + + def transpile(self): + """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. + """ + circuits = self.edesign.all_circuits_needing_data.copy() + num_batches = int(_np.ceil(len(circuits) / self.circuits_per_batch)) + + if not len(self.pygsti_circuit_batches): + rand_state = _np.random.RandomState(self.seed) + + if self.randomized_order: + if self.remove_duplicates: + circuits = list(set(circuits)) + rand_state.shuffle(circuits) + else: + assert(not self.remove_duplicates), "Can only remove duplicates if randomizing order!" + + for batch_idx in range(num_batches): + start = batch_idx*self.circuits_per_batch + end = min(len(circuits), (batch_idx+1)*self.circuits_per_batch) + self.pygsti_circuit_batches.append(circuits[start:end]) + + if not self.disable_checkpointing: + self._write_checkpoint() + + if len(self.qiskit_circuit_batches): + print(f'Already completed transpilation of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') + + for batch_idx in range(len(self.qiskit_circuit_batches), num_batches): + print(f"Transpiling circuit batch {batch_idx+1}/{num_batches}") + batch = [] + batch_strs = [] + for circ in self.pygsti_circuit_batches[batch_idx]: + pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=self.processor_spec.num_qubits, + standard_gates_version='x-sx-rz') + batch_strs.append(pygsti_openqasm_circ) + + qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) + batch.append(qiskit_qc) + + self.qasm_circuit_batches.append(batch_strs) + self.qiskit_circuit_batches.append(batch) + + if not self.disable_checkpointing: + self._write_checkpoint() + + def write(self, dirname=None): + """ + Writes to disk, storing both the pyGSTi DataProtocol object in pyGSTi's standard + format and saving all of the IBM Q submission information stored in this object, + written into the subdirectory 'ibmqexperiment'. + + Parameters + ---------- + dirname : str + The *root* directory to write into. This directory will have + an 'edesign' subdirectory, which will be created if needed and + overwritten if present. If None, then the path this object + was loaded from is used (if this object wasn't loaded from disk, + an error is raised). + + """ + if dirname is None: + dirname = self.checkpoint_path + if dirname is None: + raise ValueError("`dirname` must be given because there's no checkpoint or default edesign directory") + + dirname = _pathlib.Path(dirname) + + self.edesign.write(dirname) + + if self.data is not None: + self.data.write(dirname, edesign_already_written=True) + + self._write_checkpoint(dirname) + + def _write_checkpoint(self, dirname=None): + """Write only the ibmqexperiment part of .write(). + + Parameters + ---------- + dirname : str + The *root* directory to write into. This directory will have + an 'edesign' subdirectory, which will be created if needed and + overwritten if present. If None, then the path this object + was loaded from is used (if this object wasn't loaded from disk, + an error is raised). + """ + dirname = dirname if dirname is not None else self.checkpoint_path + exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' + exp_dir.mkdir(parents=True, exist_ok=True) + _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') + + def _retrieve_jobs(self, provider): + """Retrieves RuntimeJobs from IBMQ based on job_ids. + + Parameters + ---------- + provider: IBMProvider + Provider used to retrieve RuntimeJobs from IBMQ based on job_ids + """ + for i, jid in enumerate(self.job_ids): + print(f"Loading job {i+1}/{len(self.job_ids)}...") + self.qjobs.append(provider.backend.retrieve_job(jid)) From 7a28b2049139cd6d9008ddbf64e6048af38703a1 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Thu, 18 Jan 2024 13:49:52 -0800 Subject: [PATCH 17/32] Initial tests for IBMQExperiment. Also includes some QOL changes for debugging tests with VSCode and pytest. --- .gitignore | 1 + .../objects/advanced/IBMQExperiment.ipynb | 98 +++- pygsti/extras/ibmq/__init__.py | 2 +- pygsti/extras/ibmq/ibmqcore.py | 555 ------------------ pygsti/extras/ibmq/ibmqexperiment.py | 20 +- .../{mpi_test.py => mpi_timings.py} | 0 test/performance/mpi_2D_scaling/run.sh | 2 +- test/unit/conftest.py | 20 + test/unit/extras/ibmq/__init__.py | 0 test/unit/extras/ibmq/test_ibmqexperiment.py | 40 ++ 10 files changed, 150 insertions(+), 588 deletions(-) delete mode 100644 pygsti/extras/ibmq/ibmqcore.py rename test/performance/mpi_2D_scaling/{mpi_test.py => mpi_timings.py} (100%) create mode 100644 test/unit/conftest.py create mode 100644 test/unit/extras/ibmq/__init__.py create mode 100644 test/unit/extras/ibmq/test_ibmqexperiment.py diff --git a/.gitignore b/.gitignore index 436d93a2d..d4594c370 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ .ipynb_checkpoints test/test_packages/temp_test_files/* *_checkpoints/ +*_checkpoint/ jupyter_notebooks/**/offline test/test_packages/offline hooks/etc/permissions.yml diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 2bac0fa54..1019523d1 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -22,7 +22,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": { "tags": [] }, @@ -36,7 +36,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "tags": [ "nbval-skip" @@ -57,7 +57,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": {}, "outputs": [], "source": [ @@ -67,7 +67,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": { "tags": [ "nbval-skip" @@ -84,7 +84,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 5, "metadata": { "tags": [ "nbval-skip" @@ -120,7 +120,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -130,7 +130,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": { "tags": [ "nbval-skip" @@ -144,7 +144,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": { "tags": [] }, @@ -165,11 +165,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "total circuits: 480\n", + "full total circuits: 480\n" + ] + } + ], "source": [ "#circuit design parameters\n", "depths = [0, 2, 4, 16, 32, 64]\n", @@ -200,7 +209,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ @@ -210,11 +219,42 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 375328\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 375348\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 375368\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 375388\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 375408\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 375428\n", + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 981521\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 981541\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 981561\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 981581\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 981601\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 981621\n", + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 200349\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 200369\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 200389\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 200409\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 200429\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 200449\n", + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 836421\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 836441\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 836461\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 836481\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 836501\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 836521\n" + ] + } + ], "source": [ "edesigns_dict = {}\n", "edesign_index = 1\n", @@ -241,13 +281,41 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [], + "outputs": [ + { + "ename": "ValueError", + "evalue": "_vals key : 1 key : Cannot serialize object of type '' to JSON!", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:720\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 719\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 720\u001b[0m \u001b[43m_check_jsonable\u001b[49m\u001b[43m(\u001b[49m\u001b[43mv\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:724\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 723\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 724\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCannot serialize object of type \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m to JSON!\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m \u001b[38;5;28mstr\u001b[39m(\u001b[38;5;28mtype\u001b[39m(x)))\n", + "\u001b[0;31mValueError\u001b[0m: Cannot serialize object of type '' to JSON!", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:720\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 719\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 720\u001b[0m \u001b[43m_check_jsonable\u001b[49m\u001b[43m(\u001b[49m\u001b[43mv\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:722\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 722\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m((\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m key : \u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m k) \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mstr\u001b[39m(e))\n\u001b[1;32m 723\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "\u001b[0;31mValueError\u001b[0m: 1 key : Cannot serialize object of type '' to JSON!", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[12], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m exp \u001b[38;5;241m=\u001b[39m \u001b[43mibmq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mIBMQExperiment\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcombined_edesign\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpspec\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcircuits_per_batch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m75\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_shots\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1024\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mseed\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m20231201\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcheckpoint_path\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtest_ibmq\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:265\u001b[0m, in \u001b[0;36mIBMQExperiment.__init__\u001b[0;34m(self, edesign, pspec, remove_duplicates, randomized_order, circuits_per_batch, num_shots, seed, checkpoint_path, disable_checkpointing)\u001b[0m\n\u001b[1;32m 260\u001b[0m msg \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCheckpoint path already exists! If trying to load an existing checkpoint,\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[1;32m 261\u001b[0m \u001b[38;5;241m+\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m use .from_dir(\u001b[39m\u001b[38;5;132;01m{checkpoint_path}\u001b[39;00m\u001b[38;5;124m) instead. Otherwise, choose a different\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[1;32m 262\u001b[0m \u001b[38;5;241m+\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m checkpoint path, remove the existing checkpoint, or move the existing\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[1;32m 263\u001b[0m \u001b[38;5;241m+\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m checkpoint to a different directory.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 264\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(msg)\n\u001b[0;32m--> 265\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwrite\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcheckpoint_path\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:551\u001b[0m, in \u001b[0;36mIBMQExperiment.write\u001b[0;34m(self, dirname)\u001b[0m\n\u001b[1;32m 548\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdata \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 549\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdata\u001b[38;5;241m.\u001b[39mwrite(dirname, edesign_already_written\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m--> 551\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_write_checkpoint\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdirname\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:568\u001b[0m, in \u001b[0;36mIBMQExperiment._write_checkpoint\u001b[0;34m(self, dirname)\u001b[0m\n\u001b[1;32m 566\u001b[0m exp_dir \u001b[38;5;241m=\u001b[39m _pathlib\u001b[38;5;241m.\u001b[39mPath(dirname) \u001b[38;5;241m/\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mibmqexperiment\u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 567\u001b[0m exp_dir\u001b[38;5;241m.\u001b[39mmkdir(parents\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, exist_ok\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m--> 568\u001b[0m \u001b[43m_io\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmetadir\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwrite_obj_to_meta_based_dir\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexp_dir\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mauxfile_types\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:598\u001b[0m, in \u001b[0;36mwrite_obj_to_meta_based_dir\u001b[0;34m(obj, dirname, auxfile_types_member, omit_attributes, include_attributes, additional_meta)\u001b[0m\n\u001b[1;32m 595\u001b[0m vals \u001b[38;5;241m=\u001b[39m obj\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__dict__\u001b[39m\n\u001b[1;32m 596\u001b[0m auxtypes \u001b[38;5;241m=\u001b[39m obj\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__dict__\u001b[39m[auxfile_types_member]\n\u001b[0;32m--> 598\u001b[0m \u001b[43mwrite_meta_based_dir\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdirname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvals\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mauxtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minit_meta\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmeta\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:402\u001b[0m, in \u001b[0;36mwrite_meta_based_dir\u001b[0;34m(root_dir, valuedict, auxfile_types, init_meta)\u001b[0m\n\u001b[1;32m 399\u001b[0m meta[auxnm] \u001b[38;5;241m=\u001b[39m auxmeta \u001b[38;5;66;03m# metadata about auxfile(s) for this auxnm\u001b[39;00m\n\u001b[1;32m 401\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mopen\u001b[39m(\u001b[38;5;28mstr\u001b[39m(root_dir \u001b[38;5;241m/\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmeta.json\u001b[39m\u001b[38;5;124m'\u001b[39m), \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m'\u001b[39m) \u001b[38;5;28;01mas\u001b[39;00m f:\n\u001b[0;32m--> 402\u001b[0m \u001b[43m_check_jsonable\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmeta\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 403\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _json_util \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 404\u001b[0m _json\u001b[38;5;241m.\u001b[39mdump(meta, f, indent\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m4\u001b[39m, default\u001b[38;5;241m=\u001b[39m_json_util\u001b[38;5;241m.\u001b[39mdefault)\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:722\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 720\u001b[0m _check_jsonable(v)\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 722\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m((\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m key : \u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m k) \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mstr\u001b[39m(e))\n\u001b[1;32m 723\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 724\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCannot serialize object of type \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m to JSON!\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m \u001b[38;5;28mstr\u001b[39m(\u001b[38;5;28mtype\u001b[39m(x)))\n", + "\u001b[0;31mValueError\u001b[0m: _vals key : 1 key : Cannot serialize object of type '' to JSON!" + ] + } + ], "source": [ "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024, seed=20231201, checkpoint_path='test_ibmq')" ] diff --git a/pygsti/extras/ibmq/__init__.py b/pygsti/extras/ibmq/__init__.py index 244c24c75..6cfb71200 100644 --- a/pygsti/extras/ibmq/__init__.py +++ b/pygsti/extras/ibmq/__init__.py @@ -1,4 +1,4 @@ -""" External Device Specifications Sub-package """ +""" IBMQ Experiment Sub-package """ #*************************************************************************************************** # Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS). # Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights diff --git a/pygsti/extras/ibmq/ibmqcore.py b/pygsti/extras/ibmq/ibmqcore.py deleted file mode 100644 index 150543312..000000000 --- a/pygsti/extras/ibmq/ibmqcore.py +++ /dev/null @@ -1,555 +0,0 @@ -""" Functions for sending experiments to IBMQ devices and converting the results to pyGSTi objects """ -#*************************************************************************************************** -# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS). -# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights -# in this software. -# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except -# in compliance with the License. You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. -#*************************************************************************************************** - -import json as _json -import numpy as _np -import pathlib as _pathlib -import pickle as _pickle -import time as _time -import warnings as _warnings - -try: import qiskit as _qiskit -except: _qiskit = None - -try: - from bson import json_util as _json_util -except ImportError: - _json_util = None - -from ... import data as _data, io as _io -from ...protocols import ProtocolData as _ProtocolData, HasProcessorSpec as _HasPSpec -from ...protocols.protocol import _TreeNode - -# Most recent version of QisKit that this has been tested on: -#qiskit.__qiskit_version__ = { -# 'qiskit-terra': '0.25.3', -# 'qiskit': '0.44.3', -# 'qiskit-aer': None, -# 'qiskit-ignis': None, -# 'qiskit-ibmq-provider': '0.20.2', -# 'qiskit-nature': None, -# 'qiskit-finance': None, -# 'qiskit-optimization': None, -# 'qiskit-machine-learning': None -#} -#qiskit_ibm_provider.__version__ = '0.7.2' - - -class IBMQExperiment(_TreeNode, _HasPSpec): - - def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, - num_shots=1024, seed=None, checkpoint_path=None, disable_checkpointing=False): - """ - A object that converts pyGSTi ExperimentDesigns into jobs to be submitted to IBM Q, submits these - jobs to IBM Q and receives the results. - - Parameters - ---------- - edesign: ExperimentDesign - The ExperimentDesign to be run on IBM Q. This can be a combined experiment design (e.g., a GST - design combined with an RB design). - - pspec: QubitProcessorSpec - A QubitProcessorSpec that represents the IBM Q device being used. This can be created using the - extras.devices.create_processor_spec(). The ProcessorSpecs qubit ordering *must* correspond - to that of the IBM device (which will be the case if you create it using that function). - I.e., pspecs qubits should be labelled Q0 through Qn-1 and the labelling of the qubits - should agree with IBM's labelling. - - remove_duplicates: bool, optional - If true, each distinct circuit in `edesign` is run only once. If false, if a circuit is - repeated multiple times in `edesign` it is run multiple times. - - randomized_order: bool, optional - Whether or not to randomize the order of the circuits in `edesign` before turning them - into jobs to be submitted to IBM Q. - - circuits_per_batch: int, optional - The circuits in edesign are divded into batches, each containing at most this many - circuits. The default value of 75 is (or was) the maximal value allowed on the public - IBM Q devices. - - num_shots: int, optional - The number of samples from / repeats of each circuit. - - seed: int, optional - Seed for RNG during order randomization of circuits. - - checkpoint_path: str, optional - A string for the path to use for writing intermediate checkpoint - files to disk. This should match the `dirname` kwarg used in - serialization, i.e. `from_dir()` or `write()`. - If None, will attempt to use load location for `edesign`. - - disable_checkpointing : bool, optional (default False) - When set to True checkpoint objects will not be constructed and written - to disk during the course of this protocol. It is strongly recommended - that this be kept set to False without good reason to disable the checkpoints. - - Returns - ------- - IBMQExperiment - An object containing jobs to be submitted to IBM Q created by `.transpile()`, - which can then be submitted using the methods `.submit()` and whose results - can be grabbed from IBM Q using the method `.retrieve_results()`. - - """ - _TreeNode.__init__(self, edesign._dirs) - - self.auxfile_types = {} - _HasPSpec.__init__(self, pspec) - - self.edesign = edesign - self.remove_duplicates = remove_duplicates - self.randomized_order = randomized_order - self.circuits_per_batch = circuits_per_batch - self.num_shots = num_shots - self.seed = seed - self.checkpoint_path = str(checkpoint_path) if checkpoint_path is not None else self.edesign._loaded_from - self.disable_checkpointing = disable_checkpointing - # Populated with transpiling to IBMQ with .transpile() - self.pygsti_circuit_batches = [] - self.qasm_circuit_batches = [] - self.qiskit_circuit_batches = [] - # Populated when submitting to IBM Q with .submit() - self.qjobs = [] - self.job_ids = [] - self.submit_time_calibration_data = [] - # Populated when grabbing results from IBM Q with .retrieve_results() - self.batch_results = [] - self.data = None - - # If not in this list, will be automatically dumped to meta.json - # 'none' means it will not be read in, 'reset' means it will come back in as None - # Several of these could be stored in the meta.json but are kept external for easy chkpts - self.auxfile_types['edesign'] = 'none' - self.auxfile_types['data'] = 'reset' - # self.processor_spec is handled by _HasPSpec base class - self.auxfile_types['pygsti_circuit_batches'] = 'list:text-circuit-list' - self.auxfile_types['qasm_circuit_batches'] = 'list:json' - self.auxfile_types['qiskit_circuit_batches'] = 'none' - self.auxfile_types['qjobs'] = 'none' - self.auxfile_types['job_ids'] = 'json' - if _json_util is not None: - self.auxfile_types['submit_time_calibration_data'] = 'list:json' - self.auxfile_types['batch_results'] = 'list:json' - else: - # Fall back to pickles if we do not have bson to deal with datetime.datetime - self.auxfile_types['submit_time_calibration_data'] = 'pickle' - self.auxfile_types['batch_results'] = 'pickle' - - if not disable_checkpointing: - if self.checkpoint_path is None: - raise SyntaxError("Default checkpointing is enabled, either provide " + \ - "`checkpoint_path` or `disable_checkpointing=True` (not recommended).") - self.write(self.checkpoint_path) - - def transpile(self): - """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. - """ - circuits = self.edesign.all_circuits_needing_data.copy() - num_batches = int(_np.ceil(len(circuits) / self.circuits_per_batch)) - - if not len(self.pygsti_circuit_batches): - rand_state = _np.random.RandomState(self.seed) - - if self.randomized_order: - if self.remove_duplicates: - circuits = list(set(circuits)) - rand_state.shuffle(circuits) - else: - assert(not self.remove_duplicates), "Can only remove duplicates if randomizing order!" - - for batch_idx in range(num_batches): - start = batch_idx*self.circuits_per_batch - end = min(len(circuits), (batch_idx+1)*self.circuits_per_batch) - self.pygsti_circuit_batches.append(circuits[start:end]) - - if not self.disable_checkpointing: - self._write_checkpoint() - - if len(self.qiskit_circuit_batches): - print(f'Already completed transpilation of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') - - for batch_idx in range(len(self.qiskit_circuit_batches), num_batches): - print(f"Transpiling circuit batch {batch_idx+1}/{num_batches}") - batch = [] - batch_strs = [] - for circ in self.pygsti_circuit_batches[batch_idx]: - pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=self.processor_spec.num_qubits, - standard_gates_version='x-sx-rz') - batch_strs.append(pygsti_openqasm_circ) - - qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) - batch.append(qiskit_qc) - - self.qasm_circuit_batches.append(batch_strs) - self.qiskit_circuit_batches.append(batch) - - if not self.disable_checkpointing: - self._write_checkpoint() - - def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, - wait_time=1, wait_steps=10): - """ - Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign - used to create this object. - - Parameters - ---------- - ibmq_backend: qiskit.providers.ibmq.ibmqbackend.IBMQBackend - The IBM Q backend to submit the jobs to. Should be the backend corresponding to the - processor that this experiment has been designed for. - - start: int, optional - Batch index to start submission (inclusive). Defaults to None, - which will start submission on the first unsubmitted job. - Jobs can be resubmitted by manually specifying this, - i.e. start=0 will start resubmitting jobs from the beginning. - - stop: int, optional - Batch index to stop submission (exclusive). Defaults to None, - which will submit as many jobs as possible given the backend's - maximum job limit. - - ignore_job_limit: bool, optional - If True, then stop is set to submit all remaining jobs. This is set - as True to maintain backwards compatibility. Note that is more jobs - are needed than the max limit, this will enter a wait loop until all - jobs have been successfully submitted. - - wait_time: int - Number of seconds for each waiting step. - - wait_steps: int - Number of steps to take before retrying job submission. - - Returns - ------- - None - """ - assert len(self.qiskit_circuit_batches) == len(self.pygsti_circuit_batches), \ - "Transpilation missing! Either run .transpile() first, or if loading from file, " + \ - "use the regen_qiskit_circs=True option in from_dir()." - - #Get the backend version - backend_version = ibmq_backend.version - - total_waits = 0 - self.qjobs = [] if self.qjobs is None else self.qjobs - self.job_ids = [] if self.job_ids is None else self.job_ids - - # Set start and stop to submit the next unsubmitted jobs if not specified - if start is None: - start = len(self.qjobs) - - stop = len(self.qiskit_circuit_batches) if stop is None else min(stop, len(self.qiskit_circuit_batches)) - if not ignore_job_limit: - job_limit = ibmq_backend.job_limit() - allowed_jobs = job_limit.maximum_jobs - job_limit.active_jobs - if start + allowed_jobs < stop: - print(f'Given job limit and active jobs, only {allowed_jobs} can be submitted') - - stop = min(start + allowed_jobs, stop) - - for batch_idx, batch in enumerate(self.qiskit_circuit_batches): - if batch_idx < start or batch_idx >= stop: - continue - - print(f"Submitting batch {batch_idx + 1}") - submit_status = False - batch_waits = 0 - while not submit_status: - try: - #If submitting to a real device, get calibration data - if not ibmq_backend.simulator: - backend_properties = ibmq_backend.properties() - self.submit_time_calibration_data.append(backend_properties.to_dict()) - - if backend_version == 1: - # If using qiskit-ibmq-provider API, assemble into Qobj first - qobj = _qiskit.compiler.assemble(batch, shots=self.num_shots) - self.qjobs.append(ibmq_backend.run(qobj)) - else: - # Newer qiskit-ibm-provider can take list of Qiskit circuits directly - self.qjobs.append(ibmq_backend.run(batch, shots = self.num_shots)) - - status = self.qjobs[-1].status() - initializing = True - initializing_steps = 0 - while initializing: - if status.name == 'INITIALIZING' or status.name == 'VALIDATING': - status = self.qjobs[-1].status() - print(f' - {status} (query {initializing_steps})') - _time.sleep(wait_time) - initializing_steps += 1 - else: - initializing = False - - try: - job_id = self.qjobs[-1].job_id() - print(f' - Job ID is {job_id}') - self.job_ids.append(job_id) - except: - print(' - Failed to get job_id.') - self.job_ids.append(None) - - try: - print(f' - Queue position is {self.qjobs[-1].queue_info().position}') - except: - print(f' - Failed to get queue position for batch {batch_idx + 1}') - submit_status = True - - except Exception as ex: - template = "An exception of type {0} occurred. Arguments:\n{1!r}" - message = template.format(type(ex).__name__, ex.args) - print(message) - try: - print('Machine status is {}.'.format(ibmq_backend.status().status_msg)) - except Exception as ex1: - print('Failed to get machine status!') - template = "An exception of type {0} occurred. Arguments:\n{1!r}" - message = template.format(type(ex).__name__, ex1.args) - print(message) - total_waits += 1 - batch_waits += 1 - print("This batch has failed {0} times and there have been {1} total failures".format( - batch_waits, total_waits)) - print('Waiting ', end='') - for step in range(wait_steps): - print('{} '.format(step), end='') - _time.sleep(wait_time) - print() - finally: - # Checkpoint calibration and job id data - if not self.disable_checkpointing: - self._write_checkpoint() - - def monitor(self): - """ - Queries IBM Q for the status of the jobs. - """ - assert len(self.qjobs) == len(self.job_ids), \ - "Mismatch between jobs and job ids! If loading from file, use the regen_jobs=True option in from_dir()." - - for counter, qjob in enumerate(self.qjobs): - status = qjob.status() - print(f"Batch {counter + 1}: {status}") - if status.name == 'QUEUED': - info = qjob.queue_info() - if info is not None: - print(f' - Queue position is {info.position}') - else: - print(' - Unable to retrieve queue position') - - # Print unsubmitted for any entries in qobj but not qjob - for counter in range(len(self.qjobs), len(self.qiskit_circuit_batches)): - print(f"Batch {counter + 1}: NOT SUBMITTED") - - def retrieve_results(self): - """ - Gets the results of the completed jobs from IBM Q, and processes - them into a pyGSTi DataProtocol object (stored as the key 'data'), - which can then be used in pyGSTi data analysis routines (e.g., if this - was a GST experiment, it can input into a GST protocol object that will - analyze the data). - """ - assert len(self.qjobs) == len(self.job_ids), \ - "Mismatch between jobs and job ids! If loading from file, use the regen_jobs=True option in from_dir()." - - def reverse_dict_key_bits(counts_dict): - new_dict = {} - for key in counts_dict.keys(): - new_dict[key[::-1]] = counts_dict[key] - return new_dict - - # NOTE: This is probably duplicative of some other code in pyGSTi - def partial_trace(ordered_target_indices, input_dict): - output_dict = {} - for bitstring in input_dict.keys(): - new_string = '' - for index in ordered_target_indices: - new_string += bitstring[index] - try: - output_dict[new_string] += input_dict[bitstring] - except: - output_dict[new_string] = input_dict[bitstring] - return output_dict - - if len(self.batch_results): - print(f'Already retrieved results of {len(self.batch_results)}/{len(self.qiskit_circuit_batches)} circuit batches') - - #get results from backend jobs and add to dict - ds = _data.DataSet() - for exp_idx in range(len(self.batch_results), len(self.qjobs)): - qjob = self.qjobs[exp_idx] - print(f"Querying IBMQ for results objects for batch {exp_idx + 1}...") - batch_result = qjob.result() - self.batch_results.append(batch_result.to_dict()) - - if not self.disable_checkpointing: - self._write_checkpoint() - - for i, circ in enumerate(self.pygsti_circuit_batches[exp_idx]): - ordered_target_indices = [self.processor_spec.qubit_labels.index(q) for q in circ.line_labels] - counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result.get_counts(i))) - ds.add_count_dict(circ, counts_data) - - self.data = _ProtocolData(self.edesign, ds) - - if not self.disable_checkpointing: - self.data.write(self.checkpoint_path, edesign_already_written=True) - - def write(self, dirname=None): - """ - Writes to disk, storing both the pyGSTi DataProtocol object in pyGSTi's standard - format and saving all of the IBM Q submission information stored in this object, - written into the subdirectory 'ibmqexperiment'. - - Parameters - ---------- - dirname : str - The *root* directory to write into. This directory will have - an 'edesign' subdirectory, which will be created if needed and - overwritten if present. If None, then the path this object - was loaded from is used (if this object wasn't loaded from disk, - an error is raised). - - """ - if dirname is None: - dirname = self.checkpoint_path - if dirname is None: - raise ValueError("`dirname` must be given because there's no checkpoint or default edesign directory") - - dirname = _pathlib.Path(dirname) - - self.edesign.write(dirname) - - if self.data is not None: - self.data.write(dirname, edesign_already_written=True) - - self._write_checkpoint(dirname) - - @classmethod - def from_dir(cls, dirname, regen_qiskit_circs=False, - regen_runtime_jobs=False, provider=None): - """ - Initialize a new IBMQExperiment object from `dirname`. - - Parameters - ---------- - dirname : str - The directory name. - - regen_qiskit_circs: bool, optional - Whether to recreate the Qiskit circuits from the transpiled - OpenQASM strings. Defaults to False. You should set this to True - if you would like to call submit(). - - regen_runtime_jobs: bool, optional - Whether to recreate the RuntimeJobs from IBMQ based on the job ides. - Defaults to False. You should set this to True if you would like to - call monitor() or retrieve_results(). - - provider: IBMProvider - Provider used to retrieve RuntimeJobs from IBMQ based on job_ids - (if lazy_qiskit_load is False) - - Returns - ------- - IBMQExperiment - """ - p = _pathlib.Path(dirname) - edesign = _io.read_edesign_from_dir(dirname) - - try: - exp_dir = p / 'ibmqexperiment' - attributes_from_meta = _io.load_meta_based_dir(exp_dir) - - ret = cls(edesign, None) - ret.__dict__.update(attributes_from_meta) - ret.edesign = edesign - except KeyError: - _warnings.warn("Failed to load ibmqexperiment, falling back to old serialization format logic") - - ret = cls(edesign, None) - with open(p / 'ibmqexperiment/meta.json', 'r') as f: - from_json = _json.load(f) - ret.__dict__.update(from_json) - - # Old keys to new class members - key_attr_map = { - 'pspec': ('processor_spec', None), - 'pygsti_circuits': ('pygsti_circuit_batches', []), - 'pygsti_openqasm_circuits': ('qasm_circuit_batches', []), - 'submit_time_calibration_data': ('submit_time_calibration_data', []), - 'batch_result_object': ('batch_results', []) - } - - for key, (attr, def_val) in key_attr_map.items(): - with open(p / f'ibmqexperiment/{key}.pkl', 'rb') as f: - try: - setattr(ret, attr, _pickle.load(f)) - except: - _warnings.warn(f"Couldn't unpickle {key}, so setting {attr} to {def_val}.") - setattr(ret, attr, def_val) - - # Handle nonstandard serialization - try: - data = _ProtocolData.from_dir(p, preloaded_edesign=edesign) - ret.data = data - except: - pass - - # Regenerate Qiskit circuits - ret.qiskit_circuit_batches = [] - if regen_qiskit_circs: - for batch_strs in ret.qasm_circuit_batches: - batch = [_qiskit.QuantumCircuit.from_qasm_str(bs) for bs in batch_strs] - ret.qiskit_circuit_batches.append(batch) - - # Regenerate Qiskit RuntimeJobs - ret.qjobs = [] - if regen_runtime_jobs: - if provider is None: - _warnings.warn("No provider specified, cannot retrieve IBM jobs") - else: - ret._retrieve_jobs(provider) - - return ret - - def _write_checkpoint(self, dirname=None): - """Write only the ibmqexperiment part of .write(). - - Parameters - ---------- - dirname : str - The *root* directory to write into. This directory will have - an 'edesign' subdirectory, which will be created if needed and - overwritten if present. If None, then the path this object - was loaded from is used (if this object wasn't loaded from disk, - an error is raised). - """ - dirname = dirname if dirname is not None else self.checkpoint_path - exp_dir = _pathlib.Path(dirname) / 'ibmqexperiment' - exp_dir.mkdir(parents=True, exist_ok=True) - _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') - - def _retrieve_jobs(self, provider): - """Retrieves RuntimeJobs from IBMQ based on job_ids. - - Parameters - ---------- - provider: IBMProvider - Provider used to retrieve RuntimeJobs from IBMQ based on job_ids - """ - for i, jid in enumerate(self.job_ids): - print(f"Loading job {i+1}/{len(self.job_ids)}...") - self.qjobs.append(provider.backend.retrieve_job(jid)) diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index a3731fcb0..74508fa72 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -125,7 +125,7 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, p Provider used to retrieve RuntimeJobs from IBMQ based on job_ids (if lazy_qiskit_load is False) - checkpoint_path: str, optional + new_checkpoint_path: str, optional A string for the path to use for writing intermediate checkpoint files to disk. If None, this defaults to using the same checkpoint as the serialized IBMQExperiment object. If provided, this will be @@ -196,14 +196,8 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, p ret._retrieve_jobs(provider) # Update checkpoint path if requested - if checkpoint_path is not None: - ret.checkpoint_path = checkpoint_path - if _pathlib.Path(ret.checkpoint_path).exists(): - msg = "Checkpoint path already exists! If trying to load an existing checkpoint," \ - +" use .from_dir({checkpoint_path}) instead. Otherwise, choose a different" \ - +" checkpoint path, remove the existing checkpoint, or move the existing" \ - +" checkpoint to a different directory." - raise RuntimeError(msg) + if new_checkpoint_path is not None: + ret.checkpoint_path = new_checkpoint_path if not ret.disable_checkpointing: ret.write(ret.checkpoint_path) @@ -211,7 +205,7 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, p def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, num_shots=1024, seed=None, checkpoint_path=None, disable_checkpointing=False): - _TreeNode.__init__(self, edesign._dirs, edesign._vals) + _TreeNode.__init__(self, {}, {}) self.auxfile_types = {} _HasPSpec.__init__(self, pspec) @@ -256,12 +250,6 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.auxfile_types['batch_results'] = 'pickle' if not self.disable_checkpointing: - if _pathlib.Path(self.checkpoint_path).exists(): - msg = "Checkpoint path already exists! If trying to load an existing checkpoint," \ - +" use .from_dir({checkpoint_path}) instead. Otherwise, choose a different" \ - +" checkpoint path, remove the existing checkpoint, or move the existing" \ - +" checkpoint to a different directory." - raise RuntimeError(msg) self.write(self.checkpoint_path) def monitor(self): diff --git a/test/performance/mpi_2D_scaling/mpi_test.py b/test/performance/mpi_2D_scaling/mpi_timings.py similarity index 100% rename from test/performance/mpi_2D_scaling/mpi_test.py rename to test/performance/mpi_2D_scaling/mpi_timings.py diff --git a/test/performance/mpi_2D_scaling/run.sh b/test/performance/mpi_2D_scaling/run.sh index b5c77dd13..08595c1c2 100755 --- a/test/performance/mpi_2D_scaling/run.sh +++ b/test/performance/mpi_2D_scaling/run.sh @@ -29,4 +29,4 @@ export MKL_NUM_THREADS=1 # Note: This flags are useful on Kahuna to avoid error messages # But the --mca flags are not necessary for performance mpirun -np ${NUM_PROCS} --mca pml ucx --mca btl '^openib' \ - python ./mpi_test.py &> ${PREFIX}.out + python ./mpi_timings.py &> ${PREFIX}.out diff --git a/test/unit/conftest.py b/test/unit/conftest.py new file mode 100644 index 000000000..d27d21835 --- /dev/null +++ b/test/unit/conftest.py @@ -0,0 +1,20 @@ +# pytest configuration + +# https://stackoverflow.com/a/75438209 for making pytest work with VSCode debugging better +import sys +import pytest + +def is_debugging(): + if 'debugpy' in sys.modules: + return True + return False + +# enable_stop_on_exceptions if the debugger is running during a test +if is_debugging(): + @pytest.hookimpl(tryfirst=True) + def pytest_exception_interact(call): + raise call.excinfo.value + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(excinfo): + raise excinfo.value \ No newline at end of file diff --git a/test/unit/extras/ibmq/__init__.py b/test/unit/extras/ibmq/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/test/unit/extras/ibmq/test_ibmqexperiment.py b/test/unit/extras/ibmq/test_ibmqexperiment.py new file mode 100644 index 000000000..6be1c62d4 --- /dev/null +++ b/test/unit/extras/ibmq/test_ibmqexperiment.py @@ -0,0 +1,40 @@ +import pytest +import shutil + +try: import qiskit as _qiskit +except: _qiskit = None + +import pygsti +from pygsti.extras.devices.experimentaldevice import ExperimentalDevice +from pygsti.extras import ibmq +from pygsti.processors import CliffordCompilationRules as CCR + +class IBMQExperimentTester(): + @classmethod + def setup_class(cls): + cls.device = ExperimentalDevice.from_legacy_device('ibmq_bogota') + cls.pspec = cls.device.create_processor_spec(['Gc{}'.format(i) for i in range(24)] + ['Gcnot']) + + compilations = {'absolute': CCR.create_standard(cls.pspec, 'absolute', ('paulis', '1Qcliffords'), verbosity=0)} + + mirror_design = pygsti.protocols.MirrorRBDesign(cls.pspec, [0, 2, 4], 10, qubit_labels=('Q0', 'Q1', 'Q2'), + clifford_compilations=compilations, sampler='edgegrab', samplerargs=[3/8,]) + cls.edesign = pygsti.protocols.CombinedExperimentDesign([mirror_design]) + + + def test_init(self): + exp1 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, + disable_checkpointing=True) + + shutil.rmtree('ibmq_init_checkpoint', ignore_errors=True) + exp2 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, + checkpoint_path='ibmq_init_checkpoint') + + assert exp2.pygsti_circuit_batches == exp1.pygsti_circuit_batches + + exp3 = ibmq.IBMQExperiment.from_dir('ibmq_init_checkpoint') + assert exp3.pygsti_circuit_batches == exp1.pygsti_circuit_batches + + + + From e59ec4c0854f2e1a5b2c27dec60f09df3510dc1d Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 23 Jan 2024 13:17:16 -0800 Subject: [PATCH 18/32] First tests with mock Qiskit backend. Still needs some debugging, already fixed a few minor issues. --- pygsti/circuits/circuit.py | 5 +- pygsti/extras/ibmq/ibmqexperiment.py | 25 +++++---- test/unit/extras/ibmq/test_ibmqexperiment.py | 58 ++++++++++++++++++-- 3 files changed, 71 insertions(+), 17 deletions(-) diff --git a/pygsti/circuits/circuit.py b/pygsti/circuits/circuit.py index bf12f0e6c..47de65928 100644 --- a/pygsti/circuits/circuit.py +++ b/pygsti/circuits/circuit.py @@ -4021,7 +4021,7 @@ def convert_to_openqasm(self, num_qubits=None, # Init the openqasm string. openqasm = 'OPENQASM 2.0;\ninclude "qelib1.inc";\n\n' # Include a delay instruction - openqasm += 'opaque delay(t) q;\n\n' + #openqasm += 'opaque delay(t) q;\n\n' # SS 01-22-24: Is this necessary? CHeck with Megan/Tim/Kenny openqasm += 'qreg q[{0}];\n'.format(str(num_qubits)) # openqasm += 'creg cr[{0}];\n'.format(str(num_qubits)) @@ -4102,7 +4102,8 @@ def convert_to_openqasm(self, num_qubits=None, if q not in qubits_used: # Delay 0 works because of the barrier # In OpenQASM3, this should probably be a stretch instead - openqasm += 'delay(0)' + ' q[' + str(qubit_conversion[q]) + '];\n' + #openqasm += 'delay(0)' + ' q[' + str(qubit_conversion[q]) + '];\n' + pass # Add in a barrier after every circuit layer if block_between_layers==True. # Including barriers is critical for QCVV testing, circuits should usually diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index 74508fa72..d99752719 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -328,7 +328,7 @@ def partial_trace(ordered_target_indices, input_dict): self.data.write(self.checkpoint_path, edesign_already_written=True) def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, - wait_time=1, wait_steps=10): + wait_time=5, max_attempts=10): """ Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign used to create this object. @@ -397,12 +397,17 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, print(f"Submitting batch {batch_idx + 1}") submit_status = False batch_waits = 0 - while not submit_status: + while not submit_status and batch_waits < max_attempts: try: #If submitting to a real device, get calibration data - if not ibmq_backend.simulator: - backend_properties = ibmq_backend.properties() - self.submit_time_calibration_data.append(backend_properties.to_dict()) + try: + if not ibmq_backend.simulator: + backend_properties = ibmq_backend.properties() + self.submit_time_calibration_data.append(backend_properties.to_dict()) + except AttributeError: + # We can't get the properties or check if simulator + # Possible this is a fake backend, append empty submit data + self.submit_time_calibration_data.append({}) if backend_version == 1: # If using qiskit-ibmq-provider API, assemble into Qobj first @@ -453,15 +458,15 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, batch_waits += 1 print("This batch has failed {0} times and there have been {1} total failures".format( batch_waits, total_waits)) - print('Waiting ', end='') - for step in range(wait_steps): - print('{} '.format(step), end='') - _time.sleep(wait_time) - print() + print('Waiting', end='') + _time.sleep(wait_time) finally: # Checkpoint calibration and job id data if not self.disable_checkpointing: self._write_checkpoint() + + if submit_status is False: + raise RuntimeError("Ran out of max attempts and job was still not submitted successfully") def transpile(self): """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. diff --git a/test/unit/extras/ibmq/test_ibmqexperiment.py b/test/unit/extras/ibmq/test_ibmqexperiment.py index 6be1c62d4..e170c3e4d 100644 --- a/test/unit/extras/ibmq/test_ibmqexperiment.py +++ b/test/unit/extras/ibmq/test_ibmqexperiment.py @@ -1,8 +1,8 @@ import pytest import shutil -try: import qiskit as _qiskit -except: _qiskit = None +try: import qiskit +except: qiskit = None import pygsti from pygsti.extras.devices.experimentaldevice import ExperimentalDevice @@ -26,14 +26,62 @@ def test_init(self): exp1 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, disable_checkpointing=True) - shutil.rmtree('ibmq_init_checkpoint', ignore_errors=True) + chkpt = 'test_ibmq_init_checkpoint' + shutil.rmtree(chkpt, ignore_errors=True) exp2 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, - checkpoint_path='ibmq_init_checkpoint') + checkpoint_path=chkpt) assert exp2.pygsti_circuit_batches == exp1.pygsti_circuit_batches - exp3 = ibmq.IBMQExperiment.from_dir('ibmq_init_checkpoint') + exp3 = ibmq.IBMQExperiment.from_dir(chkpt) assert exp3.pygsti_circuit_batches == exp1.pygsti_circuit_batches + + def test_transpile(self): + chkpt = 'test_ibmq_transpile_checkpoint' + shutil.rmtree(chkpt, ignore_errors=True) + exp1 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, + checkpoint_path=chkpt) + exp1.transpile() + + # Test checkpoint load + exp2 = ibmq.IBMQExperiment.from_dir(chkpt, regen_qiskit_circs=True) + assert exp2.qiskit_circuit_batches == exp1.qiskit_circuit_batches + + # Test restart + del exp2.qiskit_circuit_batches[2:] + del exp2.qasm_circuit_batches[2:] + exp2.transpile() + assert exp2.qiskit_circuit_batches == exp1.qiskit_circuit_batches + + @pytest.mark.parametrize('backend_version', ['v1', 'v2']) + def test_submit(self, backend_version): + chkpt = 'test_ibmq_submit_checkpoint' + shutil.rmtree(chkpt, ignore_errors=True) + exp1 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, + checkpoint_path=chkpt) + exp1.transpile() + + if backend_version == 'v1': + from qiskit.providers.fake_provider import FakeBogota, FakeProvider + backend = FakeBogota() + provider = FakeProvider() + elif backend_version == 'v2': + from qiskit.providers.fake_provider import FakeBogotaV2, FakeProviderForBackendV2 + backend = FakeBogotaV2() + provider = FakeProviderForBackendV2() + else: + raise RuntimeError("Unknown backend version for testing") + + exp1.submit(backend, stop=3, max_attempts=1) + # Submit first 3 jobs + assert len(exp1.qjobs) == 3 + + # Load from checkpoint + exp2 = ibmq.IBMQExperiment.from_dir(chkpt, regen_qiskit_circs=True, regen_runtime_jobs=True, provider=provider) + exp2.submit(max_attempts=1) + assert len(exp2.qjobs) == len(exp2.qasm_circuit_batches) + + From ca2d385e9be7c07a6b1cc0b7aa74b40437d08a62 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Mon, 8 Jul 2024 16:17:34 -0700 Subject: [PATCH 19/32] Add Gdelay instruction to openqasm conversion. --- pygsti/tools/internalgates.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pygsti/tools/internalgates.py b/pygsti/tools/internalgates.py index e3664f79c..a3e90b071 100644 --- a/pygsti/tools/internalgates.py +++ b/pygsti/tools/internalgates.py @@ -602,6 +602,7 @@ def standard_gatenames_openqasm_conversions(version='u3'): std_gatenames_to_argmap['Gczr'] = lambda gatearg: ['crz(' + str(gatearg[0]) + ')'] std_gatenames_to_argmap['Gu3'] = lambda gatearg: ['u3(' + str(gatearg[0]) + ', ' + str(gatearg[1]) + ', ' + str(gatearg[2]) + ')'] + std_gatenames_to_argmap['Gdelay'] = lambda gatearg: ['delay(' + str(gatearg[0]) + ')'] elif version == 'x-sx-rz': std_gatenames_to_qasm = {} @@ -656,6 +657,7 @@ def standard_gatenames_openqasm_conversions(version='u3'): std_gatenames_to_argmap['Gu3'] = lambda gatearg: ['rz(' + str(gatearg[2]) + ')', 'sx', 'rz(' + str(float(gatearg[0]) + _np.pi) + ')', 'sx', 'rz(' + str(float(gatearg[1]) + _np.pi) + ')'] + std_gatenames_to_argmap['Gdelay'] = lambda gatearg: ['delay(' + str(gatearg[0]) + ')'] else: raise ValueError("Unknown version!") From 0c2059ff1cc11a6047be9dc6babd68d4644d663f Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Thu, 11 Jul 2024 09:15:50 -0700 Subject: [PATCH 20/32] Get IBMQExperiment tests working again with qiskit-ibm-runtime --- pygsti/extras/ibmq/ibmqexperiment.py | 153 +++++++------------ setup.py | 4 + test/unit/extras/ibmq/test_ibmqexperiment.py | 35 ++--- 3 files changed, 72 insertions(+), 120 deletions(-) diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index d99752719..225a497be 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -10,99 +10,50 @@ import json as _json import numpy as _np +import os as _os import pathlib as _pathlib import pickle as _pickle import time as _time import warnings as _warnings +# Try to load Qiskit try: import qiskit as _qiskit except: _qiskit = None +# Try to load IBM Runtime +try: + from qiskit_ibm_runtime import SamplerV2 as _Sampler + from qiskit_ibm_runtime import Session as _Session + from qiskit.transpiler.preset_passmanagers import generate_preset_pass_manager as _pass_manager +except: _Sampler = None + +# Most recent version of QisKit that this has been tested on: +#qiskit.__version__ = '1.1.1' +#qiskit_ibm_runtime.__version__ = '0.25.0' +# Note that qiskit<1.0 is going EOL in August 2024, +# and v1 backends are also being deprecated (we now support only v2) +# Also qiskit-ibm-provider is ALSO being deprecated, +# so I'm only supporting runtime here + try: from bson import json_util as _json_util except ImportError: _json_util = None -from ... import data as _data, io as _io -from ...protocols import ProtocolData as _ProtocolData, HasProcessorSpec as _HasPSpec -from ...protocols.protocol import _TreeNode - -# Most recent version of QisKit that this has been tested on: -#qiskit.__qiskit_version__ = { -# 'qiskit-terra': '0.25.3', -# 'qiskit': '0.44.3', -# 'qiskit-aer': None, -# 'qiskit-ignis': None, -# 'qiskit-ibmq-provider': '0.20.2', -# 'qiskit-nature': None, -# 'qiskit-finance': None, -# 'qiskit-optimization': None, -# 'qiskit-machine-learning': None -#} -#qiskit_ibm_provider.__version__ = '0.7.2' +from pygsti import data as _data, io as _io +from pygsti.protocols import ProtocolData as _ProtocolData, HasProcessorSpec as _HasPSpec +from pygsti.protocols.protocol import _TreeNode class IBMQExperiment(_TreeNode, _HasPSpec): """ A object that converts pyGSTi ExperimentDesigns into jobs to be submitted to IBM Q, submits these jobs to IBM Q and receives the results. - - Parameters - ---------- - edesign: ExperimentDesign - The ExperimentDesign to be run on IBM Q. This can be a combined experiment design (e.g., a GST - design combined with an RB design). - - pspec: QubitProcessorSpec - A QubitProcessorSpec that represents the IBM Q device being used. This can be created using the - extras.devices.create_processor_spec(). The ProcessorSpecs qubit ordering *must* correspond - to that of the IBM device (which will be the case if you create it using that function). - I.e., pspecs qubits should be labelled Q0 through Qn-1 and the labelling of the qubits - should agree with IBM's labelling. - - remove_duplicates: bool, optional - If true, each distinct circuit in `edesign` is run only once. If false, if a circuit is - repeated multiple times in `edesign` it is run multiple times. - - randomized_order: bool, optional - Whether or not to randomize the order of the circuits in `edesign` before turning them - into jobs to be submitted to IBM Q. - - circuits_per_batch: int, optional - The circuits in edesign are divded into batches, each containing at most this many - circuits. The default value of 75 is (or was) the maximal value allowed on the public - IBM Q devices. - - num_shots: int, optional - The number of samples from / repeats of each circuit. - - seed: int, optional - Seed for RNG during order randomization of circuits. - - checkpoint_path: str, optional - A string for the path to use for writing intermediate checkpoint - files to disk. Defaults to `ibmqexperiment_checkpoint`, but can also be - the desired {dirname} for an eventual `write({dirname})` call, i.e. the - serialized IBMQExperiment checkpoint after a successful `retrieve_results()` - is equivalent to the serialized IBMQExperiment after `write()`. - - disable_checkpointing : bool, optional (default False) - When set to True checkpoint objects will not be constructed and written - to disk during the course of this protocol. It is strongly recommended - that this be kept set to False without good reason to disable the checkpoints. - - Returns - ------- - IBMQExperiment - An object containing jobs to be submitted to IBM Q created by `.transpile()`, - which can then be submitted using the methods `.submit()` and whose results - can be grabbed from IBM Q using the method `.retrieve_results()`. - """ @classmethod - def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, provider=None, - new_checkpoint_path=None): + def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, + service=None, new_checkpoint_path=None): """ Initialize a new IBMQExperiment object from `dirname`. @@ -111,19 +62,19 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, p dirname : str The directory name. - regen_qiskit_circs: bool, optional + regen_circs: bool, optional Whether to recreate the Qiskit circuits from the transpiled OpenQASM strings. Defaults to False. You should set this to True if you would like to call submit(). - regen_runtime_jobs: bool, optional + regen_jobs: bool, optional Whether to recreate the RuntimeJobs from IBMQ based on the job ides. Defaults to False. You should set this to True if you would like to call monitor() or retrieve_results(). - - provider: IBMProvider - Provider used to retrieve RuntimeJobs from IBMQ based on job_ids - (if lazy_qiskit_load is False) + + service: QiskitRuntimeService + Service used to retrieve RuntimeJobs from IBMQ based on job_ids + (if regen_runtime_jobs is True). new_checkpoint_path: str, optional A string for the path to use for writing intermediate checkpoint @@ -145,13 +96,15 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, p exp_dir = p / 'ibmqexperiment' attributes_from_meta = _io.load_meta_based_dir(exp_dir) - ret = cls(edesign, None) + # Don't override checkpoint during this construction + ret = cls(edesign, None, disable_checkpointing=True) ret.__dict__.update(attributes_from_meta) ret.edesign = edesign except KeyError: _warnings.warn("Failed to load ibmqexperiment, falling back to old serialization format logic") - ret = cls(edesign, None) + # Don't override checkpoint during this construction + ret = cls(edesign, None, disable_checkpointing=True) with open(p / 'ibmqexperiment/meta.json', 'r') as f: from_json = _json.load(f) ret.__dict__.update(from_json) @@ -182,18 +135,20 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, p # Regenerate Qiskit circuits ret.qiskit_circuit_batches = [] - if regen_qiskit_circs: + if regen_circs: + assert _qiskit is not None, "Could not import qiskit, needed for regen_circs=True" for batch_strs in ret.qasm_circuit_batches: batch = [_qiskit.QuantumCircuit.from_qasm_str(bs) for bs in batch_strs] ret.qiskit_circuit_batches.append(batch) # Regenerate Qiskit RuntimeJobs ret.qjobs = [] - if regen_runtime_jobs: - if provider is None: - _warnings.warn("No provider specified, cannot retrieve IBM jobs") + if regen_jobs: + assert _Sampler is not None, "Could not import qiskit-ibm-runtime, needed for regen_jobs=True" + if service is None: + _warnings.warn("No service specified, cannot retrieve IBM jobs") else: - ret._retrieve_jobs(provider) + ret._retrieve_jobs(service=service) # Update checkpoint path if requested if new_checkpoint_path is not None: @@ -204,7 +159,7 @@ def from_dir(cls, dirname, regen_qiskit_circs=False, regen_runtime_jobs=False, p return ret def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, - num_shots=1024, seed=None, checkpoint_path=None, disable_checkpointing=False): + num_shots=1024, seed=None, checkpoint_path=None, disable_checkpointing=False, checkpoint_override=False): _TreeNode.__init__(self, {}, {}) self.auxfile_types = {} @@ -248,9 +203,15 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True # Fall back to pickles if we do not have bson to deal with datetime.datetime self.auxfile_types['submit_time_calibration_data'] = 'pickle' self.auxfile_types['batch_results'] = 'pickle' - + if not self.disable_checkpointing: - self.write(self.checkpoint_path) + chkpath = _pathlib.Path(self.checkpoint_path) + if chkpath.exists() and not checkpoint_override: + raise RuntimeError(f"Checkpoint {self.checkpoint_path} already exists. Either " + + "specify a different checkpoint_path, set checkpoint_override=True to clobber the current checkpoint," + + " or turn checkpointing off via disable_checkpointing=True (not recommended)." + ) + self.write(chkpath) def monitor(self): """ @@ -366,12 +327,16 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, ------- None """ + assert _qiskit is not None, "Could not import qiskit, needed for submit()" + assert _Sampler is not None, "Could not import qiskit-ibm-runtime, needed for submit()" + assert len(self.qiskit_circuit_batches) == len(self.pygsti_circuit_batches), \ "Transpilation missing! Either run .transpile() first, or if loading from file, " + \ "use the regen_qiskit_circs=True option in from_dir()." #Get the backend version backend_version = ibmq_backend.version + assert backend_version == 2, "IBMQExperiment no longer supports v1 backends due to their deprecation by IBM" total_waits = 0 self.qjobs = [] if self.qjobs is None else self.qjobs @@ -401,6 +366,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, try: #If submitting to a real device, get calibration data try: + # TODO: I think Noah ran into issues here if not ibmq_backend.simulator: backend_properties = ibmq_backend.properties() self.submit_time_calibration_data.append(backend_properties.to_dict()) @@ -409,13 +375,8 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, # Possible this is a fake backend, append empty submit data self.submit_time_calibration_data.append({}) - if backend_version == 1: - # If using qiskit-ibmq-provider API, assemble into Qobj first - qobj = _qiskit.compiler.assemble(batch, shots=self.num_shots) - self.qjobs.append(ibmq_backend.run(qobj)) - else: - # Newer qiskit-ibm-provider can take list of Qiskit circuits directly - self.qjobs.append(ibmq_backend.run(batch, shots = self.num_shots)) + # Newer qiskit-ibm-provider can take list of Qiskit circuits directly + self.qjobs.append(ibmq_backend.run(batch, shots = self.num_shots)) status = self.qjobs[-1].status() initializing = True @@ -560,7 +521,7 @@ def _write_checkpoint(self, dirname=None): exp_dir.mkdir(parents=True, exist_ok=True) _io.metadir.write_obj_to_meta_based_dir(self, exp_dir, 'auxfile_types') - def _retrieve_jobs(self, provider): + def _retrieve_jobs(self, service): """Retrieves RuntimeJobs from IBMQ based on job_ids. Parameters @@ -570,4 +531,4 @@ def _retrieve_jobs(self, provider): """ for i, jid in enumerate(self.job_ids): print(f"Loading job {i+1}/{len(self.job_ids)}...") - self.qjobs.append(provider.backend.retrieve_job(jid)) + self.qjobs.append(service.job(jid)) diff --git a/setup.py b/setup.py index 8705895b4..ee17f0def 100644 --- a/setup.py +++ b/setup.py @@ -60,6 +60,10 @@ ], 'interpygate': ['csaps'], 'serialization': ['bson'], + 'ibmq': [ + 'qiskit>1', + 'qiskit-ibm-runtime>=0.17.1', + ], 'testing': [ 'pytest', 'pytest-xdist', diff --git a/test/unit/extras/ibmq/test_ibmqexperiment.py b/test/unit/extras/ibmq/test_ibmqexperiment.py index e170c3e4d..71d26f8b5 100644 --- a/test/unit/extras/ibmq/test_ibmqexperiment.py +++ b/test/unit/extras/ibmq/test_ibmqexperiment.py @@ -27,9 +27,8 @@ def test_init(self): disable_checkpointing=True) chkpt = 'test_ibmq_init_checkpoint' - shutil.rmtree(chkpt, ignore_errors=True) exp2 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, - checkpoint_path=chkpt) + checkpoint_path=chkpt, checkpoint_override=True) assert exp2.pygsti_circuit_batches == exp1.pygsti_circuit_batches @@ -38,13 +37,12 @@ def test_init(self): def test_transpile(self): chkpt = 'test_ibmq_transpile_checkpoint' - shutil.rmtree(chkpt, ignore_errors=True) exp1 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, - checkpoint_path=chkpt) + checkpoint_path=chkpt, checkpoint_override=True) exp1.transpile() # Test checkpoint load - exp2 = ibmq.IBMQExperiment.from_dir(chkpt, regen_qiskit_circs=True) + exp2 = ibmq.IBMQExperiment.from_dir(chkpt, regen_circs=True) assert exp2.qiskit_circuit_batches == exp1.qiskit_circuit_batches # Test restart @@ -53,33 +51,22 @@ def test_transpile(self): exp2.transpile() assert exp2.qiskit_circuit_batches == exp1.qiskit_circuit_batches - @pytest.mark.parametrize('backend_version', ['v1', 'v2']) - def test_submit(self, backend_version): + def test_submit(self): chkpt = 'test_ibmq_submit_checkpoint' - shutil.rmtree(chkpt, ignore_errors=True) exp1 = ibmq.IBMQExperiment(self.edesign, self.pspec, circuits_per_batch=5, num_shots=1024, seed=20231201, - checkpoint_path=chkpt) + checkpoint_path=chkpt, checkpoint_override=True) exp1.transpile() - if backend_version == 'v1': - from qiskit.providers.fake_provider import FakeBogota, FakeProvider - backend = FakeBogota() - provider = FakeProvider() - elif backend_version == 'v2': - from qiskit.providers.fake_provider import FakeBogotaV2, FakeProviderForBackendV2 - backend = FakeBogotaV2() - provider = FakeProviderForBackendV2() - else: - raise RuntimeError("Unknown backend version for testing") + from qiskit_ibm_runtime.fake_provider import FakeBogotaV2 + backend = FakeBogotaV2() - exp1.submit(backend, stop=3, max_attempts=1) # Submit first 3 jobs + exp1.submit(backend, stop=3, max_attempts=1) assert len(exp1.qjobs) == 3 - # Load from checkpoint - exp2 = ibmq.IBMQExperiment.from_dir(chkpt, regen_qiskit_circs=True, regen_runtime_jobs=True, provider=provider) - exp2.submit(max_attempts=1) - assert len(exp2.qjobs) == len(exp2.qasm_circuit_batches) + # Submit rest of jobs + exp1.submit(backend, max_attempts=1) + assert len(exp1.qjobs) == len(exp1.qasm_circuit_batches) From aa1033f97b4cb73008d40cf7e2bce21fb142d8be Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Mon, 15 Jul 2024 15:37:00 -0700 Subject: [PATCH 21/32] Qiskit 1.0/Runtime changes almost complete. Still not fully tested due to CX/ECR conversion. --- .github/workflows/feature-branches.yml | 2 +- .../objects/advanced/IBMQExperiment.ipynb | 275 +++++++++++------- pygsti/extras/devices/experimentaldevice.py | 23 +- pygsti/extras/ibmq/ibmqexperiment.py | 49 ++-- test/unit/extras/ibmq/test_ibmqexperiment.py | 11 - 5 files changed, 207 insertions(+), 153 deletions(-) diff --git a/.github/workflows/feature-branches.yml b/.github/workflows/feature-branches.yml index b0570aa53..5b6712629 100644 --- a/.github/workflows/feature-branches.yml +++ b/.github/workflows/feature-branches.yml @@ -16,7 +16,7 @@ on: jobs: build: strategy: - # fail-fast: true is OK here + fail-fast: false matrix: os: [ubuntu-latest, windows-latest] # No Mac python-version: [3.8, '3.11'] # Only extremal Python versions diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 1019523d1..04dadbcbc 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -7,22 +7,26 @@ "# Running experiments on IBM Q Processors\n", "This tutorial will demonstrate how to run an experiment on IBM Q Processors. To do so you will need [QisKit](https://qiskit.org/) installed and an [IBM Q account](https://quantum-computing.ibm.com/).\n", "\n", + "There have been major changes to `IBMQExperiment` as of pygsti 0.9.13. This is due to Qiskit 1.0 and subsequent deprecations of V1 backends and `qiskit-ibm-provider`. The `IBMQExperiment` class only supports V2 backends and is based on `qiskit-ibm-runtime`.\n", + "\n", + "For details on how to migrate from `qiskit<1` or `qiskit-ibm-provider`, see [this blog post](https://www.ibm.com/quantum/blog/transition-to-1), [this Qiskit 1.0 migration guide](https://docs.quantum.ibm.com/api/migration-guides/qiskit-1.0-features), or [this Qiskit Runtime migration guide](https://docs.quantum.ibm.com/api/migration-guides/qiskit-runtime).\n", + "\n", "This was last run with QisKit versions:" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ - "#qiskit.__qiskit_version__ = {'qiskit-terra': '0.25.3', 'qiskit': '0.44.3', 'qiskit-aer': None, 'qiskit-ignis': None, 'qiskit-ibmq-provider': '0.20.2', 'qiskit-nature': None, 'qiskit-finance': None, 'qiskit-optimization': None, 'qiskit-machine-learning': None}\n", - "#qiskit_ibm_provider.__version__ = '0.7.2'" + "#qiskit.__version__ = '1.1.1'\n", + "#qiskit_ibm_runtime.__version__ = '0.25.0'" ] }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 2, "metadata": { "tags": [] }, @@ -36,7 +40,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 3, "metadata": { "tags": [ "nbval-skip" @@ -44,7 +48,8 @@ }, "outputs": [], "source": [ - "from qiskit_ibm_provider import IBMProvider" + "from qiskit_ibm_runtime import QiskitRuntimeService\n", + "from qiskit_ibm_runtime.fake_provider import FakeSherbrooke" ] }, { @@ -57,34 +62,60 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ "# If your first time, you may need to initialize your account with your IBMQ API token\n", - "#IBMProvider.save_account(token=\"\")" + "\n", + "# You can also specify instances (i.e. \"ibm-q/open/main\" is the default instance)\n", + "# You can also save/load named accounts for different instances, etc. See save_account docs for more information.\n", + "\n", + "#QiskitRuntimeService.save_account(channel=\"ibm_quantum\", token=\"\", overwrite=True, set_as_default=True)" ] }, { "cell_type": "code", - "execution_count": 4, - "metadata": { - "tags": [ - "nbval-skip" - ] - }, + "execution_count": 5, + "metadata": {}, "outputs": [], "source": [ - "# You can use your own instance if you have different credentials\n", - "#provider = IBMProvider(instance='ibm-q/open/main')\n", - "\n", - "# You can leave it blank to use the default for your account\n", - "provider = IBMProvider()" + "# Once credentials are saved, the service can be loaded each time:\n", + "service = QiskitRuntimeService(channel=\"ibm_quantum\")" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# You can list all the available backends to ensure your instance is running properly\n", + "service.backends()" + ] + }, + { + "cell_type": "code", + "execution_count": 7, "metadata": { "tags": [ "nbval-skip" @@ -92,14 +123,14 @@ }, "outputs": [], "source": [ - "# Use backends() to see what backends you have access to\n", - "#provider.backends()\n", - "\n", "# Can use a physical device...\n", - "backend = provider.get_backend('ibm_hanoi')\n", + "backend = service.backend('ibm_sherbrooke')\n", + "\n", + "# Can also ask for the least busy device\n", + "#backend = service.least_busy()\n", "\n", - "# ... or can use a simulator\n", - "sim_backend = provider.get_backend('ibmq_qasm_simulator')" + "# ... or can use a simulated fake backend\n", + "sim_backend = FakeSherbrooke()" ] }, { @@ -120,17 +151,7 @@ }, { "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [], - "source": [ - "# Using the configuration files in pygsti.extras.devices (legacy and may not be up-to-date)\n", - "device = ExperimentalDevice.from_legacy_device('ibmq_bogota')" - ] - }, - { - "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": { "tags": [ "nbval-skip" @@ -138,13 +159,16 @@ }, "outputs": [], "source": [ + "# Using the configuration files in pygsti.extras.devices (legacy and may not be up-to-date)\n", + "#device = ExperimentalDevice.from_legacy_device('ibmq_bogota')\n", + "\n", "# Using the active backend to pull current device specification\n", "device = ExperimentalDevice.from_qiskit_backend(sim_backend)" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 10, "metadata": { "tags": [] }, @@ -165,7 +189,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 11, "metadata": { "tags": [] }, @@ -209,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -219,7 +243,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 13, "metadata": { "tags": [] }, @@ -228,30 +252,30 @@ "name": "stdout", "output_type": "stream", "text": [ - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 375328\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 375348\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 375368\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 375388\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 375408\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 375428\n", - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 981521\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 981541\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 981561\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 981581\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 981601\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 981621\n", - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 200349\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 200369\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 200389\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 200409\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 200429\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 200449\n", - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 836421\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 836441\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 836461\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 836481\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 836501\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 836521\n" + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 488082\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 488102\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 488122\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 488142\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 488162\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 488182\n", + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 937591\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 937611\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 937631\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 937651\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 937671\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 937691\n", + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 551098\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 551118\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 551138\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 551158\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 551178\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 551198\n", + "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 754211\n", + "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 754231\n", + "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 754251\n", + "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 754271\n", + "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 754291\n", + "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 754311\n" ] } ], @@ -281,41 +305,13 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 14, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "ename": "ValueError", - "evalue": "_vals key : 1 key : Cannot serialize object of type '' to JSON!", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:720\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 719\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 720\u001b[0m \u001b[43m_check_jsonable\u001b[49m\u001b[43m(\u001b[49m\u001b[43mv\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:724\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 723\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 724\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCannot serialize object of type \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m to JSON!\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m \u001b[38;5;28mstr\u001b[39m(\u001b[38;5;28mtype\u001b[39m(x)))\n", - "\u001b[0;31mValueError\u001b[0m: Cannot serialize object of type '' to JSON!", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:720\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 719\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 720\u001b[0m \u001b[43m_check_jsonable\u001b[49m\u001b[43m(\u001b[49m\u001b[43mv\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:722\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 722\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m((\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m key : \u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m k) \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mstr\u001b[39m(e))\n\u001b[1;32m 723\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", - "\u001b[0;31mValueError\u001b[0m: 1 key : Cannot serialize object of type '' to JSON!", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[12], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m exp \u001b[38;5;241m=\u001b[39m \u001b[43mibmq\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mIBMQExperiment\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcombined_edesign\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mpspec\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcircuits_per_batch\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m75\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnum_shots\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m1024\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mseed\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;241;43m20231201\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcheckpoint_path\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtest_ibmq\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:265\u001b[0m, in \u001b[0;36mIBMQExperiment.__init__\u001b[0;34m(self, edesign, pspec, remove_duplicates, randomized_order, circuits_per_batch, num_shots, seed, checkpoint_path, disable_checkpointing)\u001b[0m\n\u001b[1;32m 260\u001b[0m msg \u001b[38;5;241m=\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCheckpoint path already exists! If trying to load an existing checkpoint,\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[1;32m 261\u001b[0m \u001b[38;5;241m+\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m use .from_dir(\u001b[39m\u001b[38;5;132;01m{checkpoint_path}\u001b[39;00m\u001b[38;5;124m) instead. Otherwise, choose a different\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[1;32m 262\u001b[0m \u001b[38;5;241m+\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m checkpoint path, remove the existing checkpoint, or move the existing\u001b[39m\u001b[38;5;124m\"\u001b[39m \\\n\u001b[1;32m 263\u001b[0m \u001b[38;5;241m+\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m checkpoint to a different directory.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 264\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(msg)\n\u001b[0;32m--> 265\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwrite\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcheckpoint_path\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:551\u001b[0m, in \u001b[0;36mIBMQExperiment.write\u001b[0;34m(self, dirname)\u001b[0m\n\u001b[1;32m 548\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdata \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 549\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdata\u001b[38;5;241m.\u001b[39mwrite(dirname, edesign_already_written\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m--> 551\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_write_checkpoint\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdirname\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:568\u001b[0m, in \u001b[0;36mIBMQExperiment._write_checkpoint\u001b[0;34m(self, dirname)\u001b[0m\n\u001b[1;32m 566\u001b[0m exp_dir \u001b[38;5;241m=\u001b[39m _pathlib\u001b[38;5;241m.\u001b[39mPath(dirname) \u001b[38;5;241m/\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mibmqexperiment\u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 567\u001b[0m exp_dir\u001b[38;5;241m.\u001b[39mmkdir(parents\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, exist_ok\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m)\n\u001b[0;32m--> 568\u001b[0m \u001b[43m_io\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mmetadir\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwrite_obj_to_meta_based_dir\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mexp_dir\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mauxfile_types\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:598\u001b[0m, in \u001b[0;36mwrite_obj_to_meta_based_dir\u001b[0;34m(obj, dirname, auxfile_types_member, omit_attributes, include_attributes, additional_meta)\u001b[0m\n\u001b[1;32m 595\u001b[0m vals \u001b[38;5;241m=\u001b[39m obj\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__dict__\u001b[39m\n\u001b[1;32m 596\u001b[0m auxtypes \u001b[38;5;241m=\u001b[39m obj\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__dict__\u001b[39m[auxfile_types_member]\n\u001b[0;32m--> 598\u001b[0m \u001b[43mwrite_meta_based_dir\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdirname\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mvals\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mauxtypes\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43minit_meta\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmeta\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:402\u001b[0m, in \u001b[0;36mwrite_meta_based_dir\u001b[0;34m(root_dir, valuedict, auxfile_types, init_meta)\u001b[0m\n\u001b[1;32m 399\u001b[0m meta[auxnm] \u001b[38;5;241m=\u001b[39m auxmeta \u001b[38;5;66;03m# metadata about auxfile(s) for this auxnm\u001b[39;00m\n\u001b[1;32m 401\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mopen\u001b[39m(\u001b[38;5;28mstr\u001b[39m(root_dir \u001b[38;5;241m/\u001b[39m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mmeta.json\u001b[39m\u001b[38;5;124m'\u001b[39m), \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mw\u001b[39m\u001b[38;5;124m'\u001b[39m) \u001b[38;5;28;01mas\u001b[39;00m f:\n\u001b[0;32m--> 402\u001b[0m \u001b[43m_check_jsonable\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmeta\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 403\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m _json_util \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 404\u001b[0m _json\u001b[38;5;241m.\u001b[39mdump(meta, f, indent\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m4\u001b[39m, default\u001b[38;5;241m=\u001b[39m_json_util\u001b[38;5;241m.\u001b[39mdefault)\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/io/metadir.py:722\u001b[0m, in \u001b[0;36m_check_jsonable\u001b[0;34m(x)\u001b[0m\n\u001b[1;32m 720\u001b[0m _check_jsonable(v)\n\u001b[1;32m 721\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m--> 722\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m((\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m key : \u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m k) \u001b[38;5;241m+\u001b[39m \u001b[38;5;28mstr\u001b[39m(e))\n\u001b[1;32m 723\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m 724\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mCannot serialize object of type \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;132;01m%s\u001b[39;00m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m to JSON!\u001b[39m\u001b[38;5;124m\"\u001b[39m \u001b[38;5;241m%\u001b[39m \u001b[38;5;28mstr\u001b[39m(\u001b[38;5;28mtype\u001b[39m(x)))\n", - "\u001b[0;31mValueError\u001b[0m: _vals key : 1 key : Cannot serialize object of type '' to JSON!" - ] - } - ], + "outputs": [], "source": [ "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024, seed=20231201, checkpoint_path='test_ibmq')" ] @@ -329,9 +325,23 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 15, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Transpiling circuit batch 1/7\n", + "Transpiling circuit batch 2/7\n", + "Transpiling circuit batch 3/7\n", + "Transpiling circuit batch 4/7\n", + "Transpiling circuit batch 5/7\n", + "Transpiling circuit batch 6/7\n", + "Transpiling circuit batch 7/7\n" + ] + } + ], "source": [ "# Provide the directory name to enable transpilation checkpointing\n", "exp.transpile()" @@ -339,9 +349,20 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 16, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Already completed transpilation of 4/7 circuit batches\n", + "Transpiling circuit batch 5/7\n", + "Transpiling circuit batch 6/7\n", + "Transpiling circuit batch 7/7\n" + ] + } + ], "source": [ "# We can simulate having been interrupted by removing the last few transpiled batches\n", "del exp.qasm_circuit_batches[4:]\n", @@ -357,16 +378,16 @@ "source": [ "If the `IBMQExperiment` object is lost and needs to be reloaded (i.e. notebook restarts), it can be loaded from file now.\n", "\n", - "However, the Qiskit circuits are not automatically regenerated from the transpiled QASM during loading for speed. They can (and need to be regenerated if calling `submit()`) by passing in the `regen_qiskit_circs=True` flag to `from_dir()`." + "However, the Qiskit circuits are not automatically regenerated from the transpiled QASM during loading for speed. They can (and need to be regenerated if calling `submit()`) by passing in the `regen_circs=True` flag to `from_dir()`." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": {}, "outputs": [], "source": [ - "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_qiskit_circs=True)" + "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_circs=True)" ] }, { @@ -378,13 +399,53 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 19, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Submitting batch 1\n", + "An exception of type IBMInputValueError occurred. Arguments:\n", + "('The instruction cx on qubits (1, 0) is not supported by the target system. Circuits that do not match the target hardware definition are no longer supported after March 4, 2024. See the transpilation documentation (https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and the primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see this coupled with operator transformations.',)\n", + "Failed to get machine status!\n", + "An exception of type IBMInputValueError occurred. Arguments:\n", + "(\"'FakeSherbrooke' object has no attribute 'status'\",)\n", + "This batch has failed 1 times and there have been 1 total failures\n", + "WaitingAn exception of type IBMInputValueError occurred. Arguments:\n", + "('The instruction cx on qubits (1, 0) is not supported by the target system. Circuits that do not match the target hardware definition are no longer supported after March 4, 2024. See the transpilation documentation (https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and the primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see this coupled with operator transformations.',)\n", + "Failed to get machine status!\n", + "An exception of type IBMInputValueError occurred. Arguments:\n", + "(\"'FakeSherbrooke' object has no attribute 'status'\",)\n", + "This batch has failed 2 times and there have been 2 total failures\n", + "Waiting" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mIBMInputValueError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:392\u001b[0m, in \u001b[0;36mIBMQExperiment.submit\u001b[0;34m(self, ibmq_backend, start, stop, ibm_opt_level, ignore_job_limit, wait_time, max_attempts)\u001b[0m\n\u001b[1;32m 391\u001b[0m \u001b[38;5;66;03m# Submit job\u001b[39;00m\n\u001b[0;32m--> 392\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqjobs\u001b[38;5;241m.\u001b[39mappend(\u001b[43msampler\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mshots\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum_shots\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 394\u001b[0m status \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqjobs[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]\u001b[38;5;241m.\u001b[39mstatus()\n", + "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/sampler.py:151\u001b[0m, in \u001b[0;36mSamplerV2.run\u001b[0;34m(self, pubs, shots)\u001b[0m\n\u001b[1;32m 149\u001b[0m validate_classical_registers(coerced_pubs)\n\u001b[0;32m--> 151\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcoerced_pubs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/base_primitive.py:135\u001b[0m, in \u001b[0;36mBasePrimitiveV2._run\u001b[0;34m(self, pubs)\u001b[0m\n\u001b[1;32m 134\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mgetattr\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backend, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtarget\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_simulator(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backend):\n\u001b[0;32m--> 135\u001b[0m \u001b[43mvalidate_isa_circuits\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[43mpub\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcircuit\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_backend\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 137\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backend, IBMBackend):\n", + "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/utils/validations.py:89\u001b[0m, in \u001b[0;36mvalidate_isa_circuits\u001b[0;34m(circuits, target)\u001b[0m\n\u001b[1;32m 88\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m message:\n\u001b[0;32m---> 89\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m IBMInputValueError(\n\u001b[1;32m 90\u001b[0m message\n\u001b[1;32m 91\u001b[0m \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m Circuits that do not match the target hardware definition are no longer \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 92\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msupported after March 4, 2024. See the transpilation documentation \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 93\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m(https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 94\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mthe primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 95\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mthis coupled with operator transformations.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 96\u001b[0m )\n", + "\u001b[0;31mIBMInputValueError\u001b[0m: 'The instruction cx on qubits (1, 0) is not supported by the target system. Circuits that do not match the target hardware definition are no longer supported after March 4, 2024. See the transpilation documentation (https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and the primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see this coupled with operator transformations.'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[19], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Again, we can checkpoint by passing in dirname\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[43mexp2\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msubmit\u001b[49m\u001b[43m(\u001b[49m\u001b[43msim_backend\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:436\u001b[0m, in \u001b[0;36mIBMQExperiment.submit\u001b[0;34m(self, ibmq_backend, start, stop, ibm_opt_level, ignore_job_limit, wait_time, max_attempts)\u001b[0m\n\u001b[1;32m 433\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mThis batch has failed \u001b[39m\u001b[38;5;132;01m{0}\u001b[39;00m\u001b[38;5;124m times and there have been \u001b[39m\u001b[38;5;132;01m{1}\u001b[39;00m\u001b[38;5;124m total failures\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mformat(\n\u001b[1;32m 434\u001b[0m batch_waits, total_waits))\n\u001b[1;32m 435\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mWaiting\u001b[39m\u001b[38;5;124m'\u001b[39m, end\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m--> 436\u001b[0m \u001b[43m_time\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msleep\u001b[49m\u001b[43m(\u001b[49m\u001b[43mwait_time\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 437\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 438\u001b[0m \u001b[38;5;66;03m# Checkpoint calibration and job id data\u001b[39;00m\n\u001b[1;32m 439\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdisable_checkpointing:\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ "# Again, we can checkpoint by passing in dirname\n", "exp2.submit(sim_backend)" @@ -414,7 +475,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Again, the `IBMQExperiment` can be loaded from file if checkpointing is being used. The Qiskit RuntimeJobs are not serialized; however, they can be retrieved from the IBMQ service from their job ids. In order to do this, pass `regen_runtime_jobs=True` and a `provider` to the `from_dir()` call." + "Again, the `IBMQExperiment` can be loaded from file if checkpointing is being used. The Qiskit RuntimeJobs are not serialized; however, they can be retrieved from the IBMQ service from their job ids. In order to do this, pass `regen_jobs=True` and a `service` to the `from_dir()` call." ] }, { @@ -427,7 +488,7 @@ }, "outputs": [], "source": [ - "exp3 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_runtime_jobs=True, provider=provider)" + "exp3 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_jobs=True, service=service)" ] }, { @@ -581,7 +642,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.11.5" + "version": "3.12.4" } }, "nbformat": 4, diff --git a/pygsti/extras/devices/experimentaldevice.py b/pygsti/extras/devices/experimentaldevice.py index 9b9bcdefc..3749fe92f 100644 --- a/pygsti/extras/devices/experimentaldevice.py +++ b/pygsti/extras/devices/experimentaldevice.py @@ -63,22 +63,13 @@ def from_qiskit_backend(cls, backend, gate_mapping=None): ------- Initialized ExperimentalDevice """ - try: - props = backend.properties().to_dict() - - qubits = [f'Q{i}' for i in range(len(props['qubits']))] - # Technically we could read all the gates off and create the actual native pspec - # This is not how devices functioned in the past, but maybe it is useful. Thoughts? - edges = [[f'Q{i}' for i in g['qubits']] for g in props['gates'] if g['gate'] == 'cx'] - graph = _QubitGraph(qubits, initial_edges=edges) - except AttributeError: - # Probably the simulator backend 32 qubits max with arbitrary connectivity - qubits = [f'Q{i}' for i in range(32)] - edges = [] - for i in range(32): - for j in range(i+1, 32): - edges.extend([(f'Q{i}', f'Q{j}'), (f'Q{j}', f'Q{i}')]) - graph = _QubitGraph(qubits, initial_edges=edges) + # Get qubits + num_qubits = backend.num_qubits + qubits = [f'Q{i}' for i in range(num_qubits)] + + # Get qubit connectivity + edges = [[qubits[edge[0]], qubits[edge[1]]] for edge in backend.coupling_map] + graph = _QubitGraph(qubits, initial_edges=edges) return cls(qubits, graph, gate_mapping) diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index 225a497be..f7b15f838 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -105,7 +105,7 @@ def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, # Don't override checkpoint during this construction ret = cls(edesign, None, disable_checkpointing=True) - with open(p / 'ibmqexperiment/meta.json', 'r') as f: + with open(p / 'ibmqexperiment' / 'meta.json', 'r') as f: from_json = _json.load(f) ret.__dict__.update(from_json) @@ -119,7 +119,7 @@ def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, } for key, (attr, def_val) in key_attr_map.items(): - with open(p / f'ibmqexperiment/{key}.pkl', 'rb') as f: + with open(p / f'ibmqexperiment' / '{key}.pkl', 'rb') as f: try: setattr(ret, attr, _pickle.load(f)) except: @@ -142,6 +142,7 @@ def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, ret.qiskit_circuit_batches.append(batch) # Regenerate Qiskit RuntimeJobs + ret.qiskit_isa_circuit_batches = [] # TODO: How to regenerate? Maybe this should be what is saved? ret.qjobs = [] if regen_jobs: assert _Sampler is not None, "Could not import qiskit-ibm-runtime, needed for regen_jobs=True" @@ -160,7 +161,7 @@ def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True, circuits_per_batch=75, num_shots=1024, seed=None, checkpoint_path=None, disable_checkpointing=False, checkpoint_override=False): - _TreeNode.__init__(self, {}, {}) + _TreeNode.__init__(self, None, None) self.auxfile_types = {} _HasPSpec.__init__(self, pspec) @@ -175,9 +176,10 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.disable_checkpointing = disable_checkpointing # Populated with transpiling to IBMQ with .transpile() self.pygsti_circuit_batches = [] - self.qasm_circuit_batches = [] + self.qasm_circuit_batches = [] # TODO: To be deprecated with direct qiskit support? self.qiskit_circuit_batches = [] # Populated when submitting to IBM Q with .submit() + self.qiskit_isa_circuit_batches = [] self.qjobs = [] self.job_ids = [] self.submit_time_calibration_data = [] @@ -192,8 +194,9 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.auxfile_types['data'] = 'reset' # self.processor_spec is handled by _HasPSpec base class self.auxfile_types['pygsti_circuit_batches'] = 'list:text-circuit-list' - self.auxfile_types['qasm_circuit_batches'] = 'list:json' + self.auxfile_types['qasm_circuit_batches'] = 'list:json' # TODO: To be deprecated with direct qiskit support? self.auxfile_types['qiskit_circuit_batches'] = 'none' + self.auxfile_types['qiskit_isa_circuit_batches'] = 'none' self.auxfile_types['qjobs'] = 'none' self.auxfile_types['job_ids'] = 'json' if _json_util is not None: @@ -280,7 +283,7 @@ def partial_trace(ordered_target_indices, input_dict): for i, circ in enumerate(self.pygsti_circuit_batches[exp_idx]): ordered_target_indices = [self.processor_spec.qubit_labels.index(q) for q in circ.line_labels] - counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result.get_counts(i))) + counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result[i].data.meas.get_counts())) ds.add_count_dict(circ, counts_data) self.data = _ProtocolData(self.edesign, ds) @@ -288,8 +291,8 @@ def partial_trace(ordered_target_indices, input_dict): if not self.disable_checkpointing: self.data.write(self.checkpoint_path, edesign_already_written=True) - def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, - wait_time=5, max_attempts=10): + def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, + ignore_job_limit=True, wait_time=5, max_attempts=10): """ Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign used to create this object. @@ -310,6 +313,10 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, Batch index to stop submission (exclusive). Defaults to None, which will submit as many jobs as possible given the backend's maximum job limit. + + ibm_opt_level: int, optional + An optimization level to give to Qiskit's `generate_preset_pass_manager`. + Defaults to 0, which is no optimization. ignore_job_limit: bool, optional If True, then stop is set to submit all remaining jobs. This is set @@ -336,7 +343,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, #Get the backend version backend_version = ibmq_backend.version - assert backend_version == 2, "IBMQExperiment no longer supports v1 backends due to their deprecation by IBM" + assert backend_version >= 2, "IBMQExperiment no longer supports v1 backends due to their deprecation by IBM" total_waits = 0 self.qjobs = [] if self.qjobs is None else self.qjobs @@ -355,6 +362,10 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, stop = min(start + allowed_jobs, stop) + pm = _pass_manager(backend=ibmq_backend, optimization_level=ibm_opt_level) + ibmq_session = _Session(backend = ibmq_backend) + sampler = _Sampler(session=ibmq_session) + for batch_idx, batch in enumerate(self.qiskit_circuit_batches): if batch_idx < start or batch_idx >= stop: continue @@ -366,17 +377,19 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, try: #If submitting to a real device, get calibration data try: - # TODO: I think Noah ran into issues here - if not ibmq_backend.simulator: - backend_properties = ibmq_backend.properties() - self.submit_time_calibration_data.append(backend_properties.to_dict()) + backend_properties = ibmq_backend.properties() + self.submit_time_calibration_data.append(backend_properties.to_dict()) except AttributeError: - # We can't get the properties or check if simulator - # Possible this is a fake backend, append empty submit data + # We can't get the properties + # Likely this is a fake backend/simulator, append empty submit data self.submit_time_calibration_data.append({}) - # Newer qiskit-ibm-provider can take list of Qiskit circuits directly - self.qjobs.append(ibmq_backend.run(batch, shots = self.num_shots)) + # Run pass manager + isa_circs = pm.run(batch) + self.qiskit_isa_circuit_batches.append(isa_circs) + + # Submit job + self.qjobs.append(sampler.run(batch, shots = self.num_shots)) status = self.qjobs[-1].status() initializing = True @@ -404,7 +417,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, print(f' - Failed to get queue position for batch {batch_idx + 1}') submit_status = True - except Exception as ex: + except Exception as ex: # TODO: Revamp this template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) print(message) diff --git a/test/unit/extras/ibmq/test_ibmqexperiment.py b/test/unit/extras/ibmq/test_ibmqexperiment.py index 71d26f8b5..badbd39ff 100644 --- a/test/unit/extras/ibmq/test_ibmqexperiment.py +++ b/test/unit/extras/ibmq/test_ibmqexperiment.py @@ -1,9 +1,3 @@ -import pytest -import shutil - -try: import qiskit -except: qiskit = None - import pygsti from pygsti.extras.devices.experimentaldevice import ExperimentalDevice from pygsti.extras import ibmq @@ -68,8 +62,3 @@ def test_submit(self): exp1.submit(backend, max_attempts=1) assert len(exp1.qjobs) == len(exp1.qasm_circuit_batches) - - - - - From 752fd8c5e94b5751b5ad19c8277f8902e53a3b6a Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 16 Jul 2024 01:38:49 -0700 Subject: [PATCH 22/32] New submission works, testing retrieval. --- .../objects/advanced/IBMQExperiment.ipynb | 350 ++++++++++++------ pygsti/extras/ibmq/ibmqexperiment.py | 152 +++++--- 2 files changed, 342 insertions(+), 160 deletions(-) diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 04dadbcbc..60e60fad8 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -124,15 +124,33 @@ "outputs": [], "source": [ "# Can use a physical device...\n", - "backend = service.backend('ibm_sherbrooke')\n", + "#backend = service.backend('ibm_sherbrooke')\n", "\n", - "# Can also ask for the least busy device\n", - "#backend = service.least_busy()\n", + "# Can also ask for the least busy physical device\n", + "backend = service.least_busy()\n", "\n", "# ... or can use a simulated fake backend\n", "sim_backend = FakeSherbrooke()" ] }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], + "source": [ + "# Let's see which backend is the least busy!\n", + "print(backend)" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -151,7 +169,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": { "tags": [ "nbval-skip" @@ -163,7 +181,7 @@ "#device = ExperimentalDevice.from_legacy_device('ibmq_bogota')\n", "\n", "# Using the active backend to pull current device specification\n", - "device = ExperimentalDevice.from_qiskit_backend(sim_backend)" + "device = ExperimentalDevice.from_qiskit_backend(backend)" ] }, { @@ -198,24 +216,43 @@ "name": "stdout", "output_type": "stream", "text": [ - "total circuits: 480\n", - "full total circuits: 480\n" + "Selected qubits ['Q18', 'Q0', 'Q14', 'Q1'] for device ibm_osaka\n", + "total circuits: 160\n", + "full total circuits: 160\n" ] } ], "source": [ "#circuit design parameters\n", - "depths = [0, 2, 4, 16, 32, 64]\n", - "circuits_per_shape = 20\n", + "depths = [0, 2, 4, 16]\n", + "circuits_per_shape = 10\n", "\n", "# dict setting the circuit widths (# qubits) you want to probe \n", "# and the qubits you want to use at each width\n", "# You can use device.graph.edges() to make sure these are connected components\n", + "def get_N_connected_qubits(device, N, starting_qubits = None):\n", + " if starting_qubits is None:\n", + " starting_qubits = []\n", + " qubits = set(starting_qubits)\n", + "\n", + " for edge in device.graph.edges():\n", + " # Check if connected, and add if so\n", + " if not len(qubits) or edge[0] in qubits or edge[1] in qubits:\n", + " qubits.update(edge)\n", + " \n", + " # Check if we can break\n", + " if len(qubits) >= N:\n", + " break\n", + " \n", + " return list(qubits)[:N]\n", + "\n", + "max_width = 4\n", + "selected_qubits = get_N_connected_qubits(device, max_width)\n", + "print(f\"Selected qubits {selected_qubits} for device {backend.name}\")\n", + "\n", "qubit_lists = {}\n", - "qubit_lists[1] = [('Q0',),]\n", - "qubit_lists[2] = [('Q0', 'Q1'),]\n", - "qubit_lists[3] = [('Q0', 'Q1', 'Q2'),]\n", - "qubit_lists[4] = [('Q0', 'Q1', 'Q2', 'Q3')]\n", + "for i in range(max_width):\n", + " qubit_lists[i] = [tuple(selected_qubits[:i+1])]\n", "\n", "widths = list(qubit_lists.keys())\n", "\n", @@ -252,30 +289,22 @@ "name": "stdout", "output_type": "stream", "text": [ - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 488082\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 488102\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 488122\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 488142\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 488162\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 488182\n", - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 937591\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 937611\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 937631\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 937651\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 937671\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 937691\n", - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 551098\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 551118\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 551138\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 551158\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 551178\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 551198\n", - "- Sampling 20 circuits at MRB length 0 (1 of 6 depths) with seed 754211\n", - "- Sampling 20 circuits at MRB length 2 (2 of 6 depths) with seed 754231\n", - "- Sampling 20 circuits at MRB length 4 (3 of 6 depths) with seed 754251\n", - "- Sampling 20 circuits at MRB length 16 (4 of 6 depths) with seed 754271\n", - "- Sampling 20 circuits at MRB length 32 (5 of 6 depths) with seed 754291\n", - "- Sampling 20 circuits at MRB length 64 (6 of 6 depths) with seed 754311\n" + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 311034\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 311044\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 311054\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 311064\n", + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 761560\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 761570\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 761580\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 761590\n", + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 84715\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 84725\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 84735\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 84745\n", + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 117582\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 117592\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 117602\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 117612\n" ] } ], @@ -300,12 +329,12 @@ "## Running on IBM Q\n", "We're now ready to run on the IBM Q processor. We do this using an `IBMQExperiment` object.\n", "\n", - "We can enable checkpointing for `IBMQExperiment` objects by providing a path. This is the default and is recommended!" + "We can enable checkpointing for `IBMQExperiment` objects by providing a path. This is the default and is recommended! We are also overriding old checkpoints here to ensure we have a clean starting point." ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 19, "metadata": { "tags": [ "nbval-skip" @@ -313,7 +342,8 @@ }, "outputs": [], "source": [ - "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024, seed=20231201, checkpoint_path='test_ibmq')" + "exp = ibmq.IBMQExperiment(combined_edesign, pspec, circuits_per_batch=75, num_shots=1024, seed=20231201,\n", + " checkpoint_path='test_ibmq', checkpoint_override=True)" ] }, { @@ -325,51 +355,57 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Transpiling circuit batch 1/7\n", - "Transpiling circuit batch 2/7\n", - "Transpiling circuit batch 3/7\n", - "Transpiling circuit batch 4/7\n", - "Transpiling circuit batch 5/7\n", - "Transpiling circuit batch 6/7\n", - "Transpiling circuit batch 7/7\n" + "Transpiling circuit batch 1/3\n", + "Transpiling circuit batch 2/3\n", + "Transpiling circuit batch 3/3\n" ] } ], "source": [ - "# Provide the directory name to enable transpilation checkpointing\n", - "exp.transpile()" + "# There is a Qiskit transpilation step than can be fully controlled with this argument\n", + "qiskit_pass_kwargs = {\n", + " 'optimization_level': 0, # Required (0 is default if not given)\n", + " 'seed_transpiler': 12345, # Can provide a seed for any stochastic part (provided by default if not given)\n", + " 'basis_gates': backend.operation_names # We want to make sure we compile to our backend gates\n", + "}\n", + "\n", + "# There is a pyGSTi -> QASM -> Qiskit step that can be fully controlled with this argument\n", + "qasm_convert_kwargs = {\n", + " 'num_qubits': pspec.num_qubits, # This is the default is not given. May want to change this if using a simulated backend (127 qubits not feasible)\n", + " 'standard_gates_version': 'x-sx-rz', # This is the default if not given\n", + "}\n", + "\n", + "exp.transpile(backend, qiskit_pass_kwargs, qasm_convert_kwargs)" ] }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 21, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "Already completed transpilation of 4/7 circuit batches\n", - "Transpiling circuit batch 5/7\n", - "Transpiling circuit batch 6/7\n", - "Transpiling circuit batch 7/7\n" + "Already completed transpilation of 2/3 circuit batches\n", + "Transpiling circuit batch 3/3\n" ] } ], "source": [ "# We can simulate having been interrupted by removing the last few transpiled batches\n", - "del exp.qasm_circuit_batches[4:]\n", - "del exp.qiskit_circuit_batches[4:]\n", + "del exp.qiskit_isa_circuit_batches[2:]\n", "\n", "# And now transpilation should only redo the missing batches\n", - "exp.transpile()\n" + "# We don't need to reprovide the options as they are saved by the first transpile call\n", + "exp.transpile(backend)" ] }, { @@ -383,11 +419,21 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 23, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Transpiling circuit batch 1/3\n", + "Transpiling circuit batch 2/3\n", + "Transpiling circuit batch 3/3\n" + ] + } + ], "source": [ - "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_circs=True)" + "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_circs=True, ibmq_backend=backend)" ] }, { @@ -399,7 +445,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 24, "metadata": { "tags": [ "nbval-skip" @@ -411,44 +457,22 @@ "output_type": "stream", "text": [ "Submitting batch 1\n", - "An exception of type IBMInputValueError occurred. Arguments:\n", - "('The instruction cx on qubits (1, 0) is not supported by the target system. Circuits that do not match the target hardware definition are no longer supported after March 4, 2024. See the transpilation documentation (https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and the primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see this coupled with operator transformations.',)\n", - "Failed to get machine status!\n", - "An exception of type IBMInputValueError occurred. Arguments:\n", - "(\"'FakeSherbrooke' object has no attribute 'status'\",)\n", - "This batch has failed 1 times and there have been 1 total failures\n", - "WaitingAn exception of type IBMInputValueError occurred. Arguments:\n", - "('The instruction cx on qubits (1, 0) is not supported by the target system. Circuits that do not match the target hardware definition are no longer supported after March 4, 2024. See the transpilation documentation (https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and the primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see this coupled with operator transformations.',)\n", - "Failed to get machine status!\n", - "An exception of type IBMInputValueError occurred. Arguments:\n", - "(\"'FakeSherbrooke' object has no attribute 'status'\",)\n", - "This batch has failed 2 times and there have been 2 total failures\n", - "Waiting" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mIBMInputValueError\u001b[0m Traceback (most recent call last)", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:392\u001b[0m, in \u001b[0;36mIBMQExperiment.submit\u001b[0;34m(self, ibmq_backend, start, stop, ibm_opt_level, ignore_job_limit, wait_time, max_attempts)\u001b[0m\n\u001b[1;32m 391\u001b[0m \u001b[38;5;66;03m# Submit job\u001b[39;00m\n\u001b[0;32m--> 392\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqjobs\u001b[38;5;241m.\u001b[39mappend(\u001b[43msampler\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43mbatch\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mshots\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum_shots\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 394\u001b[0m status \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqjobs[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m]\u001b[38;5;241m.\u001b[39mstatus()\n", - "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/sampler.py:151\u001b[0m, in \u001b[0;36mSamplerV2.run\u001b[0;34m(self, pubs, shots)\u001b[0m\n\u001b[1;32m 149\u001b[0m validate_classical_registers(coerced_pubs)\n\u001b[0;32m--> 151\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run\u001b[49m\u001b[43m(\u001b[49m\u001b[43mcoerced_pubs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/base_primitive.py:135\u001b[0m, in \u001b[0;36mBasePrimitiveV2._run\u001b[0;34m(self, pubs)\u001b[0m\n\u001b[1;32m 134\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mgetattr\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backend, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtarget\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;28;01mNone\u001b[39;00m) \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_simulator(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backend):\n\u001b[0;32m--> 135\u001b[0m \u001b[43mvalidate_isa_circuits\u001b[49m\u001b[43m(\u001b[49m\u001b[43m[\u001b[49m\u001b[43mpub\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcircuit\u001b[49m\u001b[43m]\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_backend\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtarget\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 137\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_backend, IBMBackend):\n", - "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/utils/validations.py:89\u001b[0m, in \u001b[0;36mvalidate_isa_circuits\u001b[0;34m(circuits, target)\u001b[0m\n\u001b[1;32m 88\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m message:\n\u001b[0;32m---> 89\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m IBMInputValueError(\n\u001b[1;32m 90\u001b[0m message\n\u001b[1;32m 91\u001b[0m \u001b[38;5;241m+\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m Circuits that do not match the target hardware definition are no longer \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 92\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msupported after March 4, 2024. See the transpilation documentation \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 93\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m(https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 94\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mthe primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 95\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mthis coupled with operator transformations.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 96\u001b[0m )\n", - "\u001b[0;31mIBMInputValueError\u001b[0m: 'The instruction cx on qubits (1, 0) is not supported by the target system. Circuits that do not match the target hardware definition are no longer supported after March 4, 2024. See the transpilation documentation (https://docs.quantum.ibm.com/transpile) for instructions to transform circuits and the primitive examples (https://docs.quantum.ibm.com/run/primitives-examples) to see this coupled with operator transformations.'", - "\nDuring handling of the above exception, another exception occurred:\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[19], line 2\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[38;5;66;03m# Again, we can checkpoint by passing in dirname\u001b[39;00m\n\u001b[0;32m----> 2\u001b[0m \u001b[43mexp2\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msubmit\u001b[49m\u001b[43m(\u001b[49m\u001b[43msim_backend\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:436\u001b[0m, in \u001b[0;36mIBMQExperiment.submit\u001b[0;34m(self, ibmq_backend, start, stop, ibm_opt_level, ignore_job_limit, wait_time, max_attempts)\u001b[0m\n\u001b[1;32m 433\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mThis batch has failed \u001b[39m\u001b[38;5;132;01m{0}\u001b[39;00m\u001b[38;5;124m times and there have been \u001b[39m\u001b[38;5;132;01m{1}\u001b[39;00m\u001b[38;5;124m total failures\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;241m.\u001b[39mformat(\n\u001b[1;32m 434\u001b[0m batch_waits, total_waits))\n\u001b[1;32m 435\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mWaiting\u001b[39m\u001b[38;5;124m'\u001b[39m, end\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[0;32m--> 436\u001b[0m \u001b[43m_time\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msleep\u001b[49m\u001b[43m(\u001b[49m\u001b[43mwait_time\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 437\u001b[0m \u001b[38;5;28;01mfinally\u001b[39;00m:\n\u001b[1;32m 438\u001b[0m \u001b[38;5;66;03m# Checkpoint calibration and job id data\u001b[39;00m\n\u001b[1;32m 439\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdisable_checkpointing:\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + " - Job ID is ctb2we1c2sk0008c3r0g\n", + " - Failed to get queue position for batch 1\n", + " (because queue position not available in RuntimeJobV2)\n", + "Submitting batch 2\n", + " - Job ID is ctb2weh0jacg008w6b3g\n", + " - Failed to get queue position for batch 2\n", + " (because queue position not available in RuntimeJobV2)\n", + "Submitting batch 3\n", + " - Job ID is ctb2wf10jacg008w6b40\n", + " - Failed to get queue position for batch 3\n", + " (because queue position not available in RuntimeJobV2)\n" ] } ], "source": [ - "# Again, we can checkpoint by passing in dirname\n", - "exp2.submit(sim_backend)" + "exp2.submit(backend)" ] }, { @@ -460,13 +484,29 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 25, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Batch 1: QUEUED\n", + " - Unable to retrieve queue position\n", + " (because queue position not available in RuntimeJobV2)\n", + "Batch 2: QUEUED\n", + " - Unable to retrieve queue position\n", + " (because queue position not available in RuntimeJobV2)\n", + "Batch 3: QUEUED\n", + " - Unable to retrieve queue position\n", + " (because queue position not available in RuntimeJobV2)\n" + ] + } + ], "source": [ "exp2.monitor()" ] @@ -480,26 +520,52 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 26, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Loading job 1/3...\n", + "Loading job 2/3...\n", + "Loading job 3/3...\n" + ] + } + ], "source": [ "exp3 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_jobs=True, service=service)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 29, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Batch 1: QUEUED\n", + " - Unable to retrieve queue position\n", + " (because queue position not available in RuntimeJobV2)\n", + "Batch 2: QUEUED\n", + " - Unable to retrieve queue position\n", + " (because queue position not available in RuntimeJobV2)\n", + "Batch 3: QUEUED\n", + " - Unable to retrieve queue position\n", + " (because queue position not available in RuntimeJobV2)\n" + ] + } + ], "source": [ "exp3.monitor()" ] @@ -513,13 +579,89 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 31, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "{'timestamps': {'created': '2024-07-16T08:24:24.760197Z',\n", + " 'finished': None,\n", + " 'running': None},\n", + " 'bss': {'seconds': 0},\n", + " 'usage': {'quantum_seconds': 0, 'seconds': 0},\n", + " 'qiskit_version': 'qiskit_ibm_runtime-0.25.0,qiskit-1.1.1*',\n", + " 'estimated_start_time': '2024-07-16T08:34:59.956Z',\n", + " 'estimated_completion_time': '2024-07-16T08:38:20.956Z',\n", + " 'caller': 'qiskit_ibm_runtime~sampler.py'}" + ] + }, + "execution_count": 31, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "exp3.qjobs[0].metrics()" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": {}, + "outputs": [], + "source": [ + "import datetime" + ] + }, + { + "cell_type": "code", + "execution_count": 35, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "2024-07-16 01:34:59.956000-07:00\n" + ] + } + ], + "source": [ + "print(datetime.datetime.fromisoformat('2024-07-16T08:34:59.956Z').astimezone())" + ] + }, + { + "cell_type": "code", + "execution_count": 28, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Querying IBMQ for results objects for batch 1...\n" + ] + }, + { + "ename": "KeyboardInterrupt", + "evalue": "", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[28], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mexp3\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mretrieve_results\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:281\u001b[0m, in \u001b[0;36mIBMQExperiment.retrieve_results\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 279\u001b[0m qjob \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqjobs[exp_idx]\n\u001b[1;32m 280\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQuerying IBMQ for results objects for batch \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexp_idx\u001b[38;5;250m \u001b[39m\u001b[38;5;241m+\u001b[39m\u001b[38;5;250m \u001b[39m\u001b[38;5;241m1\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m...\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 281\u001b[0m batch_result \u001b[38;5;241m=\u001b[39m \u001b[43mqjob\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mresult\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 282\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbatch_results\u001b[38;5;241m.\u001b[39mappend(batch_result\u001b[38;5;241m.\u001b[39mto_dict())\n\u001b[1;32m 284\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdisable_checkpointing:\n", + "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/runtime_job_v2.py:136\u001b[0m, in \u001b[0;36mRuntimeJobV2.result\u001b[0;34m(self, timeout, decoder)\u001b[0m\n\u001b[1;32m 121\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Return the results of the job.\u001b[39;00m\n\u001b[1;32m 122\u001b[0m \n\u001b[1;32m 123\u001b[0m \u001b[38;5;124;03mArgs:\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 133\u001b[0m \u001b[38;5;124;03m RuntimeInvalidStateError: If the job was cancelled, and attempting to retrieve result.\u001b[39;00m\n\u001b[1;32m 134\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 135\u001b[0m _decoder \u001b[38;5;241m=\u001b[39m decoder \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_final_result_decoder\n\u001b[0;32m--> 136\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwait_for_final_state\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 137\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_status \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mERROR\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 138\u001b[0m error_message \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reason \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reason \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_error_message\n", + "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/runtime_job_v2.py:257\u001b[0m, in \u001b[0;36mRuntimeJobV2.wait_for_final_state\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 253\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m timeout \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m elapsed_time \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m timeout:\n\u001b[1;32m 254\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m RuntimeJobTimeoutError(\n\u001b[1;32m 255\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mTimed out waiting for job to complete after \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mtimeout\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m secs.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 256\u001b[0m )\n\u001b[0;32m--> 257\u001b[0m \u001b[43mtime\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msleep\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m0.1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 258\u001b[0m status \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstatus()\n\u001b[1;32m 259\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m futures\u001b[38;5;241m.\u001b[39mTimeoutError:\n", + "\u001b[0;31mKeyboardInterrupt\u001b[0m: " + ] + } + ], "source": [ "exp3.retrieve_results()" ] diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index f7b15f838..783db62c5 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -8,6 +8,7 @@ # http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory. #*************************************************************************************************** +from datetime import datetime as _datetime import json as _json import numpy as _np import os as _os @@ -17,13 +18,17 @@ import warnings as _warnings # Try to load Qiskit -try: import qiskit as _qiskit -except: _qiskit = None +try: + import qiskit as _qiskit + from qiskit.providers import JobStatus as _JobStatus +except: + _qiskit = None # Try to load IBM Runtime try: from qiskit_ibm_runtime import SamplerV2 as _Sampler from qiskit_ibm_runtime import Session as _Session + from qiskit_ibm_runtime import RuntimeJobV2 as _RuntimeJobV2 from qiskit.transpiler.preset_passmanagers import generate_preset_pass_manager as _pass_manager except: _Sampler = None @@ -52,8 +57,8 @@ class IBMQExperiment(_TreeNode, _HasPSpec): """ @classmethod - def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, - service=None, new_checkpoint_path=None): + def from_dir(cls, dirname, regen_circs=False, ibmq_backend=None, + regen_jobs=False, service=None, new_checkpoint_path=None): """ Initialize a new IBMQExperiment object from `dirname`. @@ -67,6 +72,9 @@ def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, OpenQASM strings. Defaults to False. You should set this to True if you would like to call submit(). + ibmq_backend: + IBMQ backend to use for transpilation (if regen_circs is True) + regen_jobs: bool, optional Whether to recreate the RuntimeJobs from IBMQ based on the job ides. Defaults to False. You should set this to True if you would like to @@ -74,7 +82,7 @@ def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, service: QiskitRuntimeService Service used to retrieve RuntimeJobs from IBMQ based on job_ids - (if regen_runtime_jobs is True). + (if regen_jobs is True). new_checkpoint_path: str, optional A string for the path to use for writing intermediate checkpoint @@ -134,22 +142,18 @@ def from_dir(cls, dirname, regen_circs=False, regen_jobs=False, pass # Regenerate Qiskit circuits - ret.qiskit_circuit_batches = [] + ret.qiskit_isa_circuit_batches = [] if regen_circs: assert _qiskit is not None, "Could not import qiskit, needed for regen_circs=True" - for batch_strs in ret.qasm_circuit_batches: - batch = [_qiskit.QuantumCircuit.from_qasm_str(bs) for bs in batch_strs] - ret.qiskit_circuit_batches.append(batch) + assert ibmq_backend is not None, "No backend specified, could not transpile circuits" + ret.transpile(ibmq_backend) # Regenerate Qiskit RuntimeJobs - ret.qiskit_isa_circuit_batches = [] # TODO: How to regenerate? Maybe this should be what is saved? ret.qjobs = [] if regen_jobs: assert _Sampler is not None, "Could not import qiskit-ibm-runtime, needed for regen_jobs=True" - if service is None: - _warnings.warn("No service specified, cannot retrieve IBM jobs") - else: - ret._retrieve_jobs(service=service) + assert service is not None, "No service specified, cannot retrieve IBM jobs" + ret._retrieve_jobs(service=service) # Update checkpoint path if requested if new_checkpoint_path is not None: @@ -175,11 +179,11 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.checkpoint_path = str(checkpoint_path) if checkpoint_path is not None else 'ibmqexperiment_checkpoint' self.disable_checkpointing = disable_checkpointing # Populated with transpiling to IBMQ with .transpile() + self.qasm_convert_kwargs = {} + self.qiskit_pass_kwargs = {} self.pygsti_circuit_batches = [] - self.qasm_circuit_batches = [] # TODO: To be deprecated with direct qiskit support? - self.qiskit_circuit_batches = [] - # Populated when submitting to IBM Q with .submit() self.qiskit_isa_circuit_batches = [] + # Populated when submitting to IBM Q with .submit() self.qjobs = [] self.job_ids = [] self.submit_time_calibration_data = [] @@ -194,8 +198,6 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.auxfile_types['data'] = 'reset' # self.processor_spec is handled by _HasPSpec base class self.auxfile_types['pygsti_circuit_batches'] = 'list:text-circuit-list' - self.auxfile_types['qasm_circuit_batches'] = 'list:json' # TODO: To be deprecated with direct qiskit support? - self.auxfile_types['qiskit_circuit_batches'] = 'none' self.auxfile_types['qiskit_isa_circuit_batches'] = 'none' self.auxfile_types['qjobs'] = 'none' self.auxfile_types['job_ids'] = 'json' @@ -220,21 +222,26 @@ def monitor(self): """ Queries IBM Q for the status of the jobs. """ + assert _qiskit is not None, "Could not import qiskit, needed for monitor()" assert len(self.qjobs) == len(self.job_ids), \ "Mismatch between jobs and job ids! If loading from file, use the regen_jobs=True option in from_dir()." for counter, qjob in enumerate(self.qjobs): status = qjob.status() print(f"Batch {counter + 1}: {status}") - if status.name == 'QUEUED': - info = qjob.queue_info() - if info is not None: - print(f' - Queue position is {info.position}') - else: + if status in [_JobStatus.QUEUED, 'QUEUED']: + try: + print(f' - Queue position is {qjob.queue_position(True)}') + except Exception: print(' - Unable to retrieve queue position') + if isinstance(self.qjobs[-1], _RuntimeJobV2): + print(' (because queue position not available in RuntimeJobV2)') + metrics = qjob.metrics() + start_time = _datetime.fromisoformat(metrics["estimated_start_time"]) + print(f' - Estimated start time: {start_time.astimezone()} (local timezone)') # Print unsubmitted for any entries in qobj but not qjob - for counter in range(len(self.qjobs), len(self.qiskit_circuit_batches)): + for counter in range(len(self.qjobs), len(self.qiskit_isa_circuit_batches)): print(f"Batch {counter + 1}: NOT SUBMITTED") def retrieve_results(self): @@ -291,8 +298,7 @@ def partial_trace(ordered_target_indices, input_dict): if not self.disable_checkpointing: self.data.write(self.checkpoint_path, edesign_already_written=True) - def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, - ignore_job_limit=True, wait_time=5, max_attempts=10): + def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wait_time=5, max_attempts=10): """ Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign used to create this object. @@ -313,10 +319,6 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, Batch index to stop submission (exclusive). Defaults to None, which will submit as many jobs as possible given the backend's maximum job limit. - - ibm_opt_level: int, optional - An optimization level to give to Qiskit's `generate_preset_pass_manager`. - Defaults to 0, which is no optimization. ignore_job_limit: bool, optional If True, then stop is set to submit all remaining jobs. This is set @@ -337,7 +339,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, assert _qiskit is not None, "Could not import qiskit, needed for submit()" assert _Sampler is not None, "Could not import qiskit-ibm-runtime, needed for submit()" - assert len(self.qiskit_circuit_batches) == len(self.pygsti_circuit_batches), \ + assert len(self.qiskit_isa_circuit_batches) == len(self.pygsti_circuit_batches), \ "Transpilation missing! Either run .transpile() first, or if loading from file, " + \ "use the regen_qiskit_circs=True option in from_dir()." @@ -353,7 +355,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, if start is None: start = len(self.qjobs) - stop = len(self.qiskit_circuit_batches) if stop is None else min(stop, len(self.qiskit_circuit_batches)) + stop = len(self.qiskit_isa_circuit_batches) if stop is None else min(stop, len(self.qiskit_isa_circuit_batches)) if not ignore_job_limit: job_limit = ibmq_backend.job_limit() allowed_jobs = job_limit.maximum_jobs - job_limit.active_jobs @@ -362,11 +364,10 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, stop = min(start + allowed_jobs, stop) - pm = _pass_manager(backend=ibmq_backend, optimization_level=ibm_opt_level) ibmq_session = _Session(backend = ibmq_backend) sampler = _Sampler(session=ibmq_session) - for batch_idx, batch in enumerate(self.qiskit_circuit_batches): + for batch_idx, batch in enumerate(self.qiskit_isa_circuit_batches): if batch_idx < start or batch_idx >= stop: continue @@ -383,10 +384,6 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, # We can't get the properties # Likely this is a fake backend/simulator, append empty submit data self.submit_time_calibration_data.append({}) - - # Run pass manager - isa_circs = pm.run(batch) - self.qiskit_isa_circuit_batches.append(isa_circs) # Submit job self.qjobs.append(sampler.run(batch, shots = self.num_shots)) @@ -394,8 +391,8 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, status = self.qjobs[-1].status() initializing = True initializing_steps = 0 - while initializing: - if status.name == 'INITIALIZING' or status.name == 'VALIDATING': + while initializing and initializing_steps < max_attempts: + if status in [_JobStatus.INITIALIZING, "INITIALIZING", _JobStatus.VALIDATING, "VALIDATING"]: status = self.qjobs[-1].status() print(f' - {status} (query {initializing_steps})') _time.sleep(wait_time) @@ -407,17 +404,24 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, job_id = self.qjobs[-1].job_id() print(f' - Job ID is {job_id}') self.job_ids.append(job_id) - except: + except Exception: + print(' - Failed to get job_id.') self.job_ids.append(None) try: - print(f' - Queue position is {self.qjobs[-1].queue_info().position}') + print(f' - Queue position is {self.qjobs[-1].queue_position()}') except: print(f' - Failed to get queue position for batch {batch_idx + 1}') + if isinstance(self.qjobs[-1], _RuntimeJobV2): + print(' (because queue position not available in RuntimeJobV2)') + metrics = self.qjobs[-1].metrics() + start_time = _datetime.fromisoformat(metrics["estimated_start_time"]) + print(f' - Estimated start time: {start_time.astimezone()} (local timezone)') + submit_status = True - except Exception as ex: # TODO: Revamp this + except Exception as ex: template = "An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) print(message) @@ -442,14 +446,50 @@ def submit(self, ibmq_backend, start=None, stop=None, ibm_opt_level=0, if submit_status is False: raise RuntimeError("Ran out of max attempts and job was still not submitted successfully") - def transpile(self): + def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=None): """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. + + Parameters + ---------- + ibmq_backend: + IBM backend to use during Qiskit transpilation + + opt_level: int, optional + Optimization level for Qiskit `generate_preset_pass_manager`. + + qiskit_pass_kwargs: dict, optional + Additional kwargs to pass in to `generate_preset_pass_manager`. + If not defined, the default is {'seed_transpiler': self.seed, 'optimization_level': 0} + Note that "optimization_level" is a required argument to the pass manager. + + qasm_convert_kwargs: dict, optional + Additional kwargs to pass in to `Circuit.convert_to_openqasm`. + If not defined, the default is {'num_qubits': self.processor_spec.num_qubits, + 'standard_gates_version': 'x-sx-rz'} """ circuits = self.edesign.all_circuits_needing_data.copy() num_batches = int(_np.ceil(len(circuits) / self.circuits_per_batch)) + + if qiskit_pass_kwargs is None: + qiskit_pass_kwargs = {} + for k,v in qiskit_pass_kwargs.items(): + if k in self.qiskit_pass_kwargs: + _warnings.warn(f"Overriding option {k} of qiskit_pass_kwargs") + self.qiskit_pass_kwargs[k] = v + self.qiskit_pass_kwargs['seed_transpiler'] = self.qiskit_pass_kwargs.get('seed_transpiler', self.seed) + self.qiskit_pass_kwargs['optimization_level'] = self.qiskit_pass_kwargs.get('optimization_level', 0) + if qasm_convert_kwargs is None: + qasm_convert_kwargs = {} + for k,v in qasm_convert_kwargs.items(): + if k in self.qasm_convert_kwargs: + _warnings.warn(f"Overriding option {k} of qasm_convert_kwargs") + self.qasm_convert_kwargs[k] = v + self.qasm_convert_kwargs['num_qubits'] = self.qasm_convert_kwargs.get('num_qubits', self.processor_spec.num_qubits) + self.qasm_convert_kwargs['standard_gates_version'] = self.qasm_convert_kwargs.get('standard_gates_version', 'x-sx-rz') + if not len(self.pygsti_circuit_batches): - rand_state = _np.random.RandomState(self.seed) + rand_state = _np.random.RandomState(self.seed) # TODO: Should this be a different seed as transpiler? if self.randomized_order: if self.remove_duplicates: @@ -466,23 +506,23 @@ def transpile(self): if not self.disable_checkpointing: self._write_checkpoint() - if len(self.qiskit_circuit_batches): - print(f'Already completed transpilation of {len(self.qiskit_circuit_batches)}/{num_batches} circuit batches') + if len(self.qiskit_isa_circuit_batches): + print(f'Already completed transpilation of {len(self.qiskit_isa_circuit_batches)}/{num_batches} circuit batches') - for batch_idx in range(len(self.qiskit_circuit_batches), num_batches): + pm = _pass_manager(backend=ibmq_backend, **self.qiskit_pass_kwargs) + + for batch_idx in range(len(self.qiskit_isa_circuit_batches), num_batches): print(f"Transpiling circuit batch {batch_idx+1}/{num_batches}") batch = [] - batch_strs = [] for circ in self.pygsti_circuit_batches[batch_idx]: - pygsti_openqasm_circ = circ.convert_to_openqasm(num_qubits=self.processor_spec.num_qubits, - standard_gates_version='x-sx-rz') - batch_strs.append(pygsti_openqasm_circ) - + # TODO: Replace this with direct to qiskit + pygsti_openqasm_circ = circ.convert_to_openqasm(**self.qasm_convert_kwargs) qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) batch.append(qiskit_qc) - self.qasm_circuit_batches.append(batch_strs) - self.qiskit_circuit_batches.append(batch) + # Run pass manager on batch + isa_circs = pm.run(batch) + self.qiskit_isa_circuit_batches.append(isa_circs) if not self.disable_checkpointing: self._write_checkpoint() From eafa2ab91a489ac1743467092e1fd292fa3d5e40 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 16 Jul 2024 09:43:53 -0700 Subject: [PATCH 23/32] New IBMQExperiment tutorial running. --- .../objects/advanced/IBMQExperiment.ipynb | 294 ++---------------- pygsti/extras/ibmq/ibmqexperiment.py | 11 +- 2 files changed, 39 insertions(+), 266 deletions(-) diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 60e60fad8..1ccd90206 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -16,7 +16,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -26,7 +26,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": { "tags": [] }, @@ -40,7 +40,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -62,7 +62,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -76,7 +76,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -86,28 +86,9 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "[,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ]" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# You can list all the available backends to ensure your instance is running properly\n", "service.backends()" @@ -115,7 +96,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -135,17 +116,9 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], + "outputs": [], "source": [ "# Let's see which backend is the least busy!\n", "print(backend)" @@ -169,7 +142,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -186,7 +159,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": { "tags": [] }, @@ -207,21 +180,11 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selected qubits ['Q18', 'Q0', 'Q14', 'Q1'] for device ibm_osaka\n", - "total circuits: 160\n", - "full total circuits: 160\n" - ] - } - ], + "outputs": [], "source": [ "#circuit design parameters\n", "depths = [0, 2, 4, 16]\n", @@ -270,7 +233,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -280,34 +243,11 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 311034\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 311044\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 311054\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 311064\n", - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 761560\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 761570\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 761580\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 761590\n", - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 84715\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 84725\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 84735\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 84745\n", - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 117582\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 117592\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 117602\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 117612\n" - ] - } - ], + "outputs": [], "source": [ "edesigns_dict = {}\n", "edesign_index = 1\n", @@ -334,7 +274,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -355,19 +295,9 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Transpiling circuit batch 1/3\n", - "Transpiling circuit batch 2/3\n", - "Transpiling circuit batch 3/3\n" - ] - } - ], + "outputs": [], "source": [ "# There is a Qiskit transpilation step than can be fully controlled with this argument\n", "qiskit_pass_kwargs = {\n", @@ -387,18 +317,9 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Already completed transpilation of 2/3 circuit batches\n", - "Transpiling circuit batch 3/3\n" - ] - } - ], + "outputs": [], "source": [ "# We can simulate having been interrupted by removing the last few transpiled batches\n", "del exp.qiskit_isa_circuit_batches[2:]\n", @@ -419,19 +340,9 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Transpiling circuit batch 1/3\n", - "Transpiling circuit batch 2/3\n", - "Transpiling circuit batch 3/3\n" - ] - } - ], + "outputs": [], "source": [ "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_circs=True, ibmq_backend=backend)" ] @@ -445,32 +356,13 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Submitting batch 1\n", - " - Job ID is ctb2we1c2sk0008c3r0g\n", - " - Failed to get queue position for batch 1\n", - " (because queue position not available in RuntimeJobV2)\n", - "Submitting batch 2\n", - " - Job ID is ctb2weh0jacg008w6b3g\n", - " - Failed to get queue position for batch 2\n", - " (because queue position not available in RuntimeJobV2)\n", - "Submitting batch 3\n", - " - Job ID is ctb2wf10jacg008w6b40\n", - " - Failed to get queue position for batch 3\n", - " (because queue position not available in RuntimeJobV2)\n" - ] - } - ], + "outputs": [], "source": [ "exp2.submit(backend)" ] @@ -484,29 +376,13 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Batch 1: QUEUED\n", - " - Unable to retrieve queue position\n", - " (because queue position not available in RuntimeJobV2)\n", - "Batch 2: QUEUED\n", - " - Unable to retrieve queue position\n", - " (because queue position not available in RuntimeJobV2)\n", - "Batch 3: QUEUED\n", - " - Unable to retrieve queue position\n", - " (because queue position not available in RuntimeJobV2)\n" - ] - } - ], + "outputs": [], "source": [ "exp2.monitor()" ] @@ -520,52 +396,26 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Loading job 1/3...\n", - "Loading job 2/3...\n", - "Loading job 3/3...\n" - ] - } - ], + "outputs": [], "source": [ "exp3 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_jobs=True, service=service)" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Batch 1: QUEUED\n", - " - Unable to retrieve queue position\n", - " (because queue position not available in RuntimeJobV2)\n", - "Batch 2: QUEUED\n", - " - Unable to retrieve queue position\n", - " (because queue position not available in RuntimeJobV2)\n", - "Batch 3: QUEUED\n", - " - Unable to retrieve queue position\n", - " (because queue position not available in RuntimeJobV2)\n" - ] - } - ], + "outputs": [], "source": [ "exp3.monitor()" ] @@ -579,89 +429,13 @@ }, { "cell_type": "code", - "execution_count": 31, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "{'timestamps': {'created': '2024-07-16T08:24:24.760197Z',\n", - " 'finished': None,\n", - " 'running': None},\n", - " 'bss': {'seconds': 0},\n", - " 'usage': {'quantum_seconds': 0, 'seconds': 0},\n", - " 'qiskit_version': 'qiskit_ibm_runtime-0.25.0,qiskit-1.1.1*',\n", - " 'estimated_start_time': '2024-07-16T08:34:59.956Z',\n", - " 'estimated_completion_time': '2024-07-16T08:38:20.956Z',\n", - " 'caller': 'qiskit_ibm_runtime~sampler.py'}" - ] - }, - "execution_count": 31, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "exp3.qjobs[0].metrics()" - ] - }, - { - "cell_type": "code", - "execution_count": 32, - "metadata": {}, - "outputs": [], - "source": [ - "import datetime" - ] - }, - { - "cell_type": "code", - "execution_count": 35, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2024-07-16 01:34:59.956000-07:00\n" - ] - } - ], - "source": [ - "print(datetime.datetime.fromisoformat('2024-07-16T08:34:59.956Z').astimezone())" - ] - }, - { - "cell_type": "code", - "execution_count": 28, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Querying IBMQ for results objects for batch 1...\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[28], line 1\u001b[0m\n\u001b[0;32m----> 1\u001b[0m \u001b[43mexp3\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mretrieve_results\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/Documents/repos/pyGSTi/pygsti/extras/ibmq/ibmqexperiment.py:281\u001b[0m, in \u001b[0;36mIBMQExperiment.retrieve_results\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 279\u001b[0m qjob \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mqjobs[exp_idx]\n\u001b[1;32m 280\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mQuerying IBMQ for results objects for batch \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mexp_idx\u001b[38;5;250m \u001b[39m\u001b[38;5;241m+\u001b[39m\u001b[38;5;250m \u001b[39m\u001b[38;5;241m1\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m...\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 281\u001b[0m batch_result \u001b[38;5;241m=\u001b[39m \u001b[43mqjob\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mresult\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 282\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbatch_results\u001b[38;5;241m.\u001b[39mappend(batch_result\u001b[38;5;241m.\u001b[39mto_dict())\n\u001b[1;32m 284\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdisable_checkpointing:\n", - "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/runtime_job_v2.py:136\u001b[0m, in \u001b[0;36mRuntimeJobV2.result\u001b[0;34m(self, timeout, decoder)\u001b[0m\n\u001b[1;32m 121\u001b[0m \u001b[38;5;250m\u001b[39m\u001b[38;5;124;03m\"\"\"Return the results of the job.\u001b[39;00m\n\u001b[1;32m 122\u001b[0m \n\u001b[1;32m 123\u001b[0m \u001b[38;5;124;03mArgs:\u001b[39;00m\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 133\u001b[0m \u001b[38;5;124;03m RuntimeInvalidStateError: If the job was cancelled, and attempting to retrieve result.\u001b[39;00m\n\u001b[1;32m 134\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m 135\u001b[0m _decoder \u001b[38;5;241m=\u001b[39m decoder \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_final_result_decoder\n\u001b[0;32m--> 136\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mwait_for_final_state\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimeout\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mtimeout\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 137\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_status \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mERROR\u001b[39m\u001b[38;5;124m\"\u001b[39m:\n\u001b[1;32m 138\u001b[0m error_message \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reason \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reason \u001b[38;5;28;01melse\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_error_message\n", - "File \u001b[0;32m/opt/homebrew/Caskroom/miniconda/base/envs/pygsti/lib/python3.12/site-packages/qiskit_ibm_runtime/runtime_job_v2.py:257\u001b[0m, in \u001b[0;36mRuntimeJobV2.wait_for_final_state\u001b[0;34m(self, timeout)\u001b[0m\n\u001b[1;32m 253\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m timeout \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m elapsed_time \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m timeout:\n\u001b[1;32m 254\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m RuntimeJobTimeoutError(\n\u001b[1;32m 255\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mTimed out waiting for job to complete after \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mtimeout\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m secs.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 256\u001b[0m )\n\u001b[0;32m--> 257\u001b[0m \u001b[43mtime\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msleep\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m0.1\u001b[39;49m\u001b[43m)\u001b[49m\n\u001b[1;32m 258\u001b[0m status \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstatus()\n\u001b[1;32m 259\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m futures\u001b[38;5;241m.\u001b[39mTimeoutError:\n", - "\u001b[0;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "exp3.retrieve_results()" ] diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index 783db62c5..77938c863 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -198,16 +198,15 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.auxfile_types['data'] = 'reset' # self.processor_spec is handled by _HasPSpec base class self.auxfile_types['pygsti_circuit_batches'] = 'list:text-circuit-list' - self.auxfile_types['qiskit_isa_circuit_batches'] = 'none' + self.auxfile_types['qiskit_isa_circuit_batches'] = 'none' # TODO: Fix this self.auxfile_types['qjobs'] = 'none' self.auxfile_types['job_ids'] = 'json' + self.auxfile_types['batch_results'] = 'pickle' # TODO: Fix this if _json_util is not None: self.auxfile_types['submit_time_calibration_data'] = 'list:json' - self.auxfile_types['batch_results'] = 'list:json' else: # Fall back to pickles if we do not have bson to deal with datetime.datetime self.auxfile_types['submit_time_calibration_data'] = 'pickle' - self.auxfile_types['batch_results'] = 'pickle' if not self.disable_checkpointing: chkpath = _pathlib.Path(self.checkpoint_path) @@ -275,7 +274,7 @@ def partial_trace(ordered_target_indices, input_dict): return output_dict if len(self.batch_results): - print(f'Already retrieved results of {len(self.batch_results)}/{len(self.qiskit_circuit_batches)} circuit batches') + print(f'Already retrieved results of {len(self.batch_results)}/{len(self.qiskit_isa_circuit_batches)} circuit batches') #get results from backend jobs and add to dict ds = _data.DataSet() @@ -283,14 +282,14 @@ def partial_trace(ordered_target_indices, input_dict): qjob = self.qjobs[exp_idx] print(f"Querying IBMQ for results objects for batch {exp_idx + 1}...") batch_result = qjob.result() - self.batch_results.append(batch_result.to_dict()) + self.batch_results.append(batch_result) if not self.disable_checkpointing: self._write_checkpoint() for i, circ in enumerate(self.pygsti_circuit_batches[exp_idx]): ordered_target_indices = [self.processor_spec.qubit_labels.index(q) for q in circ.line_labels] - counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result[i].data.meas.get_counts())) + counts_data = partial_trace(ordered_target_indices, reverse_dict_key_bits(batch_result[i].data.cr.get_counts())) ds.add_count_dict(circ, counts_data) self.data = _ProtocolData(self.edesign, ds) From 316fbb54ffe5f8f460f38465d332361ae96a4458 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 16 Jul 2024 16:04:05 -0700 Subject: [PATCH 24/32] Add QPY as a serialization format. Also used that to save transpilation in IBMQExperiment. --- .../objects/advanced/IBMQExperiment.ipynb | 210 +++++++++++++----- pygsti/extras/ibmq/ibmqexperiment.py | 78 +++---- pygsti/io/metadir.py | 18 ++ 3 files changed, 208 insertions(+), 98 deletions(-) diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index 1ccd90206..febb1b7ff 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -16,7 +16,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -26,7 +26,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 2, "metadata": { "tags": [] }, @@ -40,7 +40,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 3, "metadata": { "tags": [ "nbval-skip" @@ -62,7 +62,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": {}, "outputs": [], "source": [ @@ -76,8 +76,12 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, + "execution_count": 5, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, "outputs": [], "source": [ "# Once credentials are saved, the service can be loaded each time:\n", @@ -86,9 +90,32 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 6, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ,\n", + " ]" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# You can list all the available backends to ensure your instance is running properly\n", "service.backends()" @@ -96,7 +123,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": { "tags": [ "nbval-skip" @@ -116,9 +143,21 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 8, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n" + ] + } + ], "source": [ "# Let's see which backend is the least busy!\n", "print(backend)" @@ -142,7 +181,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 9, "metadata": { "tags": [ "nbval-skip" @@ -159,7 +198,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": { "tags": [] }, @@ -180,11 +219,21 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 11, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Selected qubits ['Q2', 'Q0', 'Q1', 'Q14'] for device ibm_kyoto\n", + "total circuits: 160\n", + "full total circuits: 160\n" + ] + } + ], "source": [ "#circuit design parameters\n", "depths = [0, 2, 4, 16]\n", @@ -233,7 +282,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 12, "metadata": {}, "outputs": [], "source": [ @@ -243,11 +292,34 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": { "tags": [] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 183851\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 183861\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 183871\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 183881\n", + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 33439\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 33449\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 33459\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 33469\n", + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 104322\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 104332\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 104342\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 104352\n", + "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 218927\n", + "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 218937\n", + "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 218947\n", + "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 218957\n" + ] + } + ], "source": [ "edesigns_dict = {}\n", "edesign_index = 1\n", @@ -274,7 +346,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": { "tags": [ "nbval-skip" @@ -295,31 +367,45 @@ }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 15, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Transpiling circuit batch 1/3\n", + "Transpiling circuit batch 2/3\n", + "Transpiling circuit batch 3/3\n" + ] + } + ], "source": [ - "# There is a Qiskit transpilation step than can be fully controlled with this argument\n", - "qiskit_pass_kwargs = {\n", - " 'optimization_level': 0, # Required (0 is default if not given)\n", - " 'seed_transpiler': 12345, # Can provide a seed for any stochastic part (provided by default if not given)\n", - " 'basis_gates': backend.operation_names # We want to make sure we compile to our backend gates\n", - "}\n", - "\n", - "# There is a pyGSTi -> QASM -> Qiskit step that can be fully controlled with this argument\n", - "qasm_convert_kwargs = {\n", - " 'num_qubits': pspec.num_qubits, # This is the default is not given. May want to change this if using a simulated backend (127 qubits not feasible)\n", - " 'standard_gates_version': 'x-sx-rz', # This is the default if not given\n", - "}\n", - "\n", - "exp.transpile(backend, qiskit_pass_kwargs, qasm_convert_kwargs)" + "exp.transpile(backend)" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], + "execution_count": 16, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Already completed transpilation of 2/3 circuit batches\n", + "Transpiling circuit batch 3/3\n" + ] + } + ], "source": [ "# We can simulate having been interrupted by removing the last few transpiled batches\n", "del exp.qiskit_isa_circuit_batches[2:]\n", @@ -333,18 +419,20 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "If the `IBMQExperiment` object is lost and needs to be reloaded (i.e. notebook restarts), it can be loaded from file now.\n", - "\n", - "However, the Qiskit circuits are not automatically regenerated from the transpiled QASM during loading for speed. They can (and need to be regenerated if calling `submit()`) by passing in the `regen_circs=True` flag to `from_dir()`." + "If the `IBMQExperiment` object is lost and needs to be reloaded (i.e. notebook restarts), it can be loaded from file now." ] }, { "cell_type": "code", - "execution_count": null, - "metadata": {}, + "execution_count": 17, + "metadata": { + "tags": [ + "nbval-skip" + ] + }, "outputs": [], "source": [ - "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq', regen_circs=True, ibmq_backend=backend)" + "exp2 = ibmq.IBMQExperiment.from_dir('test_ibmq')" ] }, { @@ -356,13 +444,35 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 18, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Submitting batch 1\n", + " - Job ID is ctbfqy7wwtng0083ep3g\n", + " - Failed to get queue position for batch 1\n", + " (because queue position not available in RuntimeJobV2)\n", + " - Estimated start time: 2024-07-16 16:08:02.218000-07:00 (local timezone)\n", + "Submitting batch 2\n", + " - Job ID is ctbfqyz4cerg008wejp0\n", + " - Failed to get queue position for batch 2\n", + " (because queue position not available in RuntimeJobV2)\n", + " - Estimated start time: 2024-07-16 16:11:23.314000-07:00 (local timezone)\n", + "Submitting batch 3\n", + " - Job ID is ctbfqzqwwtng0083ep40\n", + " - Failed to get queue position for batch 3\n", + " (because queue position not available in RuntimeJobV2)\n", + " - Estimated start time: 2024-07-16 16:14:44.346000-07:00 (local timezone)\n" + ] + } + ], "source": [ "exp2.submit(backend)" ] @@ -558,7 +668,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.4" + "version": "3.11.9" } }, "nbformat": 4, diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index 77938c863..57682f404 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -57,8 +57,7 @@ class IBMQExperiment(_TreeNode, _HasPSpec): """ @classmethod - def from_dir(cls, dirname, regen_circs=False, ibmq_backend=None, - regen_jobs=False, service=None, new_checkpoint_path=None): + def from_dir(cls, dirname, regen_jobs=False, service=None, new_checkpoint_path=None): """ Initialize a new IBMQExperiment object from `dirname`. @@ -67,14 +66,6 @@ def from_dir(cls, dirname, regen_circs=False, ibmq_backend=None, dirname : str The directory name. - regen_circs: bool, optional - Whether to recreate the Qiskit circuits from the transpiled - OpenQASM strings. Defaults to False. You should set this to True - if you would like to call submit(). - - ibmq_backend: - IBMQ backend to use for transpilation (if regen_circs is True) - regen_jobs: bool, optional Whether to recreate the RuntimeJobs from IBMQ based on the job ides. Defaults to False. You should set this to True if you would like to @@ -140,13 +131,6 @@ def from_dir(cls, dirname, regen_circs=False, ibmq_backend=None, ret.data = data except: pass - - # Regenerate Qiskit circuits - ret.qiskit_isa_circuit_batches = [] - if regen_circs: - assert _qiskit is not None, "Could not import qiskit, needed for regen_circs=True" - assert ibmq_backend is not None, "No backend specified, could not transpile circuits" - ret.transpile(ibmq_backend) # Regenerate Qiskit RuntimeJobs ret.qjobs = [] @@ -179,8 +163,6 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.checkpoint_path = str(checkpoint_path) if checkpoint_path is not None else 'ibmqexperiment_checkpoint' self.disable_checkpointing = disable_checkpointing # Populated with transpiling to IBMQ with .transpile() - self.qasm_convert_kwargs = {} - self.qiskit_pass_kwargs = {} self.pygsti_circuit_batches = [] self.qiskit_isa_circuit_batches = [] # Populated when submitting to IBM Q with .submit() @@ -198,7 +180,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.auxfile_types['data'] = 'reset' # self.processor_spec is handled by _HasPSpec base class self.auxfile_types['pygsti_circuit_batches'] = 'list:text-circuit-list' - self.auxfile_types['qiskit_isa_circuit_batches'] = 'none' # TODO: Fix this + self.auxfile_types['qiskit_isa_circuit_batches'] = 'list:qpy' self.auxfile_types['qjobs'] = 'none' self.auxfile_types['job_ids'] = 'json' self.auxfile_types['batch_results'] = 'pickle' # TODO: Fix this @@ -235,9 +217,12 @@ def monitor(self): print(' - Unable to retrieve queue position') if isinstance(self.qjobs[-1], _RuntimeJobV2): print(' (because queue position not available in RuntimeJobV2)') - metrics = qjob.metrics() - start_time = _datetime.fromisoformat(metrics["estimated_start_time"]) - print(f' - Estimated start time: {start_time.astimezone()} (local timezone)') + try: + metrics = qjob.metrics() + start_time = _datetime.fromisoformat(metrics["estimated_start_time"]) + print(f' - Estimated start time: {start_time.astimezone()} (local timezone)') + except Exception: + print(f' - Unable to retrieve estimated start time') # Print unsubmitted for any entries in qobj but not qjob for counter in range(len(self.qjobs), len(self.qiskit_isa_circuit_batches)): @@ -410,31 +395,33 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wai try: print(f' - Queue position is {self.qjobs[-1].queue_position()}') - except: + except Exception: print(f' - Failed to get queue position for batch {batch_idx + 1}') if isinstance(self.qjobs[-1], _RuntimeJobV2): print(' (because queue position not available in RuntimeJobV2)') - metrics = self.qjobs[-1].metrics() - start_time = _datetime.fromisoformat(metrics["estimated_start_time"]) - print(f' - Estimated start time: {start_time.astimezone()} (local timezone)') + try: + metrics = self.qjobs[-1].metrics() + start_time = _datetime.fromisoformat(metrics["estimated_start_time"]) + print(f' - Estimated start time: {start_time.astimezone()} (local timezone)') + except Exception: + print(f' - Unable to retrieve estimated start time') submit_status = True except Exception as ex: - template = "An exception of type {0} occurred. Arguments:\n{1!r}" + template = " An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex.args) print(message) try: - print('Machine status is {}.'.format(ibmq_backend.status().status_msg)) + print(' Machine status is {}.'.format(ibmq_backend.status().status_msg)) except Exception as ex1: - print('Failed to get machine status!') - template = "An exception of type {0} occurred. Arguments:\n{1!r}" + print(' Failed to get machine status!') + template = " An exception of type {0} occurred. Arguments:\n{1!r}" message = template.format(type(ex).__name__, ex1.args) print(message) total_waits += 1 batch_waits += 1 - print("This batch has failed {0} times and there have been {1} total failures".format( - batch_waits, total_waits)) + print(f"This batch has failed {batch_waits} times and there have been {total_waits} total failures") print('Waiting', end='') _time.sleep(wait_time) finally: @@ -458,7 +445,8 @@ def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=N qiskit_pass_kwargs: dict, optional Additional kwargs to pass in to `generate_preset_pass_manager`. - If not defined, the default is {'seed_transpiler': self.seed, 'optimization_level': 0} + If not defined, the default is {'seed_transpiler': self.seed, 'optimization_level': 0, + 'basis_gates': ibmq_backend.operation_names} Note that "optimization_level" is a required argument to the pass manager. qasm_convert_kwargs: dict, optional @@ -471,21 +459,14 @@ def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=N if qiskit_pass_kwargs is None: qiskit_pass_kwargs = {} - for k,v in qiskit_pass_kwargs.items(): - if k in self.qiskit_pass_kwargs: - _warnings.warn(f"Overriding option {k} of qiskit_pass_kwargs") - self.qiskit_pass_kwargs[k] = v - self.qiskit_pass_kwargs['seed_transpiler'] = self.qiskit_pass_kwargs.get('seed_transpiler', self.seed) - self.qiskit_pass_kwargs['optimization_level'] = self.qiskit_pass_kwargs.get('optimization_level', 0) + qiskit_pass_kwargs['seed_transpiler'] = qiskit_pass_kwargs.get('seed_transpiler', self.seed) + qiskit_pass_kwargs['optimization_level'] = qiskit_pass_kwargs.get('optimization_level', 0) + qiskit_pass_kwargs['basis_gates'] = qiskit_pass_kwargs.get('basis_gates', ibmq_backend.operation_names) if qasm_convert_kwargs is None: qasm_convert_kwargs = {} - for k,v in qasm_convert_kwargs.items(): - if k in self.qasm_convert_kwargs: - _warnings.warn(f"Overriding option {k} of qasm_convert_kwargs") - self.qasm_convert_kwargs[k] = v - self.qasm_convert_kwargs['num_qubits'] = self.qasm_convert_kwargs.get('num_qubits', self.processor_spec.num_qubits) - self.qasm_convert_kwargs['standard_gates_version'] = self.qasm_convert_kwargs.get('standard_gates_version', 'x-sx-rz') + qasm_convert_kwargs['num_qubits'] = qasm_convert_kwargs.get('num_qubits', self.processor_spec.num_qubits) + qasm_convert_kwargs['standard_gates_version'] = qasm_convert_kwargs.get('standard_gates_version', 'x-sx-rz') if not len(self.pygsti_circuit_batches): rand_state = _np.random.RandomState(self.seed) # TODO: Should this be a different seed as transpiler? @@ -508,14 +489,15 @@ def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=N if len(self.qiskit_isa_circuit_batches): print(f'Already completed transpilation of {len(self.qiskit_isa_circuit_batches)}/{num_batches} circuit batches') - pm = _pass_manager(backend=ibmq_backend, **self.qiskit_pass_kwargs) + pm = _pass_manager(backend=ibmq_backend, **qiskit_pass_kwargs) + # TODO: In parallel and with tqdm for batch_idx in range(len(self.qiskit_isa_circuit_batches), num_batches): print(f"Transpiling circuit batch {batch_idx+1}/{num_batches}") batch = [] for circ in self.pygsti_circuit_batches[batch_idx]: # TODO: Replace this with direct to qiskit - pygsti_openqasm_circ = circ.convert_to_openqasm(**self.qasm_convert_kwargs) + pygsti_openqasm_circ = circ.convert_to_openqasm(**qasm_convert_kwargs) qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) batch.append(qiskit_qc) diff --git a/pygsti/io/metadir.py b/pygsti/io/metadir.py index d9bfbdf2f..879c44702 100644 --- a/pygsti/io/metadir.py +++ b/pygsti/io/metadir.py @@ -92,6 +92,7 @@ def _get_auxfile_ext(typ): elif typ == 'pickle': ext = '.pkl' elif typ == 'none': ext = '.NA' elif typ == 'reset': ext = '.NA' + elif typ == 'qpy': ext = '.qpy' else: #DEPRECATED formats! REMOVE LATER if typ == 'text-circuit-lists': ext = '.txt' @@ -320,6 +321,14 @@ def should_skip_loading(path): elif typ == 'pickle': with open(str(pth), 'rb') as f: val = _pickle.load(f) + elif typ == 'qpy': + try: + import qiskit as _qiskit + + with open(str(pth), 'rb') as f: + val = _qiskit.qpy.load(f) + except Exception as e: + raise RuntimeError("QPY serialization format requested but failed") from e else: raise ValueError("Invalid aux-file type: %s" % typ) @@ -475,6 +484,15 @@ def _write_auxfile_member(root_dir, filenm, typ, val): elif typ == 'pickle': with open(str(pth), 'wb') as f: _pickle.dump(val, f) + elif typ == 'qpy': + try: + import qiskit as _qiskit + + with open(str(pth), 'wb') as f: + _qiskit.qpy.dump(val, f) + except Exception as e: + raise RuntimeError("QPY serialization format requested but failed") from e + else: raise ValueError("Invalid aux-file type: %s" % typ) From 30b74a9b8214da37bfc820bb4969d0bd73208ad9 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 16 Jul 2024 18:30:17 -0700 Subject: [PATCH 25/32] Make parallel transpilation with pathos. Pathos is needed to use dill in multiprocessing, which is needed because some Qiskit obj doesn't like to be pickled. --- .../objects/advanced/IBMQExperiment.ipynb | 161 ++++-------------- pygsti/extras/ibmq/ibmqexperiment.py | 55 ++++-- setup.py | 5 +- 3 files changed, 73 insertions(+), 148 deletions(-) diff --git a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb index febb1b7ff..28ca273b8 100644 --- a/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb +++ b/jupyter_notebooks/Tutorials/objects/advanced/IBMQExperiment.ipynb @@ -16,7 +16,7 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -26,7 +26,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": { "tags": [] }, @@ -40,7 +40,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -62,7 +62,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -76,7 +76,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -90,32 +90,13 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "data": { - "text/plain": [ - "[,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ,\n", - " ]" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# You can list all the available backends to ensure your instance is running properly\n", "service.backends()" @@ -123,7 +104,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -143,21 +124,13 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "\n" - ] - } - ], + "outputs": [], "source": [ "# Let's see which backend is the least busy!\n", "print(backend)" @@ -181,7 +154,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -198,7 +171,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": null, "metadata": { "tags": [] }, @@ -219,25 +192,15 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selected qubits ['Q2', 'Q0', 'Q1', 'Q14'] for device ibm_kyoto\n", - "total circuits: 160\n", - "full total circuits: 160\n" - ] - } - ], + "outputs": [], "source": [ "#circuit design parameters\n", "depths = [0, 2, 4, 16]\n", - "circuits_per_shape = 10\n", + "circuits_per_shape = 20\n", "\n", "# dict setting the circuit widths (# qubits) you want to probe \n", "# and the qubits you want to use at each width\n", @@ -282,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -292,34 +255,11 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": null, "metadata": { "tags": [] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 183851\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 183861\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 183871\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 183881\n", - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 33439\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 33449\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 33459\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 33469\n", - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 104322\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 104332\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 104342\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 104352\n", - "- Sampling 10 circuits at MRB length 0 (1 of 4 depths) with seed 218927\n", - "- Sampling 10 circuits at MRB length 2 (2 of 4 depths) with seed 218937\n", - "- Sampling 10 circuits at MRB length 4 (3 of 4 depths) with seed 218947\n", - "- Sampling 10 circuits at MRB length 16 (4 of 4 depths) with seed 218957\n" - ] - } - ], + "outputs": [], "source": [ "edesigns_dict = {}\n", "edesign_index = 1\n", @@ -346,7 +286,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -362,53 +302,36 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "First we convert pyGSTi circuits into jobs that can be submitted to IBM Q. **This step includes transpiling of the pyGSTi circuits into OpenQASM** (and then into QisKit objects)." + "First we convert pyGSTi circuits into jobs that can be submitted to IBM Q. **This step includes transpiling of the pyGSTi circuits into OpenQASM** (and then into QisKit objects).\n", + "\n", + "This can now be done in parallel (with progress bars) using the `max_workers` kwarg!" ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Transpiling circuit batch 1/3\n", - "Transpiling circuit batch 2/3\n", - "Transpiling circuit batch 3/3\n" - ] - } - ], + "outputs": [], "source": [ - "exp.transpile(backend)" + "exp.transpile(backend, num_workers=4)" ] }, { "cell_type": "code", - "execution_count": 16, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Already completed transpilation of 2/3 circuit batches\n", - "Transpiling circuit batch 3/3\n" - ] - } - ], + "outputs": [], "source": [ "# We can simulate having been interrupted by removing the last few transpiled batches\n", - "del exp.qiskit_isa_circuit_batches[2:]\n", + "del exp.qiskit_isa_circuit_batches[3:]\n", "\n", "# And now transpilation should only redo the missing batches\n", "# We don't need to reprovide the options as they are saved by the first transpile call\n", @@ -424,7 +347,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" @@ -444,35 +367,13 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": null, "metadata": { "tags": [ "nbval-skip" ] }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Submitting batch 1\n", - " - Job ID is ctbfqy7wwtng0083ep3g\n", - " - Failed to get queue position for batch 1\n", - " (because queue position not available in RuntimeJobV2)\n", - " - Estimated start time: 2024-07-16 16:08:02.218000-07:00 (local timezone)\n", - "Submitting batch 2\n", - " - Job ID is ctbfqyz4cerg008wejp0\n", - " - Failed to get queue position for batch 2\n", - " (because queue position not available in RuntimeJobV2)\n", - " - Estimated start time: 2024-07-16 16:11:23.314000-07:00 (local timezone)\n", - "Submitting batch 3\n", - " - Job ID is ctbfqzqwwtng0083ep40\n", - " - Failed to get queue position for batch 3\n", - " (because queue position not available in RuntimeJobV2)\n", - " - Estimated start time: 2024-07-16 16:14:44.346000-07:00 (local timezone)\n" - ] - } - ], + "outputs": [], "source": [ "exp2.submit(backend)" ] diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index 57682f404..7f6a99fcd 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -9,12 +9,15 @@ #*************************************************************************************************** from datetime import datetime as _datetime +from functools import partial as _partial import json as _json import numpy as _np import os as _os +from pathos import multiprocessing as _mp import pathlib as _pathlib import pickle as _pickle import time as _time +import tqdm as _tqdm import warnings as _warnings # Try to load Qiskit @@ -50,6 +53,19 @@ from pygsti.protocols.protocol import _TreeNode +# Needs to be defined first for multiprocessing reasons +def _transpile_batch(circs, pass_manager, qasm_convert_kwargs): + batch = [] + for circ in circs: + # TODO: Replace this with direct to qiskit + pygsti_openqasm_circ = circ.convert_to_openqasm(**qasm_convert_kwargs) + qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) + batch.append(qiskit_qc) + + # Run pass manager on batch + return pass_manager.run(batch) + + class IBMQExperiment(_TreeNode, _HasPSpec): """ A object that converts pyGSTi ExperimentDesigns into jobs to be submitted to IBM Q, submits these @@ -432,7 +448,7 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wai if submit_status is False: raise RuntimeError("Ran out of max attempts and job was still not submitted successfully") - def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=None): + def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=None, num_workers=1): """Transpile pyGSTi circuits into Qiskit circuits for submission to IBMQ. Parameters @@ -453,6 +469,9 @@ def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=N Additional kwargs to pass in to `Circuit.convert_to_openqasm`. If not defined, the default is {'num_qubits': self.processor_spec.num_qubits, 'standard_gates_version': 'x-sx-rz'} + + num_workers: int, optional + Number of workers to use for parallel (by batch) transpilation """ circuits = self.edesign.all_circuits_needing_data.copy() num_batches = int(_np.ceil(len(circuits) / self.circuits_per_batch)) @@ -491,22 +510,22 @@ def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=N pm = _pass_manager(backend=ibmq_backend, **qiskit_pass_kwargs) - # TODO: In parallel and with tqdm - for batch_idx in range(len(self.qiskit_isa_circuit_batches), num_batches): - print(f"Transpiling circuit batch {batch_idx+1}/{num_batches}") - batch = [] - for circ in self.pygsti_circuit_batches[batch_idx]: - # TODO: Replace this with direct to qiskit - pygsti_openqasm_circ = circ.convert_to_openqasm(**qasm_convert_kwargs) - qiskit_qc = _qiskit.QuantumCircuit.from_qasm_str(pygsti_openqasm_circ) - batch.append(qiskit_qc) - - # Run pass manager on batch - isa_circs = pm.run(batch) - self.qiskit_isa_circuit_batches.append(isa_circs) - - if not self.disable_checkpointing: - self._write_checkpoint() + # Set up parallel tasks + tasks = [self.pygsti_circuit_batches[i] for i in range(len(self.qiskit_isa_circuit_batches), num_batches)] + + # We want to use transpile_batch and it's the same pm/convert kwargs, so create a new function with partially applied kwargs + # This function now only takes circs as an argument (which are our task elements above) + task_fn = _partial(_transpile_batch, pass_manager=pm, qasm_convert_kwargs=qasm_convert_kwargs) + + # Run in parallel (p.imap) with progress bars (tqdm) + with _mp.Pool(num_workers) as p: + isa_circuits = list(_tqdm.tqdm(p.imap(task_fn, tasks), total=len(tasks))) + + # Save all of our circuits + self.qiskit_isa_circuit_batches.extend(isa_circuits) + + if not self.disable_checkpointing: + self._write_checkpoint() def write(self, dirname=None): """ @@ -566,3 +585,5 @@ def _retrieve_jobs(self, service): for i, jid in enumerate(self.job_ids): print(f"Loading job {i+1}/{len(self.job_ids)}...") self.qjobs.append(service.job(jid)) + + diff --git a/setup.py b/setup.py index ee17f0def..5a15341e3 100644 --- a/setup.py +++ b/setup.py @@ -63,6 +63,9 @@ 'ibmq': [ 'qiskit>1', 'qiskit-ibm-runtime>=0.17.1', + 'tqdm>=4.42.0', + 'dill', + 'pathos' ], 'testing': [ 'pytest', @@ -285,7 +288,7 @@ def setup_with_extensions(extensions=None): 'pandas' ], extras_require=extras, - python_requires='>=3.5', + python_requires='>=3.8', platforms=["any"], url='http://www.pygsti.info', download_url='https://github.com/pyGSTio/pyGSTi/tarball/master', From 8ecc4312c5b684caf595fd9b8607590f2b4ed8fc Mon Sep 17 00:00:00 2001 From: Stefan Seritan Date: Wed, 17 Jul 2024 10:50:52 -0700 Subject: [PATCH 26/32] Minor IBMQExperiment serialization fixes --- pygsti/extras/ibmq/ibmqexperiment.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index 7f6a99fcd..aeaad7a95 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -148,6 +148,9 @@ def from_dir(cls, dirname, regen_jobs=False, service=None, new_checkpoint_path=N except: pass + if ret.qiskit_isa_circuit_batches is None: + ret.qiskit_isa_circuit_batches = [] + # Regenerate Qiskit RuntimeJobs ret.qjobs = [] if regen_jobs: @@ -199,7 +202,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True self.auxfile_types['qiskit_isa_circuit_batches'] = 'list:qpy' self.auxfile_types['qjobs'] = 'none' self.auxfile_types['job_ids'] = 'json' - self.auxfile_types['batch_results'] = 'pickle' # TODO: Fix this + self.auxfile_types['batch_results'] = 'none' # TODO: Fix this if _json_util is not None: self.auxfile_types['submit_time_calibration_data'] = 'list:json' else: From 4603a275e1357bc079e61d66f9674f8a8a6f5f82 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Mon, 22 Jul 2024 13:07:39 -0700 Subject: [PATCH 27/32] Undo comment out for mock IBM testing. --- pygsti/circuits/circuit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pygsti/circuits/circuit.py b/pygsti/circuits/circuit.py index 21d2e4ce2..3776bd968 100644 --- a/pygsti/circuits/circuit.py +++ b/pygsti/circuits/circuit.py @@ -4329,7 +4329,7 @@ def convert_to_openqasm(self, num_qubits=None, if q not in qubits_used: # Delay 0 works because of the barrier # In OpenQASM3, this should probably be a stretch instead - #openqasm += 'delay(0)' + ' q[' + str(qubit_conversion[q]) + '];\n' + openqasm += 'delay(0)' + ' q[' + str(qubit_conversion[q]) + '];\n' pass # Add in a barrier after every circuit layer if block_between_layers==True. From 0bcb99fe2c59991b8325d56014c4e5780a7cf0a0 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Mon, 5 Aug 2024 14:27:17 -0700 Subject: [PATCH 28/32] Do partial checkpointing in IBMQExperiment. --- pygsti/circuits/circuit.py | 6 +-- pygsti/extras/ibmq/ibmqexperiment.py | 66 +++++++++++++++++++++++----- 2 files changed, 58 insertions(+), 14 deletions(-) diff --git a/pygsti/circuits/circuit.py b/pygsti/circuits/circuit.py index 3776bd968..b2a4e69f0 100644 --- a/pygsti/circuits/circuit.py +++ b/pygsti/circuits/circuit.py @@ -4146,7 +4146,7 @@ def convert_to_openqasm(self, num_qubits=None, gatename_conversion=None, qubit_conversion=None, block_between_layers=True, block_between_gates=False, - include_delay_on_idle=True, + include_delay_on_idle=False, gateargs_map=None): # TODO """ Converts this circuit to an openqasm string. @@ -4191,9 +4191,9 @@ def convert_to_openqasm(self, num_qubits=None, include_delay_on_idle: bool, optional When `True`, includes a delay operation on implicit idles in each layer, as per Qiskit's OpenQASM 2.0 convention after the deprecation of the id operation. - Defaults to True, which is commensurate with legacy usage of this function. - However, this can now be set to False to avoid this behaviour if generating + Defaults to False, to avoid this behaviour if generating actually valid OpenQASM (with no opaque delay instruction) is desired. + Can be set to True, which is commensurate with legacy usage of this function. gateargs_map : dict, optional If not None, a dict that maps strings (representing pyGSTi standard gate names) to diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index aeaad7a95..27996265f 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -51,6 +51,7 @@ from pygsti import data as _data, io as _io from pygsti.protocols import ProtocolData as _ProtocolData, HasProcessorSpec as _HasPSpec from pygsti.protocols.protocol import _TreeNode +from pygsti.io import metadir as _metadir # Needs to be defined first for multiprocessing reasons @@ -195,6 +196,7 @@ def __init__(self, edesign, pspec, remove_duplicates=True, randomized_order=True # If not in this list, will be automatically dumped to meta.json # 'none' means it will not be read in, 'reset' means it will come back in as None # Several of these could be stored in the meta.json but are kept external for easy chkpts + # DEV NOTE: If any of these change, make sure to update the checkpointing code appropriately self.auxfile_types['edesign'] = 'none' self.auxfile_types['data'] = 'reset' # self.processor_spec is handled by _HasPSpec base class @@ -239,9 +241,15 @@ def monitor(self): try: metrics = qjob.metrics() start_time = _datetime.fromisoformat(metrics["estimated_start_time"]) - print(f' - Estimated start time: {start_time.astimezone()} (local timezone)') + local_time = start_time.astimezone() + print(f' - Estimated start time: {local_time.strftime("%Y-%m-%d %H:%M:%S")} (local timezone)') except Exception: print(f' - Unable to retrieve estimated start time') + elif status in [_JobStatus.ERROR, 'ERROR']: + try: + print(f' - Error logs: {qjob.logs()}') + except Exception: + print(f' - Unable to access error logs') # Print unsubmitted for any entries in qobj but not qjob for counter in range(len(self.qjobs), len(self.qiskit_isa_circuit_batches)): @@ -446,7 +454,23 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wai finally: # Checkpoint calibration and job id data if not self.disable_checkpointing: - self._write_checkpoint() + chkpt_path = _pathlib.Path(self.checkpoint_path) / "ibmqexperiment" + with open(chkpt_path / 'meta.json', 'r') as f: + metadata = _json.load(f) + + _metadir._write_auxfile_member(chkpt_path, 'job_ids', self.auxfile_types['job_ids'], self.job_ids) + + if self.auxfile_types['submit_time_calibration_data'] == 'list:json': + # We only need to write the last calibration data + filenm = f"submit_time_calibration_data{len(self.submit_time_calibration_data)-1}" + _metadir._write_auxfile_member(chkpt_path, filenm, 'json', self.submit_time_calibration_data[-1]) + metadata['submit_time_calibration_data'].append(None) + else: + # We are pickling the whole thing, no option to do incremental + _metadir._write_auxfile_member(chkpt_path, 'submit_time_calibration_data', 'pickle', self.submit_time_calibration_data) + + with open(chkpt_path / 'meta.json', 'w') as f: + _json.dump(metadata, f, indent=4) if submit_status is False: raise RuntimeError("Ran out of max attempts and job was still not submitted successfully") @@ -506,10 +530,21 @@ def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=N self.pygsti_circuit_batches.append(circuits[start:end]) if not self.disable_checkpointing: - self._write_checkpoint() + chkpt_path = _pathlib.Path(self.checkpoint_path) / "ibmqexperiment" + with open(chkpt_path / 'meta.json', 'r') as f: + metadata = _json.load(f) + + pcbdata = _metadir._write_auxfile_member(chkpt_path, 'pygsti_circuit_batches', self.auxfile_types['pygsti_circuit_batches'], self.pygsti_circuit_batches) + if 'pygsti_circuit_batches' in metadata: + metadata['pygsti_circuit_batches'] = pcbdata + + with open(chkpt_path / 'meta.json', 'w') as f: + _json.dump(metadata, f) if len(self.qiskit_isa_circuit_batches): print(f'Already completed transpilation of {len(self.qiskit_isa_circuit_batches)}/{num_batches} circuit batches') + if len(self.qiskit_isa_circuit_batches) == num_batches: + return pm = _pass_manager(backend=ibmq_backend, **qiskit_pass_kwargs) @@ -521,14 +556,23 @@ def transpile(self, ibmq_backend, qiskit_pass_kwargs=None, qasm_convert_kwargs=N task_fn = _partial(_transpile_batch, pass_manager=pm, qasm_convert_kwargs=qasm_convert_kwargs) # Run in parallel (p.imap) with progress bars (tqdm) - with _mp.Pool(num_workers) as p: - isa_circuits = list(_tqdm.tqdm(p.imap(task_fn, tasks), total=len(tasks))) - - # Save all of our circuits - self.qiskit_isa_circuit_batches.extend(isa_circuits) - - if not self.disable_checkpointing: - self._write_checkpoint() + #with _mp.Pool(num_workers) as p: + # isa_circuits = list(_tqdm.tqdm(p.imap(task_fn, tasks), total=len(tasks))) + for task in _tqdm.tqdm(tasks): + self.qiskit_isa_circuit_batches.append(task_fn(task)) + + # Save single batch + chkpt_path = _pathlib.Path(self.checkpoint_path) / "ibmqexperiment" + with open(chkpt_path / 'meta.json', 'r') as f: + metadata = _json.load(f) + + filenm = f"qiskit_isa_circuit_batches{len(self.qiskit_isa_circuit_batches)-1}" + _metadir._write_auxfile_member(chkpt_path, filenm, 'qpy', self.qiskit_isa_circuit_batches[-1]) + if 'qiskit_isa_circuit_batches' in metadata: + metadata['qiskit_isa_circuit_batches'].append(None) + + with open(chkpt_path / 'meta.json', 'w') as f: + _json.dump(metadata, f) def write(self, dirname=None): """ From e504581d2e8e349eca7f54a704d45bf9c4606f43 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Fri, 6 Sep 2024 16:52:46 -0700 Subject: [PATCH 29/32] Shift interleave into create_all_circuits_needing_data for merge resolution. --- pygsti/protocols/protocol.py | 36 ++++++++++++++---------------------- 1 file changed, 14 insertions(+), 22 deletions(-) diff --git a/pygsti/protocols/protocol.py b/pygsti/protocols/protocol.py index 4af70510d..4523c946a 100644 --- a/pygsti/protocols/protocol.py +++ b/pygsti/protocols/protocol.py @@ -1335,7 +1335,7 @@ class CombinedExperimentDesign(CanCreateAllCircuitsDesign): # for multiple desi form the circuit ordering of this experiment design. DEPRECATED """ - def _create_all_circuits_needing_data(self, subdesigns=None): + def _create_all_circuits_needing_data(self, sub_designs=None, interleave=False): """Create all_circuits_needing_data for other information. This interface is needed to ensure that all_circuits_needing_data @@ -1353,11 +1353,19 @@ def _create_all_circuits_needing_data(self, subdesigns=None): all_circuits: list of Circuits Union of all_circuits_needing_data from subdesigns without duplicates """ - subdesigns = self._vals if subdesigns is None else subdesigns + sub_designs = self._vals if sub_designs is None else sub_designs all_circuits = [] - for des in subdesigns.values(): - all_circuits.extend(des.all_circuits_needing_data) - _lt.remove_duplicates_in_place(all_circuits) # Maybe don't always do this? + if interleave: + subdesign_circuit_lists = [sub_design.all_circuits_needing_data for sub_design in sub_designs.values()] + #zip_longest is like zip, but if the iterables are of different lengths it returns a specified fill value + #(default None) in place of the missing elements once an iterable has been exhausted. + for circuits in _itertools.zip_longest(*subdesign_circuit_lists): + for circuit in circuits: + if circuit is not None: + all_circuits.append(circuit) + else: + for des in sub_designs.values(): + all_circuits.extend(des.all_circuits_needing_data) return all_circuits @classmethod @@ -1430,24 +1438,8 @@ def __init__(self, sub_designs, all_circuits=None, qubit_labels=None, sub_design if not isinstance(sub_designs, dict): sub_designs = {("**%d" % i): des for i, des in enumerate(sub_designs)} - self.interleave = interleave - all_circuits = self._create_all_circuits_needing_data(sub_designs) if all_circuits is None \ - else all_circuits - if all_circuits is None: - all_circuits = [] - if interleave: - subdesign_circuit_lists = [sub_design.all_circuits_needing_data for sub_design in sub_designs.values()] - #zip_longest is like zip, but if the iterables are of different lengths it returns a specified fill value - #(default None) in place of the missing elements once an iterable has been exhausted. - for circuits in _itertools.zip_longest(*subdesign_circuit_lists): - for circuit in circuits: - if circuit is not None: - all_circuits.append(circuit) - else: - for des in sub_designs.values(): - all_circuits.extend(des.all_circuits_needing_data) - _lt.remove_duplicates_in_place(all_circuits) # Maybe don't always do this? + all_circuits = self._create_all_circuits_needing_data(sub_designs, interleave) if qubit_labels is None and len(sub_designs) > 0: first = sub_designs[list(sub_designs.keys())[0]].qubit_labels From aae7362ce9ce803385d209840fe7b05fec9d7641 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Thu, 19 Sep 2024 14:52:22 -0700 Subject: [PATCH 30/32] Update and fix tests. --- setup.py | 29 +++++++++++++++-------------- test/unit/objects/test_circuit.py | 2 +- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/setup.py b/setup.py index 5a15341e3..fcc5cd2f7 100644 --- a/setup.py +++ b/setup.py @@ -46,6 +46,11 @@ 'report_pickling': ['pandas'], 'report_pdf_figures': ['matplotlib'], 'html_reports': ['jinja2', 'MarkupSafe'], + 'reports':[ + 'pygsti[report_pickling]', + 'pygsti[report_pdf_figures]', + 'pygsti[html_reports]' + ], 'notebooks': [ 'ipython', 'notebook', @@ -71,29 +76,25 @@ 'pytest', 'pytest-xdist', 'pytest-cov', + 'cython', # Don't call this pygsti[extensions] for testing_no_cython logic below + 'mpi4py', # Don't call this pygsti[multiprocessor] for no_mpi logic below 'nbval', - 'bson', - 'csaps', - 'cvxopt', - 'cvxpy', - 'cython', - 'matplotlib', - 'mpi4py', - 'msgpack', 'packaging', - 'pandas', 'psutil', 'zmq', - 'jinja2', 'seaborn', 'scipy', 'ply', 'qibo<=0.1.7', 'cirq-core', - 'notebook', - 'ipython', - 'jupyter_server', - 'torch' + 'pygsti[diamond_norm]', + 'pygsti[ibmq]', + 'pygsti[interpygate]', + 'pygsti[msgpack]', + 'pygsti[notebooks]', + 'pygsti[pytorch]', + 'pygsti[reports]', + 'pygsti[serialization]' ] } diff --git a/test/unit/objects/test_circuit.py b/test/unit/objects/test_circuit.py index 49b0daa4e..ba657af41 100644 --- a/test/unit/objects/test_circuit.py +++ b/test/unit/objects/test_circuit.py @@ -497,7 +497,7 @@ def test_convert_to_openqasm(self): ckt = circuit.Circuit([Label('Gxpi2',0), Label(()), Label([Label('Gh',0), Label('Gtdag',1)]), Label('Gcnot', (0,1))], line_labels=(0,1)) - converted_qasm = ckt.convert_to_openqasm() + converted_qasm = ckt.convert_to_openqasm(include_delay_on_idle=True) #this is really just doing a check if anything has changed. I.e. an integration test. expected_qasm = 'OPENQASM 2.0;\ninclude "qelib1.inc";\n\nopaque delay(t) q;\n\nqreg q[2];'\ +'\ncreg cr[2];\n\nu3(1.570796326794897, 4.71238898038469, 1.570796326794897) q[0];\ndelay(0) q[1];'\ From ee3b66b56585f96ed9e47bd7433bc4202084cb69 Mon Sep 17 00:00:00 2001 From: pcwysoc <144378483+pcwysoc@users.noreply.github.com> Date: Fri, 14 Mar 2025 12:13:55 -0400 Subject: [PATCH 31/32] Add option to use prior session --- pygsti/extras/ibmq/ibmqexperiment.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/pygsti/extras/ibmq/ibmqexperiment.py b/pygsti/extras/ibmq/ibmqexperiment.py index 27996265f..549ef2509 100644 --- a/pygsti/extras/ibmq/ibmqexperiment.py +++ b/pygsti/extras/ibmq/ibmqexperiment.py @@ -309,7 +309,7 @@ def partial_trace(ordered_target_indices, input_dict): if not self.disable_checkpointing: self.data.write(self.checkpoint_path, edesign_already_written=True) - def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wait_time=5, max_attempts=10): + def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wait_time=5, max_attempts=10, ibmq_session=None): """ Submits the jobs to IBM Q, that implements the experiment specified by the ExperimentDesign used to create this object. @@ -343,6 +343,9 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wai wait_steps: int Number of steps to take before retrying job submission. + ibmq_session: IBMQuantumRuntimeSession + IBMQuantumRuntime Session to use + Returns ------- None @@ -375,7 +378,9 @@ def submit(self, ibmq_backend, start=None, stop=None, ignore_job_limit=True, wai stop = min(start + allowed_jobs, stop) - ibmq_session = _Session(backend = ibmq_backend) + if ibmq_session is None: + ibmq_session = _Session(backend = ibmq_backend) + sampler = _Sampler(session=ibmq_session) for batch_idx, batch in enumerate(self.qiskit_isa_circuit_batches): From c7764e7c9f85d47eef9c5f0882ef0def8e74f666 Mon Sep 17 00:00:00 2001 From: "Stefan K. Seritan" Date: Tue, 18 Mar 2025 15:11:05 -0700 Subject: [PATCH 32/32] Finish merge for pyproject.toml --- pyproject.toml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 8eb61fce6..494ebf52e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,13 @@ diamond_norm = [ evolutionary_optimization = ['deap'] extensions = ['cython'] html_reports = ['jinja2', 'MarkupSafe'] +ibmq = [ + 'qiskit>1', + 'qiskit-ibm-runtime>=0.17.1', + 'tqdm>=4.42.0', + 'dill', + 'pathos' +] interpygate = ['csaps'] linting = [ 'autopep8', @@ -84,7 +91,7 @@ testing_no_cython_mpi = [ 'seaborn', 'ply', 'cirq-core', - 'pygsti[diamond_norm,interpygate,memory_profiling,msgpack,notebooks,pytorch,reports]' + 'pygsti[diamond_norm,ibmq,interpygate,memory_profiling,msgpack,notebooks,pytorch,reports]' ] testing_no_cython = ['pygsti[multiprocessor,testing_no_cython_mpi]'] testing = ['pygsti[extensions,testing_no_cython]']