Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 26 additions & 4 deletions src/tqecd/construction.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@ def _shift_time_instruction(number_of_spatial_coordinates: int) -> stim.Circuit:
return circuit


def annotate_detectors_automatically(circuit: stim.Circuit) -> stim.Circuit:
def annotate_detectors_automatically(
circuit: stim.Circuit, reuse_flows_for_anticommuting_cover: bool = False
) -> stim.Circuit:
"""Insert detectors into the provided circuit instance.

This is the main user-facing function to automatically insert detectors into
Expand All @@ -55,6 +57,15 @@ def annotate_detectors_automatically(circuit: stim.Circuit) -> stim.Circuit:

Args:
circuit: circuit to insert detectors in.
reuse_flows_for_anticommuting_cover: if True, the flows that are used
to form a commuting stabilizer are not all removed from the list of
flows. Instead, only one of them is removed, and the others are kept
to be potentially reused to form other commuting stabilizers. This
might lead to detectors involving more measurements than necessary,
but it allows to find more detectors in some cases.
WARNING: Enabling this feature might significantly increase the
runtime of the detector finding algorithm, as it increases the
number of flows to consider at each step. Defaults to False.

Returns:
A new ``stim.Circuit`` instance with automatically computed detectors.
Expand All @@ -67,7 +78,11 @@ def annotate_detectors_automatically(circuit: stim.Circuit) -> stim.Circuit:
qubit_coords_map: dict[int, tuple[float, ...]] = {
q: tuple(coords) for q, coords in circuit.get_final_qubit_coordinates().items()
}
return compile_fragments_to_circuit_with_detectors(fragments, qubit_coords_map)
return compile_fragments_to_circuit_with_detectors(
fragments,
qubit_coords_map,
reuse_flows_for_anticommuting_cover=reuse_flows_for_anticommuting_cover,
)


def compile_fragments_to_circuit(
Expand Down Expand Up @@ -100,9 +115,14 @@ def _insert_before_last_tick_instruction(
def compile_fragments_to_circuit_with_detectors(
fragments: list[Fragment | FragmentLoop],
qubit_coords_map: dict[int, tuple[float, ...]],
reuse_flows_for_anticommuting_cover: bool = False,
) -> stim.Circuit:
flows = build_flows_from_fragments(fragments)
detectors_from_flows = match_detectors_from_flows_shallow(flows, qubit_coords_map)
detectors_from_flows = match_detectors_from_flows_shallow(
flows,
qubit_coords_map,
reuse_flows_for_anticommuting_cover=reuse_flows_for_anticommuting_cover,
)

circuit = stim.Circuit()
number_of_spatial_coordinates = len(
Expand All @@ -117,7 +137,9 @@ def compile_fragments_to_circuit_with_detectors(
else: # isinstance(fragment, FragmentLoop):
shift_circuit = _shift_time_instruction(number_of_spatial_coordinates)
loop_body = compile_fragments_to_circuit_with_detectors(
fragment.fragments, qubit_coords_map
fragment.fragments,
qubit_coords_map,
reuse_flows_for_anticommuting_cover=reuse_flows_for_anticommuting_cover,
)
circuit += (
_insert_before_last_tick_instruction(
Expand Down
24 changes: 24 additions & 0 deletions src/tqecd/construction_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,3 +85,27 @@ def test_invalid_circuits(name: str, circuit: stim.Circuit, error_message: str)
)
with pytest.raises(TQECDException, match=rf"^{error_message}$"):
annotate_detectors_automatically(circuit_without_detectors)


def test_reuse_flows_for_anticommuting_cover() -> None:
circuit = stim.Circuit("""
QUBIT_COORDS(0, 0) 0
QUBIT_COORDS(1, 0) 1
QUBIT_COORDS(2, 0) 2
R 0 1 2
TICK
CX 1 0
TICK
CX 1 2
TICK
MX 1
TICK
M 0 2
""")
assert annotate_detectors_automatically(circuit).num_detectors == 1
assert (
annotate_detectors_automatically(
circuit, reuse_flows_for_anticommuting_cover=True
).num_detectors
== 2
)
45 changes: 34 additions & 11 deletions src/tqecd/flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,9 @@ def _anti_commuting_stabilizers_indices(flows: list[BoundaryStabilizer]) -> list
return [i for i in range(len(flows)) if flows[i].has_anticommuting_operations]


def _try_merge_anticommuting_flows_inplace(flows: list[BoundaryStabilizer]) -> None:
def _try_merge_anticommuting_flows_inplace(
flows: list[BoundaryStabilizer], reuse_flows: bool = False
) -> None:
"""Merge as much anti-commuting flows as possible from the provided flows.

This function try to merge together several :class:`BoundaryStabilizer`
Expand All @@ -27,6 +29,12 @@ def _try_merge_anticommuting_flows_inplace(flows: list[BoundaryStabilizer]) -> N
Args:
flows: a list of flows that might or might not contains flows that
anti-commute with its collapsing operations.
reuse_flows: if True, the flows that are used to form a commuting
stabilizer are not all removed from the list of flows. Instead,
only one of them is removed, and the others are kept to be potentially
reused to form other commuting stabilizers. This might lead to
detectors involving more measurements than necessary, but it allows
to find more detectors in some cases. Defaults to False.

Raises:
TQECDException: if the provided flows have different collapsing
Expand Down Expand Up @@ -87,10 +95,21 @@ def _try_merge_anticommuting_flows_inplace(flows: list[BoundaryStabilizer]) -> N
stabilizers_to_merge: list[BoundaryStabilizer] = [
flows[i] for i in flows_indices_of_stabilizers_to_merge
]
# Update the flows by removing the entries related to stabilizers that
# will be merged and re-compute the anti-commuting stabilizers and map.
for i in sorted(flows_indices_of_stabilizers_to_merge, reverse=True):
flows.pop(i)
# Update flows: remove one entry per merged stabilizer.
# This ensures:
# 1. A stabilizer isn't merged twice.
# 2. Flows remain valid for finding subsequent merging opportunities,
# as the removed flow's anti-commuting boundary stabilizer is covered
# by the remaining flows.
# Note that we have the risk that the reused flows may have unnecessarily
# many measurements included in the formed detector. However, we never
# guarantee minimality of detector structures, so this is not an issue.
if reuse_flows:
flows.pop(flows_indices_of_stabilizers_to_merge[-1])
else:
for i in sorted(flows_indices_of_stabilizers_to_merge, reverse=True):
flows.pop(i)

anti_commuting_index_to_flows_index = _anti_commuting_stabilizers_indices(flows)
anticommuting_stabilizers = [
flows[fi].before_collapse for fi in anti_commuting_index_to_flows_index
Expand Down Expand Up @@ -158,9 +177,11 @@ def without_trivial_flows(self) -> FragmentFlows:
total_number_of_measurements=self.total_number_of_measurements,
)

def try_merge_anticommuting_flows(self) -> None:
_try_merge_anticommuting_flows_inplace(self.creation)
_try_merge_anticommuting_flows_inplace(self.destruction)
def try_merge_anticommuting_flows(self, reuse_flows: bool = False) -> None:
_try_merge_anticommuting_flows_inplace(self.creation, reuse_flows=reuse_flows)
_try_merge_anticommuting_flows_inplace(
self.destruction, reuse_flows=reuse_flows
)


@dataclass
Expand Down Expand Up @@ -207,9 +228,11 @@ def remove_destructions(self, indices: ty.Iterable[int]) -> None:
for i in sorted(indices, reverse=True):
self.remove_destruction(i)

def try_merge_anticommuting_flows(self) -> None:
_try_merge_anticommuting_flows_inplace(self.creation)
_try_merge_anticommuting_flows_inplace(self.destruction)
def try_merge_anticommuting_flows(self, reuse_flows: bool = False) -> None:
_try_merge_anticommuting_flows_inplace(self.creation, reuse_flows=reuse_flows)
_try_merge_anticommuting_flows_inplace(
self.destruction, reuse_flows=reuse_flows
)


def build_flows_from_fragments(
Expand Down
30 changes: 27 additions & 3 deletions src/tqecd/match.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def _get_detectors_with_time_coordinate(
def match_detectors_from_flows_shallow(
flows: list[FragmentFlows | FragmentLoopFlows],
qubit_coordinates: dict[int, tuple[float, ...]],
reuse_flows_for_anticommuting_cover: bool = False,
) -> list[list[MatchedDetector]]:
"""Match detectors in the provided fragments.

Expand All @@ -97,6 +98,12 @@ def match_detectors_from_flows_shallow(
qubit_coordinates: a mapping from qubit indices to coordinates. Used to annotate
the matched detectors with the coordinates from the qubits involved in the
measurement forming the detector.
reuse_flows_for_anticommuting_cover: if True, the flows that are used
to form a commuting stabilizer are not all removed from the list of
flows. Instead, only one of them is removed, and the others are kept
to be potentially reused to form other commuting stabilizers. This
might lead to detectors involving more measurements than necessary,
but it allows to find more detectors in some cases. Defaults to False.

Returns:
the list of all the detectors found. These detectors are only valid if inserted
Expand All @@ -110,7 +117,12 @@ def match_detectors_from_flows_shallow(
]
for i in range(1, len(flows)):
detectors[i].extend(
match_boundary_stabilizers(flows[i - 1], flows[i], qubit_coordinates)
match_boundary_stabilizers(
flows[i - 1],
flows[i],
qubit_coordinates,
reuse_flows_for_anticommuting_cover=reuse_flows_for_anticommuting_cover,
)
)

return _get_detectors_with_time_coordinate(flows, detectors)
Expand Down Expand Up @@ -251,6 +263,7 @@ def match_boundary_stabilizers(
right_flows: FragmentFlows | FragmentLoopFlows,
qubit_coordinates: Mapping[int, tuple[float, ...]],
perform_sanity_check: bool = True,
reuse_flows_for_anticommuting_cover: bool = False,
) -> list[MatchedDetector]:
"""Match detectors using the boundary stabilizers between the two given
flows.
Expand All @@ -275,6 +288,12 @@ def match_boundary_stabilizers(
and `right_flows` are compared to the detectors found between the
last and first fragments of the body of `right_flows`. If the two
sets are not equal, an exception is raised. Defaults to True.
reuse_flows_for_anticommuting_cover: if True, the flows that are used
to form a commuting stabilizer are not all removed from the list of
flows. Instead, only one of them is removed, and the others are kept
to be potentially reused to form other commuting stabilizers. This
might lead to detectors involving more measurements than necessary,
but it allows to find more detectors in some cases. Defaults to False.

Raises:
TQECDException: if the sanity check does not pass.
Expand Down Expand Up @@ -302,6 +321,7 @@ def match_boundary_stabilizers(
deepcopy(right_flows.fragment_flows[-1]), # type: ignore
deepcopy(right_flows.fragment_flows[0]), # type: ignore
qubit_coordinates,
reuse_flows_for_anticommuting_cover=reuse_flows_for_anticommuting_cover,
)

# 0. Combining anti-commuting stabilizers
Expand All @@ -317,8 +337,12 @@ def match_boundary_stabilizers(
# be interesting to perform this step AFTER a first round of commuting stabilizer
# matching, and repeating the matching steps after to match newly added commuting
# stabilizers into detectors.
left_flows.try_merge_anticommuting_flows()
right_flows.try_merge_anticommuting_flows()
left_flows.try_merge_anticommuting_flows(
reuse_flows=reuse_flows_for_anticommuting_cover
)
right_flows.try_merge_anticommuting_flows(
reuse_flows=reuse_flows_for_anticommuting_cover
)

# 1. Match stabilizers 1-to-1 without anti-commuting collapses
matched_detectors.extend(
Expand Down
Loading