Skip to content

Commit

Permalink
Feat: Include utility-scale estimates in the examples (#136)
Browse files Browse the repository at this point in the history
Co-authored-by: Athena Caesura <[email protected]>
  • Loading branch information
SebastianMorawiec and AthenaCaesura authored Oct 17, 2023
1 parent bc8d0aa commit bc9db64
Show file tree
Hide file tree
Showing 4 changed files with 221 additions and 82 deletions.
156 changes: 156 additions & 0 deletions examples/ex_11_utility_scale.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
import datetime
import json
import os
import typing
import warnings
from pathlib import Path
from typing import Literal

from benchq.algorithms.time_evolution import qsp_time_evolution_algorithm
from benchq.compilation import get_ruby_slippers_compiler
from benchq.data_structures import DETAILED_ION_TRAP_ARCHITECTURE_MODEL, DecoderModel
from benchq.problem_ingestion.hamiltonian_generation import (
generate_cubic_hamiltonian,
generate_kitaev_hamiltonian,
generate_triangular_hamiltonian,
)
from benchq.resource_estimation.graph import (
ExtrapolationResourceEstimator,
create_big_graph_from_subcircuits,
remove_isolated_nodes,
run_custom_extrapolation_pipeline,
transpile_to_native_gates,
)
from benchq.resource_estimation.openfermion_re import get_physical_cost


def get_resources(lattice_type: str, size: int, decoder_data_file: str):
print(f"Getting operator for size {size} {lattice_type} lattice...")
if lattice_type == "triangular":
operator = generate_triangular_hamiltonian(size)
elif lattice_type == "kitaev":
operator = generate_kitaev_hamiltonian(size)
elif lattice_type == "cubic":
operator = generate_cubic_hamiltonian(size)
else:
raise ValueError(f"Lattice type {lattice_type} not supported")

architecture_model = DETAILED_ION_TRAP_ARCHITECTURE_MODEL

print("Getting algorithm implementation...")
evolution_time = 1
failure_tolerance = 1e-4
algorithm_implementation = qsp_time_evolution_algorithm(
operator, evolution_time, failure_tolerance
)

print("Setting resource estimation parameters...")
decoder_model = DecoderModel.from_csv(decoder_data_file)
my_estimator = ExtrapolationResourceEstimator(
architecture_model,
[2, 4, 6, 8, 10],
n_measurement_steps_fit_type="logarithmic",
optimization="space",
decoder_model=decoder_model,
)

# select teleportation threshold to tune number of logical qubits
if lattice_type == "triangular":
gpm = get_ruby_slippers_compiler(teleportation_threshold=70)
elif lattice_type == "kitaev":
gpm = get_ruby_slippers_compiler(teleportation_threshold=60)
elif lattice_type == "cubic":
gpm = get_ruby_slippers_compiler(teleportation_threshold=70)
else:
raise ValueError(f"Lattice type {lattice_type} not supported")

print("Estimating resources via graph state compilation...")
gsc_resources = run_custom_extrapolation_pipeline(
algorithm_implementation,
my_estimator,
transformers=[
transpile_to_native_gates,
create_big_graph_from_subcircuits(gpm),
remove_isolated_nodes,
],
)

total_t_gates = my_estimator.get_n_total_t_gates(
gsc_resources.extra.n_t_gates,
gsc_resources.extra.n_rotation_gates,
algorithm_implementation.error_budget.transpilation_failure_tolerance,
)

footprint_resources = get_physical_cost(
algorithm_implementation.program.num_data_qubits,
num_t=total_t_gates,
architecture_model=my_estimator.hw_model,
hardware_failure_tolerance=algorithm_implementation.error_budget.hardware_failure_tolerance,
decoder_model=decoder_model,
)
return gsc_resources, footprint_resources


def save_to_file(gsc_resources, footprint_resources, lattice_type, path: str):
results_folder = path

with open(results_folder + lattice_type + "_gsc_re_data.json", "w") as outfile:
json.dump(gsc_resources, outfile, indent=4, sort_keys=True, default=str)
with open(
results_folder + lattice_type + "_footprint_re_data.json", "w"
) as outfile:
json.dump(footprint_resources, outfile, indent=4, sort_keys=True, default=str)


def main(
decoder_data_file: str,
save_results: bool,
lattice_type: Literal["triangular", "kitaev", "cubic"],
size: int,
path_to_save_results: typing.Optional[str] = None,
):
gsc_estimates, footprint_estimates = get_resources(
lattice_type, size, decoder_data_file
)

if save_results:
if path_to_save_results is None:
warnings.warn("Path is required to save the results.")
else:
save_to_file(
gsc_estimates, footprint_estimates, lattice_type, path_to_save_results
)

return gsc_estimates, footprint_estimates


if __name__ == "__main__":
warnings.warn(
"These utility scale estimates take a lot of time to calculate."
"It can take up to a day for single example to finish calculation."
)

decoder_data = "data/sample_decoder_data.csv"
save_results = False
path_to_save_results = "."

utiliy_scale_problems: typing.Dict[
Literal["triangular", "kitaev", "cubic"], int
] = {"triangular": 30, "kitaev": 22, "cubic": 10}

lattice_type: Literal["triangular", "kitaev", "cubic"]

lattice_type = "triangular"
# lattice_type = "kitaev"
# lattice_type = "cubic"

gsc_estimates, footprint_estimates = main(
decoder_data,
save_results,
lattice_type,
utiliy_scale_problems[lattice_type],
path_to_save_results,
)

print(gsc_estimates)
print(footprint_estimates)
80 changes: 0 additions & 80 deletions examples/ex_4_extrapolation.py

This file was deleted.

55 changes: 55 additions & 0 deletions examples/ex_4_fast_graph_estimates.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
################################################################################
# © Copyright 2022-2023 Zapata Computing Inc.
################################################################################
"""
In this example we show how to deal with the case where the problem is too large to be
compiled to a graph. We use the extrapolation technique to estimate resources
for running time evolution for H2 molecule.
Number of block encodings needed to run the algorithm is too high, so we
estimate resources need for running similar circuit with 1, 2 and 3 block encodings
and then we extrapolate the results to estimate resources for full problem.
WARNING: This example requires the pyscf extra. run `pip install benchq[pyscf]`
to install the extra.
"""

from pathlib import Path
from pprint import pprint

from benchq.algorithms.time_evolution import qsp_time_evolution_algorithm
from benchq.data_structures import DETAILED_ION_TRAP_ARCHITECTURE_MODEL, DecoderModel
from benchq.problem_ingestion.hamiltonian_generation import (
generate_triangular_hamiltonian,
)
from benchq.resource_estimation.default_pipelines import run_fast_graph_estimate
from benchq.timing import measure_time


def main():
architecture_model = DETAILED_ION_TRAP_ARCHITECTURE_MODEL

with measure_time() as t_info:
lattice_size = 3
operator = generate_triangular_hamiltonian(lattice_size)

print("Operator generation time:", t_info.total)

with measure_time() as t_info:
evolution_time: float = 1.0
failure_tolerance: float = 1e-3
algorithm = qsp_time_evolution_algorithm(
operator,
evolution_time,
failure_tolerance,
)

print("Circuit generation time:", t_info.total)

with measure_time() as t_info:
fast_gsc_resources = run_fast_graph_estimate(algorithm, architecture_model)

print("Resource estimation time with GSC:", t_info.total)
pprint(fast_gsc_resources)


if __name__ == "__main__":
main()
12 changes: 10 additions & 2 deletions tests/benchq/examples/test_examples.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@
from examples.ex_3_packages_comparison import ( # noqa: E402
main as packages_comparison_main,
)
from examples.ex_4_extrapolation import main as extrapolation_main # noqa: E402
from examples.ex_4_fast_graph_estimates import main as fast_graph # noqa: E402
from examples.ex_11_utility_scale import main as utility_scale # noqa: E402

SKIP_AZURE = pytest.mark.skipif(
os.getenv("BENCHQ_TEST_AZURE") is None,
Expand Down Expand Up @@ -78,7 +79,14 @@ def test_packages_comparison_example():


def test_extrapolation_example():
extrapolation_main(use_hydrogen=False)
fast_graph()


def test_utility_scale_example():
decoder_data = os.path.join("examples", "data", "sample_decoder_data.csv")
gsc, footprint = utility_scale(decoder_data, False, "triangular", 3)
assert gsc
assert footprint


def test_toy_example_notebook():
Expand Down

0 comments on commit bc9db64

Please sign in to comment.