| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575 |
- # evaluation.py — Run shared sweep once; all plots aggregate from cache (Py3.8-safe)
- import math
- import os
- import pickle
- import time
- import shutil
- import json
- import hashlib
- import matplotlib.pyplot as plt
- import numpy as np
- from cycler import cycler
- # metrics / viz を外出し(UNIX的分離)
- from metrics.widths import (
- ci_radius_hoeffding,
- sum_weighted_widths_all_links,
- sum_weighted_min_widths_perpair,
- sum_widths_all_links,
- sum_minwidths_perpair,
- )
- from viz.plots import mean_ci95, plot_with_ci_band
- from network import QuantumNetwork
- from schedulers import run_scheduler # スケジューラ呼び出し
- # ---- Matplotlib style(互換性重視: hex色 & 無難な記号類)----
- plt.rc("font", family="Times New Roman")
- plt.rc("font", size=20)
- default_cycler = (
- cycler(color=["#4daf4a", "#377eb8", "#e41a1c", "#984ea3", "#ff7f00", "#a65628"])
- + cycler(marker=["s", "v", "o", "x", "*", "+"])
- + cycler(linestyle=[":", "--", "-", "-.", "--", ":"])
- )
- plt.rc("axes", prop_cycle=default_cycler)
- # =========================
- # Fidelity generators
- # =========================
- def generate_fidelity_list_avg_gap(path_num):
- result = []
- fidelity_max = 1
- fidelity_min = 0.9
- gap = (fidelity_max - fidelity_min) / path_num
- fidelity = fidelity_max
- for _ in range(path_num):
- result.append(fidelity)
- fidelity -= gap
- assert len(result) == path_num
- return result
- def generate_fidelity_list_fix_gap(path_num, gap, fidelity_max=1):
- result = []
- fidelity = fidelity_max
- for _ in range(path_num):
- result.append(fidelity)
- fidelity -= gap
- assert len(result) == path_num
- return result
- def generate_fidelity_list_random(path_num, alpha=0.95, beta=0.85, variance=0.1):
- """Generate `path_num` links with a guaranteed top-1 gap."""
- while True:
- mean = [alpha] + [beta] * (path_num - 1)
- result = []
- for i in range(path_num):
- mu = mean[i]
- # [0.8, 1.0] の範囲に入るまでサンプリング
- while True:
- r = np.random.normal(mu, variance)
- if 0.8 <= r <= 1.0:
- break
- result.append(r)
- assert len(result) == path_num
- sorted_res = sorted(result, reverse=True)
- if sorted_res[0] - sorted_res[1] > 0.02:
- return result
- # =========================
- # Progress helpers
- # =========================
- def _start_timer():
- return {"t0": time.time(), "last": time.time()}
- def _tick(timer):
- now = time.time()
- dt_total = now - timer["t0"]
- dt_step = now - timer["last"]
- timer["last"] = now
- return dt_total, dt_step
- def _log(msg):
- print(msg, flush=True)
- # =========================
- # Shared sweep (cache) helpers with file lock
- # =========================
- def _sweep_signature(budget_list, scheduler_names, noise_model,
- node_path_list, importance_list, bounces, repeat):
- payload = {
- "budget_list": list(budget_list),
- "scheduler_names": list(scheduler_names),
- "noise_model": str(noise_model),
- "node_path_list": list(node_path_list),
- "importance_list": list(importance_list),
- "bounces": list(bounces),
- "repeat": int(repeat),
- "version": 1,
- }
- sig = hashlib.md5(json.dumps(payload, sort_keys=True).encode("utf-8")).hexdigest()[:10]
- return payload, sig
- def _shared_sweep_path(noise_model, sig):
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- return os.path.join(outdir, f"shared_sweep_{noise_model}_{sig}.pickle")
- def _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10,
- verbose=True, print_every=1,
- ):
- config, sig = _sweep_signature(budget_list, scheduler_names, noise_model,
- node_path_list, importance_list, bounces, repeat)
- cache_path = _shared_sweep_path(noise_model, sig)
- lock_path = cache_path + ".lock"
- STALE_LOCK_SECS = 6 * 60 * 60 # 6時間無更新ならロック回収
- HEARTBEAT_EVERY = 5.0 # 生成側のロック更新間隔(秒)
- # 既存キャッシュがあれば即ロード
- if os.path.exists(cache_path):
- if verbose: _log(f"[shared] Load cached sweep: {os.path.basename(cache_path)}")
- with open(cache_path, "rb") as f:
- return pickle.load(f)
- # --- ロック獲得(初回生成は1プロセスのみ)---
- got_lock = False
- while True:
- try:
- fd = os.open(lock_path, os.O_CREAT | os.O_EXCL | os.O_WRONLY)
- os.close(fd)
- got_lock = True
- break
- except FileExistsError:
- # 他プロセスが生成中:完成を待つ(タイムアウトなし)
- if os.path.exists(cache_path):
- with open(cache_path, "rb") as f:
- return pickle.load(f)
- # スタックロック検出:長時間 mtime 更新がない場合は回収
- try:
- age = time.time() - os.path.getmtime(lock_path)
- except OSError:
- age = 0
- if age > STALE_LOCK_SECS:
- if verbose: _log("[shared] Stale lock detected. Removing...")
- try: os.remove(lock_path)
- except FileNotFoundError: pass
- continue
- # 進捗待ち
- if verbose: _log("[shared] Waiting for cache to be ready...")
- time.sleep(1.0)
- try:
- if verbose: _log(f"[shared] Run sweep and cache to: {os.path.basename(cache_path)}")
- data = {name: {k: [] for k in range(len(budget_list))} for name in scheduler_names}
- last_hb = time.time()
- for k, C_total in enumerate(budget_list):
- if verbose: _log(f"=== [SHARED {noise_model}] Budget={C_total} ({k+1}/{len(budget_list)}) ===")
- for r in range(repeat):
- if verbose and ((r + 1) % print_every == 0 or r == 0):
- _log(f" [repeat {r+1}/{repeat}]")
- # ハートビート(ロックの mtime を更新して“生存”を伝える)
- now = time.time()
- if now - last_hb >= HEARTBEAT_EVERY:
- try: os.utime(lock_path, None)
- except FileNotFoundError: pass
- last_hb = now
- # 1リピート = 1トポロジ
- fidelity_bank = [generate_fidelity_list_random(n) for n in node_path_list]
- def network_generator(path_num, pair_idx):
- return QuantumNetwork(path_num, fidelity_bank[pair_idx], noise_model)
- for name in scheduler_names:
- per_pair_results, total_cost, per_pair_details = run_scheduler(
- node_path_list=node_path_list,
- importance_list=importance_list,
- scheduler_name=name,
- bounces=list(bounces),
- C_total=int(C_total),
- network_generator=network_generator,
- return_details=True,
- )
- data[name][k].append({
- "per_pair_results": per_pair_results,
- "per_pair_details": per_pair_details,
- "total_cost": total_cost,
- })
- payload = {"config": config, "budget_list": list(budget_list), "data": data}
- # アトミック書き込み
- tmp = cache_path + ".tmp"
- with open(tmp, "wb") as f:
- pickle.dump(payload, f, protocol=pickle.HIGHEST_PROTOCOL)
- os.replace(tmp, cache_path)
- return payload
- finally:
- if got_lock:
- try: os.remove(lock_path)
- except FileNotFoundError: pass
- # =========================
- # 1) Accuracy: 平均のみ(CIなし)
- # =========================
- def plot_accuracy_vs_budget(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10,
- verbose=True, print_every=1,
- ):
- file_name = f"plot_accuracy_vs_budget_{noise_model}"
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- payload = _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=bounces, repeat=repeat,
- verbose=verbose, print_every=print_every,
- )
- results = {name: {"accs": [[] for _ in budget_list]} for name in scheduler_names}
- for name in scheduler_names:
- for k in range(len(budget_list)):
- for run in payload["data"][name][k]:
- per_pair_results = run["per_pair_results"]
- acc = float(np.mean([1.0 if c else 0.0 for (c, _cost, _bf) in per_pair_results])) if per_pair_results else 0.0
- results[name]["accs"][k].append(acc)
- # plot
- plt.rc("axes", prop_cycle=default_cycler)
- fig, ax = plt.subplots()
- xs = list(budget_list)
- for name, data in results.items():
- avg_accs = [float(np.mean(v)) if v else 0.0 for v in data["accs"]]
- label = name.replace("Vanilla NB","VanillaNB").replace("Succ. Elim. NB","SuccElimNB")
- ax.plot(xs, avg_accs, linewidth=2.0, label=label)
- ax.set_xlabel("Total Budget (C)")
- ax.set_ylabel("Average Correctness")
- ax.grid(True); ax.legend(title="Scheduler", fontsize=14, title_fontsize=18)
- plt.tight_layout()
- pdf = f"{file_name}.pdf"
- plt.savefig(pdf);
- if shutil.which("pdfcrop"): os.system(f"pdfcrop {pdf} {pdf}")
- _log(f"Saved: {pdf}")
- # =========================
- # 2) Value vs Used(x=実コスト平均)
- # =========================
- def plot_value_vs_used(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10,
- verbose=True, print_every=1,
- ):
- file_name = f"plot_value_vs_used_{noise_model}"
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- payload = _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=bounces, repeat=repeat,
- verbose=verbose, print_every=print_every,
- )
- results = {name: {"values": [[] for _ in budget_list], "costs": [[] for _ in budget_list]} for name in scheduler_names}
- for name in scheduler_names:
- for k in range(len(budget_list)):
- for run in payload["data"][name][k]:
- per_pair_details = run["per_pair_details"]
- total_cost = int(run["total_cost"])
- # value = Σ_d I_d Σ_l est(d,l) * alloc(d,l)
- value = 0.0
- for d, det in enumerate(per_pair_details):
- alloc = det.get("alloc_by_path", {})
- est = det.get("est_fid_by_path", {})
- inner = sum(float(est.get(l, 0.0)) * int(b) for l, b in alloc.items())
- I = float(importance_list[d]) if d < len(importance_list) else 1.0
- value += I * inner
- results[name]["values"][k].append(float(value))
- results[name]["costs"][k].append(total_cost)
- # plot
- plt.rc("axes", prop_cycle=default_cycler)
- fig, ax = plt.subplots()
- for name, dat in results.items():
- xs = [float(np.mean(v)) if v else 0.0 for v in dat["costs"]]
- ys = [float(np.mean(v)) if v else 0.0 for v in dat["values"]]
- label = name.replace("Vanilla NB","VanillaNB").replace("Succ. Elim. NB","SuccElimNB")
- ax.plot(xs, ys, linewidth=2.0, marker="o", label=label)
- ax.set_xlabel("Total Measured Cost (used)")
- ax.set_ylabel("Total Value (Σ I_d Σ f̂_{d,l}·B_{d,l})")
- ax.grid(True); ax.legend(title="Scheduler")
- plt.tight_layout()
- pdf = f"{file_name}.pdf"
- plt.savefig(pdf);
- if shutil.which("pdfcrop"): os.system(f"pdfcrop {pdf} {pdf}")
- _log(f"Saved: {pdf}")
- # =========================
- # 3) Value vs Budget target(x=目標予算)
- # =========================
- def plot_value_vs_budget_target(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10,
- verbose=True, print_every=1,
- ):
- file_name = f"plot_value_vs_budget_target_{noise_model}"
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- payload = _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=bounces, repeat=repeat,
- verbose=verbose, print_every=print_every,
- )
- results = {name: {"values": [[] for _ in budget_list]} for name in scheduler_names}
- for name in scheduler_names:
- for k in range(len(budget_list)):
- for run in payload["data"][name][k]:
- per_pair_details = run["per_pair_details"]
- value = 0.0
- for d, det in enumerate(per_pair_details):
- alloc = det.get("alloc_by_path", {})
- est = det.get("est_fid_by_path", {})
- inner = sum(float(est.get(l, 0.0)) * int(b) for l, b in alloc.items())
- I = float(importance_list[d]) if d < len(importance_list) else 1.0
- value += I * inner
- results[name]["values"][k].append(float(value))
- # plot
- plt.rc("axes", prop_cycle=default_cycler)
- fig, ax = plt.subplots()
- xs = list(budget_list)
- for name, dat in results.items():
- ys = [float(np.mean(v)) if v else 0.0 for v in dat["values"]]
- label = name.replace("Vanilla NB","VanillaNB").replace("Succ. Elim. NB","SuccElimNB")
- ax.plot(xs, ys, linewidth=2.0, marker="o", label=label)
- ax.set_xlabel("Budget (target)")
- ax.set_ylabel("Total Value (Σ I_d Σ f̂_{d,l}·B_{d,l})")
- ax.grid(True); ax.legend(title="Scheduler")
- plt.tight_layout()
- pdf = f"{file_name}.pdf"
- plt.savefig(pdf);
- if shutil.which("pdfcrop"): os.system(f"pdfcrop {pdf} {pdf}")
- _log(f"Saved: {pdf}")
- # =========================
- # 4) 幅(UB-LB)Unweighted: 全リンク総和
- # =========================
- def plot_widthsum_alllinks_vs_budget(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10, delta=0.1,
- verbose=True, print_every=1,
- ):
- file_name = f"plot_widthsum_alllinks_vs_budget_{noise_model}"
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- payload = _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=bounces, repeat=repeat,
- verbose=verbose, print_every=print_every,
- )
- results = {name: {"sums": [[] for _ in budget_list]} for name in scheduler_names}
- for name in scheduler_names:
- for k in range(len(budget_list)):
- for run in payload["data"][name][k]:
- per_pair_details = run["per_pair_details"]
- v = sum_widths_all_links(per_pair_details, delta=delta)
- results[name]["sums"][k].append(v)
- # plot (mean ± 95%CI)
- plt.rc("axes", prop_cycle=default_cycler)
- fig, ax = plt.subplots()
- xs = list(budget_list)
- for name, dat in results.items():
- means, halfs = [], []
- for vals in dat["sums"]:
- m, h = mean_ci95(vals); means.append(m); halfs.append(h)
- means = np.asarray(means); halfs = np.asarray(halfs)
- label = name.replace("Vanilla NB","VanillaNB").replace("Succ. Elim. NB","SuccElimNB")
- ax.plot(xs, means, linewidth=2.0, marker="o", label=label)
- ax.fill_between(xs, means - halfs, means + halfs, alpha=0.25)
- ax.set_xlabel("Budget (target)")
- ax.set_ylabel("Sum of (UB - LB) over all links")
- ax.grid(True); ax.legend(title="Scheduler")
- plt.tight_layout()
- pdf = f"{file_name}.pdf"
- plt.savefig(pdf);
- if shutil.which("pdfcrop"): os.system(f"pdfcrop {pdf} {pdf}")
- _log(f"Saved: {pdf}")
- # =========================
- # 5) 幅(UB-LB)Unweighted: ペア最小幅の総和
- # =========================
- def plot_minwidthsum_perpair_vs_budget(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10, delta=0.1,
- verbose=True, print_every=1,
- ):
- file_name = f"plot_minwidthsum_perpair_vs_budget_{noise_model}"
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- payload = _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=bounces, repeat=repeat,
- verbose=verbose, print_every=print_every,
- )
- results = {name: {"sums": [[] for _ in budget_list]} for name in scheduler_names}
- for name in scheduler_names:
- for k in range(len(budget_list)):
- for run in payload["data"][name][k]:
- per_pair_details = run["per_pair_details"]
- v = sum_minwidths_perpair(per_pair_details, delta=delta)
- results[name]["sums"][k].append(v)
- # plot (mean ± 95%CI)
- plt.rc("axes", prop_cycle=default_cycler)
- fig, ax = plt.subplots()
- xs = list(budget_list)
- for name, dat in results.items():
- means, halfs = [], []
- for vals in dat["sums"]:
- m, h = mean_ci95(vals); means.append(m); halfs.append(h)
- means = np.asarray(means); halfs = np.asarray(halfs)
- label = name.replace("Vanilla NB","VanillaNB").replace("Succ. Elim. NB","SuccElimNB")
- ax.plot(xs, means, linewidth=2.0, marker="o", label=label)
- ax.fill_between(xs, means - halfs, means + halfs, alpha=0.25)
- ax.set_xlabel("Budget (target)")
- ax.set_ylabel("Sum over pairs of min (UB - LB)")
- ax.grid(True); ax.legend(title="Scheduler")
- plt.tight_layout()
- pdf = f"{file_name}.pdf"
- plt.savefig(pdf);
- if shutil.which("pdfcrop"): os.system(f"pdfcrop {pdf} {pdf}")
- _log(f"Saved: {pdf}")
- # =========================
- # 6) 幅(UB-LB)Weighted: 全リンク I_d·幅 総和
- # =========================
- def plot_widthsum_alllinks_weighted_vs_budget(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10, delta=0.1,
- verbose=True, print_every=1,
- ):
- file_name = f"plot_widthsum_alllinks_weighted_vs_budget_{noise_model}"
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- payload = _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=bounces, repeat=repeat,
- verbose=verbose, print_every=print_every,
- )
- results = {name: {"sums": [[] for _ in budget_list]} for name in scheduler_names}
- for name in scheduler_names:
- for k in range(len(budget_list)):
- for run in payload["data"][name][k]:
- per_pair_details = run["per_pair_details"]
- v = sum_weighted_widths_all_links(per_pair_details, importance_list, delta=delta)
- results[name]["sums"][k].append(v)
- # plot (mean ± 95%CI)
- plt.rc("axes", prop_cycle=default_cycler)
- fig, ax = plt.subplots()
- xs = list(budget_list)
- for name, dat in results.items():
- means, halfs = [], []
- for vals in dat["sums"]:
- m, h = mean_ci95(vals); means.append(m); halfs.append(h)
- means = np.asarray(means); halfs = np.asarray(halfs)
- label = name.replace("Vanilla NB","VanillaNB").replace("Succ. Elim. NB","SuccElimNB")
- ax.plot(xs, means, linewidth=2.0, marker="o", label=label)
- ax.fill_between(xs, means - halfs, means + halfs, alpha=0.25)
- ax.set_xlabel("Budget (target)")
- ax.set_ylabel("Weighted Sum of Widths Σ_d Σ_l I_d (UB - LB)")
- ax.grid(True); ax.legend(title="Scheduler", fontsize=14, title_fontsize=18)
- plt.tight_layout()
- pdf = f"{file_name}.pdf"
- plt.savefig(pdf);
- if shutil.which("pdfcrop"): os.system(f"pdfcrop {pdf} {pdf}")
- _log(f"Saved: {pdf}")
- # =========================
- # 7) 幅(UB-LB)Weighted: ペアごとの I_d·最小幅 総和
- # =========================
- def plot_minwidthsum_perpair_weighted_vs_budget(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=(1,2,3,4), repeat=10, delta=0.1,
- verbose=True, print_every=1,
- ):
- file_name = f"plot_minwidthsum_perpair_weighted_vs_budget_{noise_model}"
- root_dir = os.path.dirname(os.path.abspath(__file__))
- outdir = os.path.join(root_dir, "outputs")
- os.makedirs(outdir, exist_ok=True)
- payload = _run_or_load_shared_sweep(
- budget_list, scheduler_names, noise_model,
- node_path_list, importance_list,
- bounces=bounces, repeat=repeat,
- verbose=verbose, print_every=print_every,
- )
- results = {name: {"sums": [[] for _ in budget_list]} for name in scheduler_names}
- for name in scheduler_names:
- for k in range(len(budget_list)):
- for run in payload["data"][name][k]:
- per_pair_details = run["per_pair_details"]
- v = sum_weighted_min_widths_perpair(per_pair_details, importance_list, delta=delta)
- results[name]["sums"][k].append(v)
- # plot (mean ± 95%CI)
- plt.rc("axes", prop_cycle=default_cycler)
- fig, ax = plt.subplots()
- xs = list(budget_list)
- for name, dat in results.items():
- means, halfs = [], []
- for vals in dat["sums"]:
- m, h = mean_ci95(vals); means.append(m); halfs.append(h)
- means = np.asarray(means); halfs = np.asarray(halfs)
- label = name.replace("Vanilla NB","VanillaNB").replace("Succ. Elim. NB","SuccElimNB")
- ax.plot(xs, means, linewidth=2.0, marker="o", label=label)
- ax.fill_between(xs, means - halfs, means + halfs, alpha=0.25)
- ax.set_xlabel("Budget (target)")
- ax.set_ylabel("Weighted sum over pairs of min (UB - LB) (× I_d)")
- ax.grid(True); ax.legend(title="Scheduler")
- plt.tight_layout()
- pdf = f"{file_name}.pdf"
- plt.savefig(pdf);
- if shutil.which("pdfcrop"): os.system(f"pdfcrop {pdf} {pdf}")
- _log(f"Saved: {pdf}")
|