1. Backend
Load IBM Cloud credentials and select the backend 'ibm_torino'.
2. Calibration-Based Qubit Selection
Lead in the latest calibration CSV.
Rank physical qubits by ascending single‑qubit √X error, then descending T_1 and T_2.
Fix the best 60 qubits:
Q_60 = argmax_(∣S∣=60) ∑_(q∈S) [-(αϵ_X)^(q) + (βT_1)^(q) + (γT_2)^(q)], α, β, γ > 0.
3. Registers
Quantum register:
Q = {q_0, q_1, …, q_59} (data qubits form a 1‑D chain)
Classical register:
C = {c_0, c_1, …, c_59} (stores measurement outcomes)
4. Vacuum Preparation
Start in the ground state:
∣vac⟩ = ∣0⟩^(⊗60)
5. Mirror‑Motion Schedule (Four Sudden Quenches)
Model the cavity Hamiltonian:
59 58
H(t) = 1/2 ∑ (Δ_i)(Z_i) + 1/2 ∑ (J_i)(t)(Z_i)(Z_(i + 1)),
i=0 i=0
where a moving mirror modulates the effective couplings (J_i)(t). Here, Δ_i = 0 for all qubits to isolate the effect of the time-dependent couplings J_i(t).
Encode the time profile with four step values:
Θ = {θ_0, θ_1, θ_2, θ_3} = {0, 0.3π, 0.6π, 0.9π}
For each step k apply a ZZ‑layer of two‑qubit unitaries:
U_(ZZ)(θ_k) = exp[−i((θ_k)/2)Z ⊗ Z], ∀(q_i, q_(i + 1))(i = 0…58)
6. Global Measurement
Measure every qubit, mapping q_i -> c_i.
Each shot yields a 60‑bit string b_59…b_0.
7. Detecting Virtual Photon Pairs
Interpret symmetric qubits as the two ends of a putative photon pair.
A shot creates a pair if:
∃i ∈ {0, …, 29} : b_i = b_(59 − i) = 1
Count such events over all shots:
N_pairs = ∑ 1[pair_created(s)]
s∈shots
Pair‑creation rate is:
R = N_pairs/N_shots
8. Execution
Compile the circuit and run with N_shots = 8192.
9. Data and Json
Retrieve counts and save all raw including data to a json.
Code:
# Main circuit
# Imports
import json, logging, pandas as pd
from pathlib import Path
from math import pi
from collections import Counter
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister, transpile
from qiskit.circuit.library import RZZGate
from qiskit_ibm_runtime import QiskitRuntimeService, SamplerV2
from qiskit.visualization import plot_histogram
import matplotlib.pyplot as plt
logging.basicConfig(level=logging .INFO)
log = logging.getLogger(__name__)
# IBMQ
TOKEN = "IBMQ_KEY_O-`"
INSTANCE = "IBMQ_CRN"
BACKEND_NAME = "ibm_torino"
service = QiskitRuntimeService(
channel="ibm_cloud",
token=TOKEN,
instance=INSTANCE,
)
backend = service.backend(BACKEND_NAME)
# Qubit quality based device mapping
def best_qubits(csv_path: str, n: int) -> list[int]:
"""Return a list of the `n` best physical qubits by √X‑error → T1 → T2."""
df = pd .read_csv(csv_path)
df.columns = df.columns.str.strip()
order = df.sort_values(
["√x (sx) error", "T1 (us)", "T2 (us)"],
ascending=[True, False, False],
)
winners = order["Qubit"].head(n).tolist()
log .info("Selected %s best qubits: %s", n, winners)
return winners
# Most recent calibration data
CAL_CSV = Path("/Users/steventippeconnic/Downloads/ibm_torino_calibrations_2025-05-10T17_59_43Z.csv")
PHYSICAL = best_qubits(CAL_CSV, 60)
# Circuit
q_reg = QuantumRegister(60, "q")
c_reg = ClassicalRegister(60, "c")
qc = QuantumCircuit(q_reg, c_reg, name="DynamicCasimirChain")
# Prepare the vacuum |0…0⟩. Four step 'mirror' schedule (sudden changes in nearest neighbour coupling θ_k).
theta_schedule = [0.0, 0.3 * pi, 0.6 * pi, 0.9 * pi]
for k, theta in enumerate(theta_schedule):
log.debug("Applying ZZ layer %d with θ = %.3f rad", k, theta)
for i in range(59):
qc.append(RZZGate(theta), [q_reg[i], q_reg[i + 1]])
qc.barrier(label=f"mirror@step_{k}")
# Measure every qubit
qc.measure(q_reg, c_reg)
# Transpilation
trans = transpile(
qc,
backend=backend,
initial_layout=PHYSICAL,
optimization_level=3,
)
# Execution
SHOTS = 8192
sampler = SamplerV2(mode=backend)
job = sampler .run([trans], shots=SHOTS)
result = job.result()
creg_name = trans.cregs[0].name # "c"
counts = result[0].data.__getattribute__(creg_name).get_counts()
# Derived observable (symmetric‑pair excitations)
def pair_created(bitstr: str, length: int = 60) -> bool:
"""Return True if *any* symmetric pair (i, L‑1‑i) is in state |11⟩."""
return any(bitstr[i] == "1" and bitstr[length - 1 - i] == "1" for i in range(length // 2))
pair_events = sum(freq for bits, freq in counts.items() if pair_created(bits))
pair_creation_rate = pair_events / SHOTS
log .info("Vacuum‑pair creation rate: %.5f", pair_creation_rate)
# Correlation count per individual pair
def pairwise_counts(raw: Counter, length: int = 60):
pc = Counter()
for bits, freq in raw.items():
for i in range(length // 2):
pair_label = f"{i}-{length-1-i}"
if bits[i] == bits[length - 1 - i] == "1":
pc[pair_label] += freq
return dict(pc)
pairwise = pairwise_counts(counts)
# Json
out = {
"experiment_name": "Dynamic Casimir Radiation in Tunable Cavity Chain",
"theta_schedule_rad": theta_schedule,
"raw_counts": dict(counts),
"pair_creation_rate": pair_creation_rate,
"pairwise_pair_counts": pairwise,
}
JSON_PATH = Path("/Users/steventippeconnic/Documents/Dynamic_Casimir_Cavity_Chain_0.json")
with JSON_PATH.open("w") as fp:
json.dump(out, fp, indent=4)
log .info("Results saved → %s", JSON_PATH)
# Visual
plot_histogram(counts, title="Dynamic Casimir Chain – Measured Bitstrings")
plt .show()
# End
/////////////////////////////////////////////////////////////////
# Code for all visuals from experiment JSON
# Imports
import json
from pathlib import Path
from collections import Counter
import numpy as np
import matplotlib.pyplot as plt
# Load data
file_path = Path("/Users/steventippeconnic/Documents/Dynamic_Casimir_Cavity_Chain_0.json")
data = json.loads(file_path.read_text())
counts_raw = Counter(data["raw_counts"])
chain_len = 60
shots = sum(counts_raw.values())
pairs = [(i, chain_len - 1 - i) for i in range(chain_len // 2)]
L = 60
# Prepare histograms and tallies
pairs_per_shot = Counter()
hamming_hist = Counter()
pair_counts = Counter()
pair_corr = np.zeros((chain_len // 2, chain_len // 2))
for bits, freq in counts_raw.items():
active_pairs = []
for idx, (i, j) in enumerate(pairs):
if bits[i] == bits[j] == "1":
pair_counts[idx] += freq
active_pairs.append(idx)
pairs_per_shot[len(active_pairs)] += freq
for a in active_pairs:
pair_corr[a, a] += freq
for b in active_pairs:
if a != b:
pair_corr[a, b] += freq
hamming_hist[bits.count("1")] += freq
pair_corr /= shots
# Pair multiplicity histogram
plt.figure()
plt.bar(pairs_per_shot.keys(), pairs_per_shot.values())
plt.xlabel("Symmetric pair count per shot")
plt.ylabel("Frequency")
plt.title("Pair multiplicity distribution")
plt.show()
# Spatial pair distribution
plt.figure()
indices, values = zip(*sorted(pair_counts.items()))
plt.bar(indices, values)
plt.xlabel("Pair index i and L minus 1 minus i")
plt.ylabel("Total events")
plt.title("Spatial distribution of created pairs")
plt.show()
# Pair correlation heat map
plt.figure()
plt.imshow(pair_corr, aspect="auto", interpolation="nearest")
plt.colorbar(label="Joint probability")
plt.xlabel("Pair j")
plt.ylabel("Pair i")
plt.title("Pair to pair correlation matrix")
plt.show()
# Hamming weight histogram
plt.figure()
plt.bar(hamming_hist.keys(), hamming_hist.values(), width=0.8)
plt.xlabel("Total excited qubits in shot")
plt.ylabel("Frequency")
plt.title("Excitation energy distribution")
plt.show()
# Helper structures
excitation_prob = np.zeros(L)
longest_cluster_hist = Counter()
side_diff_hist = Counter()
pair_joint = np.zeros((L // 2, L // 2))
pair_single = np.zeros(L // 2)
pairs = [(i, L - 1 - i) for i in range(L // 2)]
# Analyse each shot
for bitstring, freq in counts_raw.items():
bits = np.array(list(bitstring), dtype=int)
excitation_prob += freq * bits
# Longest consecutive cluster of ones
runs = np.diff(np.where(np.concatenate(([bits[0]],
bits[:-1] ^ bits[1:],
[1])))[0])[::2]
longest_cluster_hist[max(runs)] += freq
# Left‑minus‑right excitation difference
left_ones = bits[:L // 2].sum()
right_ones = bits[L // 2:].sum()
side_diff_hist[left_ones - right_ones] += freq
# Pair statistics
active_pairs = []
for idx, (i, j) in enumerate(pairs):
if bits[i] == bits[j] == 1:
pair_single[idx] += freq
active_pairs.append(idx)
for i in active_pairs:
for j in active_pairs:
pair_joint[i, j] += freq
excitation_prob /= shots
pair_single /= shots
pair_conditional = np.divide(pair_joint / shots,
pair_single[:, None],
out=np.zeros_like(pair_joint),
where=pair_single[:, None] > 0)
# Per‑qubit excitation probability
plt.figure()
plt.plot(range(L), excitation_prob, marker="o")
plt.xlabel("Qubit index")
plt.ylabel("Probability qubit is 1")
plt.title("Per‑qubit excitation profile")
plt.show()
# Histogram of longest 1‑run per shot
plt.figure()
plt.bar(longest_cluster_hist.keys(), longest_cluster_hist.values())
plt.xlabel("Longest consecutive 1‑run length")
plt.ylabel("Frequency")
plt.title("Cluster size distribution")
plt.show()
# Conditional pair activation heat map
plt.figure()
plt.imshow(pair_conditional, aspect="auto", interpolation="nearest")
plt.colorbar(label="P(pair j | pair i)")
plt.xlabel("Pair j")
plt.ylabel("Pair i")
plt.title("Conditional activation probability matrix")
plt.show()
# Left‑minus‑right excitation balance
plt.figure()
plt.bar(side_diff_hist.keys(), side_diff_hist.values(), width=0.8)
plt.xlabel("Left half minus right half excitations")
plt.ylabel("Frequency")
plt.title("Spatial asymmetry of excitations")
plt.show()
# Accumulators
pair_single = np.zeros(L // 2)
pair_joint = np.zeros((L // 2, L // 2))
dist_accum = np.zeros(L // 2)
dist_counts = np.zeros(L // 2)
com_hist = Counter()
pair_energy_hist = Counter()
# Main sweep
for bits, freq in counts_raw.items():
bit_arr = np.array([ch == "1" for ch in bits], dtype=int)
active_pairs = [p_idx
for p_idx, (i, j) in enumerate(pairs)
if bit_arr[i] and bit_arr[j]]
# Joint accumulator one increment per ordered pair
for a in active_pairs:
for b in active_pairs:
d = abs(a - b)
dist_accum[d] += freq
# Denominator one increment per *distance* in this shot
seen_d = {abs(a - b) for a in active_pairs for b in active_pairs}
for d in seen_d:
dist_counts[d] += freq
# Centre of mass
idx_excited = np.flatnonzero(bit_arr)
if idx_excited.size:
com_hist[int(round(idx_excited.mean()))] += freq
# Symmetric pair data
active_pairs = []
for p_idx, (i, j) in enumerate(pairs):
if bit_arr[i] and bit_arr[j]:
pair_single[p_idx] += freq
active_pairs.append(p_idx)
# Energy vs pair count
pair_energy_hist[(len(active_pairs), bit_arr.sum())] += freq
# Correlations by separation
for a in active_pairs:
for b in active_pairs:
pair_joint[a, b] += freq
d = abs(a - b)
dist_accum[d] += freq
dist_counts[d] += freq
pair_single /= shots
pair_joint /= shots
cov = pair_joint - np.outer(pair_single, pair_single)
# Centre‑of‑mass histogram
plt.figure()
if com_hist:
x, y = zip(*sorted(com_hist.items()))
plt.bar(x, y)
plt.xlabel("Excitation centre of mass")
plt.ylabel("Frequency")
plt.title("Centre of mass of excitations")
plt.show()
# Correlation vs separation
sep = np.arange(1, L // 2)
corr_vs_d = np.where(dist_counts[1:] > 0,
dist_accum[1:] / dist_counts[1:],
0.0)
plt.figure()
plt.plot(sep, corr_vs_d, marker="o")
plt.xlabel("Pair‑pair separation")
plt.ylabel("Average joint probability")
plt.title("Correlation decay with separation")
plt.show()
# Covariance heat map
plt.figure()
plt.imshow(cov, cmap="seismic", aspect="auto", interpolation="nearest")
plt.colorbar(label="Covariance")
plt.xlabel("Pair j")
plt.ylabel("Pair i")
plt.title("Pair‑pair covariance")
plt.show()
# 2‑D histogram pair count vs energy
max_pairs = max(pc for pc, _ in pair_energy_hist.keys())
max_energy = max(en for _, en in pair_energy_hist.keys())
grid = np.zeros((max_pairs + 1, max_energy + 1))
for (pc, en), f in pair_energy_hist.items():
grid[pc, en] = f
plt.figure()
plt.imshow(grid, origin="lower", aspect="auto", interpolation="nearest")
plt.colorbar(label="Frequency")
plt.xlabel("Total excited qubits")
plt.ylabel("Number of symmetric pairs")
plt.title("Energy vs pair multiplicity")
plt.show()
# End