Skip to content

Commit

Permalink
Fix marginalize problems
Browse files Browse the repository at this point in the history
  • Loading branch information
nkanazawa1989 committed Jan 9, 2024
1 parent 288d18f commit a3abf4d
Show file tree
Hide file tree
Showing 3 changed files with 135 additions and 341 deletions.
2 changes: 1 addition & 1 deletion qiskit_experiments/framework/base_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,7 +167,7 @@ def run_analysis(expdata: ExperimentData):
# Clearing previous analysis data
experiment_data._clear_results()

if not expdata.data():
if not expdata.data() and not expdata.child_data():
warnings.warn("ExperimentData object data is empty.\n")

# Making new analysis
Expand Down
209 changes: 16 additions & 193 deletions qiskit_experiments/framework/composite/composite_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,216 +120,39 @@ def copy(self):
ret._analyses = [analysis.copy() for analysis in ret._analyses]
return ret

def run(
self,
experiment_data: ExperimentData,
replace_results: bool = False,
**options,
) -> ExperimentData:
# Make a new copy of experiment data if not updating results
if not replace_results and _requires_copy(experiment_data):
experiment_data = experiment_data.copy()

if not self._flatten_results:
# Initialize child components if they are not initialized
# This only needs to be done if results are not being flattened
self._add_child_data(experiment_data)

# Run analysis with replace_results = True since we have already
# created the copy if it was required
return super().run(experiment_data, replace_results=True, **options)

def _run_analysis(self, experiment_data: ExperimentData):
# Return list of experiment data containers for each component experiment
# containing the marginalized data from the composite experiment
component_expdata = []
if not self._flatten_results:
# Retrieve child data for component experiments for updating
component_index = experiment_data.metadata.get("component_child_index", [])
if not component_index:
raise AnalysisError("Unable to extract component child experiment data")
component_expdata = [experiment_data.child_data(i) for i in component_index]
else:
# Initialize temporary ExperimentData containers for
# each component experiment to analysis on. These will
# not be saved but results and figures will be collected
# from them
component_expdata = self._initialize_component_experiment_data(experiment_data)

marginalized_data = self._marginalized_component_data(experiment_data.data())

for sub_expdata, sub_data in zip(component_expdata, marginalized_data):
# Clear any previously stored data and add marginalized data
sub_expdata._result_data.clear()
sub_expdata.add_data(sub_data)
child_data = experiment_data.child_data()

if len(self._analyses) != len(child_data):
# Child data is automatically created when composite result data is added.
# Validate that child data size matches with number of analysis entries.
raise RuntimeError(
"Number of sub-analysis and child data don't match: "
f"{len(self._analyses)} != {len(child_data)}. "
"Please check if the composite experiment and analysis are properly instantiated."
)

# Run the component analysis on each component data
for i, sub_expdata in enumerate(component_expdata):
for sub_analysis, sub_data in zip(self._analyses, child_data):
# Since copy for replace result is handled at the parent level
# we always run with replace result on component analysis
self._analyses[i].run(sub_expdata, replace_results=True)
sub_analysis.run(sub_data, replace_results=True)

# Analysis is running in parallel so we add loop to wait
# for all component analysis to finish before returning
# the parent experiment analysis results
for sub_expdata in component_expdata:
sub_expdata.block_for_results()
for sub_data in child_data:
sub_data.block_for_results()

# Optionally flatten results from all component experiments
# for adding to the main experiment data container
if self._flatten_results:
analysis_results, figures = self._combine_results(component_expdata)
analysis_results, figures = self._combine_results(child_data)
for res in analysis_results:
# Override experiment ID because entries are flattened
res.experiment_id = experiment_data.experiment_id
return analysis_results, figures
return [], []

def _marginalized_component_data(self, composite_data: List[Dict]) -> List[List[Dict]]:
"""Return marginalized data for component experiments.
Args:
composite_data: a list of composite experiment circuit data.
Returns:
A List of lists of marginalized circuit data for each component
experiment in the composite experiment.
"""
# Marginalize data
marginalized_data = {}
for datum in composite_data:
metadata = datum.get("metadata", {})

# Add marginalized data to sub experiments
if "composite_clbits" in metadata:
composite_clbits = metadata["composite_clbits"]
else:
composite_clbits = None

# Pre-process the memory if any to avoid redundant calls to format_counts_memory
f_memory = self._format_memory(datum, composite_clbits)

for i, index in enumerate(metadata["composite_index"]):
if index not in marginalized_data:
# Initialize data list for marginalized
marginalized_data[index] = []
sub_data = {"metadata": metadata["composite_metadata"][i]}
if "counts" in datum:
if composite_clbits is not None:
sub_data["counts"] = marginal_distribution(
counts=datum["counts"],
indices=composite_clbits[i],
)
else:
sub_data["counts"] = datum["counts"]
if "memory" in datum:
if composite_clbits is not None:
# level 2
if f_memory is not None:
idx = slice(
-1 - composite_clbits[i][-1], -composite_clbits[i][0] or None
)
sub_data["memory"] = [shot[idx] for shot in f_memory]
# level 1
else:
mem = np.array(datum["memory"])

# Averaged level 1 data
if len(mem.shape) == 2:
sub_data["memory"] = mem[composite_clbits[i]].tolist()
# Single-shot level 1 data
if len(mem.shape) == 3:
sub_data["memory"] = mem[:, composite_clbits[i]].tolist()
else:
sub_data["memory"] = datum["memory"]
marginalized_data[index].append(sub_data)

# Sort by index
return [marginalized_data[i] for i in sorted(marginalized_data.keys())]

@staticmethod
def _format_memory(datum: Dict, composite_clbits: List):
"""A helper method to convert level 2 memory (if it exists) to bit-string format."""
f_memory = None
if (
"memory" in datum
and composite_clbits is not None
and isinstance(datum["memory"][0], str)
):
num_cbits = 1 + max(cbit for cbit_list in composite_clbits for cbit in cbit_list)
header = {"memory_slots": num_cbits}
f_memory = list(format_counts_memory(shot, header) for shot in datum["memory"])

return f_memory

def _add_child_data(self, experiment_data: ExperimentData):
"""Save empty component experiment data as child data.
This will initialize empty ExperimentData objects for each component
experiment and add them as child data to the main composite experiment
ExperimentData container container for saving.
Args:
experiment_data: a composite experiment experiment data container.
"""
component_index = experiment_data.metadata.get("component_child_index", [])
if component_index:
# Child components are already initialized
return

# Initialize the component experiment data containers and add them
# as child data to the current experiment data
child_components = self._initialize_component_experiment_data(experiment_data)
start_index = len(experiment_data.child_data())
for i, subdata in enumerate(child_components):
experiment_data.add_child_data(subdata)
component_index.append(start_index + i)

# Store the indices of the added child data in metadata
experiment_data.metadata["component_child_index"] = component_index

def _initialize_component_experiment_data(
self, experiment_data: ExperimentData
) -> List[ExperimentData]:
"""Initialize empty experiment data containers for component experiments.
Args:
experiment_data: a composite experiment experiment data container.
Returns:
The list of experiment data containers for each component experiment
containing the component metadata, and tags, share level, and
auto save settings of the composite experiment.
"""
# Extract component experiment types and metadata so they can be
# added to the component experiment data containers
metadata = experiment_data.metadata
num_components = len(self._analyses)
experiment_types = metadata.get("component_types", [None] * num_components)
component_metadata = metadata.get("component_metadata", [{}] * num_components)

# Create component experiments and set the backend and
# metadata for the components
component_expdata = []
for i, _ in enumerate(self._analyses):
subdata = ExperimentData(backend=experiment_data.backend)
subdata.experiment_type = experiment_types[i]
subdata.metadata.update(component_metadata[i])

if self._flatten_results:
# Explicitly set auto_save to false so the temporary
# data can't accidentally be saved
subdata.auto_save = False
else:
# Copy tags, share_level and auto_save from the parent
# experiment data if results are not being flattened.
subdata.tags = experiment_data.tags
subdata.share_level = experiment_data.share_level
subdata.auto_save = experiment_data.auto_save

component_expdata.append(subdata)

return component_expdata

def _set_flatten_results(self):
"""Recursively set flatten_results to True for all composite components."""
self._flatten_results = True
Expand Down
Loading

0 comments on commit a3abf4d

Please sign in to comment.