diff --git a/src/auspex/data_format.py b/src/auspex/data_format.py index 5b750fcc0..f0bfdbf9c 100644 --- a/src/auspex/data_format.py +++ b/src/auspex/data_format.py @@ -10,6 +10,9 @@ import numpy as np import os, os.path import json +import datetime + +AUSPEX_CONTAINER_VERSION = 1.1 class AuspexDataContainer(object): """A container for Auspex data. Data is stored as `datasets` which may be of any dimension. These are in turn @@ -27,7 +30,7 @@ class AuspexDataContainer(object): | - DemodulatedData """ - def __init__(self, base_path, mode='a', open_all=True): + def __init__(self, base_path, mode='a', open_all=True, metadata=None): """Initialize the data container. Args: @@ -40,7 +43,8 @@ def __init__(self, base_path, mode='a', open_all=True): self.open_mmaps = [] self.mode = mode self._create() - + self.metadata = metadata + if open_all: self.open_all() @@ -58,6 +62,22 @@ def _create(self): os.makedirs(self.base_path, exist_ok=True) self.groups = {} + @property + def metadata(self): + filename = os.path.join(self.base_path,'meta.json') + assert os.path.exists(filename), "Container metadata does not exist, this must be an old .auspex file that is no longer supported." + with open(filename, 'r') as f: + a = json.load(f) + a['date'] = datetime.datetime.strptime(a['date'], '%Y-%m-%d %H:%M:%S.%f') + return a + + @metadata.setter + def metadata(self, value): + filename = os.path.join(self.base_path,'meta.json') + meta = {'version': AUSPEX_CONTAINER_VERSION, 'date': str(datetime.datetime.now()), 'metadata': value} + with open(filename, 'w') as f: + json.dump(meta, f) + def new_group(self, groupname): """Add a group to the data container. @@ -94,14 +114,7 @@ def _create_meta(self, groupname, datasetname, descriptor): filename = os.path.join(self.base_path,groupname,datasetname+'_meta.json') assert not os.path.exists(filename), "Existing dataset metafile found. Did you want to open instead?" meta = {'shape': tuple(descriptor.dims()), 'dtype': np.dtype(descriptor.dtype).str} - meta['axes'] = {a.name: a.points.tolist() for a in descriptor.axes} - meta['units'] = {a.name: a.unit for a in descriptor.axes} - meta['meta_data'] = {} - for a in descriptor.axes: - if a.metadata is not None: - meta['meta_data'][a.name] = a.metadata - else: - meta['meta_data'][a.name] = None + meta['axes'] = [{'name': a.name, 'unit': a.unit, 'points': a.points.tolist(), 'metadata': a.metadata} for a in descriptor.axes] meta['filename'] = os.path.join(self.base_path,groupname,datasetname) with open(filename, 'w') as f: json.dump(meta, f) @@ -132,14 +145,16 @@ def open_all(self): """ ret = {} for groupname in os.listdir(self.base_path): - ret[groupname] = {} - self.groups[groupname] = {} - for datasetname in os.listdir(os.path.join(self.base_path,groupname)): - if datasetname[-4:] == '.dat': - dat = self.open_dataset(groupname, datasetname[:-4]) - self.groups[groupname][datasetname[:-4]] = dat - ret[groupname][datasetname[:-4]] = dat + if os.path.isdir(os.path.join(self.base_path,groupname)): + ret[groupname] = {} + self.groups[groupname] = {} + for datasetname in os.listdir(os.path.join(self.base_path,groupname)): + if datasetname[-4:] == '.dat': + dat = self.open_dataset(groupname, datasetname[:-4]) + self.groups[groupname][datasetname[:-4]] = dat + ret[groupname][datasetname[:-4]] = dat return ret + def open_dataset(self, groupname, datasetname): """Open a particular dataset stored in this DataContainer. @@ -164,8 +179,8 @@ def open_dataset(self, groupname, datasetname): del mm desc = DataStreamDescriptor(meta['dtype']) - for name, points in meta['axes'].items(): - ax = DataAxis(name, points, unit=meta['units'][name]) - ax.metadata = meta['meta_data'][name] + for a in meta['axes'][::-1]: + ax = DataAxis(a['name'], a['points'], unit=a['unit']) + ax.metadata = a['metadata'] desc.add_axis(ax) return data, desc, self.base_path.replace('.auspex', '') diff --git a/src/auspex/experiment.py b/src/auspex/experiment.py index 9a68a5458..23931e26c 100644 --- a/src/auspex/experiment.py +++ b/src/auspex/experiment.py @@ -350,33 +350,9 @@ def sweep(self): else: self.progressbars[axis].goto(axis.step) - if self.sweeper.is_adaptive(): - # Add the new tuples to the stream descriptors - for oc in self.output_connectors.values(): - # Obtain the lists of values for any fixed - # DataAxes and append them to them to the sweep_values - # in preperation for finding all combinations. - vals = [a for a in oc.descriptor.data_axis_values()] - if sweep_values: - vals = [[v] for v in sweep_values] + vals - # Find all coordinate tuples and update the list of - # tuples that the experiment has probed. - nested_list = list(itertools.product(*vals)) - flattened_list = [tuple((val for sublist in line for val in sublist)) for line in nested_list] - oc.descriptor.visited_tuples = oc.descriptor.visited_tuples + flattened_list - - # Since the filters are in separate processes, pass them the same - # information so that they may perform the same operations. - oc.push_event("new_tuples", (axis_names, sweep_values,)) - # Run the procedure self.run() - # See if the axes want to extend themselves. They will push updates - # directly to the output_connecters as messages that will be passed - # through the filter pipeline. - self.sweeper.check_for_refinement(self.output_connectors) - # Finish up, checking to see whether we've received all of our data if self.sweeper.done(): self.declare_done() @@ -670,8 +646,8 @@ def add_axis(self, axis, position=0): logger.debug("Adding axis %s to connector %s.", axis, oc.name) oc.descriptor.add_axis(axis, position=position) - def add_sweep(self, parameters, sweep_list, refine_func=None, callback_func=None, metadata=None): - ax = SweepAxis(parameters, sweep_list, refine_func=refine_func, callback_func=callback_func, metadata=metadata) + def add_sweep(self, parameters, sweep_list, callback_func=None, metadata=None): + ax = SweepAxis(parameters, sweep_list, callback_func=callback_func, metadata=metadata) ax.experiment = self self.sweeper.add_sweep(ax) self.add_axis(ax) diff --git a/src/auspex/filters/average.py b/src/auspex/filters/average.py index 1a34ea5cb..f2db90e91 100644 --- a/src/auspex/filters/average.py +++ b/src/auspex/filters/average.py @@ -131,17 +131,6 @@ def update_descriptors(self): self.source.descriptor = descriptor self.excited_counts = np.zeros(self.data_dims, dtype=np.int64) - # We can update the visited_tuples upfront if none - # of the sweeps are adaptive... - desc_out_dtype = descriptor_in.axis_data_type(with_metadata=True, excluding_axis=self.axis.value) - if not descriptor_in.is_adaptive(): - vals = [a.points_with_metadata() for a in descriptor_in.axes if a.name != self.axis.value] - nested_list = list(itertools.product(*vals)) - flattened_list = [tuple((val for sublist in line for val in sublist)) for line in nested_list] - descriptor.visited_tuples = np.core.records.fromrecords(flattened_list, dtype=desc_out_dtype) - else: - descriptor.visited_tuples = np.empty((0), dtype=desc_out_dtype) - for stream in self.partial_average.output_streams: stream.set_descriptor(descriptor) stream.descriptor.buffer_mult_factor = 20 @@ -171,11 +160,6 @@ def update_descriptors(self): descriptor_count.metadata["num_counts"] = self.num_averages self.final_counts.descriptor = descriptor_count - if not descriptor_in.is_adaptive(): - descriptor_var.visited_tuples = np.core.records.fromrecords(flattened_list, dtype=desc_out_dtype) - else: - descriptor_var.visited_tuples = np.empty((0), dtype=desc_out_dtype) - for stream in self.final_variance.output_streams: stream.set_descriptor(descriptor_var) stream.end_connector.update_descriptors() @@ -233,18 +217,6 @@ def process_data(self, data): excited_states = (np.real(reshaped) > self.threshold.value).sum(axis=self.mean_axis) ground_states = self.num_averages - excited_states - if self.sink.descriptor.is_adaptive(): - new_tuples = self.sink.descriptor.tuples()[self.idx_global:self.idx_global + new_points] - new_tuples_stripped = remove_fields(new_tuples, self.axis.value) - take_axis = -1 if self.axis_num > 0 else 0 - reduced_tuples = new_tuples_stripped.reshape(self.reshape_dims).take((0,), axis=take_axis) - self.idx_global += new_points - - # Add to Visited tuples - if self.sink.descriptor.is_adaptive(): - for os in self.source.output_streams + self.final_variance.output_streams + self.partial_average.output_streams: - os.descriptor.visited_tuples = np.append(os.descriptor.visited_tuples, reduced_tuples) - for os in self.source.output_streams: os.push(averaged) diff --git a/src/auspex/filters/elementwise.py b/src/auspex/filters/elementwise.py index f21c28675..a1ea2f031 100644 --- a/src/auspex/filters/elementwise.py +++ b/src/auspex/filters/elementwise.py @@ -97,8 +97,6 @@ def main(self): if message_type == 'event': if message['event_type'] == 'done': streams_done[stream] = True - elif message['event_type'] == 'refine': - logger.warning("ElementwiseFilter doesn't handle refinement yet!") elif message_type == 'data': # Add any old data... message_data = stream.pop() diff --git a/src/auspex/filters/filter.py b/src/auspex/filters/filter.py index 9e0e1188e..c93d1731a 100644 --- a/src/auspex/filters/filter.py +++ b/src/auspex/filters/filter.py @@ -234,7 +234,7 @@ def main(self): for message in messages: message_type = message['type'] if message['type'] == 'event': - logger.debug('%s "%s" received event with type "%s"', self.__class__.__name__, message_type) + logger.debug('%s received event with type "%s"', self.__class__.__name__, message_type) # Check to see if we're done if message['event_type'] == 'done': diff --git a/src/auspex/filters/integrator.py b/src/auspex/filters/integrator.py index 0c46eb8e8..b3d480a8e 100644 --- a/src/auspex/filters/integrator.py +++ b/src/auspex/filters/integrator.py @@ -10,7 +10,7 @@ import os import numpy as np -from scipy.signal import chebwin, blackman, slepian, convolve +from scipy.signal import chebwin, blackman, convolve from .filter import Filter from auspex.parameter import Parameter, FloatParameter, IntParameter, BoolParameter diff --git a/src/auspex/filters/io.py b/src/auspex/filters/io.py index fa9593652..46f1e2137 100644 --- a/src/auspex/filters/io.py +++ b/src/auspex/filters/io.py @@ -50,7 +50,7 @@ class WriteToFile(Filter): groupname = Parameter(default='main') datasetname = Parameter(default='data') - def __init__(self, filename=None, groupname=None, datasetname=None, **kwargs): + def __init__(self, filename=None, groupname=None, datasetname=None, metadata=None, **kwargs): super(WriteToFile, self).__init__(**kwargs) if filename: self.filename.value = filename @@ -59,6 +59,7 @@ def __init__(self, filename=None, groupname=None, datasetname=None, **kwargs): if datasetname: self.datasetname.value = datasetname + self.metadata = metadata self.ret_queue = None # MP queue For returning data def final_init(self): @@ -67,7 +68,7 @@ def final_init(self): assert self.datasetname.value, "Dataset name never supplied to writer." self.descriptor = self.sink.input_streams[0].descriptor - self.container = AuspexDataContainer(self.filename.value) + self.container = AuspexDataContainer(self.filename.value, metadata=self.metadata) self.group = self.container.new_group(self.groupname.value) self.mmap = self.container.new_dataset(self.groupname.value, self.datasetname.value, self.descriptor) diff --git a/src/auspex/instruments/X6.py b/src/auspex/instruments/X6.py index 232d17fba..3cf413289 100644 --- a/src/auspex/instruments/X6.py +++ b/src/auspex/instruments/X6.py @@ -107,6 +107,7 @@ def __init__(self, resource_name=None, name="Unlabeled X6", gen_fake_data=False) self.last_timestamp = Value('d', datetime.datetime.now().timestamp()) self.gen_fake_data = gen_fake_data + self.fake_data_random_mag = 0.1 self.increment_ideal_data = False self.ideal_counter = 0 self.ideal_data = None @@ -233,7 +234,7 @@ def add_channel(self, channel): # todo: other checking here self._channels.append(channel) - def spew_fake_data(self, counter, ideal_data, random_mag=0.1, random_seed=12345): + def spew_fake_data(self, counter, ideal_data, random_seed=12345): """ Generate fake data on the stream. For unittest usage. ideal_data: array or list giving means of the expected signal for each segment @@ -242,8 +243,9 @@ def spew_fake_data(self, counter, ideal_data, random_mag=0.1, random_seed=12345) keep track of how many we expect to receive, when we're doing the test with fake data """ + # logger.info(f"In Spew: got {ideal_data}") + random_mag = self.fake_data_random_mag total = 0 - # import ipdb; ipdb.set_trace(); segs = self._lib.nbr_segments for chan, wsock in self._chan_to_wsocket.items(): if chan.stream_type == "integrated": @@ -253,24 +255,21 @@ def spew_fake_data(self, counter, ideal_data, random_mag=0.1, random_seed=12345) else: #Raw length = int(self._lib.record_length/4) buff = np.zeros((segs, length), dtype=chan.dtype) - # for chan, wsock in self._chan_to_wsocket.items(): for i in range(segs): if chan.stream_type == "integrated": - # random_mag*(np.random.random(length).astype(chan.dtype) + 1j*np.random.random(length).astype(chan.dtype)) + buff[i,:] = ideal_data[i] elif chan.stream_type == "demodulated": buff[i, int(length/4):int(3*length/4)] = 1.0 if ideal_data[i] == 0 else ideal_data[i] else: #Raw signal = np.sin(np.linspace(0,10.0*np.pi,int(length/2))) buff[i, int(length/4):int(length/4)+len(signal)] = signal * (1.0 if ideal_data[i] == 0 else ideal_data[i]) - # import ipdb; ipdb.set_trace(); if chan.stream_type == "raw": buff += random_mag*np.random.random((segs, length)) else: buff = buff.astype(np.complex128) + random_mag*np.random.random((segs, length))+ 1j*random_mag*np.random.random((segs, length)) total += length*segs - # logger.info(f"In Spew: {buff.dtype} {chan.dtype} {buff.size}") + # logger.info(f"In Spew: {counter}: {buff}") wsock.send(struct.pack('n', segs*length*buff.dtype.itemsize) + buff.flatten().tostring()) counter[chan] += length*segs @@ -348,19 +347,25 @@ def wait_for_acquisition(self, dig_run, timeout=15, ocs=None, progressbars=None) if hasattr(self, 'exp_step') and self.increment_ideal_data: raise Exception("Cannot use both exp_step and increment_ideal_data") elif hasattr(self, 'exp_step'): + if self.exp_step >= len(self.ideal_data): + # logger.info("Exp Step longer than ideal data... keeping at last element") + self.exp_step = len(self.ideal_data)-1 total_spewed += self.spew_fake_data(counter, self.ideal_data[self.exp_step]) elif self.increment_ideal_data: + if self.ideal_counter >= len(self.ideal_data): + # logger.info("Ideal data counter longer than ideal data... keeping at last element") + self.ideal_counter = len(self.ideal_data)-1 total_spewed += self.spew_fake_data(counter, self.ideal_data[self.ideal_counter]) else: total_spewed += self.spew_fake_data(counter, self.ideal_data) else: total_spewed += self.spew_fake_data(counter, [0.0 for i in range(self.number_segments)]) - # logger.info(f"Spewed {total_spewed}") + logger.debug("Spewed %d", total_spewed) time.sleep(0.0001) self.ideal_counter += 1 - # logger.info("Counter: %s", str(counter)) - # logger.info('TOTAL fake data generated %d', total_spewed) + logger.debug("Counter: %s", str(counter)) + logger.debug('TOTAL fake data generated %d', total_spewed) if ocs: while True: total_taken = 0 @@ -368,10 +373,10 @@ def wait_for_acquisition(self, dig_run, timeout=15, ocs=None, progressbars=None) total_taken += oc.points_taken.value - initial_points[oc] if progressbars: progress_updaters[oc](ocs[0].points_taken.value) - # logger.info('TOTAL fake data received %d', oc.points_taken.value - initial_points[oc]) + logger.debug('TOTAL fake data received %d', oc.points_taken.value - initial_points[oc]) if total_taken == total_spewed: break - # logger.info('WAITING for acquisition to finish %d < %d', total_taken, total_spewed) + logger.debug('WAITING for acquisition to finish %d < %d', total_taken, total_spewed) time.sleep(0.025) for oc in ocs: if progressbars: diff --git a/src/auspex/qubit/pulse_calibration.py b/src/auspex/qubit/pulse_calibration.py index ca1491b4f..b47073f86 100644 --- a/src/auspex/qubit/pulse_calibration.py +++ b/src/auspex/qubit/pulse_calibration.py @@ -16,7 +16,6 @@ import auspex.config as config from auspex.log import logger from copy import copy, deepcopy -# from adapt.refine import refine_1D import os import uuid import pandas as pd @@ -382,9 +381,7 @@ def _calibrate(self): self.frequencies = np.empty(0, dtype=np.complex128) self.group_delays = np.empty(0, dtype=np.complex128) self.datas = np.empty(0, dtype=np.complex128) - # orig_avg = self.kwargs['averages'] - # Adaptive refinement to find cavity feature - # for i in range(self.iterations + 1): + self.data, _ = self.run_sweeps() self.datas = np.append(self.datas, self.data) self.frequencies = np.append(self.frequencies, self.new_frequencies[:-1]) @@ -466,58 +463,6 @@ def _calibrate(self): shifted_cav = np.real(self.datas) - np.mean(np.real(self.datas)) guess = np.abs(self.frequencies[np.argmax(np.abs(shifted_cav))]) - # self.kwargs['averages'] = 2000 - - # import pdb; pdb.set_trace() - # - # self.new_frequencies = refine_1D(self.frequencies, subtracted, all_points=False, - # criterion="difference", threshold = "one_sigma") - # logger.info(f"new_frequencies {self.new_frequencies}") - - # n, bins = sp.histogram(np.abs(self.frequencies), bins="auto") - # f_start = bins[np.argmax(n)] - # f_stop = bins[np.argmax(n)+1] - # logger.info(f"Looking in bin from {f_start} to {f_stop}") - - # # self.kwargs['averages'] = orig_avg - # self.new_frequencies = np.arange(f_start, f_stop, 2e6) - # self.frequencies = np.empty(0, dtype=np.complex128) - # self.group_delays = np.empty(0, dtype=np.complex128) - # self.datas = np.empty(0, dtype=np.complex128) - # - # for i in range(self.iterations + 3): - # self.data, _ = self.run_sweeps() - # self.datas = np.append(self.datas, self.data) - # self.frequencies = np.append(self.frequencies, self.new_frequencies[:-1]) - # - # ord = np.argsort(self.frequencies) - # self.datas = self.datas[ord] - # self.frequencies = self.frequencies[ord] - # - # self.group_delays = -np.diff(np.unwrap(np.angle(self.datas)))/np.diff(self.frequencies) - # # self.group_delays = group_del - # - # # ordering = np.argsort(self.frequencies[:-1]) - # self.plot3["Group Delay"] = (self.frequencies[1:],self.group_delays) - # # self.plot2["Amplitude"] = (self.frequencies,np.abs(self.datas)) - # # self.kwargs['averages'] = 2000 - # - # self.new_frequencies = refine_1D(self.frequencies[:-1], self.group_delays, all_points=False, - # criterion="integral", threshold = "one_sigma") - # logger.info(f"new_frequencies {self.new_frequencies}") - # # - - # # self.data, _ = self.run_sweeps() - # # group_delay = -np.diff(np.unwrap(np.angle(self.data)))/np.diff(self.new_frequencies) - # # self.plot3["Group Delay"] = (self.new_frequencies[1:],group_delay) - # - # def lor_der(x, a, x0, width, offset): - # return offset-(x-x0)*a/((4.0*((x-x0)/width)**2 + a**2)**2) - # f0 = np.abs(self.frequencies[np.argmax(np.abs(self.group_delays))]) - # p0 = [np.max(np.abs(self.group_delays))*1e-18, np.abs(f0), 200e6, np.abs(self.group_delays)[0]] - # popt, pcov = curve_fit(lor_der, np.abs(self.frequencies[1:]), np.abs(self.group_delays), p0=p0) - # self.plot3["Group Delay Fit"] = ( np.abs(self.frequencies[1:]), lor_der( np.abs(self.frequencies[1:]), *popt)) - def init_plots(self): plot1 = ManualPlotter("Phase", x_label='Frequency (GHz)', y_label='Group Delay') @@ -531,14 +476,11 @@ def init_plots(self): plot2 = ManualPlotter("Amplitude", x_label='Frequency (GHz)', y_label='Amplitude (Arb. Units)') plot2.add_data_trace("Amplitude", {'color': 'C2'}) - # plot3 = ManualPlotter("First refined sweep", x_label='Frequency (GHz)', y_label='Group Delay') - # plot3.add_data_trace("Group Delay", {'color': 'C3'}) - # plot3.add_fit_trace("Group Delay Fit", {'color': 'C4'}) self.plot1 = plot1 self.plot1B = plot1B self.plot2 = plot2 - # self.plot3 = plot3 - return [plot1, plot1B, plot2] #, plot3] + + return [plot1, plot1B, plot2] class QubitTuneup(QubitCalibration): def __init__(self, qubit, f_start=5e9, f_stop=6e9, coarse_step=0.1e9, fine_step=1.0e6, averages=500, amp=1.0, **kwargs): diff --git a/src/auspex/qubit/qubit_exp.py b/src/auspex/qubit/qubit_exp.py index 327a5102b..cfc4e3f94 100644 --- a/src/auspex/qubit/qubit_exp.py +++ b/src/auspex/qubit/qubit_exp.py @@ -1,6 +1,6 @@ from auspex.log import logger from auspex.config import isnotebook -from auspex.experiment import Experiment, FloatParameter +from auspex.experiment import Experiment, FloatParameter, Parameter from auspex.stream import DataStream, DataAxis, SweepAxis, DataStreamDescriptor, InputConnector, OutputConnector from auspex.instruments import instrument_map import auspex.filters @@ -505,6 +505,31 @@ def method(value): param.assign_method(method) self.add_sweep(param, values) # Create the requested sweep on this parameter + def add_sweep(self, parameters, sweep_list, metafile_func=None, callback_func=None, metadata=None): + if (metafile_func is not None) and (callback_func is not None): + raise Exception("Cannot specify a sweep with both a metafile function and a callback function") + + if metafile_func is not None: + def callback_func(sweep_value, exp, metafile_func=metafile_func): + mf = metafile_func(sweep_value) + with open(mf, 'r') as FID: + meta_info = json.load(FID) + + output_chans = exp.transmitters + exp.transceivers + exp.phys_chans + exp.trig_chans + for xmit, fname in meta_info['instruments'].items(): + awg = [c for c in output_chans if c.label==xmit][0] + awg.sequence_file = fname + + awgs = [v for _, v in exp._instruments.items() if "AWG" in v.instrument_type] + for awg in awgs: + awg.configure_with_proxy(awg.proxy_obj) + + # Allows a dummy default parameter if passing a string + if isinstance("parameters", str): + parameters = Parameter(parameters) + + super().add_sweep(parameters, sweep_list, callback_func=callback_func, metadata=metadata) + def add_qubit_sweep(self, qubit, measure_or_control, attribute, values): """ Add a *ParameterSweep* to the experiment. Users specify a qubit property that auspex @@ -634,7 +659,7 @@ def run(self): # Set flag to enable acquisition process self.dig_run.set() - time.sleep(1) + time.sleep(0) # Start the AWGs if not self.cw_mode: for awg in self.awgs: diff --git a/src/auspex/stream.py b/src/auspex/stream.py index 4af5a876f..69958106a 100644 --- a/src/auspex/stream.py +++ b/src/auspex/stream.py @@ -57,21 +57,13 @@ def __init__(self, name, points=[], unit=None, metadata=None, dtype=np.float32): self.unstructured = False self.name = str(name) - # self.points holds the CURRENT set of points. During adaptive sweeps - # this will hold the most recently added points of the axis. self.points = np.array(points) self.unit = unit - self.refine_func = None self.metadata = metadata # By definition data axes will be done after every experiment.run() call self.done = True - - # For adaptive sweeps, etc., keep a record of the original points that we had around - self.original_points = self.points - self.has_been_extended = False - self.num_new_points = 0 - self.dtype = dtype + self.dtype = dtype if self.unstructured: if unit is not None and len(name) != len(unit): @@ -95,11 +87,11 @@ def data_type(self, with_metadata=False): def points_with_metadata(self): if self.metadata is not None: if self.unstructured: - return [list(self.original_points[i]) + [self.metadata[i]] for i in range(len(self.original_points))] - return [(self.original_points[i], self.metadata[i], ) for i in range(len(self.original_points))] + return [list(self.points[i]) + [self.metadata[i]] for i in range(len(self.points))] + return [(self.points[i], self.metadata[i], ) for i in range(len(self.points))] if self.unstructured: - return [tuple(self.original_points[i]) for i in range(len(self.original_points))] - return [(self.original_points[i],) for i in range(len(self.original_points))] + return [tuple(self.points[i]) for i in range(len(self.points))] + return [(self.points[i],) for i in range(len(self.points))] def tuple_width(self): if self.unstructured: @@ -111,29 +103,10 @@ def tuple_width(self): return width def num_points(self): - if self.has_been_extended: - return len(self.points) - else: - return len(self.original_points) - - def add_points(self, points): - if self.unstructured and len(self.parameter) != len(points[0]): - raise ValueError("Parameter value tuples must be the same length as the number of parameters.") - - if type(points) in [list, np.ndarray]: - points = np.array(points) - else: - # Somebody gave one point to the "add_points" method... - points = np.array([points]) - - self.num_new_points = len(points) - self.points = np.append(self.points, points, axis=0) - self.has_been_extended = True + return len(self.points) def reset(self): - self.points = self.original_points - self.has_been_extended = False - self.num_new_points = 0 + pass def __repr__(self): return "".format( @@ -146,7 +119,7 @@ def __str__(self): class SweepAxis(DataAxis): """ Structure for sweep axis, separate from DataAxis. Can be an unstructured axis, in which case 'parameter' is actually a list of parameters. """ - def __init__(self, parameter, points = [], metadata=None, refine_func=None, callback_func=None): + def __init__(self, parameter, points = [], metadata=None, callback_func=None): self.unstructured = hasattr(parameter, '__iter__') self.parameter = parameter @@ -162,10 +135,6 @@ def __init__(self, parameter, points = [], metadata=None, refine_func=None, call if self.metadata is not None: self.metadata_value = self.metadata[0] - # This is run at the end of this sweep axis - # Refine_func receives the sweep axis and the experiment as arguments - self.refine_func = refine_func - # This is run before each point in the sweep axis is executed # Callback_func receives the sweep axis and the experiment as arguments self.callback_func = callback_func @@ -183,47 +152,21 @@ def update(self): """ Update value after each run. """ if self.step < self.num_points(): - if self.callback_func: - self.callback_func(self, self.experiment) self.value = self.points[self.step] if self.metadata is not None: self.metadata_value = self.metadata[self.step] + if self.callback_func: + self.callback_func(self.value, self.experiment) logger.debug("Sweep Axis '{}' at step {} takes value: {}.".format(self.name, - self.step,self.value)) + self.step+1,self.value)) self.push() self.step += 1 self.done = False - - def check_for_refinement(self, output_connectors_dict): - """Check to see if we need to perform any refinements. If there is a refine_func - and it returns a list of points, then we need to extend the axes. Otherwise, if the - refine_func returns None or false, then we reset the axis to its original set of points. If - there is no refine_func then we don't do anything at all.""" - - if not self.done and self.step==self.num_points(): - logger.debug("Refining on axis {}".format(self.name)) - if self.refine_func: - points = self.refine_func(self, self.experiment) - if points is None or points is False: - # Returns false if no refinements needed, otherwise adds points to list - self.step = 0 - self.done = True - self.reset() - logger.debug("Sweep Axis '{}' complete.".format(self.name)) - # Push to ocs, which should push to processes - for oc in output_connectors_dict.values(): - oc.push_event("refined", (self.name, True, self.original_points)) # axis name, reset, points - return False - self.add_points(points) - self.done = False - for oc in output_connectors_dict.values(): - oc.push_event("refined", (self.name, False, points)) # axis name, reset, points - return True - else: - self.step = 0 - self.done = True - logger.debug("Sweep Axis '{}' complete.".format(self.name)) - return False + + if self.step == self.num_points(): + logger.debug("Sweep Axis '{}' claims to be done.".format(self.name)) + self.step = 0 + self.done = True def push(self): """ Push parameter value(s) """ @@ -262,9 +205,6 @@ def __init__(self, dtype=np.float32): # Keep track of the parameter permutations we have actually used... self.visited_tuples = [] - def is_adaptive(self): - return True in [a.refine_func is not None for a in self.axes] - def add_axis(self, axis, position=0): # Check if axis is DataAxis or SweepAxis (which inherits from DataAxis) if isinstance(axis, DataAxis): @@ -325,7 +265,7 @@ def num_points(self): def expected_num_points(self): if len(self.axes)>0: - return reduce(lambda x,y: x*y, [len(a.original_points) for a in self.axes]) + return reduce(lambda x,y: x*y, [len(a.points) for a in self.axes]) else: return 0 @@ -345,24 +285,8 @@ def axis_data_type(self, with_metadata=False, excluding_axis=None): dtype.extend(a.data_type(with_metadata=with_metadata)) return dtype - def tuples(self, as_structured_array=True): - """Returns a list of all tuples visited by the sweeper. Should only - be used with adaptive sweeps.""" - if len(self.visited_tuples) == 0: - self.visited_tuples = self.expected_tuples(with_metadata=True) - - if as_structured_array: - # If we already have a structured array - if type(self.visited_tuples) is np.ndarray and type(self.visited_tuples.dtype.names) is tuple: - return self.visited_tuples - elif type(self.visited_tuples) is np.ndarray: - return np.rec.fromarrays(self.visited_tuples.T, dtype=self.axis_data_type(with_metadata=True)) - return np.core.records.fromrecords(self.visited_tuples, dtype=self.axis_data_type(with_metadata=True)) - return self.visited_tuples - def expected_tuples(self, with_metadata=False, as_structured_array=True): - """Returns a list of tuples representing the cartesian product of the axis values. Should only - be used with non-adaptive sweeps.""" + """Returns a list of tuples representing the cartesian product of the axis values.""" vals = [a.points_with_metadata() for a in self.axes] # @@ -446,9 +370,6 @@ def num_points_through_axis(self, axis_name): else: axis_num = self.axis_num(axis_name) - # if False in [a.refine_func is None for a in self.axes[axis_num:]]: - # raise Exception("Cannot call num_points_through_axis with interior adaptive sweeps.") - if axis_num >= len(self.axes): return 0 elif len(self.axes) == 1: @@ -657,9 +578,6 @@ def __init__(self, name="", data_name=None, unit=None, parent=None, dtype=np.flo self.descriptor.unit = self.unit self.add_axis = self.descriptor.add_axis - # Determine whether we need to deal with adaptive sweeps - self.has_adaptive_sweeps = False - def __len__(self): with self.points_taken_lock: return self.points_taken.value diff --git a/src/auspex/sweep.py b/src/auspex/sweep.py index 757ba5686..305b8f67b 100644 --- a/src/auspex/sweep.py +++ b/src/auspex/sweep.py @@ -65,18 +65,6 @@ def update(self): values.append((a.value,)) return values, names - def is_adaptive(self): - return True in [a.refine_func is not None for a in self.axes] - - def check_for_refinement(self, output_connectors_dict): - refined_axes = [] - for a in self.axes: - if a.check_for_refinement(output_connectors_dict): - refined_axes.append(a.name) - break - if len(refined_axes) > 1: - raise Exception("More than one axis trying to refine simultaneously. This cannot be tolerated.") - def done(self): return np.all([a.done for a in self.axes]) diff --git a/test/plotting_test_arbitrary.py b/test/plotting_test_arbitrary.py index f2a31311a..1899cdab8 100644 --- a/test/plotting_test_arbitrary.py +++ b/test/plotting_test_arbitrary.py @@ -20,8 +20,7 @@ from auspex.filters.plot import Plotter, ManualPlotter from auspex.filters.io import DataBuffer from auspex.log import logger, logging -# import auspex.analysis.switching as sw -# from adapt import refine + class TestExperiment(Experiment): """Here the run loop merely spews data until it fills up the stream. """ diff --git a/test/plotting_test_mesh.py b/test/plotting_test_mesh.py index 376b3c579..f5e214562 100644 --- a/test/plotting_test_mesh.py +++ b/test/plotting_test_mesh.py @@ -14,7 +14,6 @@ import itertools import numpy as np -# import h5py import matplotlib.pyplot as plt from auspex.experiment import Experiment, FloatParameter @@ -23,8 +22,6 @@ from auspex.filters.io import WriteToHDF5 from auspex.log import logger, logging from auspex.refine import delaunay_refine_from_file -# import auspex.analysis.switching as sw -# from adapt import refine class TestExperiment(Experiment): """Here the run loop merely spews data until it fills up the stream. """ @@ -66,5 +63,5 @@ def run(self): refine_func = delaunay_refine_from_file(wr, 'duration', 'amplitude', 'voltage', max_points=1000, plotter=fig1) - exp.add_sweep([exp.duration, exp.amplitude], points, refine_func=refine_func) + exp.add_sweep([exp.duration, exp.amplitude], points) exp.run_sweeps() diff --git a/test/test_pipeline.py b/test/test_pipeline.py index fff0b39ea..730d23ac3 100644 --- a/test/test_pipeline.py +++ b/test/test_pipeline.py @@ -24,12 +24,6 @@ from auspex.qubit import * import bbndb -def clear_test_data(): - for file in glob.glob("test_*.h5"): - os.remove(file) - for direc in glob.glob("test_writehdf5*"): - shutil.rmtree(direc) - class PipelineTestCase(unittest.TestCase): qubits = ["q1"] diff --git a/test/test_sweeps.py b/test/test_sweeps.py index 8299228bc..21abd44bc 100644 --- a/test/test_sweeps.py +++ b/test/test_sweeps.py @@ -9,6 +9,7 @@ import unittest import time import os +import tempfile import numpy as np import auspex.config as config @@ -18,9 +19,24 @@ from auspex.parameter import FloatParameter from auspex.stream import DataStream, DataAxis, DataStreamDescriptor, OutputConnector from auspex.filters.debug import Print -from auspex.filters.io import WriteToFile +from auspex.filters.io import WriteToFile, DataBuffer from auspex.log import logger + +pl = None +cl = None + +# Set temporary output directories +awg_dir = tempfile.TemporaryDirectory() +kern_dir = tempfile.TemporaryDirectory() +import QGL +config.AWGDir = QGL.config.AWGDir = awg_dir.name +config.KernelDir = kern_dir.name + +from QGL import * +from auspex.qubit import * +import bbndb + class SweptTestExperiment(Experiment): """Here the run loop merely spews data until it fills up the stream. """ @@ -56,10 +72,17 @@ def run(self): self.time_val += time_step self.voltage.push(data_row) logger.debug("Stream pushed points {}.".format(data_row)) - logger.debug("Stream has filled {} of {} points".format(self.voltage.points_taken, self.voltage.num_points() )) + logger.debug("Stream has filled {} of {} points".format(self.voltage.points_taken.value, self.voltage.num_points() )) class SweepTestCase(unittest.TestCase): + @classmethod + def setUpClass(cls): + global cl, pl + + cl = ChannelLibrary(":memory:") + pl = PipelineManager() + def test_add_sweep(self): exp = SweptTestExperiment() self.assertTrue(len(exp.voltage.descriptor.axes) == 1) @@ -95,25 +118,6 @@ def test_run_sweep(self): exp.add_sweep(exp.freq, np.linspace(0,10.0,3)) exp.run_sweeps() - def test_run_adaptive_sweep(self): - exp = SweptTestExperiment() - pri = Print(name="Printer") - - edges = [(exp.voltage, pri.sink)] - exp.set_graph(edges) - - def rf(sweep_axis, exp): - logger.debug("Running refinement function.") - if sweep_axis.num_points() >= 5: - return False - sweep_axis.add_points(sweep_axis.points[-1]*2) - return True - - exp.add_sweep(exp.field, np.linspace(0,100.0,11)) - exp.add_sweep(exp.freq, [1.0, 2.0], refine_func=rf) - exp.run_sweeps() - # self.assertTrue(pri.sink.output_streams[0].points_taken.value == 5*11*5) - def test_unstructured_sweep(self): exp = SweptTestExperiment() pri = Print() @@ -135,5 +139,76 @@ def test_unstructured_sweep(self): exp.run_sweeps() self.assertTrue(pri.sink.input_streams[0].points_taken.value == exp.voltage.num_points()) + def test_unstructured_sweep_io(self): + + with tempfile.TemporaryDirectory() as tmpdirname: + exp = SweptTestExperiment() + pri = Print() + buf = DataBuffer() + wri = WriteToFile(tmpdirname+"/test.auspex") + + edges = [(exp.voltage, pri.sink), (exp.voltage, buf.sink), (exp.voltage, wri.sink)] + exp.set_graph(edges) + + coords = [[ 0, 0.1], + [10, 4.0], + [15, 2.5], + [40, 4.4], + [50, 2.5], + [60, 1.4], + [65, 3.6], + [66, 3.5], + [67, 3.6], + [68, 1.2]] + exp.add_sweep([exp.field, exp.freq], coords) + exp.run_sweeps() + + self.assertTrue(pri.sink.input_streams[0].points_taken.value == exp.voltage.num_points()) + + data, desc, _ = wri.get_data() + self.assertTrue(np.allclose(desc.axes[0].points, coords)) + + data, desc = buf.get_data() + self.assertTrue(np.allclose(desc.axes[0].points, coords)) + + def test_qubit_metafile_sweep(self): + cl.clear() + q1 = cl.new_qubit("q1") + aps1 = cl.new_APS2("BBNAPS1", address="192.168.5.102") + aps2 = cl.new_APS2("BBNAPS2", address="192.168.5.103") + x6_1 = cl.new_X6("X6_1", address="1", record_length=512) + holz1 = cl.new_source("Holz_1", "HolzworthHS9000", "HS9004A-009-1", power=-30) + holz2 = cl.new_source("Holz_2", "HolzworthHS9000", "HS9004A-009-2", power=-30) + cl.set_control(q1, aps1, generator=holz1) + cl.set_measure(q1, aps2, x6_1[1], generator=holz2) + cl.set_master(aps1, aps1.ch("m2")) + cl.commit() + pl.create_default_pipeline() + pl.reset_pipelines() + pl["q1"].clear_pipeline() + pl["q1"].stream_type = "integrated" + pl["q1"].create_default_pipeline(buffers=True) + + def mf(sigma): + q1.pulse_params["sigma"] = sigma + mf = RabiAmp(q1, np.linspace(-1,1,21)) + return mf + + exp = QubitExperiment(mf(5e-9), averages=5) + exp.set_fake_data(x6_1, np.linspace(-1, 1, 21), random_mag=0.0) + exp.add_sweep("q1_sigma", np.linspace(1e-9, 10e-9, 10), metafile_func=mf) + exp.run_sweeps() + + buf = list(exp.qubits_by_output.keys())[0] + ax = buf.input_connectors["sink"].descriptor.axes[0] + + self.assertTrue(buf.done.is_set()) + + data, desc = buf.get_data() + self.assertTrue(np.allclose(np.linspace(1e-9, 10e-9, 10), desc.axes[0].points)) + target_dat = np.vstack([np.linspace(-1.0, 1.0, 21)]*10) + self.assertTrue(np.allclose(target_dat, data.real)) + + if __name__ == '__main__': unittest.main() diff --git a/test/test_write.py b/test/test_write.py index ed12bb60e..493dcf2a3 100644 --- a/test/test_write.py +++ b/test/test_write.py @@ -11,6 +11,7 @@ import tempfile import os, shutil import glob +import datetime import time import numpy as np @@ -24,7 +25,7 @@ from auspex.filters.debug import Print from auspex.filters.io import WriteToFile from auspex.log import logger -from auspex.data_format import AuspexDataContainer +from auspex.data_format import AuspexDataContainer, AUSPEX_CONTAINER_VERSION class SweptTestExperiment(Experiment): """Here the run loop merely spews data until it fills up the stream. """ @@ -162,8 +163,18 @@ def test_write(self): exp.run_sweeps() self.assertTrue(os.path.exists(tmpdirname+"/test_write-0000.auspex")) container = AuspexDataContainer(tmpdirname+"/test_write-0000.auspex") + + container.metadata = {'ping': 'pong'} + data, desc, _ = container.open_dataset('main', 'data') + m = container.metadata + self.assertTrue(m['version'] == AUSPEX_CONTAINER_VERSION) + self.assertTrue(m['metadata']['ping'] == 'pong') + + time_delta = datetime.datetime.now() - m['date'] + self.assertTrue(time_delta.seconds < 5.0) + self.assertTrue(0.0 not in data) self.assertTrue(np.all(desc['field'] == np.linspace(0,100.0,4))) self.assertTrue(np.all(desc['freq'] == np.linspace(0,10.0,3))) @@ -219,7 +230,6 @@ def test_write_metadata(self): self.assertTrue(np.all(np.isnan(desc['samples'][3:]))) self.assertTrue(np.all(desc.axis('samples').metadata == ["data", "data", "data", "0", "1"])) - @unittest.skip("Need to update tests for new auspex data writer") def test_write_metadata_unstructured(self): with tempfile.TemporaryDirectory() as tmpdirname: exp = SweptTestExperimentMetadata() @@ -243,22 +253,11 @@ def test_write_metadata_unstructured(self): exp.add_sweep([exp.field, exp.freq], coords, metadata=md) exp.run_sweeps() self.assertTrue(os.path.exists(tmpdirname+"/test_write_metadata_unstructured-0000.auspex")) - with h5py.File(tmpdirname+"/test_write_metadata_unstructured-0000.auspex", 'r') as f: - self.assertTrue(0.0 not in f['main/data/voltage']) - self.assertTrue(np.sum(np.isnan(f['main/data/field'])) == 3*5 ) - self.assertTrue(np.sum(np.isnan(f['main/data/freq'])) == 3*5 ) - self.assertTrue(np.sum(np.isnan(f['main/data/samples'])) == 3*4*2 ) - - md_enum = f['main/field+freq_metadata_enum'][:] - md = f['main/data/field+freq_metadata'][:] - md = md_enum[md] - - self.assertTrue(np.sum(md == b'a') == 5) - self.assertTrue(np.sum(md == b'b') == 5) - self.assertTrue(np.sum(md == b'c') == 5) - self.assertTrue("Here the run loop merely spews" in f.attrs['exp_src']) - self.assertTrue(f['main/data'].attrs['time_val'] == 0) - self.assertTrue(f['main/data'].attrs['unit_freq'] == "Hz") + data, desc, _ = wr.get_data() + self.assertTrue(0.0 not in data) + self.assertTrue(desc.axes[0].metadata == md) + self.assertTrue(np.allclose(desc.axes[0].points[:9], coords[:9])) + self.assertTrue(np.sum(np.isnan(desc.axes[0].points)) == 6 ) @unittest.skip("need to add metadata to adaptive sweeps") def test_write_metadata_unstructured_adaptive(self): @@ -382,7 +381,6 @@ def rf(sweep_axis, exp): self.assertTrue(len(f['main/data/freq'][:]) == 5*11*5) self.assertTrue(f['main/data/freq'][:].sum() == (55*(1+2+4+8+16))) - @unittest.skip("Need to update tests for new auspex data writer") def test_write_unstructured_sweep(self): with tempfile.TemporaryDirectory() as tmpdirname: exp = SweptTestExperiment() @@ -405,13 +403,8 @@ def test_write_unstructured_sweep(self): exp.run_sweeps() self.assertTrue(os.path.exists(tmpdirname+"/test_write_unstructured-0000.auspex")) - with h5py.File(tmpdirname+"/test_write_unstructured-0000.auspex", 'r') as f: - self.assertTrue(len(f['main/data/voltage']) == 5*10) - self.assertTrue(f[f['main/field+freq'][0]] == f['main/field']) - self.assertTrue(f[f['main/field+freq'][1]] == f['main/freq']) - - data, desc = load_from_HDF5(tmpdirname+"/test_write_unstructured-0000.auspex", reshape=False) - self.assertTrue(data['main']['field'][-5:].sum() == 5*68) + data, desc, _ = wr.get_data() + self.assertTrue(np.allclose(desc.axes[0].points, coords)) @unittest.skip("Need to update tests for new auspex data writer") def test_write_adaptive_unstructured_sweep(self):