Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SPT-3G export tune up #588

Open
wants to merge 11 commits into
base: toast3
Choose a base branch
from
29 changes: 25 additions & 4 deletions src/toast/intervals.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,17 +262,38 @@ def __and__(self, other):
result = list()
curself = 0
curother = 0
lenself = len(self.data)
lenother = len(other)

# Find the first entry in ivals whose last is not smaller than val,
# searching within the range of indices [ileft,iright)
def lbound(ivals, val, ileft, iright):
# represents the width of the remaining interval
count = iright-ileft
while count>0:
i = ileft + count//2
if ivals[i].last < val:
ileft = i+1
count -= 1 + count//2
else:
count = count//2
return ileft

# Walk both sequences, building up the intersection.
while (curself < len(self.data)) and (curother < len(other)):
while (curself < lenself) and (curother < lenother):
low = max(self.data[curself].first, other[curother].first)
high = min(self.data[curself].last, other[curother].last)
if low <= high:
result.append((self.timestamps[low], self.timestamps[high], low, high))
if self.data[curself].last < other[curother].last:
curself += 1
if self.data[curself].last < other[curother].last:
curself += 1
else:
curother += 1
else:
curother += 1
if self.data[curself].last < other[curother].last:
curself = lbound(self.data, other[curother].first, curself, lenself)
else:
curother = lbound(other, self.data[curself].first, curother, lenother)

return IntervalList(
self.timestamps,
Expand Down
2 changes: 1 addition & 1 deletion src/toast/ops/sim_ground.py
Original file line number Diff line number Diff line change
Expand Up @@ -796,7 +796,7 @@ def _exec(self, data, detectors=None, **kwargs):

if self.det_data is not None:
exists_data = ob.detdata.ensure(
self.det_data, dtype=np.float64, detectors=dets
self.det_data, dtype=np.float64, detectors=dets, units=u.Kelvin
)

if self.det_flags is not None:
Expand Down
2 changes: 1 addition & 1 deletion src/toast/ops/sim_tod_noise.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ def _exec(self, data, detectors=None, **kwargs):
# detectors within the observation...

# Make sure correct output exists
exists = ob.detdata.ensure(self.det_data, detectors=dets)
exists = ob.detdata.ensure(self.det_data, detectors=dets, units=u.Kelvin)

# Get the sample rate from the data. We also have nominal sample rates
# from the noise model and also from the focalplane. Perhaps we should add
Expand Down
112 changes: 94 additions & 18 deletions src/toast/spt3g/spt3g_export.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from ..intervals import IntervalList
from ..timing import function_timer
from ..utils import Environment, Logger, object_fullname
from .. import qarray as qa
from .spt3g_utils import (
available,
compress_timestream,
Expand All @@ -26,6 +27,7 @@

if available:
from spt3g import core as c3g
from spt3g import calibration as spt_cal


@function_timer
Expand Down Expand Up @@ -63,6 +65,29 @@ def export_shared(obs, name, view_name=None, view_index=0, g3t=None):
return g3t(sview.flatten().tolist())


def export_focalplane(fplane):
out_props = spt_cal.BolometerPropertiesMap()
zaxis = np.array([0.0, 0.0, 1.0], dtype=np.float64)
for idx,det_name in enumerate(fplane.keys()):
bolo = spt_cal.BolometerProperties()

dir = fplane.detector_data["quat"][idx]
rdir = qa.rotate(dir, zaxis).flatten()
ang = np.arctan2(rdir[1], rdir[0])

mag = np.arccos(rdir[2]) * c3g.G3Units.rad

bolo.physical_name = det_name # is this a 'physical' name?
bolo.x_offset = mag * np.cos(ang)
bolo.y_offset = mag * np.sin(ang)
bolo.band = (fplane.detector_data["bandcenter"][idx] / u.GHz).value * c3g.G3Units.GHz
bolo.pol_angle = (fplane.detector_data["pol_angle"][idx] / u.rad).value * c3g.G3Units.rad
bolo.pol_efficiency = fplane.detector_data["pol_efficiency"][idx]

out_props[det_name] = bolo
return out_props


@function_timer
def export_detdata(
obs, name, view_name=None, view_index=0, g3t=None, times=None, compress=None
Expand Down Expand Up @@ -290,6 +315,9 @@ def __call__(self, obs):
# Construct calibration frame
cal = c3g.G3Frame(c3g.G3FrameType.Calibration)

# Output focal plane using proper SPT structure
cal["BolometerProperties"] = export_focalplane(obs.telescope.focalplane)

# Serialize focalplane to HDF5 bytes and write to frame.
byte_writer = io.BytesIO()
with h5py.File(byte_writer, "w") as f:
Expand All @@ -302,8 +330,12 @@ def __call__(self, obs):
byte_writer = io.BytesIO()
with h5py.File(byte_writer, "w") as f:
obs[m_in].save_hdf5(f, comm=None, force_serial=True)
if m_out in cal:
del cal[m_out]
cal[m_out] = c3g.G3VectorUnsignedChar(byte_writer.getvalue())
del byte_writer
if f"{m_out}_class" in cal:
del cal[f"{m_out}_class"]
cal[f"{m_out}_class"] = c3g.G3String(object_fullname(obs[m_in].__class__))

return ob, cal
Expand Down Expand Up @@ -357,13 +389,15 @@ def __init__(
frame_intervals=None,
shared_names=list(),
det_names=list(),
split_interval_names=list(),
interval_names=list(),
compress=False,
):
self._timestamp_names = timestamp_names
self._frame_intervals = frame_intervals
self._shared_names = shared_names
self._det_names = det_names
self._split_interval_names = split_interval_names
self._interval_names = interval_names
self._compress = compress

Expand All @@ -376,26 +410,63 @@ def __call__(self, obs):
log = Logger.get()
frame_intervals = self._frame_intervals
if frame_intervals is None:
# We are using the sample set distribution for our frame boundaries.
frame_intervals = "frames"
timespans = list()
offset = 0
n_frames = 0
first_set = obs.dist.samp_sets[obs.comm.group_rank].offset
n_set = obs.dist.samp_sets[obs.comm.group_rank].n_elem
for sset in range(first_set, first_set + n_set):
for chunk in obs.dist.sample_sets[sset]:
timespans.append(
(
obs.shared[self._timestamp_names[0]][offset],
obs.shared[self._timestamp_names[0]][offset + chunk - 1],

if len(self._split_interval_names) > 0:
# Split up data into frames by types of interval, as is normal for observations.
# There may be several types of intervals listed in _interval_names,
# and we need to pull them out in order
# So, treat each obs.intervals[ivl_key] as a queue of intervals,
# and cycle through them pulling out the first from each queue
# to add to the list of frame intervals
def earlier(i1, i2):
if i2 is None:
return True
return i1.start < i2.start
queue_indices = [0] * len(self._split_interval_names)
done = False
while not done:
done = True
next = None
nextIdx = -1
idx = 0
for ivl_key in self._split_interval_names:
if queue_indices[idx] < len(obs.intervals[ivl_key]):
done = False # if any queue has at least one more interval, we must continue
if earlier(obs.intervals[ivl_key][queue_indices[idx]], next):
next = obs.intervals[ivl_key][queue_indices[idx]]
nextIdx = idx
idx += 1
if not done:
timespans.append(
(
obs.intervals[self._split_interval_names[nextIdx]][queue_indices[nextIdx]].start,
obs.intervals[self._split_interval_names[nextIdx]][queue_indices[nextIdx]].stop,
)
)
)
n_frames += 1
offset += chunk
obs.intervals.create_col(
frame_intervals, timespans, obs.shared[self._timestamp_names[0]]
)
queue_indices[nextIdx] += 1
obs.intervals.create_col(frame_intervals, timespans, obs.shared[self._timestamp_names[0]])
else:
# Divide data into frames arbitrarily, which may be unsuitable for further processing
# We are using the sample set distribution for our frame boundaries.
offset = 0
n_frames = 0
first_set = obs.dist.samp_sets[obs.comm.group_rank].offset
n_set = obs.dist.samp_sets[obs.comm.group_rank].n_elem
for sset in range(first_set, first_set + n_set):
for chunk in obs.dist.sample_sets[sset]:
timespans.append(
(
obs.shared[self._timestamp_names[0]][offset],
obs.shared[self._timestamp_names[0]][offset + chunk - 1],
)
)
n_frames += 1
offset += chunk
obs.intervals.create_col(
frame_intervals, timespans, obs.shared[self._timestamp_names[0]]
)

output = list()
frame_view = obs.view[frame_intervals]
Expand Down Expand Up @@ -456,7 +527,7 @@ def __call__(self, obs):
if len(self._interval_names) > 0:
tview = obs.view[frame_intervals].shared[self._timestamp_names[0]][ivw]
iframe = IntervalList(
obs.shared[self._timestamp_names[0]],
np.array(obs.shared[self._timestamp_names[0]]),
timespans=[(tview[0], tview[-1])],
)
for ivl_key, ivl_val in self._interval_names:
Expand All @@ -465,6 +536,11 @@ def __call__(self, obs):
ivl_key,
iframe,
)
# Tag frames which contain a period of turning the telescope around
# This may not work usefully if the frame splitting was not based on
# suitable intervals
if ivl_val == "intervals_turnaround" and len(frame[ivl_val])>0:
frame["Turnaround"] = c3g.G3Bool(True)
output.append(frame)
# Delete our temporary frame interval if we created it
if self._frame_intervals is None:
Expand Down
Loading