Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/source/tutorials/maximum-likelihood-mapper.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -107,8 +107,8 @@
"from maria.mappers import MaximumLikelihoodMapper\n",
"\n",
"ml_mapper = MaximumLikelihoodMapper(tods=tods, \n",
" width=0.8*input_map.width.deg,\n",
" height=0.8*input_map.height.deg,\n",
" width=0.8 * input_map.width.deg,\n",
" height=0.8 * input_map.height.deg,\n",
" units=\"Jy/pixel\")\n",
"print(f\"{ml_mapper.loss() = }\")"
]
Expand Down
63 changes: 37 additions & 26 deletions maria/atmosphere/atmosphere.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,35 +77,35 @@ def __init__(

self._initialized = False

def initialize(self, instrument, boresight, site):
def initialize(self, obs):
"""
Simulate a realization of PWV.
"""

self.layers = generate_layers(
instrument=instrument,
boresight=boresight,
instrument=obs.instrument,
boresight=obs.boresight,
weather=self.weather,
site=site,
site=obs.site,
mode=self.model,
angular=self.angular,
max_height=self.max_height,
)

if self.timestep is None:
min_fwhm = instrument.dets.angular_fwhm(z=self.max_height).min()
min_fwhm = obs.instrument.dets.angular_fwhm(z=self.max_height).min()
max_wind = Quantity((self.layers.wind_speed / self.layers.h).values, "rad/s").max()
self.timestep = (min_fwhm / max_wind).s
self.timestep = max(1e-1, (min_fwhm / max_wind).s)

self.boresight = boresight.downsample(timestep=self.timestep)
self.boresight = obs.boresight.downsample(timestep=self.timestep)
self.coords = self.boresight.broadcast(
instrument.dets.offsets,
obs.instrument.dets.offsets,
frame="az/el",
)

# this is a smaller version of the sim coords
outer_coords = self.boresight.broadcast(
instrument.dets.outer().offsets,
obs.instrument.dets.outer().offsets,
frame="az/el",
)

Expand Down Expand Up @@ -206,23 +206,34 @@ def initialize(self, instrument, boresight, site):
self.layers.loc[in_process].iterrows(),
):
# this algorithm probably works
res = layer.res
wide_lp_x_dense = np.linspace(min_ty, max_ty, 10000)
wide_lp_z_dense = layer.h * np.ones(len(wide_lp_x_dense))
wide_lp_dense = np.c_[wide_lp_x_dense, wide_lp_z_dense]
interior = triangulation.find_simplex(wide_lp_dense) > -1
# interior = interior if interior.any() else np.ones_like(interior, dtype=bool)
lp_dense_x = wide_lp_x_dense[interior]
lp_x_min = lp_dense_x.min() - 2 * res
lp_x_max = lp_dense_x.max() + 2 * res
n_lp = np.maximum(3, int((lp_x_max - lp_x_min) / res))
lp_x = np.linspace(lp_x_min, lp_x_max, n_lp)
lp_z = layer.h * np.ones(len(lp_x))
lp = np.c_[lp_x, lp_z]
n_lp = len(lp)

layer_labels.extend(n_lp * [layer_index])
cross_section_points_list.append(lp)
n_cross_section = int(np.maximum(2, (np.ptp(tp[:, 1]) + 2 * layer.res) / layer.res))
cross_section_points = np.stack(
[
np.linspace(tp[:, 1].min() - layer.res, tp[:, 1].max() + layer.res, n_cross_section),
layer.h * np.ones(n_cross_section),
]
).T

layer_labels.extend(n_cross_section * [layer_index])
cross_section_points_list.append(cross_section_points)

# res = layer.res
# wide_lp_x_dense = np.linspace(min_ty, max_ty, 10000)
# wide_lp_z_dense = layer.h * np.ones(len(wide_lp_x_dense))
# wide_lp_dense = np.c_[wide_lp_x_dense, wide_lp_z_dense]
# interior = triangulation.find_simplex(wide_lp_dense) > -1
# # interior = interior if interior.any() else np.ones_like(interior, dtype=bool)
# lp_dense_x = wide_lp_x_dense[interior]
# lp_x_min = lp_dense_x.min() - 2 * res
# lp_x_max = lp_dense_x.max() + 2 * res
# n_lp = np.maximum(3, int((lp_x_max - lp_x_min) / res))
# lp_x = np.linspace(lp_x_min, lp_x_max, n_lp)
# lp_z = layer.h * np.ones(len(lp_x))
# lp = np.c_[lp_x, lp_z]
# n_lp = len(lp)

# layer_labels.extend(n_lp * [layer_index])
# cross_section_points_list.append(lp)

cross_section_points = np.concatenate(cross_section_points_list, axis=0)

Expand Down
20 changes: 10 additions & 10 deletions maria/map/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,9 +131,7 @@ def __init__(
if np.shape(beam)[-1] != 3:
raise ValueError("'beam' must be either a number or a tuple of (major, minor, angle)")

beam = beam * np.ones((1, 3) if "nu" in self.dims else 3)

self.beam = Quantity(beam, "deg" if degrees else "rad")
self.beam = Quantity(beam * np.ones((*self.slice_dims.values(), 3)), "deg" if degrees else "rad")

if self.u["quantity"] == "spectral_flux_density_per_beam":
if not np.all(self.beam_area > 0):
Expand Down Expand Up @@ -174,12 +172,11 @@ def t_bins(self):
"""
This might break in the year 3000.
"""
t_min = arrow.get("0001-01-01").timestamp()
t_max = arrow.get("3000-01-01").timestamp()

return np.array([t_min, *(self.t[1:] + self.t[:-1]) / 2, t_max])
# t_min = arrow.get("0001-01-01").timestamp()
# t_max = arrow.get("3000-01-01").timestamp()
# return np.array([-np.inf, *(self.t[1:] + self.t[:-1]) / 2, np.inf])

# return np.array([-np.inf, *(self.t[1:] + self.t[:-1])/2, np.inf])
return np.array([-np.inf, *(self.t[1:] + self.t[:-1]) / 2, np.inf])

@property
def t_side(self):
Expand Down Expand Up @@ -226,7 +223,7 @@ def unsqueeze(self, dim, value=None):
raise Exception(f"{self.__class__.__name__} already has dimension '{dim}'")

new_dim_index = 0
for d in ["stokes", "nu", "t"]:
for d in ["stokes", "nu", "t", "z"]:
if d == dim:
break
if d in self.dims:
Expand All @@ -235,6 +232,8 @@ def unsqueeze(self, dim, value=None):
package = self.package()
package["data"] = np.expand_dims(package["data"], new_dim_index)
package["weight"] = np.expand_dims(package["weight"], new_dim_index)
package["beam"] = np.expand_dims(package["beam"], new_dim_index)

if value is None:
value = SLICE_DIMS[dim]["default"]
package[dim] = value
Expand All @@ -247,7 +246,8 @@ def beam_area(self):
Returns the beam area in steradians
"""
area = (np.pi / 4) * self.beam[..., 0].radians * self.beam[..., 1].radians
return Quantity(area * np.ones(tuple(self.slice_dims.values())), "sr")
area = np.expand_dims(area, axis=(-1, -2)[: len(self.map_dims)])
return Quantity(area, "sr")

def to(self, units: str, **calibration_kwargs: Mapping):
if units == self.units:
Expand Down
38 changes: 20 additions & 18 deletions maria/map/projection.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@ def __init__(
raise ValueError("'center' must be a two-tuple of numbers")

self.frame = Frame(frame)
beam = beam if beam is not None else 0.0

super().__init__(
data=data,
Expand Down Expand Up @@ -105,9 +104,13 @@ def __init__(
y_res = height / self.n_y
else:
y_res = x_res
else:
elif height is not None:
# here height must not be None
x_res = y_res = height / self.n_y
else:
raise ValueError(
f"{self.__class__.__name__} needs one of ['width', 'height', 'resolution', 'x_res', 'y_res']."
)

self.x_res = Quantity(x_res, "deg" if degrees else "rad")
self.y_res = Quantity(y_res, "deg" if degrees else "rad")
Expand Down Expand Up @@ -218,33 +221,32 @@ def __getattr__(self, attr):
raise AttributeError(f"'ProjectionMap' object has no attribute '{attr}'")

def __getitem__(self, key):
if not hasattr(key, "__iter__"):
key = (key,)
key = key if isinstance(key, tuple) else (key,)

explicit_slices = unpack_implicit_slice(key, ndims=self.ndim)

_, dimensions = get_factor_and_base_units_vector(self.units)
if all([dimensions[dim] == 0 for dim in ["pixel", "rad"]]):
# don't convert if in resolution-neutral units (like temperature)
package = self.package()
else:
# convert if in non-resolution-neutral units (like spectral flux)
package = self.to("K_RJ").package()
package = self.package()

package["data"] = package["data"][key]
package["weight"] = package["weight"][key]
package["beam"] = package["beam"][explicit_slices[: -len(self.map_dims)]]

for axis, (dim, naxis) in enumerate(self.dims.items()):
if dim not in "xy":
package[dim] = package[dim][explicit_slices[axis]]
if isinstance(explicit_slices[axis], int):
package.pop(dim)
else:
package[dim] = package[dim][explicit_slices[axis]]

if dim in "nu":
package["beam"] = package["beam"][explicit_slices[axis]]
x_res_factor = explicit_slices[-1].step or 1.0
y_res_factor = explicit_slices[-2].step or 1.0

_, dimensions = get_factor_and_base_units_vector(self.units)

package["y_res"] *= explicit_slices[-2].step or 1
package["x_res"] *= explicit_slices[-1].step or 1
package["y_res"] *= y_res_factor
package["x_res"] *= x_res_factor
package["data"] *= (x_res_factor * y_res_factor) ** dimensions.pixel

return ProjectionMap(**package).to(self.units)
return ProjectionMap(**package)

def downsample(self, reduce: tuple, func: Callable = np.mean):
if reduce:
Expand Down
11 changes: 5 additions & 6 deletions maria/mappers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,21 +70,20 @@ def __init__(
else:
self.timestep = timestep
time_bins = np.arange(min_time, max_time + timestep, timestep)
time_bins += mean_time - self.time_bins.mean()
self.t = (time_bins[1:] + time_bins[:-1]) / 2

self.bands = BandList([])

self.tods = []
self.add_tods(tods)

beam_sum = np.zeros((len(self.bands), 1, 3))
beam_wgt = np.zeros((len(self.bands), 1, 3))
beam_sum = np.zeros((len(self.nu), 1, 3))
beam_wgt = np.zeros((len(self.nu), 1, 3))

for band_index, band in enumerate(self.bands):
for nu_index, nu in enumerate(self.nu):
for tod in self.tods:
beam_sum[band_index] += tod.duration * tod.dets.beams[tod.dets.band_name == band.name].mean(axis=0)
beam_wgt[band_index] += tod.duration
beam_sum[nu_index] += tod.duration * tod.dets.beams[tod.dets.band_center == nu].mean(axis=0)
beam_wgt[nu_index] += tod.duration

self.beam = beam_sum / beam_wgt

Expand Down
19 changes: 13 additions & 6 deletions maria/sim/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,6 @@ def _sample_maps(self, obs):
band_coords = obs.coords[band_mask]
band_dets = obs.instrument.dets[band_mask]

pointing_s = ttime.monotonic()
P = self.map.pointing_matrix(coords=band_coords, dets=band_dets)
logger.debug(f"Computed pointing matrix for band {band.name} in {humanize_time(ttime.monotonic() - pointing_s)}")

band_fwhm = Quantity(
compute_angular_fwhm(
fwhm_0=obs.instrument.dets.primary_size.mean(),
Expand All @@ -102,10 +98,13 @@ def _sample_maps(self, obs):

for channel_index, (nu_min, nu_max) in enumerate(self.map.nu_bin_bounds):
channel_s = ttime.monotonic()
channel_map = smoothed_map.to("K_RJ", band=band).data[:, channel_index]
channel_map = smoothed_map.to("K_RJ", band=band)[:, [channel_index]]
qchannel = (nu_min, nu_max)
channel_string = f"{qchannel}"

if (band.nu.Hz.max() < nu_min.Hz) or (nu_max.Hz < band.nu.Hz.min()):
continue

spectrum_kwargs = (
{
"spectrum": obs.atmosphere.spectrum,
Expand All @@ -128,6 +127,8 @@ def _sample_maps(self, obs):
f"{humanize_time(ttime.monotonic() - calibration_s)}"
)

bands_pbar.set_postfix(band=band.name, channel=channel_string)

# for stokes_index, stokes in enumerate(getattr(self.map, "stokes", "I")):
# bands_pbar.set_postfix(band=band.name, channel=channel_string, stokes=stokes)

Expand All @@ -150,7 +151,13 @@ def _sample_maps(self, obs):
# flat_padded_map = np.pad(channel_stokes_map, pad_width=((1, 1)), mode="edge").ravel()
# flat_padded_map = np.pad(channel_stokes_map, pad_width=((1, 1)), mode="edge").ravel()

pW = pW_per_K_RJ * (P @ channel_map.ravel()).reshape(band_coords.shape)
pointing_s = ttime.monotonic()
P = channel_map.pointing_matrix(coords=band_coords, dets=band_dets)
logger.debug(
f"Computed pointing matrix for band {band.name} in {humanize_time(ttime.monotonic() - pointing_s)}"
)

pW = pW_per_K_RJ * (P @ channel_map.data.ravel()).reshape(band_coords.shape)

map_loading[band_mask] += pW

Expand Down
2 changes: 1 addition & 1 deletion maria/sim/simulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ def run_obs(self, obs: Observation) -> TOD:

if hasattr(obs, "atmosphere"):
atmosphere_sim_start_s = ttime.monotonic()
obs.atmosphere.initialize(instrument=obs.instrument, boresight=obs.boresight, site=obs.site)
obs.atmosphere.initialize(obs)
self._simulate_atmosphere(obs)
obs.loading["atmosphere"] = self._compute_atmospheric_loading(obs)
logger.debug(f"Ran atmosphere simulation in {humanize_time(ttime.monotonic() - atmosphere_sim_start_s)}.")
Expand Down
8 changes: 0 additions & 8 deletions maria/tests/io/test_io.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,3 @@ def test_change_cache_dir():
del os.environ["MARIA_CACHE_DIR"]
maria.set_cache_dir("/tmp/maria-data")
raise Exception()


@pytest.mark.parametrize("filename", all_maps)
def test_all_maps(filename):
m = maria.map.load(filename=fetch(filename), width=0.1, center=(150, 10)) # noqa
m.plot()

plt.close("all")
46 changes: 46 additions & 0 deletions maria/tests/map/test_map_io.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
from __future__ import annotations

import maria
import matplotlib.pyplot as plt
import numpy as np
import pytest
from maria.instrument import Band
from maria.io import fetch
from maria.map import all_maps
from maria.mappers import BinMapper

plt.close("all")


@pytest.mark.parametrize("filename", all_maps)
def test_all_maps(filename):
m = maria.map.load(filename=fetch(filename), width=0.1, center=(150, 10)) # noqa
m.plot()

plt.close("all")


@pytest.mark.parametrize(
"map_path",
all_maps,
) # noqa
def test_map_io_units(map_path):
m = maria.map.load(fetch(map_path))
if "nu" not in m.dims:
m = m.unsqueeze("nu", 150e9)
assert np.allclose(m.to("K_RJ").to("Jy/pixel").to(m.units).data, m.data).compute()

m.to("cK_RJ").to_hdf("/tmp/test_write_map.h5")
new_m_hdf = maria.map.load("/tmp/test_write_map.h5").to(m.units) # noqa

assert np.allclose(new_m_hdf.data, m.data)
assert np.allclose(new_m_hdf.resolution.arcsec, m.resolution.arcsec)

if "fits" in map_path:
m.to("cK_RJ").to_fits("/tmp/test_write_map.fits")
new_m_fits = maria.map.load("/tmp/test_write_map.fits").to(m.units) # noqa

assert np.allclose(new_m_fits.data, m.data)
assert np.allclose(new_m_fits.resolution.arcsec, m.resolution.arcsec)

m.plot()
Loading
Loading