Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion imap_processing/tests/ultra/unit/test_helio_pset.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

@pytest.mark.skip(reason="Long running test for validation purposes.")
def test_validate_exposure_time_and_sensitivities(
ancillary_files, rates_dataset, imap_ena_sim_metakernel
ancillary_files, rates_dataset, imap_ena_sim_metakernel, aux_dataset
):
"""Validates exposure time and sensitivities for ebin 0."""
sens_filename = "SENS-IMAP_ULTRA_90-IMAP_DPS-HELIO-nside32-ebin0.csv"
Expand Down Expand Up @@ -91,6 +91,7 @@ def test_validate_exposure_time_and_sensitivities(
l1b_de,
dataset,
rates_dataset,
aux_dataset,
"imap_ultra_l1c_90sensor-heliopset",
ancillary_files,
90,
Expand Down
27 changes: 18 additions & 9 deletions imap_processing/tests/ultra/unit/test_spacecraft_pset.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
@pytest.mark.external_test_data
@pytest.mark.external_kernel
def test_calculate_spacecraft_pset(
aux_dataset,
rates_dataset,
imap_ena_sim_metakernel,
use_fake_spin_data_for_time,
Expand All @@ -40,10 +41,11 @@ def test_calculate_spacecraft_pset(
"""Tests calculate_spacecraft_pset function."""
# Simulate a spin table from MET = 0 to MET = 141 * 15 seconds
use_fake_spin_data_for_time(start_met=0, end_met=141 * 15)
# Ensure rate dataset has correct time range
rates_dataset.shcoarse.data = np.linspace(
0, 141 * 15, len(rates_dataset.shcoarse.data)
)
# Ensure rate and aux data have the correct time range
t_rates = np.linspace(0, 141 * 15, len(rates_dataset.shcoarse.data))
rates_dataset.shcoarse.data = t_rates
aux_dataset.timespinstart.data = t_rates[: len(aux_dataset.timespinstart.data)]
aux_dataset.timespinstart.data[-1] = t_rates[-1]
# This is just setting up the data so that it is in the format of l1b_de_dataset.
test_path = TEST_PATH / "ultra-90_raw_event_data_shortened.csv"
df = pd.read_csv(test_path)
Expand Down Expand Up @@ -102,6 +104,7 @@ def test_calculate_spacecraft_pset(
test_l1b_de_dataset,
test_l1b_de_dataset, # placeholder for goodtimes_dataset
rates_dataset,
aux_dataset,
"imap_ultra_l1c_45sensor-spacecraftpset",
ancillary_files,
45,
Expand All @@ -116,6 +119,7 @@ def test_calculate_spacecraft_pset(
@pytest.mark.external_kernel
def test_calculate_spacecraft_pset_with_cdf(
ancillary_files,
aux_dataset,
rates_dataset,
imap_ena_sim_metakernel,
use_fake_spin_data_for_time,
Expand All @@ -134,10 +138,11 @@ def test_calculate_spacecraft_pset_with_cdf(

de_dict["epoch"] = df_subset["epoch"].values
species_bin = np.full(len(df_subset), 1, dtype=np.uint8)
# Ensure rate dataset has correct time range
rates_dataset.shcoarse.data = np.linspace(
0, 141 * 15, len(rates_dataset.shcoarse.data)
)
# Ensure rate and aux data have the correct time range
t_rates = np.linspace(0, 141 * 15, len(rates_dataset.shcoarse.data))
rates_dataset.shcoarse.data = t_rates
aux_dataset.timespinstart.data = t_rates[: len(aux_dataset.timespinstart.data)]
aux_dataset.timespinstart.data[-1] = t_rates[-1]
# PosYSlit is True for left (start_type = 1)
# PosYSlit is False for right (start_type = 2)
start_type = np.where(df_subset["PosYSlit"].values, 1, 2)
Expand Down Expand Up @@ -186,6 +191,7 @@ def test_calculate_spacecraft_pset_with_cdf(
dataset,
dataset, # placeholder for goodtimes_dataset
rates_dataset,
aux_dataset,
"imap_ultra_l1c_45sensor-spacecraftpset",
ancillary_files,
45,
Expand All @@ -199,7 +205,9 @@ def test_calculate_spacecraft_pset_with_cdf(


@pytest.mark.skip(reason="Long running test for validation purposes.")
def test_validate_exposure_time_and_sensitivities(ancillary_files, rates_dataset):
def test_validate_exposure_time_and_sensitivities(
ancillary_files, rates_dataset, aux_dataset
):
"""Validates exposure time and sensitivities for ebin 0."""
test_data = [
(
Expand Down Expand Up @@ -296,6 +304,7 @@ def test_validate_exposure_time_and_sensitivities(ancillary_files, rates_dataset
l1b_de,
dataset,
rates_dataset,
aux_dataset,
"imap_ultra_l1c_90sensor-spacecraftpset",
ancillary_files,
90,
Expand Down
16 changes: 8 additions & 8 deletions imap_processing/tests/ultra/unit/test_ultra_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ def test_cdf_de_flags(

@pytest.mark.external_test_data
def test_ultra_l1b_extendedspin(
use_fake_spin_data_for_time, faux_aux_dataset, rates_dataset
use_fake_spin_data_for_time, aux_dataset, rates_dataset
):
"""Tests that L1b data is created."""
use_fake_spin_data_for_time(0, 141 * 15)
Expand All @@ -205,7 +205,7 @@ def test_ultra_l1b_extendedspin(
"imap_ultra_l1a_45sensor-params",
]
}
data_dict["imap_ultra_l1a_45sensor-aux"] = faux_aux_dataset
data_dict["imap_ultra_l1a_45sensor-aux"] = aux_dataset
data_dict["imap_ultra_l1a_45sensor-rates"] = rates_dataset

ancillary_files = {}
Expand All @@ -219,7 +219,7 @@ def test_ultra_l1b_extendedspin(


@pytest.mark.external_test_data
def test_cdf_extendedspin(use_fake_spin_data_for_time, faux_aux_dataset, rates_dataset):
def test_cdf_extendedspin(use_fake_spin_data_for_time, aux_dataset, rates_dataset):
use_fake_spin_data_for_time(0, 141 * 15)
l1b_de_dataset_path = (
TEST_PATH / "imap_ultra_l1b_45sensor-de_20240207-repoint99999_v999.cdf"
Expand All @@ -233,7 +233,7 @@ def test_cdf_extendedspin(use_fake_spin_data_for_time, faux_aux_dataset, rates_d
"imap_ultra_l1a_45sensor-params",
]
}
data_dict["imap_ultra_l1a_45sensor-aux"] = faux_aux_dataset
data_dict["imap_ultra_l1a_45sensor-aux"] = aux_dataset
data_dict["imap_ultra_l1a_45sensor-rates"] = rates_dataset

ancillary_files = {}
Expand All @@ -251,7 +251,7 @@ def test_cdf_extendedspin(use_fake_spin_data_for_time, faux_aux_dataset, rates_d


@pytest.mark.external_test_data
def test_cdf_goodtimes(use_fake_spin_data_for_time, faux_aux_dataset, rates_dataset):
def test_cdf_goodtimes(use_fake_spin_data_for_time, aux_dataset, rates_dataset):
"""Tests that CDF file is created and contains same attributes as xarray."""
use_fake_spin_data_for_time(0, 141 * 15)
l1b_de_dataset_path = (
Expand All @@ -266,7 +266,7 @@ def test_cdf_goodtimes(use_fake_spin_data_for_time, faux_aux_dataset, rates_data
"imap_ultra_l1a_45sensor-params",
]
}
data_dict["imap_ultra_l1a_45sensor-aux"] = faux_aux_dataset
data_dict["imap_ultra_l1a_45sensor-aux"] = aux_dataset
data_dict["imap_ultra_l1a_45sensor-rates"] = rates_dataset

ancillary_files = {}
Expand All @@ -288,7 +288,7 @@ def test_cdf_goodtimes(use_fake_spin_data_for_time, faux_aux_dataset, rates_data


@pytest.mark.external_test_data
def test_cdf_badtimes(use_fake_spin_data_for_time, faux_aux_dataset, rates_dataset):
def test_cdf_badtimes(use_fake_spin_data_for_time, aux_dataset, rates_dataset):
"""Tests that CDF file is created and contains same attributes as xarray."""
use_fake_spin_data_for_time(0, 141 * 15)
l1b_de_dataset_path = (
Expand All @@ -303,7 +303,7 @@ def test_cdf_badtimes(use_fake_spin_data_for_time, faux_aux_dataset, rates_datas
"imap_ultra_l1a_45sensor-params",
]
}
data_dict["imap_ultra_l1a_45sensor-aux"] = faux_aux_dataset
data_dict["imap_ultra_l1a_45sensor-aux"] = aux_dataset
data_dict["imap_ultra_l1a_45sensor-rates"] = rates_dataset

ancillary_files = {}
Expand Down
24 changes: 19 additions & 5 deletions imap_processing/tests/ultra/unit/test_ultra_l1b_culling.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import numpy as np
import pandas as pd
import pytest
import xarray as xr

from imap_processing import imap_module_directory
from imap_processing.quality_flags import (
Expand Down Expand Up @@ -173,18 +174,31 @@ def test_compare_aux_univ_spin_table(use_fake_spin_data_for_time, faux_aux_datas
def test_get_duration(rates_l1_test_path, use_fake_spin_data_for_time):
"""Tests get_duration function."""
use_fake_spin_data_for_time(start_met=0, end_met=141 * 15)

df = pd.read_csv(rates_l1_test_path)

# Should be evenly spaced spins of 15 seconds each except the first one has 14.
num_spins = 15
spin_start_times = np.concatenate([[0], np.arange(14, 222, num_spins)])
spin_numbers = np.arange(127, 142)
num_spins = len(spin_numbers)

aux_ds = xr.Dataset(
data_vars={
"timespinstart": ("epoch", spin_start_times),
"duration": ("epoch", np.full(num_spins, 15)),
"spinnumber": ("epoch", spin_numbers),
},
coords={"epoch": ("epoch", np.arange(num_spins))},
)

met = df["TimeTag"] - df["TimeTag"].values[0]
spin = df["Spin"]
spin_number, duration = get_spin_and_duration(met, spin)

spin_number, duration = get_spin_and_duration(aux_ds, met)
assert np.array_equal(spin, spin_number)
assert np.all(duration == 15)


def test_get_pulses(rates_l1_test_path, use_fake_spin_data_for_time):
def test_get_pulses(rates_l1_test_path, use_fake_spin_data_for_time, aux_dataset):
"""Tests get_pulses_per_spin function."""
df = pd.read_csv(rates_l1_test_path)

Expand Down Expand Up @@ -214,7 +228,7 @@ def test_get_pulses(rates_l1_test_path, use_fake_spin_data_for_time):
"spin": df["Spin"],
}

pulses = get_pulses_per_spin(pulse_dict)
pulses = get_pulses_per_spin(aux_dataset, pulse_dict)
unique_spins = np.unique(pulse_dict["spin"])

start_pulses_total = pulse_dict["start_rf"] + pulse_dict["start_lf"]
Expand Down
28 changes: 23 additions & 5 deletions imap_processing/tests/ultra/unit/test_ultra_l1b_extended.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
get_path_length,
get_ph_tof_and_back_positions,
get_phi_theta,
get_spin_and_duration,
get_ssd_back_position_and_tof_offset,
get_ssd_tof,
interpolate_fwhm,
Expand Down Expand Up @@ -521,18 +522,15 @@ def test_get_eventtimes(test_fixture, aux_dataset):
"""Tests get_eventtimes function."""
df_filt, _, _, de_dataset = test_fixture

event_times, spin_start_times, spin_numbers = get_event_times(
event_times, spin_start_times = get_event_times(
aux_dataset,
de_dataset["phase_angle"].values,
de_dataset["shcoarse"].values,
)

# Check shapes
assert (
event_times.shape
== spin_start_times.shape
== spin_numbers.shape
== de_dataset["phase_angle"].shape
event_times.shape == spin_start_times.shape == de_dataset["phase_angle"].shape
)

t1_start_sec = aux_dataset["timespinstart"].values[0]
Expand Down Expand Up @@ -561,6 +559,26 @@ def test_get_eventtimes(test_fixture, aux_dataset):
assert start_time <= int(event_times[i]) <= end_time


@pytest.mark.external_test_data
def test_get_spin_and_duration(test_fixture, aux_dataset):
"""Tests get_spin_and_duration function."""
df_filt, _, _, de_dataset = test_fixture

spin_number, spin_duration = get_spin_and_duration(
aux_dataset,
de_dataset["shcoarse"].values,
)

# Check shapes
assert spin_number.shape == spin_duration.shape == de_dataset["shcoarse"].shape

t1_spin_number = aux_dataset["spinnumber"].values[0]
t1_start_dur = aux_dataset["duration"].values[0]
# Check the first event spin number and duration
assert spin_number[0] == t1_spin_number
assert spin_duration[0] == t1_start_dur


@pytest.mark.external_test_data
def test_get_event_times_out_of_range(test_fixture, aux_dataset):
"""Tests get_event_times with out of range values."""
Expand Down
19 changes: 13 additions & 6 deletions imap_processing/tests/ultra/unit/test_ultra_l1c.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ def test_ultra_l1c_error(mock_data_l1b_dict):
def test_calculate_spacecraft_pset_with_cdf(
ancillary_files,
rates_dataset,
aux_dataset,
imap_ena_sim_metakernel,
use_fake_spin_data_for_time,
mock_spacecraft_pointing_lookups,
Expand All @@ -137,10 +138,11 @@ def test_calculate_spacecraft_pset_with_cdf(
df_subset = df[df["pointing_number"] == pointing].copy()

de_dict = {}
# Ensure rate dataset has correct time range
rates_dataset.shcoarse.data = np.linspace(
0, 141 * 15, len(rates_dataset.shcoarse.data)
)
# Ensure rate and aux data are in the correct time window
t_rates = np.linspace(0, 141 * 15, len(rates_dataset.shcoarse.data))
rates_dataset.shcoarse.data = t_rates
aux_dataset.timespinstart.data = t_rates[: len(aux_dataset.timespinstart.data)]
aux_dataset.timespinstart.data[-1] = t_rates[-1]
de_dict["epoch"] = df_subset["epoch"].values
species_bin = np.full(len(df_subset), 1, dtype=np.uint8)

Expand Down Expand Up @@ -187,6 +189,7 @@ def test_calculate_spacecraft_pset_with_cdf(
"imap_ultra_l1b_45sensor-extendedspin": dataset, # placeholder
"imap_ultra_l1b_45sensor-goodtimes": dataset, # placeholder
"imap_ultra_l1a_45sensor-rates": rates_dataset,
"imap_ultra_l1a_45sensor-aux": aux_dataset,
}
with (
mock.patch(
Expand Down Expand Up @@ -215,6 +218,7 @@ def test_calculate_helio_pset_with_cdf(
ancillary_files,
imap_ena_sim_metakernel,
mock_helio_pointing_lookups,
aux_dataset,
rates_dataset,
use_fake_spin_data_for_time,
):
Expand All @@ -226,8 +230,10 @@ def test_calculate_helio_pset_with_cdf(
)
df = pd.read_csv(TEST_PATH / "IMAP-Ultra45_r1_L1_V0_shortened.csv")

# Ensure rate dataset has correct time range
rates_dataset.shcoarse.data += 56970124
# Ensure rate and aux data are in the correct time window
t_off = 56970125
rates_dataset.shcoarse.data += t_off
aux_dataset.timespinstart.data += t_off
# Select a single pointing number
pointing = 0
df_subset = df[df["pointing_number"] == pointing].copy()
Expand Down Expand Up @@ -281,6 +287,7 @@ def test_calculate_helio_pset_with_cdf(
}
), # placeholder
"imap_ultra_l1a_45sensor-rates": rates_dataset,
"imap_ultra_l1a_45sensor-aux": aux_dataset,
}
n_pix = hp.nside2npix(32)
mock_eff = np.ones((46, n_pix))
Expand Down
23 changes: 22 additions & 1 deletion imap_processing/tests/ultra/unit/test_ultra_l1c_pset_bins.py
Original file line number Diff line number Diff line change
Expand Up @@ -436,6 +436,22 @@ def test_get_spacecraft_background_rates(
"Tests calculate_background_rates function."
# Simulate a spin table from MET = 0 to MET = 141 * 15 seconds
use_fake_spin_data_for_time(start_met=0, end_met=141 * 15)

# Should be evenly spaced spins of 15 seconds each except the first one has 14.
num_spins = 15
spin_start_times = np.concatenate([[0], np.arange(14, 222, num_spins)]) + 445015651
spin_numbers = np.arange(127, 142)
num_spins = len(spin_numbers)

aux_ds = xr.Dataset(
data_vars={
"timespinstart": ("epoch", spin_start_times),
"duration": ("epoch", np.full(num_spins, 15)),
"spinnumber": ("epoch", spin_numbers),
},
coords={"epoch": ("epoch", np.arange(num_spins))},
)

df = pd.read_csv(rates_l1_test_path)

rates = {
Expand Down Expand Up @@ -464,7 +480,12 @@ def test_get_spacecraft_background_rates(
goodtimes_spin_number = np.array([130, 131])

background_rates = get_spacecraft_background_rates(
rates, "ultra45", ancillary_files, energy_bin_edges, goodtimes_spin_number
rates,
aux_ds,
"ultra45",
ancillary_files,
energy_bin_edges,
goodtimes_spin_number,
)

assert background_rates.shape == (len(energy_bin_edges), hp.nside2npix(128))
Expand Down
2 changes: 1 addition & 1 deletion imap_processing/ultra/l1b/badtimes.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def calculate_badtimes(
badtimes_dataset : xarray.Dataset
Dataset containing the extendedspin data that has been culled.
"""
n_bins = extendedspin_dataset.dims["energy_bin_geometric_mean"]
n_bins = extendedspin_dataset.sizes["energy_bin_geometric_mean"]
culled_spins = np.setdiff1d(
extendedspin_dataset["spin_number"].values, goodtimes_spins
)
Expand Down
Loading