diff --git a/.github/workflows/conda-build-lint-test.yml b/.github/workflows/conda-build-lint-test.yml index d3ee6085..0d239e81 100644 --- a/.github/workflows/conda-build-lint-test.yml +++ b/.github/workflows/conda-build-lint-test.yml @@ -2,9 +2,9 @@ name: conda - Build Lint and Test on: push: - branches: [ main, "v*"] + branches: [ main ] pull_request: - branches: [ main, "v*"] + branches: [ main ] jobs: build-lint-test: diff --git a/.github/workflows/docker-build-lint-test.yml b/.github/workflows/docker-build-lint-test.yml index 41d21a20..0f376ae3 100644 --- a/.github/workflows/docker-build-lint-test.yml +++ b/.github/workflows/docker-build-lint-test.yml @@ -3,9 +3,9 @@ name: docker - Build Lint and Test on: push: - branches: [main, "v*"] + branches: [ main ] pull_request: - branches: [main, "v*"] + branches: [ main ] jobs: build-lint-test: diff --git a/.github/workflows/pip-build-lint-test-coverage.yml b/.github/workflows/pip-build-lint-test-coverage.yml index dc7be5d5..6c2f473c 100644 --- a/.github/workflows/pip-build-lint-test-coverage.yml +++ b/.github/workflows/pip-build-lint-test-coverage.yml @@ -2,9 +2,9 @@ name: pip - Build Lint Test and Coverage on: push: - branches: [ main, "v*"] + branches: [ main ] pull_request: - branches: [ main, "v*"] + branches: [ main ] jobs: build-lint-test-coverage: diff --git a/recipe/meta.yaml b/recipe/meta.yaml index c45fc3c8..3bee55f0 100644 --- a/recipe/meta.yaml +++ b/recipe/meta.yaml @@ -17,6 +17,7 @@ requirements: run: - python {{ python }} - numpy + - pyarrow >= 13.0.0 - numba - pandas - difi diff --git a/requirements.txt b/requirements.txt index 9785669e..ebefa0ff 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,6 @@ adam-core @ git+https://github.com/B612-Asteroid-Institute/adam_core@main numpy +pyarrow>=13.0.0 numba pandas difi diff --git a/setup.cfg b/setup.cfg index 3e53d233..6d7bcd6c 100644 --- a/setup.cfg +++ b/setup.cfg @@ -33,13 +33,14 @@ setup_requires = install_requires = adam-core @ git+https://github.com/B612-Asteroid-Institute/adam_core@main numpy + pyarrow >= 13.0.0 numba pandas difi astropy >= 5.3.1 astroquery jax - quivr + quivr>=0.7.1 scipy scikit-learn healpy diff --git a/thor/__init__.py b/thor/__init__.py index 0d4f9033..9aaaabcd 100644 --- a/thor/__init__.py +++ b/thor/__init__.py @@ -12,6 +12,7 @@ from .orbit_selection import * from .filter_orbits import * from .main import * +from .main_2 import * # TODO: remove when main is replaced from .analysis import * logger = setupLogger(__name__) diff --git a/thor/main_2.py b/thor/main_2.py new file mode 100644 index 00000000..5054c81c --- /dev/null +++ b/thor/main_2.py @@ -0,0 +1,147 @@ +from typing import List, Optional + +import pyarrow.compute as pc +import quivr as qv +from adam_core.coordinates import ( + CartesianCoordinates, + OriginCodes, + transform_coordinates, +) +from adam_core.propagator import PYOORB, Propagator + +from .observations import Observations +from .observations.filters import ObservationFilter, TestOrbitRadiusObservationFilter +from .orbit import TestOrbit +from .projections import GnomonicCoordinates + + +class TransformedDetections(qv.Table): + id = qv.StringColumn() + coordinates = GnomonicCoordinates.as_column() + state_id = qv.Int64Column() + + +def range_and_transform( + test_orbit: TestOrbit, + observations: Observations, + filters: Optional[List[ObservationFilter]] = None, + propagator: Propagator = PYOORB(), + max_processes: int = 1, +) -> TransformedDetections: + """ + Gather observations for a single test orbit, and transform them into a + gnomonic projection centered on the motion of the test orbit (co-rotating + frame). + + Parameters + ---------- + obs_src : `~thor.observation_filters.ObservationFilter` + Observation filter to use to gather observations given the test orbit. + test_orbit : `~thor.orbit.TestOrbit` + Test orbit to use to gather and transform observations. + propagator : `~adam_core.propagator.propagator.Propagator` + Propagator to use to propagate the test orbit and generate + ephemerides. + max_processes : int, optional + Maximum number of processes to use for parallelization. + + Returns + ------- + transformed_detections : `~thor.main.TransformedDetections` + The transformed detections as gnomonic coordinates + of the observations in the co-rotating frame. + """ + # Compute the ephemeris of the test orbit (this will be cached) + ephemeris = test_orbit.generate_ephemeris_from_observations( + observations, + propagator=propagator, + max_processes=max_processes, + ) + + # Apply filters to the observations + filtered_observations = observations + if filters is not None: + for filter_i in filters: + filtered_observations = filter_i.apply(filtered_observations, test_orbit) + + # Assume that the heliocentric distance of all point sources in + # the observations are the same as that of the test orbit + ranged_detections_spherical = test_orbit.range_observations( + filtered_observations, + propagator=propagator, + max_processes=max_processes, + ) + + # Transform from spherical topocentric to cartesian heliocentric coordinates + ranged_detections_cartesian = transform_coordinates( + ranged_detections_spherical.coordinates, + representation_out=CartesianCoordinates, + frame_out="ecliptic", + origin_out=OriginCodes.SUN, + ) + + # Link the ephemeris and observations by state id + link = qv.Linkage( + ephemeris, + filtered_observations, + left_keys=ephemeris.id, + right_keys=filtered_observations.state_id, + ) + + # Transform the detections into the co-rotating frame + transformed_detection_list = [] + state_ids = filtered_observations.state_id.unique().sort() + for state_id in state_ids: + # Select the detections and ephemeris for this state id + mask = pc.equal(state_id, filtered_observations.state_id) + ranged_detections_cartesian_i = ranged_detections_cartesian.apply_mask(mask) + ephemeris_i = link.select_left(state_id) + observations_i = link.select_right(state_id) + + # Transform the detections into the co-rotating frame + transformed_detections_i = TransformedDetections.from_kwargs( + id=observations_i.detections.id, + coordinates=GnomonicCoordinates.from_cartesian( + ranged_detections_cartesian_i, + center_cartesian=ephemeris_i.ephemeris.aberrated_coordinates, + ), + state_id=observations_i.state_id, + ) + + transformed_detection_list.append(transformed_detections_i) + + transformed_detections = qv.concatenate(transformed_detection_list) + return transformed_detections + + +def link_test_orbit( + test_orbit: TestOrbit, + observations: Observations, + filters: Optional[List[ObservationFilter]] = [ + TestOrbitRadiusObservationFilter(radius=10.0) + ], + propagator: Propagator = PYOORB(), + max_processes: int = 1, +): + """ + Find all linkages for a single test orbit. + """ + # Range and transform the observations + transformed_detections = range_and_transform( + test_orbit, + observations, + filters=filters, + propagator=propagator, + max_processes=max_processes, + ) + + # TODO: Find objects which move in straight-ish lines in the gnomonic frame. + # + # TODO: Run IOD against each of the discovered straight lines, and + # filter down to plausible orbits. + # + # TODO: Run OD against the plausible orbits, and filter down to really good orbits. + # + # TODO: Perform arc extension on the really good orbits. + + return transformed_detections diff --git a/thor/observation_filters.py b/thor/observation_filters.py deleted file mode 100644 index 0bcada9c..00000000 --- a/thor/observation_filters.py +++ /dev/null @@ -1,131 +0,0 @@ -import abc -from typing import TypeAlias - -import numpy as np -import quivr as qv -from adam_core.observations import detections, exposures - -from . import orbit - - -class Observations: - """Observations represents a collection of exposures and the - detections they contain. - - The detections may be a filtered subset of the detections in the - exposures. - """ - - detections: detections.PointSourceDetections - exposures: exposures.Exposures - linkage: qv.Linkage[detections.PointSourceDetections, exposures.Exposures] - - def __init__( - self, - detections: detections.PointSourceDetections, - exposures: exposures.Exposures, - ): - self.detections = detections - self.exposures = exposures - self.linkage = qv.Linkage( - detections, - exposures, - left_keys=detections.exposure_id, - right_keys=exposures.id, - ) - - -class ObservationFilter(abc.ABC): - """An ObservationFilter is reduces a collection of observations to - a subset of those observations. - - """ - @abc.abstractmethod - def apply(self, observations: Observations) -> Observations: - ... - - -class TestOrbitRadiusObservationFilter(ObservationFilter): - """A TestOrbitRadiusObservationFilter is an ObservationFilter that - gathers observations within a fixed radius of the test orbit's - ephemeris at each exposure time within a collection of exposures. - - """ - - def __init__(self, radius: float, test_orbit: orbit.TestOrbit): - """ - radius: The radius of the cell in degrees - """ - self.radius = radius - self.test_orbit = test_orbit - - def apply(self, observations: Observations) -> Observations: - # Generate an ephemeris for every observer time/location in the dataset - observers = observations.exposures.observers() - ephems_linkage = self.test_orbit.generate_ephemeris( - observers=observers, - ) - - matching_detections = detections.PointSourceDetections.empty() - matching_exposures = exposures.Exposures.empty() - - # Build a mapping of exposure_id to ephemeris ra and dec - for exposure in observations.exposures: - key = ephems_linkage.key( - code=exposure.observatory_code[0].as_py(), - mjd=exposure.midpoint().mjd()[0].as_py(), - ) - ephem = ephems_linkage.select_left(key) - assert len(ephem) == 1, "there should be exactly one ephemeris per exposure" - - ephem_ra = ephem.coordinates.lon[0].as_py() - ephem_dec = ephem.coordinates.lat[0].as_py() - - exp_dets = observations.linkage.select_left(exposure.id[0]) - - nearby_dets = _within_radius(exp_dets, ephem_ra, ephem_dec, self.radius) - if len(nearby_dets) > 0: - matching_exposures = qv.concatenate([matching_exposures, exposure]) - matching_detections = qv.concatenate([matching_detections, nearby_dets]) - - return Observations(matching_detections, matching_exposures) - - -def _within_radius( - detections: detections.PointSourceDetections, - ra: float, - dec: float, - radius: float, -) -> detections.PointSourceDetections: - """ - Return the detections within a given radius of a given ra and dec. - - ra, dec, and radius should be in degrees. - """ - det_ra = np.deg2rad(detections.ra.to_numpy()) - det_dec = np.deg2rad(detections.dec.to_numpy()) - - center_ra = np.deg2rad(ra) - center_dec = np.deg2rad(dec) - - dist_lon = det_ra - center_ra - sin_dist_lon = np.sin(dist_lon) - cos_dist_lon = np.cos(dist_lon) - - sin_center_lat = np.sin(center_dec) - sin_det_lat = np.sin(det_dec) - cos_center_lat = np.cos(center_dec) - cos_det_lat = np.cos(det_dec) - - num1 = cos_det_lat * sin_dist_lon - num2 = cos_center_lat * sin_det_lat - sin_center_lat * cos_det_lat * cos_dist_lon - denominator = ( - sin_center_lat * sin_det_lat + cos_center_lat * cos_det_lat * cos_dist_lon - ) - - distances = np.arctan2(np.hypot(num1, num2), denominator) - - mask = distances <= np.deg2rad(radius) - return detections.apply_mask(mask) - - diff --git a/thor/observations/__init__.py b/thor/observations/__init__.py new file mode 100644 index 00000000..3ecc64de --- /dev/null +++ b/thor/observations/__init__.py @@ -0,0 +1,2 @@ +# flake8: noqa: F401 +from .observations import Observations diff --git a/thor/observations/filters.py b/thor/observations/filters.py new file mode 100644 index 00000000..ea346638 --- /dev/null +++ b/thor/observations/filters.py @@ -0,0 +1,168 @@ +import abc +from typing import TYPE_CHECKING + +import numpy as np +import quivr as qv +from adam_core.observations import PointSourceDetections + +from ..orbit import TestOrbit + +if TYPE_CHECKING: + from .observations import Observations + + +class ObservationFilter(abc.ABC): + """An ObservationFilter is reduces a collection of observations to + a subset of those observations. + + """ + + @abc.abstractmethod + def apply( + self, observations: "Observations", test_orbit: TestOrbit + ) -> "Observations": + """ + Apply the filter to a collection of observations. + + Parameters + ---------- + observations : `~thor.observations.Observations` + The observations to filter. + test_orbit : `~thor.orbit.TestOrbit` + The test orbit to use for filtering. + + Returns + ------- + filtered_observations : `~thor.observations.Observations` + The filtered observations. + """ + ... + + +class TestOrbitRadiusObservationFilter(ObservationFilter): + """A TestOrbitRadiusObservationFilter is an ObservationFilter that + gathers observations within a fixed radius of the test orbit's + ephemeris at each exposure time within a collection of exposures. + + """ + + def __init__(self, radius: float): + """ + Parameters + ---------- + radius : float + The radius in degrees. + """ + self.radius = radius + + def apply( + self, observations: "Observations", test_orbit: TestOrbit + ) -> "Observations": + """ + Apply the filter to a collection of observations. + + Parameters + ---------- + observations : `~thor.observations.Observations` + The observations to filter. + test_orbit : `~thor.orbit.TestOrbit` + The test orbit to use for filtering. + + Returns + ------- + filtered_observations : `~thor.observations.Observations` + The filtered observations. + """ + # Generate an ephemeris for every observer time/location in the dataset + test_orbit_ephemeris = test_orbit.generate_ephemeris_from_observations( + observations + ) + + # Link the ephemeris to the observations + link = qv.Linkage( + test_orbit_ephemeris, + observations, + left_keys=test_orbit_ephemeris.id, + right_keys=observations.state_id, + ) + + # Loop over states and build a mask of detections within the radius + state_ids = observations.state_id.to_numpy(zero_copy_only=False) + mask = np.zeros(len(observations), dtype=bool) + for state_id in np.unique(state_ids): + # Compute the indices for observations belonging to this state + idx_state = np.where(state_ids == state_id)[0] + + # Select the ephemeris and observations for this state + ephemeris_i = link.select_left(state_id) + observations_i = link.select_right(state_id) + detections_i = observations_i.detections + + assert ( + len(ephemeris_i) == 1 + ), "there should be exactly one ephemeris per exposure" + + ephem_ra = ephemeris_i.ephemeris.coordinates.lon[0].as_py() + ephem_dec = ephemeris_i.ephemeris.coordinates.lat[0].as_py() + + # Compute indices for the detections within the radius + idx_within = idx_state[ + _within_radius(detections_i, ephem_ra, ephem_dec, self.radius) + ] + + # Update the mask + mask[idx_within] = True + + return observations.apply_mask(mask) + + +def _within_radius( + detections: PointSourceDetections, + ra: float, + dec: float, + radius: float, +) -> np.array: + """ + Return a boolean mask that identifies which of + the detections are within a given radius of a given ra and dec. + + Parameters + ---------- + detections : `~adam_core.observations.detections.PointSourceDetections` + The detections to filter. + ra : float + The right ascension of the center of the radius in degrees. + dec : float + The declination of the center of the radius in degrees. + radius : float + The radius in degrees. + + Returns + ------- + mask : `~numpy.ndarray` + A boolean mask that identifies which of the detections are within + the radius. + """ + det_ra = np.deg2rad(detections.ra.to_numpy()) + det_dec = np.deg2rad(detections.dec.to_numpy()) + + center_ra = np.deg2rad(ra) + center_dec = np.deg2rad(dec) + + dist_lon = det_ra - center_ra + sin_dist_lon = np.sin(dist_lon) + cos_dist_lon = np.cos(dist_lon) + + sin_center_lat = np.sin(center_dec) + sin_det_lat = np.sin(det_dec) + cos_center_lat = np.cos(center_dec) + cos_det_lat = np.cos(det_dec) + + num1 = cos_det_lat * sin_dist_lon + num2 = cos_center_lat * sin_det_lat - sin_center_lat * cos_det_lat * cos_dist_lon + denominator = ( + sin_center_lat * sin_det_lat + cos_center_lat * cos_det_lat * cos_dist_lon + ) + + distances = np.arctan2(np.hypot(num1, num2), denominator) + return distances <= np.deg2rad(radius) diff --git a/thor/observations/observations.py b/thor/observations/observations.py new file mode 100644 index 00000000..390f594b --- /dev/null +++ b/thor/observations/observations.py @@ -0,0 +1,233 @@ +from typing import Iterator, Tuple + +import numpy as np +import pyarrow as pa +import pyarrow.compute as pc +import quivr as qv +from adam_core.observations import Exposures, PointSourceDetections +from adam_core.observers import Observers +from adam_core.time import Timestamp + + +class Observations(qv.Table): + """ + The Observations table stored invidual point source detections and a state ID for each unique + combination of detection time and observatory code. The state ID is used as reference to a specific + observing geometry. + + Columns + ------- + detections : `~adam_core.observations.detections.PointSourceDetections` + A table of point source detections. + observatory_code : `~qv.StringColumn` + The observatory code for each detection. + state_id : `~qv.Int64Column` + The state ID for each detection. + """ + + detections = PointSourceDetections.as_column() + observatory_code = qv.StringColumn() + state_id = qv.Int64Column() + + @classmethod + def from_detections_and_exposures( + cls, detections: PointSourceDetections, exposures: Exposures + ) -> "Observations": + """ + Create a THOR observations table from a PointSourceDetections table and an Exposures table. + + Note: This function will sort the detections by time and observatory code. + + Parameters + ---------- + detections : `~adam_core.observations.detections.PointSourceDetections` + A table of point source detections. + exposures : `~adam_core.observations.exposures.Exposures` + A table of exposures. + + Returns + ------- + observations : `~Observations` + A table of observations. + """ + # TODO: One thing we could try in the future is truncating observation times to ~1ms and using those to group + # into indvidual states (i.e. if two observations are within 1ms of each other, they are in the same state). Again, + # this only matters for those detections that have times that differ from the midpoint time of the exposure (LSST) + + # If the detection times are not in UTC, convert them to UTC + if detections.time.scale != "utc": + detections = detections.set_column("time", detections.time.rescale("utc")) + + # Flatten the detections table (i.e. remove the nested columns). Unfortunately joins on tables + # with nested columns are not all supported in pyarrow + detections_flattened = pa.table( + [ + detections.id, + detections.exposure_id, + detections.time.days, + detections.time.nanos, + detections.ra, + detections.ra_sigma, + detections.dec, + detections.dec_sigma, + detections.mag, + detections.mag_sigma, + ], + names=[ + "id", + "exposure_id", + "days", + "nanos", + "ra", + "ra_sigma", + "dec", + "dec_sigma", + "mag", + "mag_sigma", + ], + ) + + # Extract the exposure IDs and the observatory codes from the exposures table + exposure_obscodes = pa.table( + [exposures.id, exposures.observatory_code], + names=["exposure_id", "observatory_code"], + ) + + # Join the detection times and the exposure IDs so that each detection has an observatory code + obscode_times = detections_flattened.join(exposure_obscodes, ["exposure_id"]) + + # Group the detections by the observatory code and the detection times and then grab the unique ones + unique_obscode_times = obscode_times.group_by( + ["days", "nanos", "observatory_code"] + ).aggregate([]) + + # Now sort the unique detections by the observatory code and the detection time + unique_obscode_times = unique_obscode_times.sort_by( + [ + ("days", "ascending"), + ("nanos", "ascending"), + ("observatory_code", "ascending"), + ] + ) + + # For each unique detection time and observatory code assign a unique state ID + unique_obscode_times = unique_obscode_times.add_column( + 0, + pa.field("state_id", pa.int64()), + pa.array(np.arange(0, len(unique_obscode_times))), + ) + + # Join the unique observatory code and detections back to the original detections + detections_with_states = obscode_times.join( + unique_obscode_times, ["days", "nanos", "observatory_code"] + ) + + # Now sort the detections one final time by state ID + detections_with_states = detections_with_states.sort_by( + [("state_id", "ascending")] + ) + + return cls.from_kwargs( + detections=PointSourceDetections.from_kwargs( + id=detections_with_states["id"], + exposure_id=detections_with_states["exposure_id"], + time=Timestamp.from_kwargs( + days=detections_with_states["days"], + nanos=detections_with_states["nanos"], + scale="utc", + ), + ra=detections_with_states["ra"], + ra_sigma=detections_with_states["ra_sigma"], + dec=detections_with_states["dec"], + dec_sigma=detections_with_states["dec_sigma"], + mag=detections_with_states["mag"], + mag_sigma=detections_with_states["mag_sigma"], + ), + observatory_code=detections_with_states["observatory_code"], + state_id=detections_with_states["state_id"], + ) + + def get_observers(self): + """ + Get the observers table for these observations. The observers table + will have one row for each unique state ID in the observations table. + + Returns + ------- + observers : `~adam_core.observers.observers.Observers` + The observers table for these observations. + """ + observers = [] + for code, observations_i in self.group_by_observatory_code(): + unique_times = observations_i.detections.time.unique() + + # Get observers table for this observatory + observers_i = Observers.from_code(code, unique_times) + observers.append(observers_i) + + observers = qv.concatenate(observers) + observers = observers.sort_by( + ["coordinates.time.days", "coordinates.time.nanos", "code"] + ) + return observers + + def select_exposure(self, exposure_id: int) -> "Observations": + """ + Select observations from a single exposure. + + Parameters + ---------- + exposure_id : int + The exposure ID. + + Returns + ------- + observations : `~Observations` + Observations from the specified exposure. + """ + return self.apply_mask(pc.equal(self.detections.exposure_id, exposure_id)) + + def group_by_exposure(self) -> Iterator[Tuple[str, "Observations"]]: + """ + Group observations by exposure ID. Note that if this exposure has observations + with varying observation times, then the exposure will have multiple unique state IDs. + + Returns + ------- + observations : Iterator[`~thor.observations.observations.Observations`] + Observations belonging to individual exposures. + """ + exposure_ids = self.detections.exposure_id + for exposure_id in exposure_ids.unique().sort(): + yield exposure_id.as_py(), self.select_exposure(exposure_id) + + def select_observatory_code(self, observatory_code) -> "Observations": + """ + Select observations from a single observatory. + + Parameters + ---------- + observatory_code : str + The observatory code. + + Returns + ------- + observations : `~Observations` + Observations from the specified observatory. + """ + return self.apply_mask(pc.equal(self.observatory_code, observatory_code)) + + def group_by_observatory_code(self) -> Iterator[Tuple[str, "Observations"]]: + """ + Group observations by observatory code. + + Returns + ------- + observations : Iterator[`~thor.observations.observations.Observations`] + Observations belonging to individual observatories. + """ + observatory_codes = self.observatory_code + for observatory_code in observatory_codes.unique().sort(): + yield observatory_code.as_py(), self.select_observatory_code( + observatory_code + ) diff --git a/thor/observations/tests/__init__.py b/thor/observations/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/thor/tests/test_observation_filter.py b/thor/observations/tests/test_filters.py similarity index 63% rename from thor/tests/test_observation_filter.py rename to thor/observations/tests/test_filters.py index 07b4dd4e..bdab2b95 100644 --- a/thor/tests/test_observation_filter.py +++ b/thor/observations/tests/test_filters.py @@ -1,30 +1,36 @@ import astropy.time import numpy as np import pyarrow as pa +import pyarrow.compute as pc import pytest import quivr as qv -from adam_core import coordinates, observers, propagator -from adam_core.observations import detections, exposures +from adam_core.coordinates import CartesianCoordinates, Origin +from adam_core.observations import Exposures, PointSourceDetections +from adam_core.observers import Observers +from adam_core.propagator import PYOORB +from adam_core.time import Timestamp -from .. import observation_filters, orbit +from ...orbit import TestOrbit +from ..filters import TestOrbitRadiusObservationFilter +from ..observations import Observations @pytest.fixture def fixed_test_orbit(): # An orbit at 1AU going around at (about) 1 degree per day - coords = coordinates.CartesianCoordinates.from_kwargs( + coords = CartesianCoordinates.from_kwargs( x=[1], y=[0], z=[0], vx=[0], vy=[2 * np.pi / 365.25], vz=[0], - time=coordinates.Times.from_astropy(astropy.time.Time("2020-01-01T00:00:00")), - origin=coordinates.Origin.from_kwargs(code=["SUN"]), + time=Timestamp.from_iso8601(["2020-01-01T00:00:00"]), + origin=Origin.from_kwargs(code=["SUN"]), frame="ecliptic", ) - return orbit.TestOrbit( + return TestOrbit( coordinates=coords, orbit_id="test_orbit", ) @@ -32,7 +38,7 @@ def fixed_test_orbit(): @pytest.fixture def fixed_observers(): - times = astropy.time.Time( + times = Timestamp.from_iso8601( [ "2020-01-01T00:00:00", "2020-01-02T00:00:00", @@ -41,18 +47,18 @@ def fixed_observers(): "2020-01-05T00:00:00", ] ) - return observers.Observers.from_code("I11", times) + return Observers.from_code("I11", times) @pytest.fixture def fixed_ephems(fixed_test_orbit, fixed_observers): - prop = propagator.PYOORB() + prop = PYOORB() return prop.generate_ephemeris(fixed_test_orbit.orbit, fixed_observers).left_table @pytest.fixture def fixed_exposures(fixed_observers): - return exposures.Exposures.from_kwargs( + return Exposures.from_kwargs( id=[str(i) for i in range(len(fixed_observers))], start_time=fixed_observers.coordinates.time, duration=[30 for i in range(len(fixed_observers))], @@ -79,11 +85,17 @@ def fixed_detections(fixed_ephems, fixed_exposures): ids = [str(i) for i in range(N)] exposure_ids = pa.concat_arrays([exposure.id] * N) magnitudes = [20] * N + times_mjd_utc = np.full( + N, + exposure.start_time.rescale("utc").mjd().to_numpy(zero_copy_only=False)[0] + + exposure.duration.to_numpy(zero_copy_only=False)[0] / 2 / 86400, + ) detection_tables.append( - detections.PointSourceDetections.from_kwargs( + PointSourceDetections.from_kwargs( id=ids, exposure_id=exposure_ids, + time=Timestamp.from_mjd(times_mjd_utc, scale="utc"), ra=ra_decs[0].flatten(), dec=ra_decs[1].flatten(), mag=magnitudes, @@ -94,23 +106,27 @@ def fixed_detections(fixed_ephems, fixed_exposures): @pytest.fixture def fixed_observations(fixed_detections, fixed_exposures): - return observation_filters.Observations(fixed_detections, fixed_exposures) + return Observations.from_detections_and_exposures(fixed_detections, fixed_exposures) def test_observation_fixtures(fixed_test_orbit, fixed_observations): assert len(fixed_test_orbit.orbit) == 1 - assert len(fixed_observations.exposures) == 5 + assert len(pc.unique(fixed_observations.detections.exposure_id)) == 5 assert len(fixed_observations.detections) == 100 * 100 * 5 def test_orbit_radius_observation_filter(fixed_test_orbit, fixed_observations): - fos = observation_filters.TestOrbitRadiusObservationFilter( + fos = TestOrbitRadiusObservationFilter( radius=0.5, - test_orbit=fixed_test_orbit, ) - have = fos.apply(fixed_observations) - assert len(have.exposures) == 5 - assert have.exposures == fixed_observations.exposures + have = fos.apply(fixed_observations, fixed_test_orbit) + assert len(pc.unique(have.detections.exposure_id)) == 5 + assert pc.all( + pc.equal( + pc.unique(have.detections.exposure_id), + pc.unique(fixed_observations.detections.exposure_id), + ) + ) # Should be about pi/4 fraction of the detections (0.785 assert len(have.detections) < 0.80 * len(fixed_observations.detections) assert len(have.detections) > 0.76 * len(fixed_observations.detections) diff --git a/thor/orbit.py b/thor/orbit.py index 0e34e724..7a217c43 100644 --- a/thor/orbit.py +++ b/thor/orbit.py @@ -1,8 +1,10 @@ +import logging import uuid from typing import Optional, TypeVar, Union import numpy as np import pyarrow as pa +import pyarrow.compute as pc import quivr as qv from adam_core.coordinates import ( CartesianCoordinates, @@ -14,12 +16,10 @@ SphericalCoordinates, transform_coordinates, ) -from adam_core.observations.detections import PointSourceDetections -from adam_core.observations.exposures import Exposures from adam_core.observers import Observers from adam_core.orbits import Ephemeris, Orbits from adam_core.propagator import PYOORB, Propagator -from astropy.time import Time +from adam_core.time import Timestamp CoordinateType = TypeVar( "CoordinateType", @@ -31,12 +31,24 @@ ], ) +from .observations import Observations + +logger = logging.getLogger(__name__) + class RangedPointSourceDetections(qv.Table): id = qv.StringColumn() exposure_id = qv.StringColumn() coordinates = SphericalCoordinates.as_column() + state_id = qv.Int64Column() + + +class TestOrbitEphemeris(qv.Table): + + id = qv.Int64Column() + ephemeris = Ephemeris.as_column() + observer = Observers.as_column() class TestOrbit: @@ -86,6 +98,9 @@ def __init__( coordinates=cartesian_coordinates, ) + self._cached_ephemeris: Optional[TestOrbitEphemeris] = None + self._cached_observation_ids: Optional[pa.array] = None + @classmethod def from_orbits(cls, orbits): assert len(orbits) == 1 @@ -97,9 +112,56 @@ def from_orbits(cls, orbits): def orbit(self): return self._orbit + def _is_cache_fresh(self, observations: Observations) -> bool: + """ + Check if the cached ephemeris is fresh. If the observation IDs are contained within the + cached observation IDs, then the cache is fresh. Otherwise, it is stale. This permits + observations to be filtered out without having to regenerate the ephemeris. + + Parameters + ---------- + observations : `~thor.observations.observations.Observations` + Observations to check against the cached ephemerides. + + Returns + ------- + is_fresh : bool + True if the cache is fresh, False otherwise. + """ + if self._cached_ephemeris is None or self._cached_observation_ids is None: + return False + elif pc.all( + pc.is_in( + observations.detections.id.sort(), self._cached_observation_ids.sort() + ) + ).as_py(): + return True + else: + return False + + def _cache_ephemeris( + self, ephemeris: TestOrbitEphemeris, observations: Observations + ): + """ + Cache the ephemeris and observation IDs. + + Parameters + ---------- + ephemeris : `~thor.orbit.TestOrbitEphemeris` + States to cache. + observations : `~thor.observations.observations.Observations` + Observations to cache. Only observation IDs will be cached. + + Returns + ------- + None + """ + self._cached_ephemeris = ephemeris + self._cached_observation_ids = observations.detections.id + def propagate( self, - times: Time, + times: Timestamp, propagator: Propagator = PYOORB(), max_processes: Optional[int] = 1, ) -> Orbits: @@ -108,7 +170,7 @@ def propagate( Parameters ---------- - times : `~astropy.time.core.Time` + times : `~adam_core.time.time.Timestamp` Times to which to propagate the orbit. propagator : `~adam_core.propagator.propagator.Propagator`, optional Propagator to use to propagate the orbit. Defaults to PYOORB. @@ -153,9 +215,71 @@ def generate_ephemeris( self.orbit, observers, max_processes=max_processes, chunk_size=1 ) + def generate_ephemeris_from_observations( + self, + observations: Observations, + propagator: Propagator = PYOORB(), + max_processes: Optional[int] = 1, + ): + """ + For each unique time and code in the observations (a state), generate an ephemeris for + that state and store them in a TestOrbitStates table. The observer's coordinates will also be + stored in the table and can be referenced through out the THOR pipeline. + + These ephemerides will be cached. If the cache is fresh, the cached ephemerides will be + returned instead of regenerating them. + + Parameters + ---------- + observations : `~thor.observations.observations.Observations` + Observations to compute test orbit ephemerides for. + propagator : `~adam_core.propagator.propagator.Propagator`, optional + Propagator to use to propagate the orbit. Defaults to PYOORB. + num_processes : int, optional + Number of processes to use to propagate the orbit. Defaults to 1. + + + Returns + ------- + states : `~thor.orbit.TestOrbitEphemeris` + Table containing the ephemeris of the test orbit, its aberrated state vector, and the + observer coordinates at each unique time of the observations. + """ + if self._is_cache_fresh(observations): + logger.debug( + "Test orbit ephemeris cache is fresh. Returning cached states." + ) + return self._cached_ephemeris + + logger.debug("Test orbit ephemeris cache is stale. Regenerating.") + + state_ids = observations.state_id.unique() + observers = observations.get_observers() + + # Generate ephemerides for each unique state and then sort by time and code + ephemeris = self.generate_ephemeris( + observers, propagator=propagator, max_processes=max_processes + ).left_table + ephemeris = ephemeris.sort_by( + by=[ + "coordinates.time.days", + "coordinates.time.nanos", + "coordinates.origin.code", + ] + ) + + test_orbit_ephemeris = TestOrbitEphemeris.from_kwargs( + id=state_ids, + ephemeris=ephemeris, + observer=observers, + ) + self._cache_ephemeris(test_orbit_ephemeris, observations) + return test_orbit_ephemeris + def range_observations( self, - observations: qv.Linkage[PointSourceDetections, Exposures], + observations: Observations, + propagator: Propagator = PYOORB(), max_processes: Optional[int] = 1, ) -> RangedPointSourceDetections: """ @@ -164,8 +288,10 @@ def range_observations( Parameters ---------- - observations : qv.Linkage[PointSourceDetections, Exposures] + observations : `~thor.observations.observations.Observations` Observations to range. + propagator : `~adam_core.propagator.propagator.Propagator`, optional + Propagator to use to propagate the orbit. Defaults to PYOORB. max_processes : int, optional Number of processes to use to propagate the orbit. Defaults to 1. @@ -174,41 +300,44 @@ def range_observations( ranged_point_source_detections : `~thor.orbit.RangedPointSourceDetections` The ranged detections. """ - exposures = observations.right_table - ephemeris = self.generate_ephemeris( - exposures.observers(), max_processes=max_processes + # Generate an ephemeris for each unique observation time and observatory + # code combination + ephemeris = self.generate_ephemeris_from_observations( + observations, propagator=propagator, max_processes=max_processes ) - # Get the light-time corrected state vector: the state vector - # at the time where the light reflected/emitted from the object - # would have reached the observer. - # We will use this state vector to get the heliocentric distance of - # the object at the time of the exposure. - # TODO: We could use other adam_core functionality to calculate this if need - # be and we may need to if we plan on mapping covariances. - propagated_orbit = ephemeris.left_table.aberrated_coordinates - - # TODO: We could investigate using concurrent futures here to parallelize - # this loop + + # Link the ephemeris to the observations + link = qv.Linkage( + ephemeris, + observations, + left_keys=ephemeris.id, + right_keys=observations.state_id, + ) + + # Do a sorted iteration over the unique state IDs rpsds = [] - for propagated_orbit_i, exposure_i in zip(propagated_orbit, exposures): + state_ids = observations.state_id.unique().sort() + for state_id in state_ids: - # Select the detections that belong to this exposure - detections_i = observations.select_left(exposure_i.id[0]) + # Select the ephemeris and observations for this state + ephemeris_i = link.select_left(state_id) + observations_i = link.select_right(state_id) + detections_i = observations_i.detections # Get the heliocentric distance of the object at the time of the exposure - r_mag = propagated_orbit_i.r_mag[0] + r_mag = ephemeris_i.ephemeris.aberrated_coordinates.r_mag[0] # Get the observer's heliocentric coordinates - observer_i = exposure_i.observers() + observer_i = ephemeris_i.observer # Create an array of observatory codes for the detections - num_detections = len(detections_i) + num_detections = len(observations_i) observatory_codes = np.repeat( - exposure_i.observatory_code[0].as_py(), num_detections + observations_i.observatory_code[0].as_py(), num_detections ) # The following can be replaced with: - # coords = detections_i.to_spherical(observatory_codes) + # coords = observations_i.to_spherical(observatory_codes) # Start replacement: sigma_data = np.vstack( [ @@ -238,10 +367,12 @@ def range_observations( coordinates=assume_heliocentric_distance( r_mag, coords, observer_i.coordinates ), + state_id=observations_i.state_id, ) ) - return qv.concatenate(rpsds) + ranged_detections = qv.concatenate(rpsds) + return ranged_detections def assume_heliocentric_distance( diff --git a/thor/projections/covariances.py b/thor/projections/covariances.py index 07847a76..50edc7b9 100644 --- a/thor/projections/covariances.py +++ b/thor/projections/covariances.py @@ -1,16 +1,9 @@ from typing import List import numpy as np -import pandas as pd import pyarrow as pa import quivr as qv -from adam_core.coordinates.covariances import ( - covariances_from_df, - covariances_to_df, - sigmas_from_df, - sigmas_to_covariances, - sigmas_to_df, -) +from adam_core.coordinates.covariances import sigmas_to_covariances from typing_extensions import Self @@ -112,63 +105,6 @@ def from_sigmas(cls, sigmas: np.ndarray) -> Self: """ return cls.from_matrix(sigmas_to_covariances(sigmas)) - def to_dataframe( - self, - coord_names: List[str] = ["theta_x", "theta_y", "vtheta_x", "vtheta_y"], - sigmas: bool = False, - ) -> pd.DataFrame: - """ - Return the covariance matrices represented as lower triangular columns in a pandas DataFrame. - - Parameters - ---------- - coord_names : `list` of `str`, optional - Names of the coordinate axes. Default is ["theta_x", "theta_y", "vtheta_x", "vtheta_y"]. - sigmas : `bool`, optional - If True, the standard deviations are added as columns to the DataFrame. Default is False. - - Returns - ------- - df : `pandas.DataFrame` - Covariance matrices (lower triangular) for N coordinates in 4 dimensions. - """ - df = covariances_to_df(self.to_matrix(), coord_names=coord_names, kind="lower") - if sigmas: - df_sigmas = sigmas_to_df(self.sigmas, coord_names=coord_names) - df = df_sigmas.join(df) - - return df - - @classmethod - def from_dataframe( - cls, df, coord_names: List[str] = ["theta_x", "theta_y", "vtheta_x", "vtheta_y"] - ) -> Self: - """ - Create a Covariances object from a pandas DataFrame. - - Parameters - ---------- - df : `pandas.DataFrame` - Covariance matrices (lower triangular) for N coordinates in 4 dimensions. - coord_names : `list` of `str`, optional - Names of the coordinate axes. Default is ["theta_x", "theta_y", "vtheta_x", "vtheta_y"]. - - Returns - ------- - covariances : `CoordinateCovariances` - Covariance matrices for N coordinates in 4 dimensions. - """ - try: - covariances = covariances_from_df(df, coord_names=coord_names, kind="lower") - except KeyError: - sigmas = sigmas_from_df(df, coord_names=coord_names) - covariances = sigmas_to_covariances(sigmas) - - if np.all(np.isnan(covariances)): - return cls.from_kwargs(values=[None for i in range(len(covariances))]) - else: - return cls.from_matrix(covariances) - def is_all_nan(self) -> bool: """ Check if all covariance matrix values are NaN. diff --git a/thor/projections/gnomonic.py b/thor/projections/gnomonic.py index 129903f9..32ddbddd 100644 --- a/thor/projections/gnomonic.py +++ b/thor/projections/gnomonic.py @@ -8,24 +8,23 @@ from adam_core.coordinates import ( CartesianCoordinates, Origin, - Times, - coords_from_dataframe, - coords_to_dataframe, transform_covariances_jacobian, ) +from adam_core.time import Timestamp from typing_extensions import Self -from . import covariances, transforms +from .covariances import ProjectionCovariances +from .transforms import _cartesian_to_gnomonic, cartesian_to_gnomonic class GnomonicCoordinates(qv.Table): - time = Times.as_column(nullable=True) + time = Timestamp.as_column(nullable=True) theta_x = qv.Float64Column() theta_y = qv.Float64Column() vtheta_x = qv.Float64Column(nullable=True) vtheta_y = qv.Float64Column(nullable=True) - covariance = covariances.ProjectionCovariances.as_column(nullable=True) + covariance = ProjectionCovariances.as_column(nullable=True) origin = Origin.as_column() frame = qv.StringAttribute("testorbit?") @@ -33,7 +32,7 @@ class GnomonicCoordinates(qv.Table): def values(self) -> np.ndarray: return np.array( self.table.select(["theta_x", "theta_y", "vtheta_x", "vtheta_y"]) - ).T + ) @property def sigma_theta_x(self) -> np.ndarray: @@ -108,6 +107,15 @@ def from_cartesian( left_key = pa.array(np.zeros(len(cartesian)), type=pa.int64()) right_key = pa.array(np.zeros(len(center_cartesian)), type=pa.int64()) + # Create a linkage between the cartesian coordinates and the center cartesian + # coordinates on time + link = qv.Linkage( + cartesian, + center_cartesian, + left_keys=left_key, + right_keys=right_key, + ) + elif center_cartesian is None and cartesian.time is not None: # Create a center cartesian coordinate for each unique time in the # input cartesian coordinates @@ -126,22 +134,12 @@ def from_cartesian( [cartesian[0].origin for _ in range(num_unique_times)] ), ) - left_key = cartesian.time.mjd() - right_key = center_cartesian.time.mjd() + link = cartesian.time.link(center_cartesian.time, precision="us") elif center_cartesian is not None and cartesian.time is not None: assert cartesian.time.scale == center_cartesian.time.scale - times = cartesian.time.mjd().unique().sort() - center_times = cartesian.time.mjd().unique().sort() - if not pc.all(pc.equal(times, center_times)).as_py(): - raise ValueError( - "Input cartesian coordinates and projection center cartesian coordinates " - "must have the same times." - ) - - left_key = cartesian.time.mjd() - right_key = center_cartesian.time.mjd() + link = cartesian.time.link(center_cartesian.time, precision="us") else: raise ValueError( @@ -149,20 +147,24 @@ def from_cartesian( "must have the same times." ) - # Create a linkage between the cartesian coordinates and the center cartesian - # coordinates on time - link = qv.Linkage( - cartesian, - center_cartesian, - left_keys=left_key, - right_keys=right_key, - ) + # Round the times to the nearest microsecond and use those to select + # the cartesian coordinates and center cartesian coordinates + rounded_cartesian_times = cartesian.time.rounded(precision="us") # type: ignore + rounded_center_cartesian_times = center_cartesian.time.rounded(precision="us") # type: ignore gnomonic_coords = [] - for key, cartesian_i, center_cartesian_i in link.iterate(): - assert len(center_cartesian_i) == 1 + for key, time_i, center_time_i in link.iterate(): + + cartesian_i = cartesian.apply_mask( + rounded_cartesian_times.equals_scalar(key[0], key[1], precision="us") + ) + center_cartesian_i = center_cartesian.apply_mask( + rounded_center_cartesian_times.equals_scalar( + key[0], key[1], precision="us" + ) + ) - coords_gnomonic, M = transforms.cartesian_to_gnomonic( + coords_gnomonic, M = cartesian_to_gnomonic( cartesian_i.values, center_cartesian=center_cartesian_i.values[0], ) @@ -173,7 +175,7 @@ def from_cartesian( covariances_gnomonic = transform_covariances_jacobian( cartesian_i.values, cartesian_covariances, - transforms._cartesian_to_gnomonic, + _cartesian_to_gnomonic, in_axes=(0, None), center_cartesian=center_cartesian_i.values[0], ) @@ -190,64 +192,10 @@ def from_cartesian( vtheta_x=coords_gnomonic[:, 2], vtheta_y=coords_gnomonic[:, 3], time=cartesian_i.time, - covariance=covariances.ProjectionCovariances.from_matrix( - covariances_gnomonic - ), + covariance=ProjectionCovariances.from_matrix(covariances_gnomonic), origin=cartesian_i.origin, frame=cartesian_i.frame, ) ) return qv.concatenate(gnomonic_coords) - - def to_dataframe( - self, sigmas: bool = False, covariances: bool = True - ) -> pd.DataFrame: - """ - Convert coordinates to a pandas DataFrame. - - Parameters - ---------- - sigmas : bool, optional - If True, include 1-sigma uncertainties in the DataFrame. - covariances : bool, optional - If True, include covariance matrices in the DataFrame. Covariance matrices - will be split into 21 columns, with the lower triangular elements stored. - - Returns - ------- - df : `~pandas.Dataframe` - DataFrame containing coordinates. - """ - return coords_to_dataframe( - self, - ["theta_x", "theta_y", "vtheta_x", "vtheta_y"], - sigmas=sigmas, - covariances=covariances, - ) - - @classmethod - def from_dataframe( - cls, df: pd.DataFrame, frame: Literal["ecliptic", "equatorial"] - ) -> Self: - """ - Create coordinates from a pandas DataFrame. - - Parameters - ---------- - df : `~pandas.Dataframe` - DataFrame containing coordinates. - frame : {"ecliptic", "equatorial"} - Frame in which coordinates are defined. - - Returns - ------- - coords : `~thor.projections.gnomonic.GnomonicCoordinates` - Gnomomic projection coordinates. - """ - return coords_from_dataframe( - cls, - df, - coord_names=["theta_x", "theta_y", "vtheta_x", "vtheta_y"], - frame=frame, - ) diff --git a/thor/tests/test_main.py b/thor/tests/test_main.py new file mode 100644 index 00000000..4ec0ce33 --- /dev/null +++ b/thor/tests/test_main.py @@ -0,0 +1,89 @@ +import pyarrow.compute as pc +import pytest +from adam_core.utils.helpers import make_observations, make_real_orbits + +from ..main_2 import range_and_transform +from ..observations import Observations +from ..observations.filters import TestOrbitRadiusObservationFilter +from ..orbit import TestOrbit as THORbit + +OBJECT_IDS = [ + "594913 'Aylo'chaxnim (2020 AV2)", + "163693 Atira (2003 CP20)", + "(2010 TK7)", + "3753 Cruithne (1986 TO)", + "54509 YORP (2000 PH5)", + "2063 Bacchus (1977 HB)", + "1221 Amor (1932 EA1)", + "433 Eros (A898 PA)", + "3908 Nyx (1980 PA)", + "434 Hungaria (A898 RB)", + "1876 Napolitania (1970 BA)", + "2001 Einstein (1973 EB)", + "2 Pallas (A802 FA)", + "6 Hebe (A847 NA)", + "6522 Aci (1991 NQ)", + "10297 Lynnejones (1988 RJ13)", + "17032 Edlu (1999 FM9)", + "202930 Ivezic (1998 SG172)", + "911 Agamemnon (A919 FB)", + "1143 Odysseus (1930 BH)", + "1172 Aneas (1930 UA)", + "3317 Paris (1984 KF)", + "5145 Pholus (1992 AD)", + "5335 Damocles (1991 DA)", + "15760 Albion (1992 QB1)", + "15788 (1993 SB)", + "15789 (1993 SC)", + "1I/'Oumuamua (A/2017 U1)", +] + + +@pytest.fixture +def observations(): + return make_observations() + + +@pytest.fixture +def orbits(): + return make_real_orbits() + + +@pytest.mark.parametrize("object_id", OBJECT_IDS) +def test_range_and_transform(object_id, orbits, observations): + + orbit = orbits.select("object_id", object_id) + exposures, detections, associations = observations + + # Make THOR observations from the detections and exposures + observations = Observations.from_detections_and_exposures(detections, exposures) + + # Select the associations that match this object ID + associations_i = associations.select("object_id", object_id) + assert len(associations_i) == 90 + + # Extract the observations that match this object ID + obs_ids_expected = associations_i.detection_id.unique().sort() + + # Filter the observations to include only those that match this object + observations = observations.apply_mask( + pc.is_in(observations.detections.id, obs_ids_expected) + ) + + # Set a filter to include observations within 1 arcsecond of the predicted position + # of the test orbit + # filters = [TestOrbitRadiusObservationFilter(radius=1/3600)] + + # Create a test orbit for this object + test_orbit = THORbit.from_orbits(orbit) + + # Run range and transform and make sure we get the correct observations back + transformed_detections = range_and_transform( + test_orbit, + observations, # filters=filters + ) + assert len(transformed_detections) == 90 + + # Ensure we get all the object IDs back that we expect + obs_ids_actual = transformed_detections.id.unique().sort() + assert pc.all(pc.equal(obs_ids_actual, obs_ids_expected)) diff --git a/thor/tests/test_orbit.py b/thor/tests/test_orbit.py index caaf72ba..0551078d 100644 --- a/thor/tests/test_orbit.py +++ b/thor/tests/test_orbit.py @@ -1,10 +1,6 @@ import numpy as np -from adam_core.coordinates import ( - CartesianCoordinates, - Origin, - SphericalCoordinates, - Times, -) +from adam_core.coordinates import CartesianCoordinates, Origin, SphericalCoordinates +from adam_core.time import Timestamp from ..orbit import assume_heliocentric_distance @@ -21,7 +17,7 @@ def test_assume_heliocentric_distance_missing_rho(): vx=[0.0], vy=[0.0], vz=[0.0], - time=Times.from_mjd([59000.0], scale="tdb"), + time=Timestamp.from_mjd([59000.0], scale="tdb"), origin=Origin.from_kwargs(code=["SUN"]), frame="ecliptic", ) @@ -34,7 +30,7 @@ def test_assume_heliocentric_distance_missing_rho(): rho=rho, lon=lon, lat=lat, - time=Times.from_mjd([59000.0] * num_detections, scale="tdb"), + time=Timestamp.from_mjd([59000.0] * num_detections, scale="tdb"), origin=Origin.from_kwargs(code=["Observatory"] * num_detections), frame="equatorial", ) @@ -59,7 +55,7 @@ def test_assume_heliocentric_distance_zero_origin(): vx=[0.0], vy=[0.0], vz=[0.0], - time=Times.from_mjd([59000.0], scale="tdb"), + time=Timestamp.from_mjd([59000.0], scale="tdb"), origin=Origin.from_kwargs(code=["SUN"]), frame="ecliptic", ) @@ -75,7 +71,7 @@ def test_assume_heliocentric_distance_zero_origin(): coords = SphericalCoordinates.from_kwargs( lon=lon, lat=lat, - time=Times.from_mjd([59000.0] * num_detections, scale="tdb"), + time=Timestamp.from_mjd([59000.0] * num_detections, scale="tdb"), origin=Origin.from_kwargs(code=["Observatory"] * num_detections), frame="equatorial", ) @@ -100,7 +96,7 @@ def test_assume_heliocentric_distance(): vx=[0.0], vy=[0.0], vz=[0.0], - time=Times.from_mjd([59000.0], scale="tdb"), + time=Timestamp.from_mjd([59000.0], scale="tdb"), origin=Origin.from_kwargs(code=["SUN"]), frame="ecliptic", ) @@ -114,7 +110,7 @@ def test_assume_heliocentric_distance(): coords = SphericalCoordinates.from_kwargs( lon=lon, lat=lat, - time=Times.from_mjd([59000.0] * num_detections, scale="tdb"), + time=Timestamp.from_mjd([59000.0] * num_detections, scale="tdb"), origin=Origin.from_kwargs(code=["Observatory"] * num_detections), frame="equatorial", )