diff --git a/COPYRIGHT b/COPYRIGHT
new file mode 100644
index 00000000..9108de11
--- /dev/null
+++ b/COPYRIGHT
@@ -0,0 +1 @@
+Copyright 2022 Fermi Research Alliance, LLC.
diff --git a/README.md b/README.md
index 77e74cc7..aa14b48a 100644
--- a/README.md
+++ b/README.md
@@ -1,2 +1,86 @@
# schedview
-Tools for monitoring the Rubin Observatory scheduler and related LSST survey progress and strategy data.
+Tools for visualizing Rubin Observatory scheduler behaviour and LSST survey status
+
+## Development state
+
+The current code is in a early stage of development. The architecture will
+be documented by [RTN-037](https://rtn-037.lsst.io/), currently little more than
+an outline.
+
+## Applications
+
+There are presently two different applications in this project:
+
+- `metric_maps`, a tool for visualizing metric output of MAF that takes the form
+of a sky map.
+- `sched_maps`, a tool for examing some elements of the state of objects used by
+the scheduler, particularly those that take the form of a sky map, although some
+indications of other elements are also present.
+
+The project contains example data for each. At present, to use the example data,
+different versions of dependencies are required, so the installation instructions
+are slightly different in each case. (The reason for this is that the pickles
+containing the sample objects to be viewed with `sched_maps` were created with
+an old version of `rubin_sim`, and this older version needs to be installed for
+these to be loaded.)
+
+## Installation
+
+First, get the code by cloning the github project:
+
+ $ git clone git@github.com:ehneilsen/schedview.git
+
+Go to the development directory, and download and decompress a data file used
+by the automated tests.
+
+ $ cd schedview
+ $ wget -O schedview/data/bsc5.dat.gz http://tdc-www.harvard.edu/catalogs/bsc5.dat.gz
+ $ gunzip schedview/data/bsc5.dat.gz
+
+Create a `conda` environment with the appropriate dependencies, and activate it.
+If you are running the `metric_maps` application, use the `conda` environment
+file that includes a recent version of `survey_sim`:
+
+ $ # ONLY IF RUNNING metric_maps
+ $ conda env create -f environment.yaml
+ $ conda activate schedview
+
+If you are running `sched_maps`, get the one with the older version:
+
+ $ # ONLY IF RUNNING sched_maps
+ $ conda env create -f environment_080a2.yaml
+ $ conda activate schedview080a2
+
+Install the (development) `schedview` in your new environment:
+
+ $ pip install -e .
+
+Run the tests:
+
+ $ pytest
+
+## Running `metric_maps`
+
+Activate the environment, and start the `bokeh` app. If `SCHEDVIEW_DIR` is the
+directory into which you cloned the `schedview` github repository, then:
+
+ $ conda activate schedview
+ $ bokeh serve ${SCHEDVIEW_DIR}/schedview/app/metric_maps.py
+
+The app will then give you the URL at which you can find the app. The data
+displayed with the above instructions will be the sample metric map in the
+project itself.
+
+If you want to use a different data file, you can set the `METRIC_FNAME`
+to its path before running the `bokeh` app. This is only a very short term
+solution, and will be replaced soon.
+
+## Running `sched_maps`
+
+Activate the environment, and start the `bokeh` app. If `SCHEDVIEW_DIR` is the
+directory into which you cloned the `schedview` github repository, then:
+
+ $ conda activate schedview080a2
+ $ bokeh serve ${SCHEDVIEW_DIR}/schedview/app/sched_maps.py
+
+The app will then give you the URL at which you can find the app.
\ No newline at end of file
diff --git a/environment.yaml b/environment.yaml
new file mode 100644
index 00000000..4e299493
--- /dev/null
+++ b/environment.yaml
@@ -0,0 +1,12 @@
+name: schedview
+channels:
+ - conda-forge
+dependencies:
+ - rubin-sim
+ - bokeh
+ - pytest-flake8
+ - pytest-black
+ - selenium
+ - firefox
+ - geckodriver
+ - build
diff --git a/pyproject.toml b/pyproject.toml
new file mode 100644
index 00000000..bd5e2495
--- /dev/null
+++ b/pyproject.toml
@@ -0,0 +1,36 @@
+[build-system]
+requires = [ "setuptools", "setuptools_scm" ]
+build-backend = "setuptools.build_meta"
+
+[project]
+name = "schedview"
+description = "Tools for visualization of Rubin Observatory's scheduler status, strategy and progress on LSST."
+classifiers = [
+ "Intended Audience :: Science/Research",
+ "Operating System :: OS Independent",
+ "Programming Language :: Python :: 3",
+ "Programming Language :: Python :: 3.10",
+ "Topic :: Scientific/Engineering :: Astronomy",
+ ]
+dynamic = [ "version" ]
+
+[tool.setuptools.dynamic]
+version = { attr = "setuptools_scm.get_version" }
+
+[tool.setuptools.packages.find]
+where = [ "" ]
+
+[tool.setuptools_scm]
+write_to = "schedview/version.py"
+write_to_template = """
+# Generated by setuptools_scm
+__all__ = ["__version__"]
+__version__ = "{version}"
+"""
+
+[tool.pytest.ini_options]
+addopts = "--flake8 --black --ignore-glob=*/version.py"
+flake8-ignore = ["E133", "E203", "E226", "E228", "N802", "N803", "N806", "N812", "N813", "N815", "N816", "W503", "**/*/__init__.py ALL", "**/*/version.py ALL"]
+flake8-max-line-length = 110
+flake8-max-doc-length = 79
+testpaths = "."
diff --git a/schedview/__init__.py b/schedview/__init__.py
new file mode 100644
index 00000000..5891aa0a
--- /dev/null
+++ b/schedview/__init__.py
@@ -0,0 +1 @@
+from .sphere import *
diff --git a/schedview/app/__init__.py b/schedview/app/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/schedview/app/metric_maps.py b/schedview/app/metric_maps.py
new file mode 100644
index 00000000..e7bab019
--- /dev/null
+++ b/schedview/app/metric_maps.py
@@ -0,0 +1,105 @@
+import bokeh.plotting
+
+from rubin_sim import maf
+
+from schedview.plot.SphereMap import ArmillarySphere, Planisphere, MollweideMap
+from schedview.collect.stars import load_bright_stars
+from schedview.collect import get_metric_path
+
+
+def make_metric_figure(metric_values_fname=None, nside=8, mag_limit_slider=True):
+ """Create a figure showing multiple projections of a set of a MAF metric.
+
+ Parameters
+ ----------
+ metric_values_fname : `str`, optional
+ Name of file from which to load metric values, as saved by MAF
+ in a saved metric bundle. If it is None, the look for the
+ file name in the ``METRIC_FNAME`` environment varable. By default None
+ nside : `int`, optional
+ Healpix nside to use for display, by default 8
+ mag_limit_slider : `bool`, optional
+ Show the mag limit slider for stars?, by default True
+
+ Returns
+ -------
+ fig : `bokeh.models.layouts.LayoutDOM`
+ A bokeh figure that can be displayed in a notebook (e.g. with
+ ``bokeh.io.show``) or used to create a bokeh app.
+
+ Notes
+ -----
+ If ``mag_limit_slider`` is ``True``, it creates a magnitude limit
+ slider for the stars. This is implemented as a python callback, and
+ so is only operational in full bokeh app, not standalone output.
+ """
+
+ if metric_values_fname is None:
+ metric_values_fname = get_metric_path()
+
+ healpy_values = maf.MetricBundle.load(metric_values_fname).metricValues
+
+ star_data = load_bright_stars().loc[:, ["name", "ra", "decl", "Vmag"]]
+ star_data["glyph_size"] = 15 - (15.0 / 3.5) * star_data["Vmag"]
+ star_data.query("glyph_size>0", inplace=True)
+
+ arm = ArmillarySphere()
+ hp_ds, cmap, _ = arm.add_healpix(healpy_values, nside=nside)
+ hz = arm.add_horizon()
+ zd70 = arm.add_horizon(zd=70, line_kwargs={"color": "red", "line_width": 2})
+ star_ds = arm.add_stars(
+ star_data, mag_limit_slider=mag_limit_slider, star_kwargs={"color": "black"}
+ )
+ arm.decorate()
+
+ pla = Planisphere()
+ pla.add_healpix(hp_ds, cmap=cmap, nside=nside)
+ pla.add_horizon(data_source=hz)
+ pla.add_horizon(
+ zd=60, data_source=zd70, line_kwargs={"color": "red", "line_width": 2}
+ )
+ pla.add_stars(
+ star_data,
+ data_source=star_ds,
+ mag_limit_slider=False,
+ star_kwargs={"color": "black"},
+ )
+ pla.decorate()
+
+ mol_plot = bokeh.plotting.figure(plot_width=512, plot_height=256, match_aspect=True)
+ mol = MollweideMap(plot=mol_plot)
+ mol.add_healpix(hp_ds, cmap=cmap, nside=nside)
+ mol.add_horizon(data_source=hz)
+ mol.add_horizon(
+ zd=70, data_source=zd70, line_kwargs={"color": "red", "line_width": 2}
+ )
+ mol.add_stars(
+ star_data,
+ data_source=star_ds,
+ mag_limit_slider=False,
+ star_kwargs={"color": "black"},
+ )
+ mol.decorate()
+
+ figure = bokeh.layouts.row(
+ bokeh.layouts.column(mol.plot, *arm.sliders.values()), arm.plot, pla.plot
+ )
+
+ return figure
+
+
+def add_metric_app(doc):
+ """Add a metric figure to a bokeh document
+
+ Parameters
+ ----------
+ doc : `bokeh.document.document.Document`
+ The bokeh document to which to add the figure.
+ """
+ figure = make_metric_figure()
+ doc.add_root(figure)
+
+
+if __name__.startswith("bokeh_app_"):
+ doc = bokeh.plotting.curdoc()
+ add_metric_app(doc)
diff --git a/schedview/app/sched_maps.py b/schedview/app/sched_maps.py
new file mode 100644
index 00000000..d841957c
--- /dev/null
+++ b/schedview/app/sched_maps.py
@@ -0,0 +1,478 @@
+import bokeh.plotting
+from astropy.time import Time
+
+import pandas as pd
+import bokeh.models
+import bokeh.core.properties
+
+from schedview.plot.SphereMap import (
+ ArmillarySphere,
+ HorizonMap,
+ Planisphere,
+ MollweideMap,
+)
+
+from schedview.collect import sample_pickle
+
+from schedview.plot.scheduler import SchedulerDisplay
+from schedview.collect import read_scheduler
+from schedview.plot.scheduler import LOGGER, DEFAULT_NSIDE
+
+
+class SchedulerDisplayApp(SchedulerDisplay):
+ def make_pickle_entry_box(self):
+ """Make the entry box for a file name from which to load state."""
+ file_input_box = bokeh.models.TextInput(
+ value=sample_pickle("auxtel59628.pickle.gz") + " ",
+ title="Pickle path:",
+ )
+
+ def switch_pickle(attrname, old, new):
+ def do_switch_pickle():
+ LOGGER.info(f"Loading {new}.")
+ try:
+ # load updates the survey & conditions, which updates
+ # bokeh models...
+ self.load(new)
+ except FileNotFoundError:
+ LOGGER.info("File not found.")
+ pass
+
+ LOGGER.debug(f"Finished loading {new}")
+
+ # If we do not have access to the document, this won't
+ # do anything and is unnecessary, but that's okay.
+ self.enable_controls()
+
+ if file_input_box.document is None:
+ # If we don't have access to the document, we can't disable
+ # the controls, so just do it.
+ do_switch_pickle()
+ else:
+ # disable the controls, and ask the document to do the update
+ # on the following event look tick.
+ self.disable_controls()
+ file_input_box.document.add_next_tick_callback(do_switch_pickle)
+
+ file_input_box.on_change("value", switch_pickle)
+ self.bokeh_models["file_input_box"] = file_input_box
+
+ def _set_scheduler(self, scheduler):
+ LOGGER.info("Setting scheduler")
+ super()._set_scheduler(scheduler)
+ self.update_tier_selector()
+ self.update_reward_summary_table_bokeh_model()
+
+ def _set_conditions(self, conditions):
+ super()._set_conditions(conditions)
+ self.update_healpix_bokeh_model()
+ self.update_reward_table_bokeh_model()
+ self.update_hovertool_bokeh_model()
+ self.update_telescope_marker_bokeh_model()
+ self.update_moon_marker_bokeh_model()
+ self.update_sun_marker_bokeh_model()
+ self.update_survey_marker_bokeh_model()
+ self.update_chosen_survey_bokeh_model()
+ self.update_mjd_slider_bokeh_model()
+ self.update_time_input_box_bokeh_model()
+
+ def make_time_selector(self):
+ """Create the time selector slider bokeh model."""
+ time_selector = bokeh.models.Slider(
+ title="MJD",
+ start=self.mjd - 1,
+ end=self.mjd + 1,
+ value=self.mjd,
+ step=1.0 / 1440,
+ )
+
+ def switch_time(attrname, old, new):
+ if time_selector.document is None:
+ # If we don't have access to the document, we can't disable
+ # the controls, so don't try.
+ self.mjd = new
+ else:
+ # To disable controls as the time is being updated, we need to
+ # separate the callback so it happens in two event loop ticks:
+ # the first tick disables the controls, the next one
+ # actually updates the MJD and then re-enables the controls.
+ def do_switch_time():
+ self.mjd = new
+ self.enable_controls()
+
+ self.disable_controls()
+ time_selector.document.add_next_tick_callback(do_switch_time)
+
+ time_selector.on_change("value_throttled", switch_time)
+ self.bokeh_models["time_selector"] = time_selector
+ self.update_time_selector_bokeh_model()
+
+ def update_time_selector_bokeh_model(self):
+ """Update the time selector limits and value to match the date."""
+ if "time_selector" in self.bokeh_models:
+ self.bokeh_models["time_selector"].start = self.conditions.sun_n12_setting
+ self.bokeh_models["time_selector"].end = self.conditions.sun_n12_rising
+ self.bokeh_models["time_selector"].value = self.conditions.mjd
+
+ def add_mjd_slider_callback(self):
+ """Create the mjd slider bokeh model."""
+ mjd_slider = self.bokeh_models["mjd_slider"]
+
+ def switch_time(attrname, old, new):
+ if mjd_slider.document is None:
+ # If we don't have access to the document, we can't disable
+ # the controls, so don't try.
+ self.mjd = new
+ else:
+ # To disable controls as the time is being updated, we need to
+ # separate the callback so it happens in two event loop ticks:
+ # the first tick disables the controls, the next one
+ # actually updates the MJD and then re-enables the controls.
+ def do_switch_time():
+ self.mjd = new
+ self.enable_controls()
+
+ self.disable_controls()
+ mjd_slider.document.add_next_tick_callback(do_switch_time)
+
+ mjd_slider.on_change("value_throttled", switch_time)
+ self.update_time_selector_bokeh_model()
+
+ def update_mjd_slider_bokeh_model(self):
+ """Update the time selector limits and value to match the date."""
+ if "mjd_slider" in self.bokeh_models:
+ self.bokeh_models["mjd_slider"].start = self.conditions.sun_n12_setting
+ self.bokeh_models["mjd_slider"].end = self.conditions.sun_n12_rising
+ self.bokeh_models["mjd_slider"].value = self.conditions.mjd
+
+ def make_time_input_box(self):
+ """Create the time entry box bokeh model."""
+ time_input_box = bokeh.models.TextInput(title="Date and time (UTC):")
+ self.bokeh_models["time_input_box"] = time_input_box
+ self.update_time_input_box_bokeh_model()
+
+ def switch_time(attrname, old, new):
+ new_mjd = pd.to_datetime(new, utc=True).to_julian_date() - 2400000.5
+
+ if time_input_box.document is None:
+ # If we don't have access to the document, we can't disable
+ # the controls, so don't try.
+ self.mjd = new_mjd
+ else:
+ # To disable controls as the time is being updated, we need to
+ # separate the callback so it happens in two event loop ticks:
+ # the first tick disables the controls, the next one
+ # actually updates the MJD and then re-enables the controls.
+ def do_switch_time():
+ self.mjd = new_mjd
+ self.enable_controls()
+
+ self.disable_controls()
+ time_input_box.document.add_next_tick_callback(do_switch_time)
+
+ time_input_box.on_change("value", switch_time)
+
+ def update_time_input_box_bokeh_model(self):
+ """Update the time selector limits and value to match the date."""
+ if "time_input_box" in self.bokeh_models:
+ iso_time = Time(self.mjd, format="mjd", scale="utc").iso
+ self.bokeh_models["time_input_box"].value = iso_time
+
+ def make_tier_selector(self):
+ """Create the tier selector bokeh model."""
+ tier_selector = bokeh.models.Select(value=None, options=[None])
+
+ def switch_tier(attrname, old, new):
+ self.select_tier(new)
+
+ tier_selector.on_change("value", switch_tier)
+ self.bokeh_models["tier_selector"] = tier_selector
+ self.update_tier_selector()
+
+ def update_tier_selector(self):
+ """Update tier selector to represent tiers for the current survey."""
+ if "tier_selector" in self.bokeh_models:
+ options = self.tier_names
+ self.bokeh_models["tier_selector"].options = options
+ self.bokeh_models["tier_selector"].value = options[self.survey_index[0]]
+
+ def select_tier(self, tier):
+ """Set the tier being displayed."""
+ super().select_tier(tier)
+ self.update_survey_selector()
+
+ def make_survey_selector(self):
+ """Create the survey selector bokeh model."""
+ survey_selector = bokeh.models.Select(value=None, options=[None])
+
+ def switch_survey(attrname, old, new):
+ self.select_survey(new)
+
+ survey_selector.on_change("value", switch_survey)
+ self.bokeh_models["survey_selector"] = survey_selector
+
+ def update_survey_selector(self):
+ """Uptade the survey selector to the current scheduler and tier."""
+ if "survey_selector" in self.bokeh_models:
+ options = [
+ s.survey_name for s in self.scheduler.survey_lists[self.survey_index[0]]
+ ]
+ self.bokeh_models["survey_selector"].options = options
+ self.bokeh_models["survey_selector"].value = options[self.survey_index[1]]
+
+ def select_survey(self, survey):
+ """Set the tier being displayed."""
+ super().select_survey(survey)
+ # Note that updating the value selector triggers the
+ # callback, which updates the maps themselves
+ self.update_value_selector()
+ self.update_reward_table_bokeh_model()
+ self.update_hovertool_bokeh_model()
+ self.update_survey_marker_bokeh_model()
+
+ def make_value_selector(self):
+ """Create the bokeh model to select which value to show in maps."""
+ value_selector = bokeh.models.Select(value=None, options=[None])
+
+ def switch_value(attrname, old, new):
+ self.select_value(new)
+
+ value_selector.on_change("value", switch_value)
+ self.bokeh_models["value_selector"] = value_selector
+
+ def update_value_selector(self):
+ """Update the value selector bokeh model to show available options."""
+ if "value_selector" in self.bokeh_models:
+ self.bokeh_models["value_selector"].options = self.map_keys
+ if self.map_key in self.map_keys:
+ self.bokeh_models["value_selector"].value = self.map_key
+ elif self.init_key in self.map_keys:
+ self.bokeh_models["value_selector"].value = self.init_key
+ else:
+ self.bokeh_models["value_selector"].value = self.map_keys[-1]
+
+ def select_value(self, map_key):
+ """Set the tier being displayed."""
+ super().select_value(map_key)
+ self.update_healpix_bokeh_model()
+
+ def update_time_display_bokeh_model(self):
+ """Update the value of the displayed time."""
+ if "mjd" in self.sliders:
+ self.update_mjd_slider_bokeh_model()
+
+ if "time_input_box" in self.bokeh_models:
+ self.update_time_input_box_bokeh_model()
+
+ def update_displayed_value_metadata_bokeh_model(self):
+ self.update_tier_selector()
+
+ def _select_survey_from_summary_table(self, attr, old, new):
+ LOGGER.debug("Called select_survey_from_summary_table")
+ selected_index = new[0]
+ tier_name = self.data_sources["reward_summary_table"].data["tier"][
+ selected_index
+ ]
+ survey_name = self.data_sources["reward_summary_table"].data["survey_name"][
+ selected_index
+ ]
+
+ # Update the selectors, and this will run
+ # the callbacks to do all the updates
+ self.bokeh_models["tier_selector"].value = tier_name
+ self.bokeh_models["survey_selector"].value = survey_name
+
+ def make_reward_summary_table(self):
+ super().make_reward_summary_table()
+
+ self.data_sources["reward_summary_table"].selected.on_change(
+ "indices",
+ self._select_survey_from_summary_table,
+ )
+
+ def disable_controls(self):
+ """Disable all controls.
+
+ Intended to be used while plot elements are updating, and the
+ control therefore do not do what the user probably intends.
+ """
+ LOGGER.info("Disabling controls")
+ for model in self.bokeh_models.values():
+ try:
+ model.disabled = True
+ except AttributeError:
+ pass
+
+ def enable_controls(self):
+ """Enable all controls."""
+ LOGGER.info("Enabling controls")
+ for model in self.bokeh_models.values():
+ try:
+ model.disabled = False
+ except AttributeError:
+ pass
+
+ def make_figure(self):
+ """Create a bokeh figures showing sky maps for scheduler behavior.
+
+ Returns
+ -------
+ fig : `bokeh.models.layouts.LayoutDOM`
+ A bokeh figure that can be displayed in a notebook (e.g. with
+ ``bokeh.io.show``) or used to create a bokeh app.
+ """
+ self.make_sphere_map(
+ "armillary_sphere",
+ ArmillarySphere,
+ "Armillary Sphere",
+ plot_width=512,
+ plot_height=512,
+ decorate=True,
+ )
+ self.bokeh_models["alt_slider"] = self.sphere_maps["armillary_sphere"].sliders[
+ "alt"
+ ]
+ self.bokeh_models["az_slider"] = self.sphere_maps["armillary_sphere"].sliders[
+ "az"
+ ]
+ self.bokeh_models["mjd_slider"] = self.sphere_maps["armillary_sphere"].sliders[
+ "mjd"
+ ]
+ # self.bokeh_models["mjd_slider"].visible = False
+ self.make_sphere_map(
+ "planisphere",
+ Planisphere,
+ "Planisphere",
+ plot_width=512,
+ plot_height=512,
+ decorate=True,
+ )
+ self.make_sphere_map(
+ "altaz",
+ HorizonMap,
+ "Alt Az",
+ plot_width=512,
+ plot_height=512,
+ decorate=False,
+ horizon_graticules=True,
+ )
+ self.make_sphere_map(
+ "mollweide",
+ MollweideMap,
+ "Mollweide",
+ plot_width=512,
+ plot_height=512,
+ decorate=True,
+ )
+
+ self.bokeh_models["key"] = bokeh.models.Div(text=self.key_markup)
+
+ self.bokeh_models["reward_table_title"] = bokeh.models.Div(
+ text="
Basis functions for displayed survey
"
+ )
+ self.make_reward_table()
+
+ self.bokeh_models["reward_summary_table_title"] = bokeh.models.Div(
+ text="Rewards for all survey schedulers
"
+ )
+ self.make_reward_summary_table()
+ self.make_chosen_survey()
+ self.make_value_selector()
+ self.make_survey_selector()
+ self.make_tier_selector()
+ self.make_pickle_entry_box()
+
+ # slider was created by SphereMap
+ self.add_mjd_slider_callback()
+
+ arm_controls = [
+ self.bokeh_models["alt_slider"],
+ self.bokeh_models["az_slider"],
+ ]
+
+ controls = [self.bokeh_models["file_input_box"]]
+
+ if self.observatory is not None:
+ self.make_time_input_box()
+ controls.append(self.bokeh_models["time_input_box"])
+
+ controls += [
+ self.bokeh_models["mjd_slider"],
+ self.bokeh_models["tier_selector"],
+ self.bokeh_models["survey_selector"],
+ self.bokeh_models["value_selector"],
+ ]
+
+ figure = bokeh.layouts.row(
+ bokeh.layouts.column(
+ self.bokeh_models["altaz"],
+ *controls,
+ self.bokeh_models["chosen_survey"],
+ self.bokeh_models["reward_table_title"],
+ self.bokeh_models["reward_table"],
+ self.bokeh_models["reward_summary_table_title"],
+ self.bokeh_models["reward_summary_table"],
+ ),
+ bokeh.layouts.column(
+ self.bokeh_models["planisphere"],
+ self.bokeh_models["key"],
+ self.bokeh_models["mollweide"],
+ self.bokeh_models["armillary_sphere"],
+ *arm_controls,
+ ),
+ )
+
+ return figure
+
+
+def make_scheduler_map_figure(
+ scheduler_pickle_fname="baseline22_start.pickle.gz",
+ init_key="AvoidDirectWind",
+ nside=DEFAULT_NSIDE,
+):
+ """Create a set of bekeh figures showing sky maps for scheduler behavior.
+
+ Parameters
+ ----------
+ scheduler_pickle_fname : `str`, optional
+ File from which to load the scheduler state. If set to none, look for
+ the file name in the ``SCHED_PICKLE`` environment variable.
+ By default None
+ init_key : `str`, optional
+ Name of the initial map to show, by default 'AvoidDirectWind'
+ nside : int, optional
+ Healpix nside to use for display, by default 32
+
+ Returns
+ -------
+ fig : `bokeh.models.layouts.LayoutDOM`
+ A bokeh figure that can be displayed in a notebook (e.g. with
+ ``bokeh.io.show``) or used to create a bokeh app.
+ """
+ if scheduler_pickle_fname is None:
+ scheduler_map = SchedulerDisplayApp(nside=nside)
+ else:
+ scheduler, conditions = read_scheduler(sample_pickle(scheduler_pickle_fname))
+ scheduler.update_conditions(conditions)
+ scheduler_map = SchedulerDisplayApp(nside=nside, scheduler=scheduler)
+
+ figure = scheduler_map.make_figure()
+
+ return figure
+
+
+def add_scheduler_map_app(doc):
+ """Add a scheduler map figure to a bokeh document
+
+ Parameters
+ ----------
+ doc : `bokeh.document.document.Document`
+ The bokeh document to which to add the figure.
+ """
+ figure = make_scheduler_map_figure()
+ doc.add_root(figure)
+
+
+if __name__.startswith("bokeh_app_"):
+ doc = bokeh.plotting.curdoc()
+ add_scheduler_map_app(doc)
diff --git a/schedview/collect/__init__.py b/schedview/collect/__init__.py
new file mode 100644
index 00000000..8f607e4e
--- /dev/null
+++ b/schedview/collect/__init__.py
@@ -0,0 +1 @@
+from .local import *
diff --git a/schedview/collect/local.py b/schedview/collect/local.py
new file mode 100644
index 00000000..1d529f4a
--- /dev/null
+++ b/schedview/collect/local.py
@@ -0,0 +1,4 @@
+from .scheduler_pickle import read_scheduler # noqa F401
+from .scheduler_pickle import sample_pickle # noqa F401
+from .metrics import get_metric_path # noqa F401
+from .stars import load_bright_stars # noqa F401
diff --git a/schedview/collect/metrics.py b/schedview/collect/metrics.py
new file mode 100644
index 00000000..c4eb27ef
--- /dev/null
+++ b/schedview/collect/metrics.py
@@ -0,0 +1,36 @@
+import importlib.resources
+from os import environ
+
+__all__ = ["get_metric_path"]
+
+
+def get_metric_path():
+ """Get the path to a file with numpy metrics
+
+ Returns
+ -------
+ metric_path : `str`
+ The path to the file containing the MAF metric
+ """
+ if "PICKLE_FNAME" in environ:
+ return environ["PICKLE_FNAME"]
+
+ root_package = __package__.split(".")[0]
+ base_fname = "baseline_v2_0_10yrs_CoaddM5_r_HEAL.npz"
+
+ try:
+ fname = str(
+ importlib.resources.files(root_package).joinpath("data", base_fname)
+ )
+ except AttributeError as e:
+ # If we are using an older version of importlib, we need to do
+ # this instead:
+ if e.args[0] != "module 'importlib.resources' has no attribute 'files'":
+ raise e
+
+ with importlib.resources.path(root_package, ".") as root_path:
+ fname = str(root_path.joinpath("data", base_fname))
+
+ return fname
+
+ return fname
diff --git a/schedview/collect/scheduler_pickle.py b/schedview/collect/scheduler_pickle.py
new file mode 100644
index 00000000..ce18c58c
--- /dev/null
+++ b/schedview/collect/scheduler_pickle.py
@@ -0,0 +1,121 @@
+import pickle
+import os
+import gzip
+import importlib.resources
+from pathlib import Path
+from tempfile import TemporaryDirectory
+import urllib
+import urllib.request
+
+__all__ = ["read_scheduler", "sample_pickle"]
+
+try:
+ PICKLE_FNAME = os.environ["SCHED_PICKLE"]
+except KeyError:
+ PICKLE_FNAME = None
+
+
+def read_local_scheduler_pickle(file_name):
+ """Read an instance of a scheduler object from a pickle.
+
+ Parameters
+ ----------
+ file_name : `str`
+ The name of the pickle file from which to load the scheduler.
+
+ Returns
+ -------
+ scheduler : `rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler`
+ An instance of a rubin_sim scheduler object.
+ conditions : `rubin_sim.scheduler.features.conditions.Conditions`
+ An instance of a rubin_sim conditions object.
+ """
+ if file_name is None:
+ file_name = PICKLE_FNAME
+
+ if file_name is None:
+ file_name = sample_pickle()
+
+ opener = gzip.open if file_name.endswith(".gz") else open
+
+ with opener(file_name, "rb") as pio:
+ scheduler, conditions = pickle.load(pio)
+
+ return [scheduler, conditions]
+
+
+def read_scheduler(file_name_or_url=None):
+ """Read an instance of a scheduler object from a pickle.
+
+ Parameters
+ ----------
+ file_name : `str`
+ The name or URL of the pickle file from which to load the scheduler.
+
+ Returns
+ -------
+ scheduler : `rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler`
+ An instance of a rubin_sim scheduler object.
+ conditions : `rubin_sim.scheduler.features.conditions.Conditions`
+ An instance of a rubin_sim conditions object.
+ """
+ if file_name_or_url is None:
+ file_name_or_url = PICKLE_FNAME
+
+ if file_name_or_url is None:
+ file_name_or_url = sample_pickle()
+
+ if Path(file_name_or_url).is_file():
+ scheduler, conditions = read_local_scheduler_pickle(file_name_or_url)
+ else:
+ # If we didn't have do decompress it, we could use urlopen instead
+ # of downloading a local copy. But, it can be compressed, so we need
+ # to use gzip.open to open it.
+ with TemporaryDirectory() as directory:
+ with urllib.request.urlopen(file_name_or_url) as url_io:
+ content = url_io.read()
+
+ # Infer a file name
+ parsed_url = urllib.parse.urlparse(file_name_or_url)
+ origin_path = Path(parsed_url.path)
+ origin_name = origin_path.name
+ name = origin_name if len(origin_name) > 0 else "scheduler.pickle"
+ path = Path(directory).joinpath(name)
+
+ with open(path, "wb") as file_io:
+ file_io.write(content)
+
+ scheduler, conditions = read_local_scheduler_pickle(str(path))
+
+ return scheduler, conditions
+
+
+def sample_pickle(base_fname="auxtel59628.pickle.gz"):
+ """Return the path of the sample pickle
+
+ Parameters
+ ----------
+ base_fname : `str`
+ The base file name.
+
+ Returns
+ -------
+ fname : `str`
+ File name of the sample pickle.
+ """
+ root_package = __package__.split(".")[0]
+
+ try:
+ fname = str(
+ importlib.resources.files(root_package).joinpath("data", base_fname)
+ )
+ except AttributeError as e:
+ # If we are using an older version of importlib, we need to do
+ # this instead:
+ if e.args[0] != "module 'importlib.resources' has no attribute 'files'":
+ raise e
+
+ with importlib.resources.path(root_package, ".") as root_path:
+ fname = str(root_path.joinpath("data", base_fname))
+
+ return fname
diff --git a/schedview/collect/stars.py b/schedview/collect/stars.py
new file mode 100644
index 00000000..a1005e40
--- /dev/null
+++ b/schedview/collect/stars.py
@@ -0,0 +1,54 @@
+from collections import OrderedDict
+import os
+import pandas as pd
+
+BSC5_URL = "http://tdc-www.harvard.edu/catalogs/bsc5.dat.gz"
+
+
+def load_bright_stars(fname=None):
+ """Read the Yale Bright Star Catalog into a pandas.DataFrame.
+
+ Parameters
+ ----------
+ fname : `str`, optional
+ Name of file from which to load the catalog, by default None
+
+ Returns
+ -------
+ bright_stars : `pandas.DataFrame`
+ The catalog of bright stars.
+ """
+ if fname is None:
+ try:
+ fname = os.environ["BSC5_FNAME"]
+ except KeyError:
+ fname = BSC5_URL
+
+ ybs_columns = OrderedDict(
+ (
+ ("HR", (0, 4)),
+ ("name", (4, 14)),
+ ("RA_hour", (75, 77)),
+ ("RA_min", (77, 79)),
+ ("RA_sec", (79, 83)),
+ ("decl_sign", (83, 84)),
+ ("decl_deg", (84, 86)),
+ ("decl_min", (86, 88)),
+ ("decl_sec", (88, 90)),
+ ("Vmag", (102, 107)),
+ )
+ )
+
+ compression = "gzip" if fname.endswith(".gz") else "infer"
+
+ bs = pd.read_fwf(
+ fname,
+ colspecs=[ybs_columns[k] for k in ybs_columns],
+ names=[k for k in ybs_columns],
+ compression=compression,
+ )
+ bs["ra"] = (360 / 24) * (bs.RA_hour + (bs.RA_min + bs.RA_sec / 60.0) / 60.0)
+ bs["decl"] = bs.decl_deg + (bs.decl_min + bs.decl_sec / 60.0) / 60.0
+ southern_stars = bs.decl_sign == "-"
+ bs.loc[southern_stars, "decl"] = -1 * bs.loc[southern_stars, "decl"]
+ return bs
diff --git a/schedview/data/__init__.py b/schedview/data/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/schedview/data/auxtel59628.pickle.gz b/schedview/data/auxtel59628.pickle.gz
new file mode 100644
index 00000000..3e0d869e
Binary files /dev/null and b/schedview/data/auxtel59628.pickle.gz differ
diff --git a/schedview/data/baseline22_start.pickle.gz b/schedview/data/baseline22_start.pickle.gz
new file mode 100644
index 00000000..a6d866be
Binary files /dev/null and b/schedview/data/baseline22_start.pickle.gz differ
diff --git a/schedview/data/baseline_v2_0_10yrs_CoaddM5_r_HEAL.npz b/schedview/data/baseline_v2_0_10yrs_CoaddM5_r_HEAL.npz
new file mode 100644
index 00000000..cc614267
Binary files /dev/null and b/schedview/data/baseline_v2_0_10yrs_CoaddM5_r_HEAL.npz differ
diff --git a/schedview/js/__init_.py b/schedview/js/__init_.py
new file mode 100644
index 00000000..e69de29b
diff --git a/schedview/js/update_map.js b/schedview/js/update_map.js
new file mode 100644
index 00000000..362e22d7
--- /dev/null
+++ b/schedview/js/update_map.js
@@ -0,0 +1,315 @@
+function rotateCart(ux, uy, uz, angle, x0, y0, z0) {
+ // ux, uy, uz is a vector that defines the axis of rotation
+ // angle is in radians
+ // following https://en.wikipedia.org/wiki/Rotation_matrix
+
+ const cosa = Math.cos(angle)
+ const ccosa = 1 - cosa
+ const sina = Math.sin(angle)
+ const rxx = cosa + ux * ux * ccosa
+ const rxy = ux * uy * ccosa - uz * sina
+ const rxz = ux * uz * ccosa + uy * sina
+ const ryx = uy * ux * ccosa + uz * sina
+ const ryy = cosa + uy * uy * ccosa
+ const ryz = uy * uz * ccosa - ux * sina
+ const rzx = uz * ux * ccosa - uy * sina
+ const rzy = uz * uy * ccosa + ux * sina
+ const rzz = cosa + uz * uz * ccosa
+ const x = rxx * x0 + rxy * y0 + rxz * z0
+ const y = ryx * x0 + ryy * y0 + ryz * z0
+ const z = rzx * x0 + rzy * y0 + rzz * z0
+ return [x, y, z]
+}
+
+function applyRotations(hpx, hpy, hpz, codecl, ra, orient, npoleCoords1) {
+ // We are looking out of the sphere from the inside, so the center is 180 degrees
+ // from the front of the sphere, hence the pi.
+ const decl_rot = Math.PI + codecl
+ const ra_rot = ra - Math.PI / 2
+
+ const coords1 = rotateCart(1, 0, 0, decl_rot, hpx, hpy, hpz)
+ const coords2 = rotateCart(npoleCoords1[0], npoleCoords1[1], npoleCoords1[2], ra_rot, coords1[0], coords1[1], coords1[2])
+ let coords = rotateCart(0, 0, 1, orient, coords2[0], coords2[1], coords2[2])
+
+ // In astronomy, we are looking out of the sphere from the center to the back
+ // (which naturally results in west to the right).
+ // Positive z is out of the screen behind us, and we are at the center,
+ // so to visible part is when z is negative (coords[2]<=0).
+ // So, stuff the points with positive z to NaN so they are
+ // not shown, because they are behind the observer.
+
+ // Use 5*Number.EPSILON instead of exactly 0, because the
+ // assorted trig operations result in values slightly above or below
+ // 0 when the horizon is in principle exactly 0, and this gives an
+ // irregularly dotted/dashed appearance to the horizon if
+ // a cutoff of exactly 0 is used.
+ if (coords[2] > 5 * Number.EPSILON) {
+ coords[0] = NaN
+ coords[1] = NaN
+ }
+ return coords
+}
+
+function horizonToEq(lat, alt, az, lst) {
+ // Stupid simple rough approximation, ignores aberration, precession, diffraction, etc.
+ // Doing this "correctly" would make this much more complicated and much slower, and
+ // of the dates of relevance won't make a significant difference.
+ const decl = Math.asin(Math.sin(alt) * Math.sin(lat) + Math.cos(lat) * Math.cos(alt) * Math.cos(az))
+ const ha = Math.atan2(
+ -1 * Math.cos(alt) * Math.cos(lat) * Math.sin(az),
+ Math.sin(alt) - Math.sin(lat) * Math.sin(decl)
+ )
+ const ra = lst - ha
+ const coords = [ra, decl]
+ return coords
+}
+
+function eqToHorizon(ra, decl, lat, lst) {
+ // Stupid simple rough approximation, ignores aberration, precession, diffraction, etc.
+ const ha = lst - ra
+ const alt = Math.asin(
+ Math.sin(decl) * Math.sin(lat) + Math.cos(decl) * Math.cos(lat) * Math.cos(ha)
+ )
+ const az = Math.atan2(
+ -1 * Math.cos(decl) * Math.cos(lat) * Math.sin(ha),
+ Math.sin(decl) - Math.sin(lat) * Math.sin(alt)
+ )
+ const coords = [alt, az]
+ return coords
+}
+
+function eqToHorizonCart(ra, decl, lat, lst) {
+ const horizon = eqToHorizon(ra, decl, lat, lst)
+ const alt = horizon[0]
+ const az = horizon[1]
+ const zd = Math.PI / 2 - alt
+ const x = -1 * zd * Math.sin(az)
+ const y = zd * Math.cos(az)
+ let coords = [x, y]
+ if ((x ** 2 + y ** 2) > ((Math.PI / 2) ** 2)) {
+ coords[0] = NaN
+ coords[1] = NaN
+ }
+ return coords
+}
+
+function eqToCart(ra, decl) {
+ const theta = Math.PI / 2 - decl
+ const phi = ra
+ const x = Math.sin(theta) * Math.cos(phi)
+ const y = Math.sin(theta) * Math.sin(phi)
+ const z = Math.cos(theta)
+ return [x, y, z]
+}
+
+function cartToEq(x, y, z) {
+ const theta = Math.acos(z)
+ const ra = Math.atan2(y, x)
+ const decl = Math.PI / 2 - theta
+ return [ra, decl]
+}
+
+function eqToLambertAEA(ra, decl, hemisphere, west_right) {
+ // Follow notation of Snyder p. 87-88
+ let theta = (west_right === (hemisphere === 'south')) ? -1 * ra : ra
+ const phi = (hemisphere === 'south') ? -1 * decl : decl
+
+ // Choose an R to match that used by healpy
+ const R = 1
+
+ const rho = 2 * R * Math.sin((Math.PI / 2 - phi) / 2)
+ let x = rho * Math.sin(theta)
+ let y = rho * Math.cos(theta)
+ if (phi > 89.9 * Math.PI / 180) {
+ x = Math.NaN
+ y = Math.NaN
+ }
+ return [x, y]
+}
+
+function eqToMollweide(ra, decl, west_right) {
+ const tolerance = 0.001
+ const max_iter = 1000
+ const R = 1 / Math.sqrt(2)
+ const dir_sign = west_right ? -1 : 1
+ const wra = (ra + Math.PI) % (2 * Math.PI) - Math.PI
+
+ // Return NaNs if near the discontinuity
+ if (Math.abs(ra - Math.PI) < (Math.PI / 180) / Math.cos(decl)) {
+ let xy = [Math.NaN, Math.NaN]
+ return xy
+ }
+
+ function compute_xy(theta) {
+ const x = dir_sign * R * (2 / Math.PI) * Math.sqrt(2) * wra * Math.cos(theta)
+ const y = Math.sqrt(2) * R * Math.sin(theta)
+ return [x, y]
+ }
+
+ let theta = decl
+ let xy = compute_xy(theta)
+
+ for (let iter = 1; iter < max_iter; iter++) {
+ if (Math.cos(theta) ** 2 <= Math.cos(Math.PI / 2) ** 2) {
+ // We are too close to a pole to refine further
+ break
+ }
+
+ const theta0 = theta
+ const xy0 = compute_xy(theta0)
+
+ const delta_theta = -(2 * theta0 + Math.sin(2 * theta0) - Math.PI * Math.sin(decl)) / (
+ 4 * (Math.cos(theta0) ** 2)
+ )
+ theta = theta0 + delta_theta
+ xy = compute_xy(theta)
+
+ if ((Math.abs(xy[0] - xy0[0]) < tolerance) & (Math.abs(xy[1] - xy0[1]) < tolerance)) {
+ break
+ }
+ }
+
+ return xy
+}
+
+function computeLocalSiderealTime(mjd, longitude) {
+ // Computes the Mean Sidereal Time
+
+ // Follow Meeus's _Astronomical_Algorithms_ 2nd edition, equation 12.4
+ // Avoid obvious simplifications to make it easier to check the
+ // numbers in the equation exactly.
+ // Meeus follows tho IAU recommendation of 1982.
+ const jd = mjd + 2400000.5
+ const t = (jd - 2451545.0) / 36525.0
+ const theta0 = 280.46061837 + (360.98564736629 * (jd - 2451545.0)) + (0.000387933 * t * t) - (t * t * t / 38710000)
+ const lon_deg = longitude * (180.0 / Math.PI)
+ const lst_deg = ((theta0 + lon_deg) % 360 + 360) % 360
+ const lst = lst_deg * Math.PI / 180.0
+ return lst
+}
+
+const data = data_source.data
+
+lat = lat * Math.PI / 180
+lon = lon * Math.PI / 180
+
+const alt = center_alt_slider.value * Math.PI / 180
+const az = center_az_slider.value * Math.PI / 180
+const mjd = mjd_slider.value
+const lst = computeLocalSiderealTime(mjd, lon)
+
+const eqCoords = horizonToEq(lat, alt, az, lst)
+const ra = eqCoords[0]
+const decl = eqCoords[1]
+const codecl = Math.PI / 2 - decl
+const npoleCoords1 = rotateCart(1, 0, 0, codecl, 0, 0, 1)
+
+const hemisphere = (lat > 0) ? 'north' : 'south'
+
+/* To get the orientation
+ - Get the coordinates of a point we want to be directly "up" from center (slightly higher alt, same az)
+ - Look where it would end up with orientation 0
+ - Get the angle of the desired top with the actual top
+ - Reverse to get the rotation
+ */
+let upAlt = alt + Math.PI / 180
+let upAz = az
+const upEq = horizonToEq(lat, upAlt, upAz, lst)
+const upCart0 = eqToCart(upEq[0], upEq[1])
+const upCart3 = applyRotations(upCart0[0], upCart0[1], upCart0[2], codecl, ra, 0, npoleCoords1)
+const orient = Math.PI / 2 - Math.atan2(upCart3[1], upCart3[0])
+
+for (let i = 0; i < data['x_hp'].length; i++) {
+ let x_hp = NaN
+ let y_hp = NaN
+ let z_hp = NaN
+ let pointEq = NaN
+
+ // Columns can contain lists of coords (eg for corners of healpixels), or individual points.
+ // If they are lists, iteratate over each element. Otherwise, just apply the rotation to the point.
+ if (typeof (data['x_hp'][i]) === 'number') {
+ if ('alt' in data) {
+ pointEq = horizonToEq(lat, data['alt'][i] * Math.PI / 180, data['az'][i] * Math.PI / 180, lst)
+ data['ra'][i] = pointEq[0] * 180 / Math.PI
+ data['decl'][i] = pointEq[1] * 180 / Math.PI
+ let cartCoords = eqToCart(pointEq[0], pointEq[1])
+ x_hp = cartCoords[0]
+ y_hp = cartCoords[1]
+ z_hp = cartCoords[2]
+ } else {
+ x_hp = data['x_hp'][i]
+ y_hp = data['y_hp'][i]
+ z_hp = data['z_hp'][i]
+ }
+ const coords = applyRotations(x_hp, y_hp, z_hp, codecl, ra, orient, npoleCoords1)
+ data['x_orth'][i] = coords[0]
+ data['y_orth'][i] = coords[1]
+ data['z_orth'][i] = coords[2]
+ if ('x_laea' in data) {
+ const laea = eqToLambertAEA(data['ra'][i] * Math.PI / 180, data['decl'][i] * Math.PI / 180, hemisphere, true)
+ data['x_laea'][i] = laea[0]
+ data['y_laea'][i] = laea[1]
+ }
+ if ('x_moll' in data) {
+ const moll = eqToMollweide(data['ra'][i] * Math.PI / 180, data['decl'][i] * Math.PI / 180, true)
+ data['x_moll'][i] = moll[0]
+ data['y_moll'][i] = moll[1]
+ }
+ if ('x_hz' in data) {
+ const horizonCart = eqToHorizonCart(data['ra'][i] * Math.PI / 180, data['decl'][i] * Math.PI / 180, lat, lst)
+ data['x_hz'][i] = horizonCart[0]
+ data['y_hz'][i] = horizonCart[1]
+ }
+ } else {
+ for (let j = 0; j < data['x_hp'][i].length; j++) {
+ if ('alt' in data) {
+ pointEq = horizonToEq(lat, data['alt'][i][j] * Math.PI / 180, data['az'][i][j] * Math.PI / 180, lst)
+ data['ra'][i][j] = pointEq[0] * 180 / Math.PI
+ data['decl'][i][j] = pointEq[1] * 180 / Math.PI
+ let cartCoords = eqToCart(pointEq[0], pointEq[1])
+ x_hp = cartCoords[0]
+ y_hp = cartCoords[1]
+ z_hp = cartCoords[2]
+ } else {
+ x_hp = data['x_hp'][i][j]
+ y_hp = data['y_hp'][i][j]
+ z_hp = data['z_hp'][i][j]
+ }
+ const coords = applyRotations(x_hp, y_hp, z_hp, codecl, ra, orient, npoleCoords1)
+ data['x_orth'][i][j] = coords[0]
+ data['y_orth'][i][j] = coords[1]
+ data['z_orth'][i][j] = coords[2]
+ if ('x_laea' in data) {
+ const laea = eqToLambertAEA(data['ra'][i][j] * Math.PI / 180, data['decl'][i][j] * Math.PI / 180, hemisphere, true)
+ data['x_laea'][i][j] = laea[0]
+ data['y_laea'][i][j] = laea[1]
+ }
+ if ('x_moll' in data) {
+ const moll = eqToMollweide(data['ra'][i][j] * Math.PI / 180, data['decl'][i][j] * Math.PI / 180, true)
+ data['x_moll'][i][j] = moll[0]
+ data['y_moll'][i][j] = moll[1]
+ }
+ if ('x_hz' in data) {
+ const horizonCart = eqToHorizonCart(data['ra'][i][j] * Math.PI / 180, data['decl'][i][j] * Math.PI / 180, lat, lst)
+ data['x_hz'][i][j] = horizonCart[0]
+ data['y_hz'][i][j] = horizonCart[1]
+ }
+ }
+ }
+
+ if ('in_mjd_window' in data) {
+ data['in_mjd_window'][i] = 1.0
+ if ('min_mjd' in data) {
+ if (mjd < data['min_mjd'][i]) {
+ data['in_mjd_window'][i] = 0.0
+ }
+ }
+ if ('max_mjd' in data) {
+ if (mjd > data['max_mjd'][i]) {
+ data['in_mjd_window'][i] = 0.0
+ }
+ }
+ }
+}
+
+data_source.change.emit()
diff --git a/schedview/munge/__init__.py b/schedview/munge/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/schedview/munge/monkeypatch_rubin_sim.py b/schedview/munge/monkeypatch_rubin_sim.py
new file mode 100644
index 00000000..63acbcd1
--- /dev/null
+++ b/schedview/munge/monkeypatch_rubin_sim.py
@@ -0,0 +1,523 @@
+from io import StringIO
+from collections import OrderedDict
+from copy import deepcopy
+
+import pandas as pd
+import numpy as np
+import healpy as hp
+
+import rubin_sim
+import rubin_sim.scheduler
+import rubin_sim.scheduler.surveys
+import rubin_sim.scheduler.features.conditions
+import rubin_sim.scheduler.basis_functions
+
+
+class Core_scheduler(rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler):
+ def get_basis_functions(self, survey_index=None, conditions=None):
+ """Get basis functions for a specific survey in provided conditions.
+
+ Parameters
+ ----------
+ survey_index : `List` [`int`], optional
+ A list with two elements: the survey list and the element within
+ that survey list for which the basis function should be retrieved.
+ If ``None``, use the latest survey to make an addition to the
+ queue.
+ conditions : `rubin_sim.scheduler.features.conditions.Conditions`, optional # noqa W505
+ The conditions for which to return the basis functions.
+ If ``None``, use the conditions associated with this sceduler.
+ By default None.
+
+ Returns
+ -------
+ basis_funcs : `OrderedDict` ['str`, `rubin_sim.scheduler.basis_functions.basis_functions.Base_basis_function`] # noqa W505
+ A dictionary of the basis functions, where the keys are names for
+ the basis functions and the values are the functions themselves.
+ """
+ if survey_index is None:
+ survey_index = self.survey_index
+
+ if conditions is None:
+ conditions = self.conditions
+
+ survey = self.survey_lists[survey_index[0]][survey_index[1]]
+ basis_funcs = OrderedDict()
+ for basis_func in survey.basis_functions:
+ if hasattr(basis_func(conditions), "__len__"):
+ basis_funcs[basis_func.__class__.__name__] = basis_func
+ return basis_funcs
+
+ def get_healpix_maps(self, survey_index=None, conditions=None):
+ """Get the healpix maps for a specific survey, in provided conditions.
+
+ Parameters
+ ----------
+ survey_index : `List` [`int`], optional
+ A list with two elements: the survey list and the element within
+ that survey list for which the maps that should be retrieved.
+ If ``None``, use the latest survey to make an addition to
+ the queue.
+ conditions : `rubin_sim.scheduler.features.conditions.Conditions`, optional # noqa W505
+ The conditions for the maps to be returned. If ``None``, use
+ the conditions associated with this sceduler. By default None.
+
+ Returns
+ -------
+ basis_funcs : `OrderedDict` ['str`, `numpy.ndarray`]
+ A dictionary of the maps, where the keys are names for the maps and
+ values are the numpy arrays as used by ``healpy``.
+ """
+ if survey_index is None:
+ survey_index = self.survey_index
+
+ if conditions is None:
+ conditions = self.conditions
+
+ maps = OrderedDict()
+ for band in conditions.skybrightness.keys():
+ maps[f"{band}_sky"] = deepcopy(conditions.skybrightness[band])
+ maps[f"{band}_sky"][maps[f"{band}_sky"] < -1e30] = np.nan
+
+ basis_functions = self.get_basis_functions(survey_index, conditions)
+
+ for basis_func_key in basis_functions.keys():
+ label = basis_functions[basis_func_key].label()
+ maps[label] = basis_functions[basis_func_key](conditions)
+
+ return maps
+
+ def __repr__(self):
+ if isinstance(
+ self.pointing2hpindx, rubin_sim.scheduler.utils.utils.hp_in_lsst_fov
+ ):
+ camera = "LSST"
+ elif isinstance(
+ self.pointing2hpindx, rubin_sim.scheduler.utils.utils.hp_in_comcam_fov
+ ):
+ camera = "comcam"
+ else:
+ camera = None
+
+ this_repr = f"""{self.__class__.__qualname__}(
+ surveys={repr(self.survey_lists)},
+ camera="{camera}",
+ nside={repr(self.nside)},
+ rotator_limits={repr(self.rotator_limits)},
+ survey_index={repr(self.survey_index)},
+ log={repr(self.log)}
+ )"""
+ return this_repr
+
+ def __str__(self):
+ if isinstance(
+ self.pointing2hpindx, rubin_sim.scheduler.utils.utils.hp_in_lsst_fov
+ ):
+ camera = "LSST"
+ elif isinstance(
+ self.pointing2hpindx, rubin_sim.scheduler.utils.utils.hp_in_comcam_fov
+ ):
+ camera = "comcam"
+ else:
+ camera = None
+
+ output = StringIO()
+ print(f"# {self.__class__.__name__} at {hex(id(self))}", file=output)
+
+ misc = pd.Series(
+ {
+ "camera": camera,
+ "nside": self.nside,
+ "rotator limits": self.rotator_limits,
+ "survey index": self.survey_index,
+ "Last chosen": str(
+ self.survey_lists[self.survey_index[0]][self.survey_index[1]]
+ ),
+ }
+ )
+ misc.name = "value"
+ print(misc.to_markdown(), file=output)
+
+ print("", file=output)
+ print("## Surveys", file=output)
+
+ if len(self.survey_lists) == 0:
+ print("Scheduler contains no surveys.", file=output)
+
+ for tier_index, tier_surveys in enumerate(self.survey_lists):
+ print(file=output)
+ print(f"### Survey list {tier_index}", file=output)
+ print(self.surveys_df(tier_index).to_markdown(), file=output)
+
+ print("", file=output)
+ print(str(self.conditions), file=output)
+
+ print("", file=output)
+ print("## Queue", file=output)
+ print(
+ pd.concat(pd.DataFrame(q) for q in self.queue)[
+ ["ID", "flush_by_mjd", "RA", "dec", "filter", "exptime", "note"]
+ ]
+ .set_index("ID")
+ .to_markdown(),
+ file=output,
+ )
+
+ result = output.getvalue()
+ return result
+
+ def _repr_markdown_(self):
+ return str(self)
+
+ def surveys_df(self, tier):
+ surveys = []
+ survey_list = self.survey_lists[tier]
+ for survey_list_elem, survey in enumerate(survey_list):
+ reward = np.max(survey.reward) if tier <= self.survey_index[0] else None
+ chosen = (tier == self.survey_index[0]) and (
+ survey_list_elem == self.survey_index[1]
+ )
+ surveys.append({"survey": str(survey), "reward": reward, "chosen": chosen})
+
+ df = pd.DataFrame(surveys).set_index("survey")
+ return df
+
+ def make_reward_df(self, conditions):
+ survey_dfs = []
+ for index0, survey_list in enumerate(self.survey_lists):
+ for index1, survey in enumerate(survey_list):
+ survey_df = survey.make_reward_df(conditions)
+ survey_df["list_index"] = index0
+ survey_df["survey_index"] = index1
+ survey_dfs.append(survey_df)
+
+ survey_df = pd.concat(survey_dfs).set_index(["list_index", "survey_index"])
+ return survey_df
+
+
+class Conditions(rubin_sim.scheduler.features.conditions.Conditions):
+ def __repr__(self):
+ return f"<{self.__class__.__name__} mjd_start='{self.mjd_start}' at {hex(id(self))}>"
+
+ def __str__(self):
+ output = StringIO()
+ print(f"{self.__class__.__qualname__} at {hex(id(self))}", file=output)
+ print("============================", file=output)
+ print("nside: ", self.nside, file=output)
+ print("site: ", self.site.name, file=output)
+ print("exptime: ", self.exptime, file=output)
+ print("lmst: ", self.lmst, file=output)
+ print("season_offset: ", self.season_offset, file=output)
+ print("sun_RA_start: ", self.sun_RA_start, file=output)
+ print("clouds: ", self.clouds, file=output)
+ print("current_filter: ", self.current_filter, file=output)
+ print("mounted_filters: ", self.mounted_filters, file=output)
+ print("night: ", self.night, file=output)
+ print("wind_speed: ", self.wind_speed, file=output)
+ print("wind_direction: ", self.wind_direction, file=output)
+ print(
+ "len(scheduled_observations): ",
+ len(self.scheduled_observations),
+ file=output,
+ )
+ print("len(queue): ", len(self.queue), file=output)
+ print("moonPhase: ", self.moonPhase, file=output)
+ print("bulk_cloud: ", self.bulk_cloud, file=output)
+ print("targets_of_opportunity: ", self.targets_of_opportunity, file=output)
+ print("season_modulo: ", self.season_modulo, file=output)
+ print("season_max_season: ", self.season_max_season, file=output)
+ print("season_length: ", self.season_length, file=output)
+ print("season_floor: ", self.season_floor, file=output)
+ print("cumulative_azimuth_rad: ", self.cumulative_azimuth_rad, file=output)
+
+ positions = [
+ {
+ "name": "sun",
+ "alt": self.sunAlt,
+ "az": self.sunAz,
+ "RA": self.sunRA,
+ "decl": self.sunDec,
+ }
+ ]
+ positions.append(
+ {
+ "name": "moon",
+ "alt": self.moonAlt,
+ "az": self.moonAz,
+ "RA": self.moonRA,
+ "decl": self.moonDec,
+ }
+ )
+ for planet_name in ("venus", "mars", "jupiter", "saturn"):
+ positions.append(
+ {
+ "name": planet_name,
+ "RA": np.asscalar(self.planet_positions[planet_name + "_RA"]),
+ "decl": np.asscalar(self.planet_positions[planet_name + "_dec"]),
+ }
+ )
+ positions.append(
+ {
+ "name": "telescope",
+ "alt": self.telAlt,
+ "az": self.telAz,
+ "RA": self.telRA,
+ "decl": self.telDec,
+ "rot": self.rotTelPos,
+ }
+ )
+ positions = pd.DataFrame(positions).set_index("name")
+ print(file=output)
+ print("Positions (radians)", file=output)
+ print("-------------------", file=output)
+ print(positions.to_markdown(), file=output)
+
+ positions_deg = np.degrees(positions)
+ print(file=output)
+ print("Positions (degrees)", file=output)
+ print("-------------------", file=output)
+ print(positions_deg.to_markdown(), file=output)
+
+ events = (
+ "mjd_start",
+ "mjd",
+ "sunset",
+ "sun_n12_setting",
+ "sun_n18_setting",
+ "sun_n18_rising",
+ "sun_n12_rising",
+ "sunrise",
+ "moonrise",
+ "moonset",
+ "sun_0_setting",
+ "sun_0_rising",
+ )
+ event_rows = []
+ for event in events:
+ mjd = getattr(self, event)
+ time = pd.to_datetime(mjd + 2400000.5, unit="D", origin="julian")
+ event_rows.append({"event": event, "MJD": mjd, "date": time})
+ event_df = pd.DataFrame(event_rows).set_index("event").sort_values(by="MJD")
+ print("", file=output)
+ print("Events", file=output)
+ print("------", file=output)
+ print(event_df.to_markdown(), file=output)
+
+ map_stats = []
+ for map_name in ("ra", "dec", "slewtime", "airmass"):
+ values = getattr(self, map_name)
+ map_stats.append(
+ {
+ "map": map_name,
+ "nside": hp.npix2nside(len(values)),
+ "min": np.nanmin(values),
+ "max": np.nanmax(values),
+ "median": np.nanmedian(values),
+ }
+ )
+
+ for base_map_name in ("skybrightness", "FWHMeff"):
+ for band in "ugrizy":
+ values = getattr(self, base_map_name)[band]
+ map_name = f"{base_map_name}_{band}"
+ map_stats.append(
+ {
+ "map": map_name,
+ "nside": hp.npix2nside(len(values)),
+ "min": np.nanmin(values),
+ "max": np.nanmax(values),
+ "median": np.nanmedian(values),
+ }
+ )
+ maps_df = pd.DataFrame(map_stats).set_index("map")
+ print("", file=output)
+ print("Maps", file=output)
+ print("----", file=output)
+ print(maps_df.to_markdown(), file=output)
+
+ result = output.getvalue()
+ return result
+
+ def _repr_markdown_(self):
+ return str(self)
+
+
+class BaseSurvey(rubin_sim.scheduler.surveys.BaseSurvey):
+ def __repr__(self):
+ return f"<{self.__class__.__name__} survey_name='{self.survey_name}' at {hex(id(self))}>"
+
+ def make_reward_df(self, conditions):
+ feasibility = []
+ accum_reward = []
+ bf_reward = []
+ bf_label = []
+ basis_functions = []
+ for basis_function in self.basis_functions:
+ basis_functions.append(basis_function)
+ test_survey = deepcopy(self)
+ test_survey.basis_functions = basis_functions
+ bf_label.append(basis_function.label())
+ bf_reward.append(np.nanmax(basis_function(conditions)))
+ feasibility.append(basis_function.check_feasibility(conditions))
+ try:
+ accum_reward.append(
+ np.nanmax(test_survey.calc_reward_function(conditions))
+ )
+ except IndexError:
+ accum_reward.append(None)
+
+ reward_df = pd.DataFrame(
+ {
+ "basis_function": bf_label,
+ "feasible": feasibility,
+ "basis_reward": bf_reward,
+ "accum_reward": accum_reward,
+ }
+ )
+ return reward_df
+
+ def reward_changes(self, conditions):
+ reward_values = []
+ basis_functions = []
+ for basis_function in self.basis_functions:
+ test_survey = deepcopy(self)
+ basis_functions.append(basis_function)
+ test_survey.basis_functions = basis_functions
+ try:
+ reward_values.append(
+ np.nanmax(test_survey.calc_reward_function(conditions))
+ )
+ except IndexError:
+ reward_values.append(None)
+
+ bf_names = [bf.__class__.__name__ for bf in self.basis_functions]
+ return list(zip(bf_names, reward_values))
+
+
+class BaseMarkovDF_survey(rubin_sim.scheduler.surveys.BaseMarkovDF_survey):
+ def make_reward_df(self, conditions):
+ feasibility = []
+ accum_reward = []
+ bf_reward = []
+ bf_label = []
+ basis_functions = []
+ basis_weights = []
+ for (weight, basis_function) in zip(self.basis_weights, self.basis_functions):
+ basis_functions.append(basis_function)
+ basis_weights.append(weight)
+ test_survey = deepcopy(self)
+ test_survey.basis_functions = basis_functions
+ test_survey.basis_weights = basis_weights
+ bf_label.append(basis_function.label())
+ bf_reward.append(np.nanmax(basis_function(conditions)))
+ feasibility.append(basis_function.check_feasibility(conditions))
+ try:
+ accum_reward.append(
+ np.nanmax(test_survey.calc_reward_function(conditions))
+ )
+ except IndexError:
+ accum_reward.append(None)
+
+ reward_df = pd.DataFrame(
+ {
+ "basis_function": bf_label,
+ "feasible": feasibility,
+ "basis_reward": bf_reward,
+ "accum_reward": accum_reward,
+ }
+ )
+ return reward_df
+
+ def reward_changes(self, conditions):
+ reward_values = []
+
+ basis_functions = []
+ basis_weights = []
+ for (weight, basis_function) in zip(self.basis_weights, self.basis_functions):
+ test_survey = deepcopy(self)
+ basis_functions.append(basis_function)
+ test_survey.basis_functions = basis_functions
+ basis_weights.append(weight)
+ test_survey.basis_weights = basis_weights
+ try:
+ reward_values.append(
+ np.nanmax(test_survey.calc_reward_function(conditions))
+ )
+ except IndexError:
+ reward_values.append(None)
+
+ bf_names = [bf.label() for bf in self.basis_functions]
+ return list(zip(bf_names, reward_values))
+
+
+class Deep_drilling_survey(rubin_sim.scheduler.surveys.Deep_drilling_survey):
+ def __repr__(self):
+ repr_start = f"<{self.__class__.__name__} survey_name='{self.survey_name}'"
+ repr_end = f", RA={self.ra}, dec={self.dec} at {hex(id(self))}>"
+ return repr_start + repr_end
+
+
+class Base_basis_function(rubin_sim.scheduler.basis_functions.Base_basis_function):
+ def label(self):
+ label = self.__class__.__name__.replace("_basis_function", "")
+ return label
+
+
+class Slewtime_basis_function(
+ rubin_sim.scheduler.basis_functions.Slewtime_basis_function
+):
+ def label(self):
+ label = f"{self.__class__.__name__.replace('_basis_function', '')} {self.maxtime} {self.filtername}"
+ return label
+
+
+rubin_sim.scheduler.basis_functions.Slewtime_basis_function.label = (
+ Slewtime_basis_function.label
+)
+rubin_sim.scheduler.basis_functions.Base_basis_function.label = (
+ Base_basis_function.label
+)
+
+rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler.get_basis_functions = (
+ Core_scheduler.get_basis_functions
+)
+rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler.get_healpix_maps = (
+ Core_scheduler.get_healpix_maps
+)
+rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler.__repr__ = (
+ Core_scheduler.__repr__
+)
+rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler.__str__ = (
+ Core_scheduler.__str__
+)
+rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler.surveys_df = (
+ Core_scheduler.surveys_df
+)
+rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler.make_reward_df = (
+ Core_scheduler.make_reward_df
+)
+
+rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler._repr_markdown_ = (
+ Core_scheduler._repr_markdown_
+)
+
+rubin_sim.scheduler.surveys.BaseSurvey.__repr__ = BaseSurvey.__repr__
+
+rubin_sim.scheduler.surveys.Deep_drilling_survey.__repr__ = (
+ Deep_drilling_survey.__repr__
+)
+
+rubin_sim.scheduler.features.conditions.Conditions.__str__ = Conditions.__str__
+rubin_sim.scheduler.features.conditions.Conditions._repr_markdown_ = (
+ Conditions._repr_markdown_
+)
+
+rubin_sim.scheduler.surveys.BaseMarkovDF_survey.reward_changes = (
+ BaseMarkovDF_survey.reward_changes
+)
+rubin_sim.scheduler.surveys.BaseSurvey.reward_changes = BaseSurvey.reward_changes
+
+rubin_sim.scheduler.surveys.BaseSurvey.make_reward_df = BaseSurvey.make_reward_df
+rubin_sim.scheduler.surveys.BaseMarkovDF_survey.make_reward_df = (
+ BaseMarkovDF_survey.make_reward_df
+)
diff --git a/schedview/plot/SphereMap.py b/schedview/plot/SphereMap.py
new file mode 100644
index 00000000..c58d49e5
--- /dev/null
+++ b/schedview/plot/SphereMap.py
@@ -0,0 +1,1644 @@
+from collections import OrderedDict, namedtuple
+from collections.abc import Iterable
+from copy import deepcopy
+
+import numpy as np
+import pandas as pd
+import healpy as hp
+import bokeh
+import bokeh.plotting
+import astropy.units as u
+from astropy.coordinates import SkyCoord
+from astropy.time import Time
+import astropy.visualization
+
+from rubin_sim.utils.Site import Site
+from rubin_sim.utils import calcLmstLast
+from rubin_sim.utils import altAzPaFromRaDec, raDecFromAltAz
+from rubin_sim.utils import approx_altAz2RaDec, approx_RaDec2AltAz
+from rubin_sim.utils import ObservationMetaData
+
+from schedview.plot.readjs import read_javascript
+
+from schedview.sphere import offset_sep_bear, rotate_cart
+
+ProjSliders = namedtuple("ProjSliders", ["alt", "az", "mjd"])
+
+APPROX_COORD_TRANSFORMS = True
+
+# Angles of excactly 90 degrees result in edge conditions, e.g.
+# horizon circles with gaps depending on how trig is rounded.
+# Define an "almost 90" to get consistent behaviour.
+ALMOST_90 = np.degrees(np.arccos(0) - 2 * np.finfo(float).resolution)
+
+
+class SphereMap:
+ alt_limit = 0
+ update_js_fname = "update_map.js"
+ max_star_glyph_size = 15
+ proj_slider_keys = ["mjd"]
+ default_title = ""
+ default_graticule_line_kwargs = {"color": "darkgray"}
+ default_ecliptic_line_kwargs = {"color": "green"}
+ default_galactic_plane_line_kwargs = {"color": "blue"}
+ default_horizon_line_kwargs = {"color": "black", "line_width": 6}
+
+ def __init__(self, plot=None, laea_limit_mag=88, mjd=None, site=Site("LSST")):
+ """Base for maps of the sphere.
+
+ Parameters
+ ----------
+ plot : `bokeh.plotting.figure.Figure`, optional
+ Figure to which to add the map, by default None
+ laea_limit_mag : `float`, optional
+ Magnitude of the limit for Lamber azimuthal equal area plots,
+ in degrees. By default 88.
+ mjd : `float`, option
+ The Modified Julian Date
+ site : `rubin_sim.utils.Site.Site`, optional
+ The site of the observatory, defaults to LSST.
+ """
+
+ self.laea_limit_mag = laea_limit_mag
+ self.mjd = Time.now().mjd if mjd is None else mjd
+ self.site = site
+
+ if plot is None:
+ self.plot = bokeh.plotting.figure(
+ plot_width=512,
+ plot_height=512,
+ match_aspect=True,
+ title=self.default_title,
+ )
+ else:
+ self.plot = plot
+
+ self.plot.axis.visible = False
+ self.plot.grid.visible = False
+
+ self.laea_proj = hp.projector.AzimuthalProj(rot=self.laea_rot, lamb=True)
+ self.laea_proj.set_flip("astro")
+ self.moll_proj = hp.projector.MollweideProj()
+ self.moll_proj.set_flip("astro")
+
+ self.figure = self.plot
+ self.add_sliders()
+
+ @property
+ def lst(self):
+ """Return the Local Sidereal Time."""
+ lst = calcLmstLast(self.mjd, self.site.longitude_rad)[1] * 360.0 / 24.0
+ return lst
+
+ @lst.setter
+ def lst(self, value):
+ """Modify the MJD to match the LST, keeping the same (UT) day."""
+ mjd_start = np.floor(self.mjd)
+ lst_start = calcLmstLast(mjd_start, self.site.longitude_rad)[1] * 360.0 / 24.0
+ self.mjd = mjd_start + ((value - lst_start) % 360) / 360.9856405809225
+
+ @property
+ def update_js(self):
+ """Return javascript code to update the plots.
+
+ Returns
+ -------
+ js_code : `str`
+ Javascript code to update the bokeh model.
+ """
+ js_code = read_javascript(self.update_js_fname)
+ return js_code
+
+ @property
+ def laea_rot(self):
+ """Return the `rot` tuple to be used in the Lambert EA projection
+
+ Returns
+ -------
+ rot : `tuple` [`float`]
+ The `rot` tuple to be passed to `healpy.projector.AzimuthalProj`.
+ """
+ rot = (0, -90, 0) if self.site.latitude < 0 else (0, 90, 180)
+ return rot
+
+ @property
+ def laea_limit(self):
+ """Return the lat. furthest from the center for the LAEA projection.
+
+ Returns
+ -------
+ `limit` : `float`
+ The maximum (or minimum) value for the latitude shown in the
+ Lambert Azimuthal Equal Area plot.
+ """
+ limit = (
+ self.laea_limit_mag if self.site.latitude < 0 else -1 * self.laea_limit_mag
+ )
+ return limit
+
+ def to_orth_zenith(self, hpx, hpy, hpz):
+ """Convert healpy vector coordinates to orthographic coordinates
+
+ Parameters
+ ----------
+ hpx : `numpy.ndarray`
+ Healpy vector x coordinates
+ x=1, y=0, z=0 corresponds to R.A.=0 deg, Decl=0 deg.
+ x=-1, y=0, z=0 corresponds to R.A.=180 deg, Decl=0 deg.
+ hpy : `numpy.ndarray`
+ Healpy vector y coordinates
+ x=0, y=1, z=0 corresponds to R.A.=90 deg, Decl=0 deg.
+ x=0, y=-1, z=0 corresponds to R.A.=270 deg, Decl=0 deg.
+ hpz : `numpy.ndarray`
+ Healpy vector z coordinates
+ x=0, y=0, z=1 corresponds to Decl=90 deg.
+ x=0, y=0, z=-1 corresponds to Decl=-90 deg.
+
+ Returns
+ -------
+ x : `numpy.ndarray`
+ Orthographic x coordinate (positive to the right)
+ y : `numpy.ndarray`
+ Orthographic y coordinate (positive up)
+ z : `numpy.ndarray`
+ Orthographic z coordinate (positive toward the viewer)
+ """
+ x1, y1, z1 = rotate_cart(0, 0, 1, -90, hpx, hpy, hpz)
+ x2, y2, z2 = rotate_cart(1, 0, 0, self.site.latitude + 90, x1, y1, z1)
+
+ npole_x1, npole_y1, npole_z1 = rotate_cart(0, 0, 1, -90, 0, 0, 1)
+ npole_x2, npole_y2, npole_z2 = rotate_cart(
+ 1, 0, 0, self.site.latitude + 90, npole_x1, npole_y1, npole_z1
+ )
+ x3, y3, z3 = rotate_cart(npole_x2, npole_y2, npole_z2, -self.lst, x2, y2, z2)
+
+ # x3, y3, z3 have the center right, now rotate it so that north is "up"
+ # the 2-3 transform is about the axis through the n pole, so
+ # the n pole is the same in 3 an 2.
+
+ # Find the direction of the north pole, angle form +x axis toward
+ # +y axis
+ npole_x3, npole_y3 = npole_x2, npole_y2
+ orient = np.degrees(np.arctan2(npole_y3, npole_x3))
+
+ # To the n pole on the y axis, we must rotate it the rest of the 90 deg
+ x4, y4, z4 = rotate_cart(0, 0, 1, 90 - orient, x3, y3, z3)
+
+ # In astronomy, we are looking out of the sphere from the center to the
+ # back (which naturally results in west to the right).
+ # Positive z is out of the screen behind us, and we are at the center,
+ # so to visible part is when z is negative (coords[2]<=0).
+ # So, set the points with positive z to NaN so they are
+ # not shown, because they are behind the observer.
+
+ # Use np.finfo(z3[0]).resolution instead of exactly 0, because the
+ # assorted trig operations result in values slightly above or below
+ # 0 when the horizon is in principle exactly 0, and this gives an
+ # irregularly dotted/dashed appearance to the horizon if
+ # a cutoff of exactly 0 is used.
+
+ orth_invisible = z4 > np.finfo(z4.dtype).resolution
+ x4[orth_invisible] = np.nan
+ y4[orth_invisible] = np.nan
+ z4[orth_invisible] = np.nan
+
+ return x4, y4, z4
+
+ def eq_to_horizon(self, ra, decl, degrees=True, cart=True):
+ """Convert equatorial to horizon coordinates
+
+ Parameters
+ ----------
+ ra : `numpy.ndarray`
+ Values for Right Ascension
+ decl : `numpy.ndarray`
+ Values for declination
+ degrees : bool, optional
+ Values are in degrees (if False, values are in radians),
+ by default True
+ cart : bool, optional
+ Return cartesion coordinates rather than alt, az, by default True
+
+ Returns
+ -------
+ coords : `list` [`np.ndarray`]
+ Either alt, az (if cart=False) with az measured east of north,
+ or x, y with +x pointing west and +y pointing north
+
+ Azimuth is east of north
+ """
+
+ # Due to a bug in altAzPaFromRaDec, it won't
+ # work on pandas Series, so convert to a numpy
+ # array if necessary.
+ if isinstance(ra, pd.Series):
+ ra = ra.values
+
+ if isinstance(decl, pd.Series):
+ decl = decl.values
+
+ observation_metadata = ObservationMetaData(mjd=self.mjd, site=self.site)
+ if degrees:
+ ra_deg, decl_deg = ra, decl
+ else:
+ ra_deg, decl_deg = np.degrees(ra), np.degrees(decl)
+ if APPROX_COORD_TRANSFORMS:
+ alt, az = approx_RaDec2AltAz(
+ ra_deg, decl_deg, self.site.latitude, self.site.longitude, self.mjd
+ )
+ else:
+ alt, az, _ = altAzPaFromRaDec(ra_deg, decl_deg, observation_metadata)
+
+ if cart:
+ zd = np.pi / 2 - np.radians(alt)
+ x = -zd * np.sin(np.radians(az))
+ y = zd * np.cos(np.radians(az))
+
+ invisible = alt < self.alt_limit
+ x[invisible] = np.nan
+ y[invisible] = np.nan
+ return x, y
+
+ if not degrees:
+ return np.radians(alt), np.radians(az)
+
+ return alt, az
+
+ def make_healpix_data_source(self, hpvalues, nside=32, bound_step=1):
+ """Make a data source of healpix values, corners, and projected coords.
+
+ Parameters
+ ----------
+ hpvalues : `numpy.ndarray`
+ Healpixel values (RING pixel ordering)
+ nside : int, optional
+ healpixel nside for display, by default 32
+ bound_step : int, optional
+ number of boundary points for each side of each healpixel,
+ by default 1
+
+ Returns
+ -------
+ hpix_datasource : `bokeh.models.ColumnDataSource`
+ Data source for healpixel values and bounds.
+ """
+ values = np.copy(hpvalues)
+ values[np.isnan(values)] = hp.UNSEEN
+ values = hp.ud_grade(hpvalues, nside)
+ values[values == hp.UNSEEN] = np.nan
+ npix = hp.nside2npix(nside)
+ npts = npix * 4 * bound_step
+ hpids = np.arange(npix)
+ hpix_bounds_vec = hp.boundaries(nside, hpids, bound_step)
+ # Rearrange the axes to match what is used by hp.vec2ang
+ hpix_bounds_vec_long = np.moveaxis(hpix_bounds_vec, 1, 2).reshape((npts, 3))
+ ra, decl = hp.vec2ang(hpix_bounds_vec_long, lonlat=True)
+ center_ra, center_decl = hp.pix2ang(nside, hpids, lonlat=True)
+ x_hz, y_hz = self.eq_to_horizon(ra, decl)
+
+ xs, ys, zs = self.to_orth_zenith(
+ hpix_bounds_vec[:, 0, :], hpix_bounds_vec[:, 1, :], hpix_bounds_vec[:, 2, :]
+ )
+
+ x_laea, y_laea = self.laea_proj.vec2xy(hpix_bounds_vec_long.T)
+ x_moll, y_moll = self.moll_proj.vec2xy(hpix_bounds_vec_long.T)
+
+ # in hpix_bounds, each row corresponds to a healpixels, and columns
+ # contain lists where elements of the lists correspond to corners.
+ hpix_bounds = pd.DataFrame(
+ {
+ "hpid": hpids,
+ "x_hp": hpix_bounds_vec[:, 0, :].tolist(),
+ "y_hp": hpix_bounds_vec[:, 1, :].tolist(),
+ "z_hp": hpix_bounds_vec[:, 2, :].tolist(),
+ "ra": ra.reshape(npix, 4).tolist(),
+ "decl": decl.reshape(npix, 4).tolist(),
+ "x_orth": xs.tolist(),
+ "y_orth": ys.tolist(),
+ "z_orth": zs.tolist(),
+ "x_laea": x_laea.reshape(npix, 4).tolist(),
+ "y_laea": y_laea.reshape(npix, 4).tolist(),
+ "x_moll": x_moll.reshape(npix, 4).tolist(),
+ "y_moll": y_moll.reshape(npix, 4).tolist(),
+ "x_hz": x_hz.reshape(npix, 4).tolist(),
+ "y_hz": y_hz.reshape(npix, 4).tolist(),
+ }
+ )
+
+ # in hpix_cornors, each row corresponds to one corner of one
+ # healpix, identified by the hpid column.
+ explode_cols = list(set(hpix_bounds.columns) - set(["hpid"]))
+ hpix_corners = hpix_bounds.explode(column=explode_cols)
+
+ # Hide points near the discontinuity at the pole in laea
+ if self.site.latitude < 0:
+ hide_laea = hpix_corners["decl"] > self.laea_limit
+ else:
+ hide_laea = hpix_corners["decl"] < self.laea_limit
+
+ hpix_corners.loc[hide_laea, ["x_laea", "y_laea"]] = np.NaN
+
+ # Hide points near the discontiuities at ra=180 in Mollweide
+ resol = np.degrees(hp.nside2resol(nside))
+ hide_moll = np.abs(hpix_corners["ra"] - 180) < (
+ resol / np.cos(np.radians(decl))
+ )
+ hpix_corners.loc[hide_moll, ["x_moll", "y_moll"]] = np.NaN
+
+ hpix_corners.replace([np.inf, -np.inf], np.NaN, inplace=True)
+ hpix_data = hpix_corners.groupby("hpid").agg(lambda x: x.tolist())
+ hpix_data["center_ra"] = center_ra
+ hpix_data["center_decl"] = center_decl
+ hpix_data["value"] = values
+
+ values_are_finite = np.isfinite(values)
+ finite_hpix_data = hpix_data.loc[values_are_finite, :]
+ finite_hpids = hpids[values_are_finite]
+ finite_values = values[values_are_finite]
+
+ hpix = bokeh.models.ColumnDataSource(
+ {
+ "hpid": finite_hpids,
+ "value": finite_values,
+ "center_ra": finite_hpix_data["center_ra"].tolist(),
+ "center_decl": finite_hpix_data["center_decl"].tolist(),
+ "ra": finite_hpix_data["ra"].tolist(),
+ "decl": finite_hpix_data["decl"].tolist(),
+ "x_hp": finite_hpix_data["x_hp"].tolist(),
+ "y_hp": finite_hpix_data["y_hp"].tolist(),
+ "z_hp": finite_hpix_data["z_hp"].tolist(),
+ "x_orth": finite_hpix_data["x_orth"].tolist(),
+ "y_orth": finite_hpix_data["y_orth"].tolist(),
+ "z_orth": finite_hpix_data["z_orth"].tolist(),
+ "x_laea": finite_hpix_data["x_laea"].tolist(),
+ "y_laea": finite_hpix_data["y_laea"].tolist(),
+ "x_moll": finite_hpix_data["x_moll"].tolist(),
+ "y_moll": finite_hpix_data["y_moll"].tolist(),
+ "x_hz": finite_hpix_data["x_hz"].tolist(),
+ "y_hz": finite_hpix_data["y_hz"].tolist(),
+ }
+ )
+
+ return hpix
+
+ def make_graticule_points(
+ self,
+ min_decl=-80,
+ max_decl=80,
+ decl_space=20,
+ min_ra=0,
+ max_ra=360,
+ ra_space=30,
+ step=1,
+ ):
+ """Create points that define graticules
+
+ Parameters
+ ----------
+ min_decl : int, optional
+ Decl. of minimum R.A. graticule
+ and ends of declination graticules (deg),
+ by default -80
+ max_decl : int, optional
+ Decl. of maximum R.A. graticulas
+ and ends of declination graticules (deg),
+ by default 80
+ decl_space : int, optional
+ Spacing of decl. graticules (deg), by default 20
+ min_ra : int, optional
+ R.A. of first R.A. graticule (deg), by default 0
+ max_ra : int, optional
+ R.A. of last R.A. graticule (deg), by default 360
+ ra_space : int, optional
+ Spacing of R.A. graticules (deg), by default 30
+ step : int, optional
+ Spacing of points along graticules, by default 1
+
+ Returns
+ -------
+ graticule_points : `bokeh.models.ColumnDataSource`
+ Bokeh data sources defining points in graticules.
+ """
+
+ # Bohek puts gaps in lines when there are NaNs in the
+ # data frame. We will be platting many graticules, and we do not
+ # want them connected, so define a "stop" element to separate
+ # different graticules.
+ stop_df = pd.DataFrame(
+ {
+ "decl": [np.nan],
+ "ra": [np.nan],
+ "grat": None,
+ "x_orth": [np.nan],
+ "y_orth": [np.nan],
+ "z_orth": [np.nan],
+ "x_laea": [np.nan],
+ "y_laea": [np.nan],
+ "x_moll": [np.nan],
+ "y_moll": [np.nan],
+ "x_hz": [np.nan],
+ "y_hz": [np.nan],
+ }
+ )
+ graticule_list = []
+
+ # Define points in each declination graticule
+ for decl in np.arange(min_decl, max_decl + decl_space, decl_space):
+ ra_steps = np.arange(0, 360 + step)
+ this_graticule = pd.DataFrame(
+ {
+ "grat": f"decl{decl}",
+ "decl": decl,
+ "ra": ra_steps,
+ "x_hp": np.nan,
+ "y_hp": np.nan,
+ "z_hp": np.nan,
+ "x_orth": np.nan,
+ "y_orth": np.nan,
+ "z_orth": np.nan,
+ "x_laea": np.nan,
+ "y_laea": np.nan,
+ "x_moll": np.nan,
+ "y_moll": np.nan,
+ "x_hz": np.nan,
+ "y_hz": np.nan,
+ }
+ )
+ this_graticule.loc[:, ["x_hp", "y_hp", "z_hp"]] = hp.ang2vec(
+ this_graticule.ra, this_graticule.decl, lonlat=True
+ )
+ xs, ys, zs = self.to_orth_zenith(
+ this_graticule.loc[:, "x_hp"],
+ this_graticule.loc[:, "y_hp"],
+ this_graticule.loc[:, "z_hp"],
+ )
+ this_graticule.loc[:, "x_orth"] = xs
+ this_graticule.loc[:, "y_orth"] = ys
+ this_graticule.loc[:, "z_orth"] = zs
+
+ x_laea, y_laea = self.laea_proj.ang2xy(
+ this_graticule["ra"], this_graticule["decl"], lonlat=True
+ )
+ this_graticule.loc[:, "x_laea"] = x_laea
+ this_graticule.loc[:, "y_laea"] = y_laea
+
+ x_moll, y_moll = self.moll_proj.ang2xy(
+ this_graticule["ra"], this_graticule["decl"], lonlat=True
+ )
+ this_graticule.loc[:, "x_moll"] = x_moll
+ this_graticule.loc[:, "y_moll"] = y_moll
+
+ x_hz, y_hz = self.eq_to_horizon(
+ this_graticule["ra"].values, this_graticule["decl"].values
+ )
+ this_graticule.loc[:, "x_hz"] = x_hz
+ this_graticule.loc[:, "y_hz"] = y_hz
+
+ graticule_list.append(this_graticule)
+ graticule_list.append(stop_df)
+
+ # Define points in each R.A. graticule
+ for ra in np.arange(min_ra, max_ra + step, ra_space):
+ decl_steps = np.arange(min_decl, max_decl + step, step)
+ this_graticule = pd.DataFrame(
+ {
+ "grat": f"ra{ra}",
+ "decl": decl_steps,
+ "ra": ra,
+ "x_hp": np.nan,
+ "y_hp": np.nan,
+ "z_hp": np.nan,
+ "x_orth": np.nan,
+ "y_orth": np.nan,
+ "z_orth": np.nan,
+ "x_laea": np.nan,
+ "y_laea": np.nan,
+ "x_moll": np.nan,
+ "y_moll": np.nan,
+ "x_hz": np.nan,
+ "y_hz": np.nan,
+ }
+ )
+ this_graticule.loc[:, ["x_hp", "y_hp", "z_hp"]] = hp.ang2vec(
+ this_graticule.ra, this_graticule.decl, lonlat=True
+ )
+ xs, ys, zs = self.to_orth_zenith(
+ this_graticule.loc[:, "x_hp"],
+ this_graticule.loc[:, "y_hp"],
+ this_graticule.loc[:, "z_hp"],
+ )
+ this_graticule.loc[:, "x_orth"] = xs
+ this_graticule.loc[:, "y_orth"] = ys
+ this_graticule.loc[:, "z_orth"] = zs
+
+ x_laea, y_laea = self.laea_proj.ang2xy(
+ this_graticule["ra"], this_graticule["decl"], lonlat=True
+ )
+ this_graticule.loc[:, "x_laea"] = x_laea
+ this_graticule.loc[:, "y_laea"] = y_laea
+
+ x_moll, y_moll = self.moll_proj.ang2xy(
+ this_graticule["ra"], this_graticule["decl"], lonlat=True
+ )
+ this_graticule.loc[:, "x_moll"] = x_moll
+ this_graticule.loc[:, "y_moll"] = y_moll
+
+ x_hz, y_hz = self.eq_to_horizon(
+ this_graticule["ra"].values, this_graticule["decl"].values
+ )
+ this_graticule.loc[:, "x_hz"] = x_hz
+ this_graticule.loc[:, "y_hz"] = y_hz
+
+ graticule_list.append(this_graticule)
+ graticule_list.append(stop_df)
+
+ graticule_points = bokeh.models.ColumnDataSource(pd.concat(graticule_list))
+ return graticule_points
+
+ def make_horizon_graticule_points(
+ self,
+ min_alt=0,
+ max_alt=80,
+ alt_space=20,
+ min_az=0,
+ max_az=360,
+ az_space=30,
+ step=1,
+ ):
+ """Create points that define graticules
+
+ Parameters
+ ----------
+ min_alt : int, optional
+ Alt. of minimum az graticule
+ and ends of alt graticules (deg),
+ by default 0
+ max_alt : int, optional
+ Alt. of maximum az graticulas
+ and ends of alt graticules (deg),
+ by default 80
+ alt_space : int, optional
+ Spacing of alt. graticules (deg), by default 20
+ min_az : int, optional
+ Az of first azimuth graticule (deg), by default 0
+ max_ra : int, optional
+ Az of last azimuth graticule (deg), by default 360
+ az_space : int, optional
+ Spacing of azimuth graticules (deg), by default 30
+ step : int, optional
+ Spacing of points along graticules, by default 1
+
+ Returns
+ -------
+ graticule_points : `bokeh.models.ColumnDataSource`
+ Bokeh data sources defining points in graticules.
+ """
+
+ # Bohek puts gaps in lines when there are NaNs in the
+ # data frame. We will be platting many graticules, and we do not
+ # want them connected, so define a "stop" element to separate
+ # different graticules.
+ stop_df = pd.DataFrame(
+ {
+ "decl": [np.nan],
+ "ra": [np.nan],
+ "alt": [np.nan],
+ "az": [np.nan],
+ "grat": None,
+ "x_orth": [np.nan],
+ "y_orth": [np.nan],
+ "z_orth": [np.nan],
+ "x_laea": [np.nan],
+ "y_laea": [np.nan],
+ "x_moll": [np.nan],
+ "y_moll": [np.nan],
+ "x_hz": [np.nan],
+ "y_hz": [np.nan],
+ }
+ )
+ graticule_list = []
+
+ # Define points in each alt graticule
+ for alt in np.arange(min_alt, max_alt + alt_space, alt_space):
+ radius = 90 - alt
+ start_bear = 0
+ end_bear = 360 + step
+ this_graticule = pd.DataFrame(
+ self.make_horizon_circle_points(
+ 90, 0, radius, start_bear, end_bear, step
+ ).data
+ )
+ this_graticule["grat"] = f"Alt{alt}"
+
+ graticule_list.append(this_graticule)
+ graticule_list.append(stop_df)
+
+ for az in np.arange(min_az, max_az + step, az_space):
+ radius = 90
+ this_graticule = pd.DataFrame(
+ self.make_horizon_circle_points(
+ 0, az + 90, radius, 0, 360 + step, step
+ ).data
+ )
+ this_graticule.query(
+ f"(alt > {min_alt}) and (alt <= {max_alt}) and (abs(az-{az}) < 1)",
+ inplace=True,
+ )
+ this_graticule.sort_values(by="alt", inplace=True)
+ this_graticule["grat"] = f"Az{az}"
+
+ graticule_list.append(this_graticule)
+ graticule_list.append(stop_df)
+
+ graticule_points = bokeh.models.ColumnDataSource(pd.concat(graticule_list))
+ return graticule_points
+
+ def make_circle_points(
+ self,
+ center_ra,
+ center_decl,
+ radius=90.0,
+ start_bear=0,
+ end_bear=360,
+ step=1,
+ ):
+ """Create points along a circle or arc on a sphere
+
+ Parameters
+ ----------
+ center_ra : `float`
+ R.A. of the center of the circle (deg.).
+ center_decl : `float`
+ Decl. of the center of the circle (deg.).
+ radius : float, optional
+ Radius of the circle (deg.), by default 90.0
+ start_bear : int, optional
+ Bearing (E. of N.) of the start of the circle (deg.), by default 0
+ end_bear : int, optional
+ Bearing (E. of N.) of the end of the circle (deg.), by default 360
+ step : int, optional
+ Spacing of the points along the circle (deg.), by default 1
+
+ Returns
+ -------
+ circle : `bokeh.models.ColumnDataSource`
+ Bokeh data source for points in the circle.
+ """
+ ras = []
+ decls = []
+ bearings = []
+ for bearing in range(start_bear, end_bear + step, step):
+ ra, decl = offset_sep_bear(
+ np.radians(center_ra),
+ np.radians(center_decl),
+ np.radians(radius),
+ np.radians(bearing),
+ )
+ ras.append(np.degrees(ra))
+ decls.append(np.degrees(decl))
+ bearings.append(bearing)
+
+ x0s, y0s, z0s = hp.ang2vec(np.array(ras), np.array(decls), lonlat=True).T
+ xs, ys, zs = self.to_orth_zenith(x0s, y0s, z0s)
+
+ x_laea, y_laea = self.laea_proj.ang2xy(
+ np.array(ras), np.array(decls), lonlat=True
+ )
+ x_moll, y_moll = self.moll_proj.ang2xy(
+ np.array(ras), np.array(decls), lonlat=True
+ )
+ x_hz, y_hz = self.eq_to_horizon(np.array(ras), np.array(decls))
+
+ # Hide discontinuities
+ if self.site.latitude < 0:
+ laea_discont = np.array(decls) > self.laea_limit
+ else:
+ laea_discont = np.array(decls) < self.laea_limit
+ x_laea[laea_discont] = np.nan
+ y_laea[laea_discont] = np.nan
+
+ moll_discont = np.abs(np.array(ras) - 180) < step
+ x_moll[moll_discont] = np.nan
+ y_moll[moll_discont] = np.nan
+
+ circle = bokeh.models.ColumnDataSource(
+ data={
+ "bearing": bearings,
+ "ra": ras,
+ "decl": decls,
+ "x_hp": x0s.tolist(),
+ "y_hp": y0s.tolist(),
+ "z_hp": z0s.tolist(),
+ "x_orth": xs.tolist(),
+ "y_orth": ys.tolist(),
+ "z_orth": zs.tolist(),
+ "x_laea": x_laea.tolist(),
+ "y_laea": y_laea.tolist(),
+ "x_moll": x_moll.tolist(),
+ "y_moll": y_moll.tolist(),
+ "x_hz": x_hz.tolist(),
+ "y_hz": y_hz.tolist(),
+ }
+ )
+
+ return circle
+
+ def make_horizon_circle_points(
+ self, alt=ALMOST_90, az=0, radius=90.0, start_bear=0, end_bear=360, step=1
+ ):
+ """Define points in a circle with the center defined in horizon coords.
+
+ Parameters
+ ----------
+ alt : `float`
+ Altitude of circle center, by default 90.
+ az : `float`
+ Azimuth of circle center, by default 0.
+ radius : `float`, optional
+ Radius of the circle (deg.), by default 90.0
+ start_bear : int, optional
+ Bearing of the start of the circle (deg.), by default 0
+ end_bear : int, optional
+ Bearing of the end of the circle (deg.), by default 360
+ step : int, optional
+ Spacing of points along the circle., by default 1
+
+ Returns
+ -------
+ circle : `bokeh.models.ColumnDataSource`
+ Bokeh data source with points along the circle.
+ """
+ observation_metadata = ObservationMetaData(mjd=self.mjd, site=self.site)
+ if APPROX_COORD_TRANSFORMS:
+ center_ra_arr, center_decl_arr = approx_altAz2RaDec(
+ np.array([alt]),
+ np.array([az]),
+ self.site.latitude,
+ self.site.longitude,
+ self.mjd,
+ )
+ center_ra = center_ra_arr[0]
+ center_decl = center_decl_arr[0]
+ else:
+ center_ra, center_decl = raDecFromAltAz(alt, az, observation_metadata)
+
+ eq_circle_points = self.make_circle_points(
+ center_ra, center_decl, radius, start_bear, end_bear, step
+ )
+ ra = np.array(eq_circle_points.data["ra"])
+ decl = np.array(eq_circle_points.data["decl"])
+ alt, az = self.eq_to_horizon(ra, decl, degrees=True, cart=False)
+
+ circle_data = dict(eq_circle_points.data)
+ circle_data["alt"] = alt.tolist()
+ circle_data["az"] = az.tolist()
+
+ circle = bokeh.models.ColumnDataSource(data=circle_data)
+
+ return circle
+
+ def make_points(self, points_data):
+ """Create a bokes data source with locations of points on a sphere.
+
+ Parameters
+ ----------
+ points_data : `Iterable` , `dict` , or `pandas.DataFrame`
+ A source of data (to be passed to `pandas.DataFrame`)
+ Must contain the following columns or keys:
+
+ ``"ra"``
+ The Right Ascension in degrees.
+ ``"decl"``
+ The declination in degrees.
+
+ Returns
+ -------
+ point : `bokeh.models.ColumnDataSource`
+ A data source with point locations, including projected coords.
+ """
+
+ points_df = pd.DataFrame(points_data)
+ x0s, y0s, z0s = hp.ang2vec(points_df.ra, points_df.decl, lonlat=True).T
+ xs, ys, zs = self.to_orth_zenith(x0s, y0s, z0s)
+
+ x_laea, y_laea = self.laea_proj.ang2xy(
+ points_df.ra, points_df.decl, lonlat=True
+ )
+ x_moll, y_moll = self.moll_proj.ang2xy(
+ points_df.ra, points_df.decl, lonlat=True
+ )
+ x_hz, y_hz = self.eq_to_horizon(
+ points_df.ra.values, points_df.decl.values, degrees=True, cart=True
+ )
+
+ # If point_df.ra and points_df.decl have only one value, ang2xy returns
+ # scalars (or 0d arrays) not 1d arrays, but
+ # bokeh.models.ColumnDataSource requires that column values
+ # be python Sequences. So force results of ang2xy to be 1d arrays,
+ # even when healpy returns 0d arrays.
+ x_laea = x_laea.reshape(x_laea.size)
+ y_laea = y_laea.reshape(y_laea.size)
+ x_moll = x_moll.reshape(x_moll.size)
+ y_moll = y_moll.reshape(y_moll.size)
+ x_hz = x_hz.reshape(x_hz.size)
+ y_hz = y_hz.reshape(y_hz.size)
+
+ alt, az = self.eq_to_horizon(
+ points_df.ra, points_df.decl, degrees=True, cart=False
+ )
+ invisible = alt < -1 * np.finfo(float).resolution
+ x_hz[invisible] = np.nan
+ y_hz[invisible] = np.nan
+
+ data = {
+ "name": points_df.name,
+ "ra": points_df.ra.tolist(),
+ "decl": points_df.decl.tolist(),
+ "x_hp": x0s.tolist(),
+ "y_hp": y0s.tolist(),
+ "z_hp": z0s.tolist(),
+ "x_orth": xs.tolist(),
+ "y_orth": ys.tolist(),
+ "z_orth": zs.tolist(),
+ "x_laea": x_laea.tolist(),
+ "y_laea": y_laea.tolist(),
+ "x_moll": x_moll.tolist(),
+ "y_moll": y_moll.tolist(),
+ "x_hz": x_hz.tolist(),
+ "y_hz": y_hz.tolist(),
+ "glyph_size": points_df.glyph_size.tolist(),
+ }
+
+ # Add any additional data provided
+ for column_name in points_df.columns:
+ if column_name not in data.keys():
+ data[column_name] = points_df[column_name].to_list()
+
+ points = bokeh.models.ColumnDataSource(data=data)
+
+ return points
+
+ def make_marker_data_source(
+ self,
+ ra=None,
+ decl=None,
+ name="anonymous",
+ glyph_size=5,
+ min_mjd=None,
+ max_mjd=None,
+ ):
+ """Add one or more circular marker(s) to the map.
+
+ Parameters
+ ----------
+ ra : `float` or `Iterable`, optional
+ R.A. of the marker (deg.), by default None
+ decl : `float` or `Iterable`, optional
+ Declination of the marker (deg.), by default None
+ name : `str` or `Iterable` , optional
+ Name for the thing marked, by default "anonymous"
+ glyph_size : `int` or `Iterable`, optional
+ Size of the marker, by default 5
+ min_mjd : `float`
+ Earlist time for which to show the marker
+ max_mjd : `float`
+ Latest time for which to show the marker
+
+ Returns
+ -------
+ data_source : `bokeh.models.ColumnDataSource`
+ A data source with marker locations, including projected coords.
+ """
+ ras = ra if isinstance(ra, Iterable) else [ra]
+ decls = decl if isinstance(decl, Iterable) else [decl]
+ if len(ras) > 0:
+ glyph_sizes = (
+ glyph_size
+ if isinstance(glyph_size, Iterable)
+ else [glyph_size] * len(ras)
+ )
+ names = [name] * len(ras) if isinstance(name, str) else name
+ else:
+ glyph_sizes = np.array([])
+ names = np.array([])
+
+ data = {
+ "ra": ras,
+ "decl": decls,
+ "name": names,
+ "glyph_size": glyph_sizes,
+ }
+
+ if (min_mjd is not None) or (max_mjd is not None):
+ if len(ras) == 0:
+ data["in_mjd_window"] = np.array([])
+ else:
+ data["in_mjd_window"] = [1] * len(ras)
+
+ if min_mjd is not None:
+ if not isinstance(min_mjd, Iterable):
+ min_mjd = [min_mjd]
+ if len(ras) < 1:
+ min_mjd = np.array([])
+
+ data["min_mjd"] = min_mjd
+
+ for marker_index, this_min_mjd in enumerate(min_mjd):
+ if self.mjd < this_min_mjd:
+ data["in_mjd_window"][marker_index] = 0
+
+ if max_mjd is not None:
+ if not isinstance(max_mjd, Iterable):
+ max_mjd = [max_mjd]
+ if len(ras) < 1:
+ max_mjd = np.array([])
+
+ data["max_mjd"] = max_mjd
+
+ for marker_index, this_max_mjd in enumerate(max_mjd):
+ if self.mjd > this_max_mjd:
+ data["in_mjd_window"][marker_index] = 0
+
+ data_source = self.make_points(data)
+
+ return data_source
+
+ def add_sliders(self):
+ """Add (already defined) sliders to the map."""
+ self.sliders = OrderedDict()
+
+ def add_mjd_slider(self):
+ """Add a slider to control the MJD."""
+ if "mjd" not in self.sliders:
+ self.sliders["mjd"] = bokeh.models.Slider(
+ start=self.mjd - 1,
+ end=self.mjd + 1,
+ value=self.mjd,
+ step=1.0 / (24 * 12),
+ title="MJD",
+ )
+
+ self.figure = bokeh.layouts.column(self.plot, *self.sliders.values())
+
+ def set_js_update_func(self, data_source):
+ """Set the javascript update functions for each slider
+
+ Parameters
+ ----------
+ data_source : `bokeh.models.ColumnDataSource`
+ The bokeh data source to update.
+ """
+ update_func = bokeh.models.CustomJS(
+ args=dict(
+ data_source=data_source,
+ center_alt_slider={"value": 90},
+ center_az_slider={"value": 0},
+ mjd_slider=self.sliders["mjd"],
+ lat=self.site.latitude,
+ lon=self.site.longitude,
+ ),
+ code=self.update_js,
+ )
+
+ for proj_slider_key in self.proj_slider_keys:
+ try:
+ self.sliders[proj_slider_key].js_on_change("value", update_func)
+ except KeyError:
+ pass
+
+ def show(self):
+ """Show the map."""
+ bokeh.io.show(self.figure)
+
+ def add_healpix(self, data, cmap=None, nside=16, bound_step=1):
+ """Add healpix values to the map
+
+ Parameters
+ ----------
+ data : `numpy.ndarray`
+ Healpixel values (RING pixel ordering)
+ cmap : `bokeh.core.properties.ColorSpec`, optional
+ _description_, by default None
+ nside : `int`, optional
+ Healpix nside to use for display, by default 16
+ bound_step : `int`, optional
+ number of boundary points for each side of each healpixel,
+ by default 1
+
+ Returns
+ -------
+ data_sounce : `bokeh.models.ColumnDataSource`
+ The data source with the healpix values and bounds.
+ cmap : `bokeh.core.properties.ColorSpec`
+ The color map used
+ hp_glype : `bokeh.models.glyphs.Patches`
+ The bokeh glyphs for the plotted patches.
+ """
+ if isinstance(data, bokeh.models.DataSource):
+ data_source = data
+ else:
+ data_source = self.make_healpix_data_source(data, nside, bound_step)
+
+ self.healpix_data = data_source
+
+ if cmap is None:
+ cmap = make_zscale_linear_cmap(data_source.data["value"])
+
+ self.healpix_cmap = cmap
+
+ hpgr = self.plot.patches(
+ xs=self.x_col,
+ ys=self.y_col,
+ fill_color=cmap,
+ line_color=cmap,
+ source=data_source,
+ )
+
+ self.healpix_glyph = hpgr.glyph
+ self.healpix_renderer = hpgr
+
+ hp_glyph = hpgr.glyph
+
+ return data_source, cmap, hp_glyph
+
+ def add_graticules(self, graticule_kwargs={}, line_kwargs={}):
+ """Add graticules to the map
+
+ Parameters
+ ----------
+ graticule_kwargs : dict, optional
+ Keywords to be passed to ``SphereMap.make_graticule_points``,
+ by default {}
+ line_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.line``,
+ by default {}
+
+ Returns
+ -------
+ graticules : ` `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining the graticules.
+ """
+ graticule_points = self.make_graticule_points(**graticule_kwargs)
+ kwargs = deepcopy(self.default_graticule_line_kwargs)
+ kwargs.update(line_kwargs)
+ self.plot.line(x=self.x_col, y=self.y_col, source=graticule_points, **kwargs)
+ return graticule_points
+
+ def add_horizon_graticules(self, graticule_kwargs={}, line_kwargs={}):
+ """Add graticules to the map
+
+ Parameters
+ ----------
+ graticule_kwargs : dict, optional
+ Keywords to be passed to ``SphereMap.make_graticule_points``,
+ by default {}
+ line_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.line``,
+ by default {}
+
+ Returns
+ -------
+ graticules : ` `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining the graticules.
+ """
+ graticule_points = self.make_horizon_graticule_points(**graticule_kwargs)
+ kwargs = deepcopy(self.default_graticule_line_kwargs)
+ kwargs.update(line_kwargs)
+ self.plot.line(x=self.x_col, y=self.y_col, source=graticule_points, **kwargs)
+ return graticule_points
+
+ def add_circle(self, center_ra, center_decl, circle_kwargs={}, line_kwargs={}):
+ """Draw a circle on the map.
+
+ Parameters
+ ----------
+ center_ra : `float`
+ R.A. of the center of the circle (deg.)
+ center_decl : `float`
+ Decl. of the center of the circle (deg.)
+ circle_kwargs : dict, optional
+ Keywords to be passed to ``SphereMap.make_circle_points``,
+ by default {}
+ line_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.line``,
+ by default {}
+
+ Returns
+ -------
+ circle_points : `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining the circle.
+ """
+ circle_points = self.make_circle_points(center_ra, center_decl, **circle_kwargs)
+ self.plot.line(x=self.x_col, y=self.y_col, source=circle_points, **line_kwargs)
+ return circle_points
+
+ def add_horizon(
+ self, zd=ALMOST_90, data_source=None, circle_kwargs={}, line_kwargs={}
+ ):
+ """Add a circle parallel to the horizon.
+
+ Parameters
+ ----------
+ zd : int, optional
+ Zenith distance of the circle (deg), by default (almost) 90
+ data_source : `bokeh.models.ColumnDataSource`, optional
+ Bokeh data source for points on the circle,
+ None if the should be generated.
+ By default, None
+ circle_kwargs : dict, optional
+ Keywords to be passed to ``SphereMap.make_circle_points``,
+ by default {}
+ line_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.line``,
+ by default {}
+
+ Returns
+ -------
+ circle_points : `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining the circle.
+ """
+ if data_source is None:
+ circle_points = self.make_horizon_circle_points(
+ 90, 0, radius=zd, **circle_kwargs
+ )
+ if "mjd" in self.sliders:
+ self.set_js_update_func(circle_points)
+ else:
+ circle_points = data_source
+
+ kwargs = deepcopy(self.default_horizon_line_kwargs)
+ kwargs.update(line_kwargs)
+ self.plot.line(x=self.x_col, y=self.y_col, source=circle_points, **kwargs)
+ return circle_points
+
+ def add_marker(
+ self,
+ ra=None,
+ decl=None,
+ name="anonymous",
+ glyph_size=5,
+ min_mjd=None,
+ max_mjd=None,
+ data_source=None,
+ circle_kwargs={},
+ ):
+ """Add one or more circular marker(s) to the map.
+
+ Parameters
+ ----------
+ ra : `float` or `Iterable`, optional
+ R.A. of the marker (deg.), by default None
+ decl : `float` or `Iterable`, optional
+ Declination of the marker (deg.), by default None
+ name : `str` or `Iterable` , optional
+ Name for the thing marked, by default "anonymous"
+ glyph_size : `int` or `Iterable`, optional
+ Size of the marker, by default 5
+ min_mjd : `float` or `Iterable`, optional
+ Earliest time for which to show the marker.
+ max_mjd : `float` or `Iterable`, optional
+ Latest time for which to show the marker.
+ data_source : `bokeh.models.ColumnDataSource`, optional
+ Data source for the marker, None if a new one is to be generated.
+ By default, None
+ circle_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.circle``,
+ by default {}
+
+ Returns
+ -------
+ data_source : `bokeh.models.ColumnDataSource`
+ A data source with marker locations, including projected coords.
+ """
+ if data_source is None:
+ data_source = self.make_marker_data_source(
+ ra, decl, name, glyph_size, min_mjd, max_mjd
+ )
+
+ self.plot.circle(
+ x=self.x_col,
+ y=self.y_col,
+ size="glyph_size",
+ source=data_source,
+ **circle_kwargs,
+ )
+
+ return data_source
+
+ def add_stars(
+ self, points_data, data_source=None, mag_limit_slider=False, star_kwargs={}
+ ):
+ """Add stars to the map
+
+ Parameters
+ ----------
+ points_data : `Iterable` , `dict` , or `pandas.DataFrame`
+ A source of data (anything that can be passed to
+ `pandas.DataFrame`)
+ Must contain the following columns or keys:
+
+ ``"ra"``
+ The Right Ascension in degrees.
+ ``"decl"``
+ The declination in degrees.
+ data_source : `bokeh.models.ColumnDataSource`, optional
+ The bokeh data source to use (None to generate a new one).
+ By default, None.
+ mag_limit_slider : `bool` , optional
+ Generate a slider limiting the magnitude of stars to plot,
+ by default False
+ star_kwargs : `dict` , optional
+ _description_, by default {}
+
+ Returns
+ -------
+ data_source : `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining star locations.
+ """
+ self.star_data = points_data
+ if data_source is None:
+ self.star_data_source = self.make_points(self.star_data)
+ else:
+ self.star_data_source = data_source
+
+ self.plot.star(
+ x=self.x_col,
+ y=self.y_col,
+ size="glyph_size",
+ source=self.star_data_source,
+ **star_kwargs,
+ )
+
+ if mag_limit_slider:
+ mag_slider = bokeh.models.Slider(
+ start=0,
+ end=6.5,
+ value=3,
+ step=0.5,
+ title="Magnitude limit for bright stars",
+ )
+ mag_slider.on_change("value", self.limit_stars)
+
+ self.sliders["mag_limit"] = mag_slider
+
+ self.figure = bokeh.layouts.column(self.plot, *self.sliders.values())
+
+ return self.star_data_source
+
+ def limit_stars(self, attr, old_limit, mag_limit):
+ """Apply a magnitude limit to mapped stars
+
+ Parameters
+ ----------
+ attr : `str`
+ Attribute of the slider to use (ignored)
+ old_limit : `float`
+ Old value for the magnitude limit (ignored)
+ mag_limit : `float`
+ Now value for the magnitude limit
+
+ Note
+ ----
+ This method is intended to be called as a callback by bokeh.
+ """
+ star_data = self.star_data.query(f"Vmag < {mag_limit}").copy()
+ star_data.loc[:, "glyph_size"] = (
+ self.max_star_glyph_size
+ - (self.max_star_glyph_size / mag_limit) * star_data["Vmag"]
+ )
+ stars = self.make_points(star_data)
+ self.star_data_source.data = dict(stars.data)
+
+ def add_ecliptic(self, **kwargs):
+ """Map the ecliptic.
+
+ Returns
+ -------
+ points : `bokeh.models.ColumnDataSource`
+ The bokeh data source with points on the ecliptic.
+ """
+ ecliptic_pole = SkyCoord(
+ lon=0 * u.degree, lat=90 * u.degree, frame="geocentricmeanecliptic"
+ ).icrs
+ line_kwargs = deepcopy(self.default_ecliptic_line_kwargs)
+ line_kwargs.update(kwargs)
+ points = self.add_circle(
+ ecliptic_pole.ra.deg, ecliptic_pole.dec.deg, line_kwargs=line_kwargs
+ )
+ return points
+
+ def add_galactic_plane(self, **kwargs):
+ """Map the galactic plane
+
+ Returns
+ -------
+ points : `bokeh.models.ColumnDataSource`
+ The bokeh data source with points on the galactic plane.
+ """
+ galactic_pole = SkyCoord(l=0 * u.degree, b=90 * u.degree, frame="galactic").icrs
+ line_kwargs = deepcopy(self.default_galactic_plane_line_kwargs)
+ line_kwargs.update(kwargs)
+ points = self.add_circle(
+ galactic_pole.ra.deg, galactic_pole.dec.deg, line_kwargs=line_kwargs
+ )
+ return points
+
+ def decorate(self):
+ """Add graticules, the ecliptic, and galactic plane to the map."""
+ self.add_graticules()
+ self.add_ecliptic()
+ self.add_galactic_plane()
+
+
+class Planisphere(SphereMap):
+ x_col = "x_laea"
+ y_col = "y_laea"
+ default_title = "Planisphere"
+
+
+class MollweideMap(SphereMap):
+ x_col = "x_moll"
+ y_col = "y_moll"
+ default_title = "Mollweide"
+
+
+class MovingSphereMap(SphereMap):
+ def add_healpix(self, data, cmap=None, nside=16, bound_step=1):
+ """Add healpix values to the map
+
+ Parameters
+ ----------
+ data : `numpy.ndarray`
+ Healpixel values (RING pixel ordering)
+ cmap : `bokeh.core.properties.ColorSpec`, optional
+ _description_, by default None
+ nside : `int`, optional
+ Healpix nside to use for display, by default 16
+ bound_step : `int`, optional
+ number of boundary points for each side of each healpixel,
+ by default 1
+
+ Returns
+ -------
+ data_sounce : `bokeh.models.ColumnDataSource`
+ The data source with the healpix values and bounds.
+ cmap : `bokeh.core.properties.ColorSpec`
+ The color map used
+ hp_glype : `bokeh.models.glyphs.Patches`
+ The bokeh glyphs for the plotted patches.
+ """
+ data_source, cmap, hp_glyph = super().add_healpix(data, cmap, nside, bound_step)
+ self.set_js_update_func(data_source)
+ return data_source, cmap, hp_glyph
+
+ def add_graticules(self, graticule_kwargs={}, line_kwargs={}):
+ """Add graticules to the map
+
+ Parameters
+ ----------
+ graticule_kwargs : dict, optional
+ Keywords to be passed to ``SphereMap.make_graticule_points``,
+ by default {}
+ line_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.line``,
+ by default {}
+
+ Returns
+ -------
+ graticules : ` `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining the graticules.
+ """
+ data_source = super().add_graticules(graticule_kwargs, line_kwargs)
+ self.set_js_update_func(data_source)
+ return data_source
+
+ def add_circle(self, center_ra, center_decl, circle_kwargs={}, line_kwargs={}):
+ """Draw a circle on the map.
+
+ Parameters
+ ----------
+ center_ra : `float`
+ R.A. of the center of the circle (deg.)
+ center_decl : `float`
+ Decl. of the center of the circle (deg.)
+ circle_kwargs : dict, optional
+ Keywords to be passed to ``SphereMap.make_circle_points``,
+ by default {}
+ line_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.line``,
+ by default {}
+
+ Returns
+ -------
+ circle_points : `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining the circle.
+ """
+ data_source = super().add_circle(
+ center_ra, center_decl, circle_kwargs, line_kwargs
+ )
+ self.set_js_update_func(data_source)
+ return data_source
+
+ def add_stars(
+ self, points_data, data_source=None, mag_limit_slider=False, star_kwargs={}
+ ):
+ """Add stars to the map
+
+ Parameters
+ ----------
+ points_data : `Iterable` , `dict` , or `pandas.DataFrame`
+ A source of data (anything that can be passed to
+ `pandas.DataFrame`)
+ Must contain the following columns or keys:
+
+ ``"ra"``
+ The Right Ascension in degrees.
+ ``"decl"``
+ The declination in degrees.
+ data_source : `bokeh.models.ColumnDataSource`, optional
+ The bokeh data source to use (None to generate a new one).
+ By default, None.
+ mag_limit_slider : `bool` , optional
+ Generate a slider limiting the magnitude of stars to plot,
+ by default False
+ star_kwargs : `dict` , optional
+ _description_, by default {}
+
+ Returns
+ -------
+ data_source : `bokeh.models.ColumnDataSource`
+ The bokeh data source with points defining star locations.
+ """
+ data_source = super().add_stars(
+ points_data, data_source, mag_limit_slider, star_kwargs
+ )
+ self.set_js_update_func(data_source)
+ return data_source
+
+ def add_marker(
+ self,
+ ra=None,
+ decl=None,
+ name="anonymous",
+ glyph_size=5,
+ min_mjd=None,
+ max_mjd=None,
+ data_source=None,
+ circle_kwargs={},
+ ):
+ """Add one or more circular marker(s) to the map.
+
+ Parameters
+ ----------
+ ra : `float` or `Iterable`, optional
+ R.A. of the marker (deg.), by default None
+ decl : `float` or `Iterable`, optional
+ Declination of the marker (deg.), by default None
+ name : `str` or `Iterable` , optional
+ Name for the thing marked, by default "anonymous"
+ glyph_size : `int` or `Iterable`, optional
+ Size of the marker, by default 5
+ min_mjd : `float` or `Iterable`, optional
+ Earliest time for which to show the marker.
+ max_mjd : `float` or `Iterable`, optional
+ Latest time for which to show the marker.
+ data_source : `bokeh.models.ColumnDataSource`, optional
+ Data source for the marker, None if a new one is to be generated.
+ By default, None
+ circle_kwargs : dict, optional
+ Keywords to be passed to ``bokeh.plotting.figure.Figure.circle``,
+ by default {}
+
+ Returns
+ -------
+ data_source : `bokeh.models.ColumnDataSource`
+ A data source with marker locations, including projected coords.
+ """
+ data_source = super().add_marker(
+ ra,
+ decl,
+ name,
+ glyph_size,
+ min_mjd,
+ max_mjd,
+ data_source=data_source,
+ circle_kwargs=circle_kwargs,
+ )
+ self.set_js_update_func(data_source)
+ return data_source
+
+
+class HorizonMap(MovingSphereMap):
+ x_col = "x_hz"
+ y_col = "y_hz"
+ proj_slider_keys = ["mjd"]
+ default_title = "Horizon"
+
+ def set_js_update_func(self, data_source):
+ """Set the javascript update functions for each slider
+
+ Parameters
+ ----------
+ data_source : `bokeh.models.ColumnDataSource`
+ The bokeh data source to update.
+ """
+ update_func = bokeh.models.CustomJS(
+ args=dict(
+ data_source=data_source,
+ mjd_slider=self.sliders["mjd"],
+ lat=self.site.latitude,
+ lon=self.site.longitude,
+ ),
+ code=self.update_js,
+ )
+
+ for proj_slider_key in self.proj_slider_keys:
+ try:
+ self.sliders[proj_slider_key].js_on_change("value", update_func)
+ except KeyError:
+ pass
+
+ def add_sliders(self, center_alt=90, center_az=0):
+ """Add (already defined) sliders to the map."""
+ super().add_sliders()
+ self.sliders["mjd"] = bokeh.models.Slider(
+ start=self.mjd - 1,
+ end=self.mjd + 1,
+ value=self.mjd,
+ step=1.0 / (24 * 60),
+ title="MJD",
+ )
+
+ self.figure = bokeh.layouts.column(self.plot, self.sliders["mjd"])
+
+
+class ArmillarySphere(MovingSphereMap):
+ x_col = "x_orth"
+ y_col = "y_orth"
+ proj_slider_keys = ["alt", "az", "mjd"]
+ default_title = "Armillary Sphere"
+
+ def set_js_update_func(self, data_source):
+ """Set the javascript update functions for each slider
+
+ Parameters
+ ----------
+ data_source : `bokeh.models.ColumnDataSource`
+ The bokeh data source to update.
+ """
+ update_func = bokeh.models.CustomJS(
+ args=dict(
+ data_source=data_source,
+ center_alt_slider=self.sliders["alt"],
+ center_az_slider=self.sliders["az"],
+ mjd_slider=self.sliders["mjd"],
+ lat=self.site.latitude,
+ lon=self.site.longitude,
+ ),
+ code=self.update_js,
+ )
+
+ for proj_slider_key in self.proj_slider_keys:
+ try:
+ self.sliders[proj_slider_key].js_on_change("value", update_func)
+ except KeyError:
+ pass
+
+ def add_sliders(self, center_alt=90, center_az=180):
+ """Add (already defined) sliders to the map."""
+ super().add_sliders()
+ self.sliders["alt"] = bokeh.models.Slider(
+ start=-90,
+ end=90,
+ value=center_alt,
+ step=np.pi / 180,
+ title="center alt",
+ )
+ self.sliders["az"] = bokeh.models.Slider(
+ start=-90, end=360, value=center_az, step=np.pi / 180, title="center Az"
+ )
+ self.sliders["mjd"] = bokeh.models.Slider(
+ start=self.mjd - 1,
+ end=self.mjd + 1,
+ value=self.mjd,
+ step=1.0 / (24 * 60),
+ title="MJD",
+ )
+
+ self.figure = bokeh.layouts.column(
+ self.plot, self.sliders["alt"], self.sliders["az"], self.sliders["mjd"]
+ )
+
+
+def make_zscale_linear_cmap(
+ values, field_name="value", palette="Inferno256", *args, **kwargs
+):
+ zscale_interval = astropy.visualization.ZScaleInterval(*args, **kwargs)
+ if np.any(np.isfinite(values)):
+ scale_limits = zscale_interval.get_limits(values)
+ else:
+ scale_limits = [0, 1]
+ cmap = bokeh.transform.linear_cmap(
+ field_name, palette, scale_limits[0], scale_limits[1]
+ )
+ return cmap
diff --git a/schedview/plot/__init__.py b/schedview/plot/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/schedview/plot/readjs.py b/schedview/plot/readjs.py
new file mode 100644
index 00000000..c9c368d0
--- /dev/null
+++ b/schedview/plot/readjs.py
@@ -0,0 +1,35 @@
+import importlib.resources
+
+
+def read_javascript(fname):
+ """Read javascript source code from the current package.
+
+ Parameters
+ ----------
+ fname : `str`
+ The name of the file from which to load js source code.
+
+ Return
+ ------
+ js_code : `str`
+ The loaded source code.
+ """
+ root_package = __package__.split(".")[0]
+
+ try:
+ js_path = importlib.resources.files(root_package).joinpath("js", fname)
+ with importlib.resources.as_file(js_path) as js_file_path:
+ with open(js_file_path, "r") as js_io:
+ js_code = js_io.read()
+ except AttributeError as e:
+ # If we are using an older version of importlib, we need to do
+ # this instead:
+ if e.args[0] != "module 'importlib.resources' has no attribute 'files'":
+ raise e
+
+ with importlib.resources.path(root_package, ".") as root_path:
+ full_name = root_path.joinpath("js", fname)
+ with open(full_name, "r") as js_io:
+ js_code = js_io.read()
+
+ return js_code
diff --git a/schedview/plot/scheduler.py b/schedview/plot/scheduler.py
new file mode 100644
index 00000000..477f5437
--- /dev/null
+++ b/schedview/plot/scheduler.py
@@ -0,0 +1,1141 @@
+import bokeh.plotting
+import numpy as np
+import healpy as hp
+from astropy.time import Time
+import logging
+import collections.abc
+from collections import OrderedDict
+
+import pandas as pd
+import bokeh.models
+import bokeh.core.properties
+
+from rubin_sim.scheduler.features.conditions import Conditions
+from rubin_sim.scheduler.modelObservatory import Model_observatory
+import rubin_sim.scheduler.schedulers
+import rubin_sim.scheduler.surveys
+import rubin_sim.scheduler.basis_functions
+
+
+from schedview.plot.SphereMap import (
+ ArmillarySphere,
+ HorizonMap,
+ Planisphere,
+ MollweideMap,
+ make_zscale_linear_cmap,
+)
+
+from schedview.collect import read_scheduler
+
+DEFAULT_MJD = 60200.2
+# DEFAULT_NSIDE = 16
+DEFAULT_NSIDE = 32
+
+
+def make_logger():
+ logger = logging.getLogger("sched_logger")
+ logger.setLevel(logging.DEBUG)
+
+ console_handler = logging.StreamHandler()
+ console_handler.setLevel(logging.DEBUG)
+ formatter = logging.Formatter("%(asctime)s: %(message)s")
+ console_handler.setFormatter(formatter)
+ logger.addHandler(console_handler)
+ return logger
+
+
+LOGGER = make_logger()
+
+
+class SchedulerDisplay:
+ tooltips = [
+ ("RA", "@center_ra"),
+ ("Decl", "@center_decl"),
+ ]
+
+ key_markup = """Key
+
+- Black line Horizon
+- Red line ZD=70 deg.
+- Green line Ecliptic
+- Blue line Galactic plane
+- Yellow dot Sun position
+- Gray dot Moon position
+- Red dot Survey field(s)
+- Green dot Telescope pointing
+
+ """
+
+ def __init__(self, init_key="AvoidDirectWind", nside=DEFAULT_NSIDE, scheduler=None):
+ self._scheduler = None
+ self.survey_index = [None, None]
+ self.healpix_maps = OrderedDict()
+ self.init_key = init_key
+ self.map_key = init_key
+ self.nside = nside
+ self.healpix_cmap = None
+ self.data_sources = {}
+ self.glyphs = {}
+ self.bokeh_models = {}
+ self.sphere_maps = {}
+ self._figure = None
+ mjd = Time.now().mjd if DEFAULT_MJD is None else DEFAULT_MJD
+ try:
+ self.observatory = Model_observatory(mjd_start=mjd - 1)
+ except ValueError:
+ self.observatory = None
+
+ if scheduler is None:
+ scheduler = make_default_scheduler(mjd, nside=nside)
+
+ self.scheduler = scheduler
+
+ @property
+ def map_keys(self):
+ """Return keys for the available healpix maps"""
+ keys = list(self.healpix_maps.keys())
+ return keys
+
+ @property
+ def mjd(self):
+ return self.conditions.mjd
+
+ @mjd.setter
+ def mjd(self, value):
+ """Update the interface for a new date
+
+ Parameters
+ ----------
+ value : `float`
+ The new MJD
+ """
+
+ # Sometimes a loaded pickle will have close to a represented
+ # time, but not identical, and we do not want to try to recreate
+ # the conditions object if we have loaded it and not changed the
+ # time. So, check only that the mjd is close, not that it
+ # is identical.
+ if np.abs(value - self.mjd) < (1.0 / (24 * 60 * 60)):
+ # Nothing needs to be done
+ return
+
+ LOGGER.info(f"Creating conditions for mjd {value}")
+
+ try:
+ # If we cannot go to the requested MJD, follback on
+ # on we can go to:
+ if value < self.observatory.sky_model.mjd_left.min():
+ LOGGER.info("Cannot go to requested date, going to earliest.")
+ self.observatory.mjd = self.observatory.sky_model.mjd_left.min() + 1.0
+ elif value > self.observatory.sky_model.mjd_right.max():
+ LOGGER.info("Cannot go to requested date, going to latest.")
+ self.observatory.mjd = self.observatory.sky_model.mjd_right.max() - 1.0
+ else:
+ self.observatory.mjd = value
+
+ conditions = self.observatory.return_conditions()
+
+ # Make sure we have set a time at night, and if we have night
+ # go to the sunsrise or sunset on the same night.
+ if conditions.sun_n18_setting > self.observatory.mjd:
+ self.observatory.mjd = conditions.sun_n18_setting
+ conditions = self.observatory.return_conditions()
+ if conditions.sun_n18_rising < self.observatory.mjd:
+ self.observatory.mjd = conditions.sun_n18_rising
+ conditions = self.observatory.return_conditions()
+
+ LOGGER.info("Conditions created")
+ except (ValueError, AttributeError):
+ # If we do not have the right cache of sky brightness
+ # values on disk, we may not be able to instantiate
+ # Model_observatory, but we should be able to run
+ # it anyway. Fake up a conditions object as well as
+ # we can.
+ conditions = Conditions(mjd_start=value - 1)
+ conditions.mjd = value
+ LOGGER.warning("Created dummy conditions.")
+
+ self.conditions = conditions
+
+ @property
+ def time(self):
+ """Return the time as an astropy Time objec.
+
+ Return
+ ------
+ time : `astropy.time.Time`
+ The time
+ """
+ time = Time(self.mjd, format="mjd", scale="utc")
+ time.format = "iso"
+ return time
+
+ @time.setter
+ def time(self, time):
+ """Set the time according to a time string.
+
+ Parameters
+ ----------
+ time : `astropy.time.Time` or `str`
+ The new time
+ Parameterers are the same as for `pandas.to_datetime`.
+ """
+ if isinstance(time, Time):
+ new_mjd = time.mjd
+ elif isinstance(time, pd.Timestamp):
+ new_mjd = time.to_julian_date() - 2400000.5
+ else:
+ try:
+ new_mjd = Time(time).mjd
+ except ValueError:
+ new_mjd = pd.to_datetime(time, utc=True).to_julian_date() - 2400000.5
+
+ self.mjd = new_mjd
+
+ def _update_healpix_maps(self):
+ """Update healpix values from the scheduler."""
+ # Be sure we keep using the same dictionary, and just update it,
+ # rather than use a new one because any new one we make won't propogate
+ # into other callbacks.
+ self.healpix_maps.clear()
+ full_healpix_maps = self.scheduler.get_healpix_maps(
+ survey_index=self.survey_index, conditions=self.conditions
+ )
+ for key in full_healpix_maps:
+ new_key = key.replace(" ", "_").replace(".", "_").replace("@", "_")
+ values = full_healpix_maps[key]
+ if values.shape[0] != hp.nside2npix(self.nside):
+ values[np.isnan(values)] = hp.UNSEEN
+ values = hp.ud_grade(
+ values,
+ self.nside,
+ )
+ values[values == hp.UNSEEN] = np.nan
+ self.healpix_maps[new_key] = values
+
+ survey = self.scheduler.survey_lists[self.survey_index[0]][self.survey_index[1]]
+ reward = survey.calc_reward_function(self.conditions)
+ if not (isinstance(reward, np.ndarray) and len(reward) > 1):
+ try:
+ basis_weights = survey.basis_weights
+ basis_functions = survey.basis_functions
+ supported_survey = True
+ except AttributeError:
+ supported_survey = False
+
+ if supported_survey:
+ npix = hp.nside2npix(self.nside)
+ reward = np.zeros(npix)
+ indx = np.arange(npix)
+ for bf, weight in zip(basis_functions, basis_weights):
+ basis_value = bf(self.conditions, indx=indx)
+ if isinstance(basis_value, np.ndarray):
+ basis_value[np.isnan(basis_value)] = hp.UNSEEN
+ basis_value = hp.ud_grade(basis_value, self.nside)
+ basis_value[basis_value == hp.UNSEEN] = np.nan
+ reward += basis_value * weight
+
+ if isinstance(reward, np.ndarray) and len(reward) > 1:
+ if reward.shape[0] != hp.nside2npix(self.nside):
+ reward[np.isnan(reward)] = hp.UNSEEN
+ reward = hp.ud_grade(
+ reward,
+ self.nside,
+ )
+ reward[reward == hp.UNSEEN] = np.nan
+ if np.any(np.isfinite(reward)):
+ self.healpix_maps["reward"] = reward
+
+ @property
+ def healpix_values(self):
+ """Healpix numpy array for the current map."""
+ if len(self.healpix_maps) == 0:
+ npix = hp.nside2npix(self.nside)
+ values = np.ones(npix)
+ return values
+
+ return self.healpix_maps[self.map_key]
+
+ def load(self, file_name):
+ """Load scheduler data
+
+ Parameters
+ ----------
+ file_name : `str`
+ The file name from which to load scheduler state.
+ """
+ scheduler, conditions = read_scheduler(file_name)
+ scheduler.update_conditions(conditions)
+ self.scheduler = scheduler
+
+ @property
+ def scheduler(self):
+ return self._scheduler
+
+ @scheduler.setter
+ def scheduler(self, scheduler):
+ """Set the scheduler visualized.
+
+ Parameters
+ ----------
+ scheduler : `rubin_sim.scheduler.schedulers.core_scheduler.Core_scheduler` # noqa W505
+ The new scheduler to visualize
+ """
+ # Work separated into _set_scheduler so that it can be overriden by
+ # subclasses.
+ self._set_scheduler(scheduler)
+
+ def _set_scheduler(self, scheduler):
+ LOGGER.debug("Setting the scheduler")
+ self._scheduler = scheduler
+
+ # FIXME The pickle used for testing does not include several
+ # required methods of the Scheduler class, so add them.
+ if "get_basis_functions" not in dir(self.scheduler):
+ import schedview.munge.monkeypatch_rubin_sim # noqa F401
+
+ self.survey_index[0] = self.scheduler.survey_index[0]
+ self.survey_index[1] = self.scheduler.survey_index[1]
+
+ if self.survey_index[0] is None:
+ self.survey_index = [0, 0]
+ if self.survey_index[1] is None:
+ self.survey_index[1] = 0
+
+ self.conditions = scheduler.conditions
+
+ @property
+ def conditions(self):
+ return self.scheduler.conditions
+
+ @conditions.setter
+ def conditions(self, conditions):
+ """Update the figure to represent changed conditions.
+
+ Parameters
+ ----------
+ conditions : `rubin_sim.scheduler.features.conditions.Conditions`
+ The new conditions.
+ """
+ self._set_conditions(conditions)
+
+ def _set_conditions(self, conditions):
+ # Separated from the decorated setter so that it can be overridden
+ # in a subclass.
+ LOGGER.info("Updating interface for new conditions")
+ self.scheduler.update_conditions(conditions)
+ self.scheduler.request_observation()
+ self._update_healpix_maps()
+
+ # If the current map is no longer valid, pick a valid one.
+ # Otherwise, keep displaying the same map.
+ if self.map_key not in self.map_keys:
+ self.map_key = self.map_keys[-1]
+
+ for sphere_map in self.sphere_maps.values():
+ sphere_map.mjd = self.mjd
+
+ if "armillary_sphere" in self.sphere_maps:
+ self.sphere_maps["armillary_sphere"].sliders[
+ "mjd"
+ ].value = self.sphere_maps["armillary_sphere"].mjd
+
+ LOGGER.info("Finished updating conditions")
+
+ @property
+ def tier_names(self):
+ """List of names of tiers in current survey."""
+ tiers = [f"tier {t}" for t in np.arange(len(self.scheduler.survey_lists))]
+ return tiers
+
+ def select_tier(self, tier):
+ """Set the tier being displayed."""
+ LOGGER.info(f"swiching tier to {tier}")
+ self.survey_index[0] = self.tier_names.index(tier)
+ self.survey_index[1] = 0
+
+ @property
+ def surveys_in_tier(self):
+ """List of surveys in the current tier."""
+ tier = self.survey_index[0]
+ surveys_in_tier = [s.survey_name for s in self.scheduler.survey_lists[tier]]
+ return surveys_in_tier
+
+ def select_survey(self, survey):
+ """Update the display to show a given survey.
+
+ Parameters
+ ----------
+ survey : `str`
+ The name of the survey to select.
+ """
+ # keep using the same survey_index list, and just update it,
+ # not create a new one, because any new one we make won't propogate
+ # into other callbacks.
+ self.survey_index[1] = self.surveys_in_tier.index(survey)
+ self._update_healpix_maps()
+
+ def select_value(self, map_key):
+ """Select the value to be displayed on the maps.
+
+ Parameters
+ ----------
+ map_key : `str`
+ The name of the value to be mapped
+ """
+ LOGGER.info(f"Switching value to {map_key}")
+ self.map_key = map_key
+
+ def make_sphere_map(
+ self,
+ key,
+ cls,
+ title,
+ plot_width=512,
+ plot_height=512,
+ decorate=True,
+ horizon_graticules=False,
+ ):
+
+ if "hover_tool" not in self.bokeh_models:
+ self.bokeh_models["hover_tool"] = bokeh.models.HoverTool(
+ renderers=[], tooltips=self.tooltips
+ )
+
+ plot = bokeh.plotting.figure(
+ plot_width=plot_width,
+ plot_height=plot_height,
+ tools=[self.bokeh_models["hover_tool"]],
+ match_aspect=True,
+ title=title,
+ )
+ sphere_map = cls(plot=plot, mjd=self.mjd)
+
+ if "healpix" in self.data_sources:
+ sphere_map.add_healpix(
+ self.data_sources["healpix"], cmap=self.healpix_cmap, nside=self.nside
+ )
+ else:
+ sphere_map.add_healpix(self.healpix_values, nside=self.nside)
+ self.data_sources["healpix"] = sphere_map.healpix_data
+ self.healpix_cmap = sphere_map.healpix_cmap
+
+ self.bokeh_models["hover_tool"].renderers.append(sphere_map.healpix_renderer)
+
+ if "horizon" in self.data_sources:
+ sphere_map.add_horizon(data_source=self.data_sources["horizon"])
+ else:
+ self.data_sources["horizon"] = sphere_map.add_horizon()
+
+ if "zd70" in self.data_sources:
+ sphere_map.add_horizon(
+ zd=70,
+ data_source=self.data_sources["zd70"],
+ line_kwargs={"color": "red", "line_width": 2},
+ )
+ else:
+ self.data_sources["zd70"] = sphere_map.add_horizon(
+ zd=70, line_kwargs={"color": "red", "line_width": 2}
+ )
+
+ if horizon_graticules:
+ sphere_map.add_horizon_graticules()
+
+ if decorate:
+ sphere_map.decorate()
+
+ if "survey_marker" not in self.data_sources:
+ self.data_sources["survey_marker"] = self.make_survey_marker_data_source(
+ sphere_map
+ )
+
+ sphere_map.add_marker(
+ data_source=self.data_sources["survey_marker"],
+ name="Field",
+ circle_kwargs={"color": "red", "fill_alpha": 0.5},
+ )
+
+ if "telescope_marker" not in self.data_sources:
+ self.data_sources[
+ "telescope_marker"
+ ] = self.make_telescope_marker_data_source(sphere_map)
+
+ sphere_map.add_marker(
+ data_source=self.data_sources["telescope_marker"],
+ name="Field",
+ circle_kwargs={"color": "green", "fill_alpha": 0.5},
+ )
+
+ if "moon_marker" not in self.data_sources:
+ self.data_sources["moon_marker"] = self.make_moon_marker_data_source(
+ sphere_map
+ )
+
+ sphere_map.add_marker(
+ data_source=self.data_sources["moon_marker"],
+ name="Moon",
+ circle_kwargs={"color": "lightgray", "fill_alpha": 0.8},
+ )
+
+ if "sun_marker" not in self.data_sources:
+ self.data_sources["sun_marker"] = self.make_sun_marker_data_source(
+ sphere_map
+ )
+
+ sphere_map.add_marker(
+ data_source=self.data_sources["sun_marker"],
+ name="Sun",
+ circle_kwargs={"color": "yellow", "fill_alpha": 1},
+ )
+
+ self.bokeh_models[key] = plot
+ self.sphere_maps[key] = sphere_map
+
+ def _make_marker_data_source(
+ self,
+ sphere_map=None,
+ name="telescope",
+ source_name="conditions",
+ ra_name="telRA",
+ decl_name="telDec",
+ source_units="radians",
+ ):
+ """Create a bokeh datasource for an object at a set of coordinates.
+
+ Parameters
+ ----------
+ sphere_map: `schedview.plot.SphereMap`
+ The instance of SphereMap to use to create the data source
+ name : 'str'
+ The name of the thing to mark.
+ source_name : `str`
+ The name of the member object to provide the coordinates.
+ ra_name : `str`
+ The name of the member with the RA.
+ decl_name : `str`
+ The name of the member with the declination.
+ source_units : `str`
+ 'radians' or 'degrees', according to what is provided by the source
+
+ Returns
+ -------
+ data_source: `bokeh.models.ColumnDataSource`
+ The DataSource with the column data.
+ """
+ if sphere_map is None:
+ sphere_map = tuple(self.sphere_maps.values())[0]
+
+ sources = {
+ "conditions": self.conditions,
+ "survey": self.scheduler.survey_lists[self.survey_index[0]][
+ self.survey_index[1]
+ ],
+ }
+ source = sources[source_name]
+
+ # If the telescope position is not set in our instance of
+ # conditions, use an empty array
+ ra = getattr(source, ra_name, np.array([]))
+ decl = getattr(source, decl_name, np.array([]))
+ if ra is None:
+ ra = np.array([])
+ if decl is None:
+ decl = np.array([])
+ LOGGER.debug(
+ f"{name.capitalize()} coordinates: ra={np.degrees(ra)}, decl={np.degrees(decl)}"
+ )
+ if source_units == "radians":
+ ra_deg = np.degrees(ra)
+ decl_deg = np.degrees(decl)
+ elif source_units in ("degrees", "deg"):
+ ra_deg = ra
+ decl_deg = decl
+ data_source = sphere_map.make_marker_data_source(
+ ra=ra_deg, decl=decl_deg, name=name, glyph_size=20
+ )
+ return data_source
+
+ def make_moon_marker_data_source(self, sphere_map=None):
+ """Create a bokeh datasource for the moon.
+
+ Parameters
+ ----------
+ sphere_map: `schedview.plot.SphereMap`
+ The instance of SphereMap to use to create the data source
+
+ Returns
+ -------
+ data_source: `bokeh.models.ColumnDataSource`
+ The DataSource with the column data.
+ """
+ data_source = self._make_marker_data_source(
+ sphere_map=sphere_map,
+ name="moon",
+ source_name="conditions",
+ ra_name="moonRA",
+ decl_name="moonDec",
+ source_units="radians",
+ )
+ return data_source
+
+ def update_moon_marker_bokeh_model(self):
+ """Update the moon data source."""
+ if "telescope_marker" not in self.data_sources:
+ return
+
+ sphere_map = tuple(self.sphere_maps.values())[0]
+ data_source = self.make_moon_marker_data_source(sphere_map)
+ data = dict(data_source.data)
+ if "moon_marker" in self.data_sources:
+ self.data_sources["moon_marker"].data = data
+
+ def make_sun_marker_data_source(self, sphere_map=None):
+ """Create a bokeh datasource for the sun.
+
+ Parameters
+ ----------
+ sphere_map: `schedview.plot.SphereMap`
+ The instance of SphereMap to use to create the data source
+
+ Returns
+ -------
+ data_source: `bokeh.models.ColumnDataSource`
+ The DataSource with the column data.
+ """
+ data_source = self._make_marker_data_source(
+ sphere_map=sphere_map,
+ name="sun",
+ source_name="conditions",
+ ra_name="sunRA",
+ decl_name="sunDec",
+ source_units="radians",
+ )
+ return data_source
+
+ def update_sun_marker_bokeh_model(self):
+ """Update the sun data source."""
+ if "telescope_marker" not in self.data_sources:
+ return
+
+ sphere_map = tuple(self.sphere_maps.values())[0]
+ data_source = self.make_sun_marker_data_source(sphere_map)
+ data = dict(data_source.data)
+ if "sun_marker" in self.data_sources:
+ self.data_sources["sun_marker"].data = data
+
+ def make_telescope_marker_data_source(self, sphere_map=None):
+ """Create a bokeh datasource for the current telescope pointing.
+
+ Parameters
+ ----------
+ sphere_map: `schedview.plot.SphereMap`
+ The instance of SphereMap to use to create the data source
+
+ Returns
+ -------
+ data_source: `bokeh.models.ColumnDataSource`
+ The DataSource with the column data.
+ """
+ data_source = self._make_marker_data_source(
+ sphere_map=sphere_map,
+ name="telescope",
+ source_name="conditions",
+ ra_name="telRA",
+ decl_name="telDec",
+ source_units="radians",
+ )
+ return data_source
+
+ def update_telescope_marker_bokeh_model(self):
+ """Update the telescope pointing data source."""
+ if "telescope_marker" not in self.data_sources:
+ return
+
+ sphere_map = tuple(self.sphere_maps.values())[0]
+ data_source = self.make_telescope_marker_data_source(sphere_map)
+ data = dict(data_source.data)
+ if "telescope_marker" in self.data_sources:
+ self.data_sources["telescope_marker"].data = data
+
+ def make_survey_marker_data_source(self, sphere_map=None, max_fields=50):
+ """Create a bokeh datasource for the pointings for the current survey.
+
+ Parameters
+ ----------
+ sphere_map: `schedview.plot.SphereMap`
+ The instance of SphereMap to use to create the data source
+ max_fields: `int`
+ Maximum number of fields to display (none shown if the scheduler
+ has more fields.)
+
+ Returns
+ -------
+ data_source: `bokeh.models.ColumnDataSource`
+ The DataSource with the column data.
+ """
+ survey = self.scheduler.survey_lists[self.survey_index[0]][self.survey_index[1]]
+ try:
+ ra_name = "ra" if len(survey.ra) <= max_fields else ""
+ decl_name = "dec" if len(survey.dec) <= max_fields else ""
+ except AttributeError:
+ ra_name = ""
+ decl_name = ""
+
+ data_source = self._make_marker_data_source(
+ sphere_map=sphere_map,
+ name="Field",
+ source_name="survey",
+ ra_name=ra_name,
+ decl_name=decl_name,
+ source_units="radians",
+ )
+ return data_source
+
+ def update_survey_marker_bokeh_model(self):
+ """Update the survey pointing data source."""
+ if "survey_marker" not in self.data_sources:
+ return
+
+ sphere_map = tuple(self.sphere_maps.values())[0]
+ data_source = self.make_survey_marker_data_source(sphere_map)
+ data = dict(data_source.data)
+ if "survey_marker" in self.data_sources:
+ self.data_sources["survey_marker"].data = data
+
+ def update_healpix_bokeh_model(self):
+ """Update the healpix value data source."""
+ if "healpix" not in self.data_sources:
+ return
+
+ LOGGER.debug("Updating helpix bokeh models")
+
+ sphere_map = tuple(self.sphere_maps.values())[0]
+ # sphere_map = ArmillarySphere(mjd=self.conditions.mjd)
+
+ if "Zenith_shadow_mask" in self.map_keys:
+ zenith_mask = self.healpix_maps["Zenith_shadow_mask"]
+ cmap_sample_data = self.healpix_values[zenith_mask == 1]
+ elif "y_sky" in self.map_keys:
+ sb_mask = np.isfinite(self.healpix_maps["y_sky"])
+ cmap_sample_data = self.healpix_values[sb_mask]
+ if len(cmap_sample_data) == 0:
+ # It's probably day, so the color map will be bad regardless.
+ cmap_sample_data = self.healpix_values
+ else:
+ cmap_sample_data = self.healpix_values
+
+ self.healpix_cmap = make_zscale_linear_cmap(cmap_sample_data)
+
+ new_ds = sphere_map.make_healpix_data_source(
+ self.healpix_values,
+ nside=self.nside,
+ bound_step=1,
+ )
+ new_data = dict(new_ds.data)
+
+ for key in self.map_keys:
+ # The datasource might not have all healpixels
+ # or have them in the same order
+ # so force the order by indexing on new_data["hpid"]
+ new_data[key] = self.healpix_maps[key][new_data["hpid"]]
+
+ # Replace the data to be shown
+ self.data_sources["healpix"].data = new_data
+
+ for sphere_map in self.sphere_maps.values():
+ sphere_map.healpix_glyph.fill_color = self.healpix_cmap
+ sphere_map.healpix_glyph.line_color = self.healpix_cmap
+
+ def update_hovertool_bokeh_model(self):
+ """Update the hovertool with available value."""
+ if "hover_tool" not in self.bokeh_models:
+ return
+
+ tooltips = []
+ data = self.data_sources["healpix"].data
+ for data_key in data.keys():
+ if not isinstance(data[data_key][0], collections.abc.Sequence):
+
+ if data_key == "center_ra":
+ label = "RA"
+ elif data_key == "center_decl":
+ label = "Decl"
+ else:
+ label = data_key.replace("_", " ")
+
+ reference = f"@{data_key}"
+ tooltips.append((label, reference))
+
+ self.bokeh_models["hover_tool"].tooltips = tooltips
+
+ def make_reward_table(self):
+ """Create the bokeh model for a table of rewards."""
+ # Bokeh's DataTable doesn't like to expand to accommodate extra rows,
+ # so create a dummy with lots of rows initially.
+ df = pd.DataFrame(
+ np.nan,
+ index=range(30),
+ columns=["basis_function", "feasible", "basis_reward", "accum_reward"],
+ )
+ self.bokeh_models["reward_table"] = bokeh.models.DataTable(
+ source=bokeh.models.ColumnDataSource(df),
+ columns=[bokeh.models.TableColumn(field=c, title=c) for c in df],
+ )
+
+ def update_reward_table_bokeh_model(self):
+ """Update the bokeh model for the table of rewards."""
+ if "reward_table" in self.bokeh_models:
+ reward_df = self.scheduler.survey_lists[self.survey_index[0]][
+ self.survey_index[1]
+ ].make_reward_df(self.conditions)
+ self.bokeh_models["reward_table"].source = bokeh.models.ColumnDataSource(
+ reward_df
+ )
+ self.bokeh_models["reward_table"].columns = [
+ bokeh.models.TableColumn(field=c, title=c) for c in reward_df
+ ]
+
+ def make_chosen_survey(self):
+ """Create the bokeh model for text showing the chosen survey."""
+ self.bokeh_models["chosen_survey"] = bokeh.models.Div(
+ text="No chosen survey
"
+ )
+
+ def update_chosen_survey_bokeh_model(self):
+ """Update the bokeh model for text showing the chosen survey."""
+ if "chosen_survey" in self.bokeh_models:
+ tier = f"tier {self.scheduler.survey_index[0]}"
+ survey = self.scheduler.survey_lists[self.scheduler.survey_index[0]][
+ self.scheduler.survey_index[1]
+ ].survey_name
+ self.bokeh_models[
+ "chosen_survey"
+ ].text = f"Chosen survey: {tier}, {survey}
"
+
+ def make_displayed_value_metadata(self):
+ """Create the bokeh model specifying what values are displayed."""
+ self.bokeh_models["displayed_value_metadata"] = bokeh.models.Div(
+ text="No displayed values
"
+ )
+
+ def update_displayed_value_metadata_bokeh_model(self):
+ """Update the bokeh model specifying what values are displayed."""
+ if "displayed_value_metadata" in self.bokeh_models:
+ tier = f"tier {self.survey_index[0]}"
+ survey = self.scheduler.survey_lists[self.survey_index[0]][
+ self.survey_index[1]
+ ].survey_name
+ self.bokeh_models[
+ "displayed_value_metadata"
+ ].text = f"Displayed value: {self.map_key} from {tier}, {survey}
"
+
+ def make_time_display(self):
+ """Create the bokeh model showing what time is being represented."""
+ self.bokeh_models["time_display"] = bokeh.models.Div(text="No time.
")
+
+ def update_time_display_bokeh_model(self):
+ """Update the bokeh model showing what time is being represented."""
+ iso_time = Time(self.mjd, format="mjd", scale="utc").iso
+ if "time_display" in self.bokeh_models:
+ self.bokeh_models["time_display"].text = f"{iso_time}
"
+
+ def make_scheduler_summary_df(self):
+ """Summarize the reward from each scheduler
+
+ Returns
+ -------
+ survey_df : `pandas.DataFrame`
+ A table showing the reword for each feasible survey, and the
+ basis functions that result in it being infeasible for the rest.
+ """
+ reward_df = self.scheduler.make_reward_df(self.conditions)
+ summary_df = reward_df.reset_index()
+
+ def make_tier_name(row):
+ tier_name = f"tier {row.list_index}"
+ return tier_name
+
+ summary_df["tier"] = summary_df.apply(make_tier_name, axis=1)
+
+ def get_survey_name(row):
+ survey_name = self.scheduler.survey_lists[row.list_index][
+ row.survey_index
+ ].survey_name
+ return survey_name
+
+ summary_df["survey_name"] = summary_df.apply(get_survey_name, axis=1)
+
+ def make_survey_row(survey_bfs):
+ infeasible_bf = ", ".join(
+ survey_bfs.loc[
+ ~survey_bfs.feasible.astype(bool)
+ ].basis_function.to_list()
+ )
+ infeasible = ~np.all(survey_bfs.feasible.astype(bool))
+ reward = infeasible_bf if infeasible else survey_bfs.accum_reward.iloc[-1]
+ survey_row = pd.Series({"reward": reward, "infeasible": infeasible})
+ return survey_row
+
+ survey_df = summary_df.groupby(["tier", "survey_name"]).apply(make_survey_row)
+
+ return survey_df["reward"].reset_index()
+
+ def make_reward_summary_table(self):
+ """Create the bokeh model of the table of rewards."""
+ # Bokeh's DataTable doesn't like to expand to accommodate extra rows,
+ # so create a dummy with lots of rows initially.
+ df = pd.DataFrame(
+ np.nan,
+ index=range(300),
+ columns=["tier", "survey_name", "reward"],
+ )
+ self.data_sources["reward_summary_table"] = bokeh.models.ColumnDataSource(df)
+ self.bokeh_models["reward_summary_table"] = bokeh.models.DataTable(
+ source=self.data_sources["reward_summary_table"],
+ columns=[bokeh.models.TableColumn(field=c, title=c) for c in df],
+ )
+ self.update_reward_summary_table_bokeh_model()
+
+ def update_reward_summary_table_bokeh_model(self):
+ """Update the bokeh model of the table of rewards."""
+ LOGGER.info("Updating reward summary table bokeh model")
+ if "reward_summary_table" in self.bokeh_models:
+ scheduler_summary_df = self.make_scheduler_summary_df()
+ self.bokeh_models["reward_summary_table"].source.data = dict(
+ bokeh.models.ColumnDataSource(scheduler_summary_df).data
+ )
+ self.bokeh_models["reward_summary_table"].columns = [
+ bokeh.models.TableColumn(field=c, title=c) for c in scheduler_summary_df
+ ]
+
+ def make_figure(self):
+ """Create a bokeh figures showing sky maps for scheduler behavior.
+
+ Returns
+ -------
+ fig : `bokeh.models.layouts.LayoutDOM`
+ A bokeh figure that can be displayed in a notebook (e.g. with
+ ``bokeh.io.show``) or used to create a bokeh app.
+ """
+ self.make_sphere_map(
+ "armillary_sphere",
+ ArmillarySphere,
+ "Armillary Sphere",
+ plot_width=512,
+ plot_height=512,
+ decorate=True,
+ )
+ self.bokeh_models["alt_slider"] = self.sphere_maps["armillary_sphere"].sliders[
+ "alt"
+ ]
+ self.bokeh_models["az_slider"] = self.sphere_maps["armillary_sphere"].sliders[
+ "az"
+ ]
+ self.bokeh_models["mjd_slider"] = self.sphere_maps["armillary_sphere"].sliders[
+ "mjd"
+ ]
+ self.bokeh_models["mjd_slider"].visible = False
+ self.make_sphere_map(
+ "planisphere",
+ Planisphere,
+ "Planisphere",
+ plot_width=512,
+ plot_height=512,
+ decorate=True,
+ )
+ self.make_sphere_map(
+ "altaz",
+ HorizonMap,
+ "Alt Az",
+ plot_width=512,
+ plot_height=512,
+ decorate=False,
+ horizon_graticules=True,
+ )
+ self.make_sphere_map(
+ "mollweide",
+ MollweideMap,
+ "Mollweide",
+ plot_width=512,
+ plot_height=512,
+ decorate=True,
+ )
+
+ self.bokeh_models["key"] = bokeh.models.Div(text=self.key_markup)
+ self.make_time_display()
+ self.make_displayed_value_metadata()
+
+ self.bokeh_models["reward_table_title"] = bokeh.models.Div(
+ text="Basis functions for displayed survey
"
+ )
+ self.make_reward_table()
+
+ self.bokeh_models["reward_summary_table_title"] = bokeh.models.Div(
+ text="Rewards for all survey schedulers
"
+ )
+ self.make_reward_summary_table()
+ self.make_chosen_survey()
+
+ arm_controls = [
+ self.bokeh_models["alt_slider"],
+ self.bokeh_models["az_slider"],
+ ]
+
+ figure = bokeh.layouts.row(
+ bokeh.layouts.column(
+ self.bokeh_models["altaz"],
+ self.bokeh_models["time_display"],
+ self.bokeh_models["displayed_value_metadata"],
+ self.bokeh_models["chosen_survey"],
+ self.bokeh_models["reward_table_title"],
+ self.bokeh_models["reward_table"],
+ self.bokeh_models["reward_summary_table_title"],
+ self.bokeh_models["reward_summary_table"],
+ ),
+ bokeh.layouts.column(
+ self.bokeh_models["planisphere"],
+ self.bokeh_models["key"],
+ self.bokeh_models["mollweide"],
+ self.bokeh_models["armillary_sphere"],
+ *arm_controls,
+ ),
+ )
+
+ return figure
+
+ def update_bokeh_models(self):
+ """Update all bokeh models with current data."""
+ LOGGER.debug("Updating bokeh data models.")
+ self.update_reward_table_bokeh_model()
+ self.update_reward_summary_table_bokeh_model()
+ self.update_healpix_bokeh_model()
+ self.update_hovertool_bokeh_model()
+ self.update_telescope_marker_bokeh_model()
+ self.update_moon_marker_bokeh_model()
+ self.update_sun_marker_bokeh_model()
+ self.update_survey_marker_bokeh_model()
+ self.update_time_display_bokeh_model()
+ self.update_displayed_value_metadata_bokeh_model()
+ self.update_chosen_survey_bokeh_model()
+
+ @property
+ def figure(self):
+ """Return a figure for this display.
+
+ Returns
+ -------
+ figure : `bokeh.models.layouts.LayoutDOM`
+ A bokeh figure that can be displayed in a notebook (e.g. with
+ ``bokeh.io.show``) or used to create a bokeh app.
+ """
+ if self._figure is None:
+ self._figure = self.make_figure()
+
+ return self._figure
+
+
+class SchedulerNotebookDisplay(SchedulerDisplay):
+ def __init__(self, *args, **kwargs):
+ # docstring in parent class
+ # notebook_handle must be initialized so overridden methods
+ # called by the parent __init__ will run.
+ self.notebook_handle = None
+ super().__init__(*args, **kwargs)
+
+ def _set_conditions(self, conditions):
+ super()._set_conditions(conditions)
+ self.update_display()
+
+ def select_tier(self, tier):
+ # docstring in parent class
+ super().select_tier(tier)
+ self.update_display()
+
+ def select_survey(self, survey):
+ # docstring in parent class
+ super().select_survey(survey)
+ self.update_display()
+
+ def select_value(self, value):
+ # docstring in parent class
+ super().select_value(value)
+ self.update_display()
+
+ def update_display(self):
+ """Update the display."""
+ if self.notebook_handle is not None:
+ self.update_bokeh_models()
+ bokeh.io.push_notebook(handle=self.notebook_handle)
+
+ def show(self):
+ """Show the display."""
+ self.notebook_handle = bokeh.io.show(self.figure, notebook_handle=True)
+
+
+def make_default_scheduler(mjd, nside=32):
+ """Return default scheduler.
+
+ Parameters
+ ----------
+ mjd : `float`
+ The MJD.
+ nside : `int`
+ The healpix nside
+
+ Returns
+ -------
+ scheduler : `rubin_sim.scheduler.schedulers.Core_scheduler`
+ """
+ LOGGER.debug("Making default scheduler")
+
+ def make_band_survey(band):
+ # Split the creation of basis functions so that if one fails,
+ # the other(s) might still be included.
+ basis_functions = []
+ try:
+ this_basis_function = (
+ rubin_sim.scheduler.basis_functions.Ecliptic_basis_function(nside=nside)
+ )
+ basis_functions.append(this_basis_function)
+ except Exception:
+ pass
+
+ try:
+ this_basis_function = (
+ rubin_sim.scheduler.basis_functions.M5_diff_basis_function(
+ filtername=band, nside=nside
+ )
+ )
+ basis_functions.append(this_basis_function)
+ except Exception:
+ pass
+
+ survey = rubin_sim.scheduler.surveys.BaseSurvey(
+ basis_functions,
+ survey_name=band,
+ )
+ return survey
+
+ band_surveys = {b: make_band_survey(b) for b in "ugrizy"}
+ visible_surveys = [band_surveys["u"], band_surveys["g"], band_surveys["r"]]
+ ir_surveys = [band_surveys["i"], band_surveys["z"], band_surveys["y"]]
+
+ scheduler = rubin_sim.scheduler.schedulers.Core_scheduler(
+ [visible_surveys, ir_surveys], nside=nside
+ )
+ try:
+ observatory = Model_observatory(mjd_start=mjd - 1)
+ observatory.mjd = mjd
+ conditions = observatory.return_conditions()
+ except ValueError:
+ # If we do not have the right cache of sky brightness
+ # values on disk, we may not be able to instantiate
+ # Model_observatory, but we should be able to run
+ # it anyway. Fake up a conditions object as well as
+ # we can.
+ conditions = Conditions(mjd_start=mjd - 1)
+ conditions.mjd = mjd
+
+ scheduler.update_conditions(conditions)
+ scheduler.request_observation()
+ return scheduler
diff --git a/schedview/sphere.py b/schedview/sphere.py
new file mode 100644
index 00000000..7a786ca3
--- /dev/null
+++ b/schedview/sphere.py
@@ -0,0 +1,109 @@
+import numpy as np
+
+
+def offset_sep_bear(ra, decl, sep, bearing, degrees=False):
+ """Calculate coordinates after an offset by a separation.
+
+ Parameters
+ ----------
+ ra : `float`
+ R.A. as a float in radians
+ decl : `float`
+ declination as a float in radians
+ sep : `float`
+ separation in radians
+ bearing : `float`
+ bearing (east of north) in radians
+ degrees : `bool`
+ arguments and returnes are in degrees (False for radians).
+
+ Returns
+ -------
+ ra : `float`
+ R.A. Right Ascension
+ decl : `float`
+ declination
+
+ """
+ # Use cos formula:
+ # cos(a)=cos(b)*cos(c)+sin(b)*sin(c)*cos(A)
+
+ if degrees:
+ ra = np.radians(ra)
+ decl = np.radians(decl)
+ sep = np.radians(sep)
+ bearing = np.radians(bearing)
+
+ np_sep = np.pi / 2 - decl
+
+ new_np_sep = np.arccos(
+ np.cos(np_sep) * np.cos(sep) + np.sin(np_sep) * np.sin(sep) * np.cos(bearing)
+ )
+ new_decl = np.pi / 2 - new_np_sep
+
+ # use tan = sin/cos, sin rule to get sin, cos rule to get cos,
+ # cancel sin(np_sep) to avoid problems when new_np_sep=90 deg.
+ dra = np.arctan2(
+ np.sin(sep) * np.sin(bearing) * np.sin(np_sep),
+ np.cos(sep) - np.cos(new_np_sep) * np.cos(np_sep),
+ )
+
+ # Hack to match astropy behaviour at poles
+ near_pole = np.abs(np.cos(decl)) < 1e-12
+ if near_pole:
+ dra = np.pi / 2 + np.cos(np_sep) * (np.pi / 2 - bearing)
+
+ new_ra = ra + dra
+
+ if degrees:
+ new_ra = np.degrees(new_ra)
+ new_decl = np.degrees(new_decl)
+
+ return new_ra, new_decl
+
+
+def rotate_cart(ux, uy, uz, angle, x0, y0, z0):
+ """Rotate coordinates on a unit sphere around an axis
+
+ Parameters
+ ----------
+ ux : `float`
+ x coordinate of a point on the axis of rotation
+ uy : `float`
+ y coordinate of a point on the axis of rotation
+ uz : `float`
+ z coordinate of a point on the axis of rotation
+ angle : `float`
+ Magnitude of the rotation.
+ x0 : `float`
+ Input x coordinate
+ y0 : `float`
+ Input y coordinate
+ z0 : `float`
+ Input z coordinate
+
+ Returns
+ -------
+ ux : `float`
+ Output x coordinate
+ uy : `float`
+ Output y coordinate
+ uz : `float`
+ Output z coordinate
+ """
+ cosa = np.cos(np.radians(angle))
+ ccosa = 1 - cosa
+ sina = np.sin(np.radians(angle))
+ rxx = cosa + ux * ux * ccosa
+ rxy = ux * uy * ccosa - uz * sina
+ rxz = ux * uz * ccosa + uy * sina
+ ryx = uy * ux * ccosa + uz * sina
+ ryy = cosa + uy * uy * ccosa
+ ryz = uy * uz * ccosa - ux * sina
+ rzx = uz * ux * ccosa - uy * sina
+ rzy = uz * uy * ccosa + ux * sina
+ rzz = cosa + uz * uz * ccosa
+ x = rxx * x0 + rxy * y0 + rxz * z0
+ y = ryx * x0 + ryy * y0 + ryz * z0
+ z = rzx * x0 + rzy * y0 + rzz * z0
+ return x, y, z
diff --git a/schedview/version.py b/schedview/version.py
new file mode 100644
index 00000000..07082224
--- /dev/null
+++ b/schedview/version.py
@@ -0,0 +1,3 @@
+# Generated by setuptools_scm
+__all__ = ["__version__"]
+__version__ = "0.1.dev1+g56e64ff.d20220817"
diff --git a/tests/test_metric_maps.py b/tests/test_metric_maps.py
new file mode 100644
index 00000000..ac2b029e
--- /dev/null
+++ b/tests/test_metric_maps.py
@@ -0,0 +1,27 @@
+import unittest
+from tempfile import TemporaryDirectory
+from pathlib import Path
+import bokeh.plotting
+import bokeh.io
+import bokeh.document
+from schedview.app.metric_maps import make_metric_figure, add_metric_app
+
+
+class test_metric_maps(unittest.TestCase):
+ def test_metric_maps(self):
+ fig = make_metric_figure()
+ with TemporaryDirectory() as dir:
+ out_path = Path(dir)
+ saved_html_fname = out_path.joinpath("test_page.html")
+ bokeh.plotting.output_file(filename=saved_html_fname, title="Test Page")
+ bokeh.plotting.save(fig)
+ saved_png_fname = out_path.joinpath("test_fig.png")
+ bokeh.io.export_png(fig, filename=saved_png_fname)
+
+ def test_add_metric_app(self):
+ doc = bokeh.document.document.Document()
+ add_metric_app(doc)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_readjs.py b/tests/test_readjs.py
new file mode 100644
index 00000000..9f82e962
--- /dev/null
+++ b/tests/test_readjs.py
@@ -0,0 +1,15 @@
+import unittest
+import schedview.plot.readjs
+
+JS_FNAME = "update_map.js"
+
+
+class Test_update_js(unittest.TestCase):
+ def test_update_js(self):
+ js_code = schedview.plot.readjs.read_javascript(JS_FNAME)
+ self.assertGreater(len(js_code), 10)
+ self.assertIsInstance(js_code, str)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_scheduler.py b/tests/test_scheduler.py
new file mode 100644
index 00000000..4ed71b49
--- /dev/null
+++ b/tests/test_scheduler.py
@@ -0,0 +1,70 @@
+import unittest
+import healpy as hp
+from astropy.time import TimeDelta
+from schedview.plot.scheduler import (
+ SchedulerDisplay,
+ DEFAULT_MJD,
+ make_default_scheduler,
+)
+from schedview.collect import sample_pickle
+
+NSIDE = 8
+
+
+class test_SchedulerDisplay(unittest.TestCase):
+ def test_scheduler_display(self):
+ default_scheduler = make_default_scheduler(DEFAULT_MJD, nside=NSIDE)
+ sched_display = SchedulerDisplay(nside=NSIDE, scheduler=default_scheduler)
+ self.assertTrue(sched_display.scheduler is default_scheduler)
+
+ # Drives the creation of many bokeh models
+ sched_display.make_figure()
+
+ self.assertGreater(len(sched_display.map_keys), 0)
+
+ self.assertEqual(sched_display.mjd, DEFAULT_MJD)
+ self.assertLessEqual(sched_display.conditions.sun_n18_setting, DEFAULT_MJD)
+ self.assertGreaterEqual(sched_display.conditions.sun_n18_rising, DEFAULT_MJD)
+
+ new_mjd = DEFAULT_MJD + 1.1
+ sched_display.mjd = new_mjd
+ self.assertEqual(sched_display.mjd, new_mjd)
+ self.assertLessEqual(sched_display.conditions.sun_n18_setting, new_mjd)
+ self.assertGreaterEqual(sched_display.conditions.sun_n18_rising, new_mjd)
+
+ time = sched_display.time
+ prev_mjd = sched_display.mjd
+ time_change = TimeDelta(1.8, format="jd")
+ next_time = time + time_change
+ sched_display.time = next_time
+ self.assertAlmostEqual(sched_display.mjd, prev_mjd + time_change.value)
+
+ hp_values = sched_display.healpix_values
+ self.assertEqual(hp.nside2npix(NSIDE), hp_values.shape[0])
+ self.assertEqual(len(hp_values.shape), 1)
+
+ pre_load_conditions = sched_display.conditions
+ pre_load_scheduler = sched_display.scheduler
+ self.assertTrue(sched_display.scheduler is pre_load_scheduler)
+ self.assertTrue(sched_display.conditions is pre_load_conditions)
+ file_name = sample_pickle()
+ sched_display.load(file_name)
+ self.assertTrue(sched_display.scheduler is not pre_load_scheduler)
+ self.assertTrue(sched_display.conditions is not pre_load_conditions)
+
+ new_survey_index = 1
+ sched_display.select_survey(sched_display.surveys_in_tier[new_survey_index])
+ self.assertEqual(sched_display.survey_index[1], new_survey_index)
+
+ new_tier = sched_display.tier_names[1]
+ sched_display.select_tier(new_tier)
+ self.assertSequenceEqual(sched_display.survey_index, [1, 0])
+
+ sched_display.select_value(sched_display.map_keys[0])
+ self.assertEqual(sched_display.map_key, sched_display.map_keys[0])
+ sched_display.select_value(sched_display.map_keys[1])
+ self.assertEqual(sched_display.map_key, sched_display.map_keys[1])
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_scheduler_app.py b/tests/test_scheduler_app.py
new file mode 100644
index 00000000..1aaecfab
--- /dev/null
+++ b/tests/test_scheduler_app.py
@@ -0,0 +1,36 @@
+import unittest
+from tempfile import TemporaryDirectory
+from pathlib import Path
+import bokeh.plotting
+import bokeh.io
+from schedview.app.sched_maps import SchedulerDisplayApp
+from schedview.collect import sample_pickle
+
+NSIDE = 8
+
+
+class test_sched_maps(unittest.TestCase):
+ def test_sched_maps(self):
+ scheduler_app = SchedulerDisplayApp(nside=NSIDE)
+
+ render_figure(scheduler_app)
+
+ scheduler_app.bokeh_models["file_input_box"].value = sample_pickle(
+ "baseline22_start.pickle.gz"
+ )
+ scheduler_app.disable_controls()
+
+
+def render_figure(scheduler_app):
+ fig = scheduler_app.make_figure()
+ with TemporaryDirectory() as dir:
+ out_path = Path(dir)
+ saved_html_fname = out_path.joinpath("test_page.html")
+ bokeh.plotting.output_file(filename=saved_html_fname, title="Test Page")
+ bokeh.plotting.save(fig)
+ saved_png_fname = out_path.joinpath("test_fig.png")
+ bokeh.io.export_png(fig, filename=saved_png_fname)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_scheduler_pickle.py b/tests/test_scheduler_pickle.py
new file mode 100644
index 00000000..ab4fc25c
--- /dev/null
+++ b/tests/test_scheduler_pickle.py
@@ -0,0 +1,15 @@
+import unittest
+from rubin_sim.scheduler.schedulers.core_scheduler import Core_scheduler
+from rubin_sim.scheduler.features.conditions import Conditions
+from schedview.collect.scheduler_pickle import read_scheduler
+
+
+class test_scheduler_pickle(unittest.TestCase):
+ def test_read_scheduler(self):
+ scheduler, conditions = read_scheduler()
+ self.assertIsInstance(scheduler, Core_scheduler)
+ self.assertIsInstance(conditions, Conditions)
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_sphere.py b/tests/test_sphere.py
new file mode 100644
index 00000000..455f25f2
--- /dev/null
+++ b/tests/test_sphere.py
@@ -0,0 +1,95 @@
+import unittest
+
+import numpy as np
+import healpy as hp
+from astropy.coordinates import SkyCoord
+
+from numpy.random import default_rng
+
+from schedview import sphere
+
+
+def _random_point_on_sphere(rng):
+ # Not slick or efficient, but it works
+ mag = np.inf
+ while mag > 1:
+ vec = rng.uniform(-1, 1, 3)
+ mag = np.linalg.norm(vec)
+
+ vec = vec / mag
+ return vec
+
+
+class test_sphere(unittest.TestCase):
+ def test_offset_sep_bear(self):
+ rng = default_rng(6563)
+ num_test_values = 100
+
+ for i in range(num_test_values):
+ vec1 = _random_point_on_sphere(rng)
+ vec2 = _random_point_on_sphere(rng)
+ ra, decl = hp.vec2ang(np.array([vec1, vec2]), lonlat=True)
+
+ # Make sure at least one point is near each pole
+ if i == 0:
+ decl[0] = np.degrees(np.arccos(1.23e-13))
+ elif i == 1:
+ decl[0] = np.degrees(-1 * np.arccos(2.45e-14))
+
+ coords = SkyCoord(ra, decl, unit="deg")
+ separation = coords[0].separation(coords[1]).deg
+ bearing = coords[0].position_angle(coords[1]).deg
+
+ test_ra, test_decl = sphere.offset_sep_bear(
+ ra[0], decl[0], separation, bearing, degrees=True
+ )
+
+ self.assertAlmostEqual(test_ra % 360, ra[1] % 360)
+ self.assertAlmostEqual(test_decl % 360, decl[1] % 360)
+
+ test_ra, test_decl = sphere.offset_sep_bear(
+ np.radians(ra[0]),
+ np.radians(decl[0]),
+ np.radians(separation),
+ np.radians(bearing),
+ degrees=False,
+ )
+
+ self.assertAlmostEqual(np.degrees(test_ra) % 360, ra[1] % 360)
+ self.assertAlmostEqual(np.degrees(test_decl) % 360, decl[1] % 360)
+
+ def test_rotate_cart(self):
+ rng = default_rng(4861)
+ num_test_values = 100
+
+ for i in range(num_test_values):
+ vec0 = _random_point_on_sphere(rng)
+ axis = _random_point_on_sphere(rng)
+ angle = rng.uniform(-360, 360)
+ vec1 = np.array(
+ sphere.rotate_cart(
+ axis[0], axis[1], axis[2], angle, vec0[0], vec0[1], vec0[2]
+ )
+ )
+ self.assertAlmostEqual(np.linalg.norm(vec1), 1.0)
+ self.assertAlmostEqual(axis.dot(vec0), axis.dot(vec1))
+
+ np.testing.assert_almost_equal(
+ np.array(sphere.rotate_cart(0, 0, 1, 90, 1, 0, 0)), np.array((0, 1, 0))
+ )
+
+ np.testing.assert_almost_equal(
+ np.array(sphere.rotate_cart(0, 0, 1, 180, 1, 0, 0)), np.array((-1, 0, 0))
+ )
+
+ np.testing.assert_almost_equal(
+ np.array(sphere.rotate_cart(0, 0, 1, 90, 1, 0, 0)), np.array((0, 1, 0))
+ )
+
+ np.testing.assert_almost_equal(
+ np.array(sphere.rotate_cart(1, 0, 0, 90, 0, 1, 0)), np.array((0, 0, 1))
+ )
+
+
+if __name__ == "__main__":
+ unittest.main()
diff --git a/tests/test_stars.py b/tests/test_stars.py
new file mode 100644
index 00000000..d6353dc7
--- /dev/null
+++ b/tests/test_stars.py
@@ -0,0 +1,24 @@
+import unittest
+from schedview.collect.stars import load_bright_stars
+
+
+class test_stars(unittest.TestCase):
+ def test_load_stars(self):
+ stars = load_bright_stars()
+ self.assertGreater(len(stars), 9000)
+ for column_name in ("name", "ra", "decl", "Vmag"):
+ self.assertIn(column_name, stars.columns)
+ self.assertGreaterEqual(stars.ra.min(), 0)
+ self.assertLessEqual(stars.ra.min(), 1)
+ self.assertGreaterEqual(stars.ra.max(), 359)
+ self.assertLessEqual(stars.ra.max(), 360)
+ self.assertGreaterEqual(stars.decl.min(), -90)
+ self.assertLessEqual(stars.decl.min(), -88)
+ self.assertGreaterEqual(stars.decl.max(), 89)
+ self.assertLessEqual(stars.decl.max(), 90)
+ self.assertGreaterEqual(stars.Vmag.min(), -2)
+ self.assertLessEqual(stars.Vmag.max(), 10)
+
+
+if __name__ == "__main__":
+ unittest.main()