Skip to content

Commit

Permalink
Merge pull request #151 from SNflows/devel
Browse files Browse the repository at this point in the history
Merge devel to master for new release
  • Loading branch information
emirkmo authored Feb 10, 2023
2 parents 12f29f3 + 31d6962 commit 0c36119
Show file tree
Hide file tree
Showing 27 changed files with 820 additions and 577 deletions.
9 changes: 9 additions & 0 deletions .github/codecov.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
coverage:
status:
project:
default:
# basic
threshold: 5%
patch:
default:
threshold: 5%
15 changes: 10 additions & 5 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -47,15 +47,13 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
python-version: [3.9, '3.10']
os: [ubuntu-latest, macos-latest]
python-version: ['3.10']
include:
- os: ubuntu-latest
pippath: ~/.cache/pip
- os: macos-latest
pippath: ~/Library/Caches/pip
- os: windows-latest
pippath: ~\AppData\Local\pip\Cache

name: Python ${{ matrix.python-version }} on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
Expand Down Expand Up @@ -102,6 +100,8 @@ jobs:
env:
FLOWS_CONFIG: ${{ secrets.FLOWS_CONFIG }}
FLOWS_API_TOKEN: ${{ secrets.FLOWS_API_TOKEN }}
CASSJOBS_WSID: ${{ secrets.CASSJOBS_WSID }}
CASSJOBS_PASSWORD: ${{ secrets.CASSJOBS_PASSWORD }}
run: |
python -m pip install --upgrade pip wheel
pip install -r requirements.txt
Expand All @@ -113,12 +113,15 @@ jobs:
env:
FLOWS_CONFIG: ${{ secrets.FLOWS_CONFIG }}
FLOWS_API_TOKEN: ${{ secrets.FLOWS_API_TOKEN }}
CASSJOBS_WSID: ${{ secrets.CASSJOBS_WSID }}
CASSJOBS_PASSWORD: ${{ secrets.CASSJOBS_PASSWORD }}
run: pytest --cov

- name: Upload coverage
continue-on-error: true
uses: codecov/codecov-action@v2
uses: codecov/codecov-action@v3
with:
fail_ci_if_error: true
env_vars: OS,PYTHON
verbose: true

Expand Down Expand Up @@ -156,6 +159,8 @@ jobs:
env:
FLOWS_API_TOKEN: ${{ secrets.FLOWS_API_TOKEN }}
FLOWS_CONFIG: ${{ secrets.FLOWS_CONFIG }}
CASSJOBS_WSID: ${{ secrets.CASSJOBS_WSID }}
CASSJOBS_PASSWORD: ${{ secrets.CASSJOBS_PASSWORD }}
run: |
python -m pip install --upgrade pip wheel
pip install -r requirements.txt
Expand Down
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -142,3 +142,8 @@ dmypy.json

# Ignore test output
tests/output/

# Ignore VSCode typing clutter
typings/

*.config
4 changes: 2 additions & 2 deletions dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
pytest
flake8
flake8-tabs >= 2.3.2
flake8-builtins
flake8-logging-format
flake8-logging-format
autopep8
7 changes: 4 additions & 3 deletions flows/aadc_db.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,11 @@
.. codeauthor:: Rasmus Handberg <[email protected]>
"""
import getpass
import os

import psycopg2 as psql
from psycopg2.extras import DictCursor
import getpass
from tendrils.utils import load_config


Expand Down Expand Up @@ -41,15 +42,15 @@ def __init__(self, username=None, password=None):
config = load_config()

if username is None:
username = config.get('database', 'username', fallback=None)
username = config.get('database', 'username', fallback=os.environ.get("AUDBUsername", None))
if username is None:
default_username = getpass.getuser()
username = input('Username [%s]: ' % default_username)
if username == '':
username = default_username

if password is None:
password = config.get('database', 'password', fallback=None)
password = config.get('database', 'password', fallback=os.environ.get("AUDBPassword", None))
if password is None:
password = getpass.getpass('Password: ')

Expand Down
144 changes: 73 additions & 71 deletions flows/catalogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,25 +4,29 @@
.. codeauthor:: Rasmus Handberg <[email protected]>
"""

import logging
import os
import os.path
import subprocess
from pickle import NONE
import shlex
import requests
import subprocess
import warnings
from io import BytesIO

import numpy as np
from bottleneck import anynan
from astropy.time import Time
from astropy.coordinates import SkyCoord, Angle
import requests
from astropy import units as u
from astropy.table import Table, MaskedColumn
from astroquery.sdss import SDSS
from astropy.coordinates import Angle, SkyCoord
from astropy.table import MaskedColumn, Table
from astropy.time import Time
from astroquery import sdss
from astroquery.simbad import Simbad
from bottleneck import anynan
from tendrils.utils import load_config, query_ztf_id

from .aadc_db import AADC_DB

logger = logging.getLogger(__name__)

# --------------------------------------------------------------------------------------------------
class CasjobsError(RuntimeError):
Expand Down Expand Up @@ -55,15 +59,15 @@ def configure_casjobs(overwrite=False):
.. codeauthor:: Rasmus Handberg <[email protected]>
"""

__dir__ = os.path.dirname(os.path.realpath(__file__))
casjobs_config = os.path.join(__dir__, 'casjobs', 'CasJobs.config')
logger.debug(",".join([casjobs_config,__dir__,os.path.realpath(__file__)]))
if os.path.isfile(casjobs_config) and not overwrite:
return

config = load_config()
wsid = config.get('casjobs', 'wsid', fallback=None)
passwd = config.get('casjobs', 'password', fallback=None)
wsid = config.get('casjobs', 'wsid', fallback=os.environ.get("CASJOBS_WSID", None))
passwd = config.get('casjobs', 'password', fallback=os.environ.get("CASJOBS_PASSWORD", None))
if wsid is None or passwd is None:
raise CasjobsError("CasJobs WSID and PASSWORD not in config.ini")

Expand Down Expand Up @@ -100,8 +104,6 @@ def query_casjobs_refcat2(coo_centre, radius=24 * u.arcmin):
.. codeauthor:: Rasmus Handberg <[email protected]>
"""

logger = logging.getLogger(__name__)
if isinstance(radius, (float, int)):
radius *= u.deg

Expand All @@ -128,7 +130,6 @@ def query_casjobs_refcat2(coo_centre, radius=24 * u.arcmin):

# --------------------------------------------------------------------------------------------------
def _query_casjobs_refcat2_divide_and_conquer(coo_centre, radius):
logger = logging.getLogger(__name__)

# Just put in a stop criterion to avoid infinite recursion:
if radius < 0.04 * u.deg:
Expand Down Expand Up @@ -172,7 +173,6 @@ def _query_casjobs_refcat2(coo_centre, radius=24 * u.arcmin):
.. codeauthor:: Rasmus Handberg <[email protected]>
"""

logger = logging.getLogger(__name__)
if isinstance(radius, (float, int)):
radius *= u.deg

Expand Down Expand Up @@ -298,7 +298,9 @@ def query_sdss(coo_centre, radius=24 * u.arcmin, dr=16, clean=True):
if isinstance(radius, (float, int)):
radius *= u.deg

AT_sdss = SDSS.query_region(coo_centre, photoobj_fields=['type', 'clean', 'ra', 'dec', 'psfMag_u'], data_release=dr,
#SDSS.MAX_CROSSID_RADIUS = radius + 1 * u.arcmin
sdss.conf.skyserver_baseurl = sdss.conf.skyserver_baseurl.replace("http://","https://")
AT_sdss = sdss.SDSS.query_region(coo_centre, photoobj_fields=['type', 'clean', 'ra', 'dec', 'psfMag_u'], data_release=dr,
timeout=600, radius=radius)

if AT_sdss is None:
Expand All @@ -316,9 +318,9 @@ def query_sdss(coo_centre, radius=24 * u.arcmin, dr=16, clean=True):
return None, None

# Create SkyCoord object with the coordinates:
sdss = SkyCoord(ra=AT_sdss['ra'], dec=AT_sdss['dec'], unit=u.deg, frame='icrs')
sdss_coord = SkyCoord(ra=AT_sdss['ra'], dec=AT_sdss['dec'], unit=u.deg, frame='icrs')

return AT_sdss, sdss
return AT_sdss, sdss_coord


# --------------------------------------------------------------------------------------------------
Expand Down Expand Up @@ -561,8 +563,6 @@ def download_catalog(target=None, radius=24 * u.arcmin, radius_ztf=3 * u.arcsec,
.. codeauthor:: Rasmus Handberg <[email protected]>
"""

logger = logging.getLogger(__name__)

with AADC_DB() as db:

# Get the information about the target from the database:
Expand Down Expand Up @@ -623,54 +623,56 @@ def download_catalog(target=None, radius=24 * u.arcmin, radius_ztf=3 * u.arcsec,
else:
on_conflict = 'DO NOTHING'

try:
db.cursor.executemany("""INSERT INTO flows.refcat2 (
starid,
ra,
decl,
pm_ra,
pm_dec,
gaia_mag,
gaia_bp_mag,
gaia_rp_mag,
gaia_variability,
u_mag,
g_mag,
r_mag,
i_mag,
z_mag,
"J_mag",
"H_mag",
"K_mag",
"V_mag",
"B_mag")
VALUES (
%(starid)s,
%(ra)s,
%(decl)s,
%(pm_ra)s,
%(pm_dec)s,
%(gaia_mag)s,
%(gaia_bp_mag)s,
%(gaia_rp_mag)s,
%(gaia_variability)s,
%(u_mag)s,
%(g_mag)s,
%(r_mag)s,
%(i_mag)s,
%(z_mag)s,
%(J_mag)s,
%(H_mag)s,
%(K_mag)s,
%(V_mag)s,
%(B_mag)s)
ON CONFLICT """ + on_conflict + ";", results)
logger.info("%d catalog entries inserted for %s.", db.cursor.rowcount, target_name)

# Mark the target that the catalog has been downloaded:
db.cursor.execute("UPDATE flows.targets SET catalog_downloaded=TRUE,ztf_id=%s WHERE targetid=%s;",
(ztf_id, targetid))
db.conn.commit()
except: # noqa: E722, pragma: no cover
db.conn.rollback()
raise
# Avoid testing "ON CONFLICT" of postgres. Only test update/insert.
if update_existing:
try:
db.cursor.executemany("""INSERT INTO flows.refcat2 (
starid,
ra,
decl,
pm_ra,
pm_dec,
gaia_mag,
gaia_bp_mag,
gaia_rp_mag,
gaia_variability,
u_mag,
g_mag,
r_mag,
i_mag,
z_mag,
"J_mag",
"H_mag",
"K_mag",
"V_mag",
"B_mag")
VALUES (
%(starid)s,
%(ra)s,
%(decl)s,
%(pm_ra)s,
%(pm_dec)s,
%(gaia_mag)s,
%(gaia_bp_mag)s,
%(gaia_rp_mag)s,
%(gaia_variability)s,
%(u_mag)s,
%(g_mag)s,
%(r_mag)s,
%(i_mag)s,
%(z_mag)s,
%(J_mag)s,
%(H_mag)s,
%(K_mag)s,
%(V_mag)s,
%(B_mag)s)
ON CONFLICT """ + on_conflict + ";", results)
logger.info("%d catalog entries inserted for %s.", db.cursor.rowcount, target_name)

# Mark the target that the catalog has been downloaded:
db.cursor.execute("UPDATE flows.targets SET catalog_downloaded=TRUE,ztf_id=%s WHERE targetid=%s;",
(ztf_id, targetid))
db.conn.commit()
except: # noqa: E722, pragma: no cover
db.conn.rollback()
raise
3 changes: 2 additions & 1 deletion flows/fileio.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
from pathlib import Path
from typing import Optional, Protocol, Dict, Union
from typing import Optional, Protocol, Dict, TypeVar, Union
from configparser import ConfigParser
from bottleneck import allnan
from tendrils import api, utils
Expand All @@ -12,6 +12,7 @@
from .filters import get_reference_filter
logger = create_logger()

DataFileType = TypeVar("DataFileType", bound=dict)

class DirectoryProtocol(Protocol):
archive_local: str
Expand Down
Loading

0 comments on commit 0c36119

Please sign in to comment.