diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 2159759..74d0130 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -40,7 +40,7 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip + python -m pip install --upgrade pip wheel grep "numpy" requirements.txt | xargs -I {} pip install "{}" pip install -r requirements.txt pip install codecov pytest-cov @@ -69,7 +69,7 @@ jobs: - name: Install dependencies run: | - python -m pip install --upgrade pip + python -m pip install --upgrade pip wheel grep "numpy" requirements.txt | xargs -I {} pip install "{}" pip install -r requirements.txt diff --git a/flows/__init__.py b/flows/__init__.py index f65aed0..488a5bb 100644 --- a/flows/__init__.py +++ b/flows/__init__.py @@ -3,7 +3,7 @@ # flake8: noqa from .photometry import photometry -from .download_catalog import download_catalog +from .catalogs import download_catalog from .visibility import visibility from .config import load_config diff --git a/flows/api/datafiles.py b/flows/api/datafiles.py index bf8cbce..bf1357f 100644 --- a/flows/api/datafiles.py +++ b/flows/api/datafiles.py @@ -33,7 +33,7 @@ def get_datafile(fileid): return jsn #-------------------------------------------------------------------------------------------------- -def get_datafiles(targetid=None, filt=None): +def get_datafiles(targetid=None, filt=None, minversion=None): """ Get list of data file IDs to be processed. @@ -42,6 +42,8 @@ def get_datafiles(targetid=None, filt=None): filt (str, optional): Filter the returned list: - ``missing``: Return only data files that have not yet been processed. - ``'all'``: Return all data files. + minversion (str, optional): Special filter matching files not processed at least with + the specified version (defined internally in API for now). Returns: list: List of data files the can be processed. @@ -64,6 +66,8 @@ def get_datafiles(targetid=None, filt=None): params = {} if targetid is not None: params['targetid'] = targetid + if minversion is not None: + params['version'] = minversion params['filter'] = filt r = requests.get('https://flows.phys.au.dk/api/datafiles.php', diff --git a/flows/download_catalog.py b/flows/catalogs.py similarity index 65% rename from flows/download_catalog.py rename to flows/catalogs.py index a6d498c..a3c91d9 100644 --- a/flows/download_catalog.py +++ b/flows/catalogs.py @@ -11,18 +11,21 @@ import shlex import requests import numpy as np +from astropy.time import Time from astropy.coordinates import SkyCoord, Angle from astropy import units as u +from astropy.table import Table +from astroquery.sdss import SDSS from .config import load_config from .aadc_db import AADC_DB from .ztf import query_ztf_id #-------------------------------------------------------------------------------------------------- -class CasjobsException(Exception): +class CasjobsError(RuntimeError): pass #-------------------------------------------------------------------------------------------------- -class CasjobsMemoryError(Exception): +class CasjobsMemoryError(RuntimeError): pass #-------------------------------------------------------------------------------------------------- @@ -54,7 +57,7 @@ def configure_casjobs(overwrite=False): wsid = config.get('casjobs', 'wsid', fallback=None) passwd = config.get('casjobs', 'password', fallback=None) if wsid is None or passwd is None: - raise CasjobsException("CasJobs WSID and PASSWORD not in config.ini") + raise CasjobsError("CasJobs WSID and PASSWORD not in config.ini") try: with open(casjobs_config, 'w') as fid: @@ -189,7 +192,8 @@ def _query_casjobs_refcat2(coo_centre, radius=24*u.arcmin): results = [] for line in output: line = line.strip() - if line == '': continue + if line == '': + continue if 'ERROR' in line: error_thrown = True break @@ -225,10 +229,10 @@ def _query_casjobs_refcat2(coo_centre, radius=24*u.arcmin): if 'query results exceed memory limit' in error_msg.lower(): raise CasjobsMemoryError("Query results exceed memory limit") else: - raise CasjobsException("ERROR detected in CasJobs: " + error_msg) + raise CasjobsError("ERROR detected in CasJobs: " + error_msg) if not results: - raise CasjobsException("Could not find anything on CasJobs") + raise CasjobsError("Could not find anything on CasJobs") logger.debug("Found %d results", len(results)) return results @@ -253,12 +257,19 @@ def query_apass(coo_centre, radius=24*u.arcmin): if isinstance(radius, (float, int)): radius *= u.deg - r = requests.post('https://www.aavso.org/cgi-bin/apass_dr10_download.pl', - data={'ra': coo_centre.ra.deg, 'dec': coo_centre.dec.deg, 'radius': Angle(radius).deg, 'outtype': '1'}) + data = { + 'ra': coo_centre.icrs.ra.deg, + 'dec': coo_centre.icrs.dec.deg, + 'radius': Angle(radius).deg, + 'outtype': '1' + } + + res = requests.post('https://www.aavso.org/cgi-bin/apass_dr10_download.pl', data=data) + res.raise_for_status() results = [] - lines = r.text.split("\n") + lines = res.text.split("\n") #header = lines[0] for line in lines[1:]: @@ -280,10 +291,47 @@ def query_apass(coo_centre, radius=24*u.arcmin): return results +#-------------------------------------------------------------------------------------------------- +def query_sdss(coo_centre, radius=24*u.arcmin, dr=16, clean=True): + """ + Queries SDSS catalog using cone-search around the position using astroquery. + + Parameters: + coo_centre (:class:`astropy.coordinates.SkyCoord`): Coordinates of centre of search cone. + radius (float, optional): + dr (int, optional): SDSS Data Release to query. Default=16. + clean (bool, optional): Clean results for stars only and no other problems. + + Returns: + list: Astropy Table with SDSS information. + + .. codeauthor:: Emir Karamehmetoglu + .. codeauthor:: Rasmus Handberg + """ + + if isinstance(radius, (float, int)): + radius *= u.deg + + AT_sdss = SDSS.query_region(coo_centre, + photoobj_fields=['type', 'clean', 'ra', 'dec', 'psfMag_u'], + data_release=dr, + timeout=600, + radius=radius) + + if clean: + # Clean SDSS following https://www.sdss.org/dr12/algorithms/photo_flags_recommend/ + # 6 == star, clean means remove interp, edge, suspicious defects, deblending problems, duplicates. + AT_sdss = AT_sdss[(AT_sdss['type'] == 6) & (AT_sdss['clean'] == 1)] + + return AT_sdss + #-------------------------------------------------------------------------------------------------- def query_all(coo_centre, radius=24*u.arcmin, dist_cutoff=2*u.arcsec): """ - Query all catalogs, and return merged catalog. + Query all catalogs (REFCAT2, APASS and SDSS) and return merged catalog. + + Merging of catalogs are done using sky coordinates: + https://docs.astropy.org/en/stable/coordinates/matchsep.html#matching-catalogs Parameters: coo_centre (:class:`astropy.coordinates.SkyCoord`): Coordinates of centre of search cone. @@ -294,65 +342,99 @@ def query_all(coo_centre, radius=24*u.arcmin, dist_cutoff=2*u.arcsec): list: List of dicts with catalog stars. .. codeauthor:: Rasmus Handberg + .. codeauthor:: Emir Karamehmetoglu """ # Query the REFCAT2 catalog using CasJobs around the target position: results = query_casjobs_refcat2(coo_centre, radius=radius) + AT_results = Table(results) # Query APASS around the target position: results_apass = query_apass(coo_centre, radius=radius) + AT_apass = Table(results_apass) # Match the two catalogs using coordinates: # https://docs.astropy.org/en/stable/coordinates/matchsep.html#matching-catalogs - ra = np.array([r['ra'] for r in results]) - decl = np.array([r['decl'] for r in results]) - refcat = SkyCoord(ra=ra, dec=decl, unit=u.deg, frame='icrs') + #ra = np.array([r['ra'] for r in results]) + #decl = np.array([r['decl'] for r in results]) + refcat = SkyCoord(ra=AT_results['ra'], dec=AT_results['decl'], unit=u.deg, frame='icrs') - ra_apass = np.array([r['ra'] for r in results_apass]) - decl_apass = np.array([r['decl'] for r in results_apass]) - apass = SkyCoord(ra=ra_apass, dec=decl_apass, unit=u.deg, frame='icrs') + #ra_apass = np.array([r['ra'] for r in results_apass]) + #decl_apass = np.array([r['decl'] for r in results_apass]) + apass = SkyCoord(ra=AT_apass['ra'], dec=AT_apass['decl'], unit=u.deg, frame='icrs') # Match the two catalogs: idx, d2d, _ = apass.match_to_catalog_sky(refcat) + sep_constraint = d2d <= dist_cutoff # Reject any match further away than the cutoff: + idx_apass = np.arange(len(idx)) # since idx maps apass to refcat + + # Update results table with APASS bands of interest + AT_results.add_columns([None,None,None],names=['B_mag','V_mag','u_mag']) # Results table does not have uBV + AT_results['B_mag'][idx[sep_constraint]] = AT_apass[idx_apass[sep_constraint]]['B_mag'] + AT_results['V_mag'][idx[sep_constraint]] = AT_apass[idx_apass[sep_constraint]]['V_mag'] + AT_results['u_mag'][idx[sep_constraint]] = AT_apass[idx_apass[sep_constraint]]['u_mag'] + + # Create SDSS cat + AT_sdss = query_sdss(coo_centre, radius=radius) + sdss = SkyCoord(ra=AT_sdss['ra'], dec=AT_sdss['dec'], unit=u.deg, frame='icrs') + + # Match to dist_cutoff sky distance (angular) apart + idx, d2d, _ = sdss.match_to_catalog_sky(refcat) + sep_constraint = d2d <= dist_cutoff + idx_sdss = np.arange(len(idx)) # since idx maps sdss to refcat + # TODO: Maybe don't (potentially) overwrite APASS uband with SDSS uband. Decide which is better. + AT_results['u_mag'][idx[sep_constraint]] = AT_sdss[idx_sdss[sep_constraint]]['psfMag_u'] + + # # Go through the matches and make sure they are valid: + # for k, i in enumerate(idx): + # # If APASS doesn't contain any new information anyway, skip it: + # if results_apass[k]['B_mag'] is None and results_apass[k]['V_mag'] is None \ + # and results_apass[k]['u_mag'] is None: + # continue + # + # # Reject any match further away than the cutoff: + # if d2d[k] > dist_cutoff: + # continue + # + # # TODO: Use the overlapping magnitudes to make better match: + # #photdist = 0 + # #for photfilt in ('g_mag', 'r_mag', 'i_mag', 'z_mag'): + # # if results_apass[k][photfilt] and results[i][photfilt]: + # # photdist += (results[i][photfilt] - results_apass[k][photfilt])**2 + # #print( np.sqrt(photdist) ) + # + # # Update the results "table" with the APASS filters: + # results[i].update({ + # 'V_mag': results_apass[k]['V_mag'], + # 'B_mag': results_apass[k]['B_mag'], + # 'u_mag': results_apass[k]['u_mag'] + # }) + # + # # Fill in empty fields where nothing was matched: + # for k in range(len(results)): + # if 'V_mag' not in results[k]: + # results[k].update({ + # 'B_mag': None, + # 'V_mag': None, + # 'u_mag': None + # }) + + # TODO: Adjust receiving functions so we can just pass the astropy table instead. + return [dict(zip(AT_results.colnames, row)) for row in AT_results] - # Go through the matches and make sure they are valid: - for k, i in enumerate(idx): - # If APASS doesn't contain any new information anyway, skip it: - if results_apass[k]['B_mag'] is None and results_apass[k]['V_mag'] is None \ - and results_apass[k]['u_mag'] is None: - continue - - # Reject any match further away than the cutoff: - if d2d[k] > dist_cutoff: - continue - - # TODO: Use the overlapping magnitudes to make better match: - #photdist = 0 - #for photfilt in ('g_mag', 'r_mag', 'i_mag', 'z_mag'): - # if results_apass[k][photfilt] and results[i][photfilt]: - # photdist += (results[i][photfilt] - results_apass[k][photfilt])**2 - #print( np.sqrt(photdist) ) - - # Update the results "table" with the APASS filters: - results[i].update({ - 'V_mag': results_apass[k]['V_mag'], - 'B_mag': results_apass[k]['B_mag'], - 'u_mag': results_apass[k]['u_mag'] - }) - - # Fill in empty fields where nothing was matched: - for k in range(len(results)): - if 'V_mag' not in results[k]: - results[k].update({ - 'B_mag': None, - 'V_mag': None, - 'u_mag': None - }) +#-------------------------------------------------------------------------------------------------- +def download_catalog(target=None, radius=24*u.arcmin, radius_ztf=3*u.arcsec, dist_cutoff=2*u.arcsec): + """ + Download reference star catalogs and save to Flows database. - return results + Parameters: + target (str or int): Target identifier to download catalog for. + radius (Angle, optional): Radius around target to download catalogs. + radius_ztf (Angle, optional): Radius around target to search for ZTF identifier. + dist_cutoff (Angle, optional): Distance cutoff used for matching catalog positions. -#-------------------------------------------------------------------------------------------------- -def download_catalog(target=None, radius=24*u.arcmin, dist_cutoff=2*u.arcsec): + .. codeauthor:: Rasmus Handberg + """ logger = logging.getLogger(__name__) @@ -360,16 +442,19 @@ def download_catalog(target=None, radius=24*u.arcmin, dist_cutoff=2*u.arcsec): # Get the information about the target from the database: if target is not None and isinstance(target, int): - db.cursor.execute("SELECT targetid,target_name,ra,decl FROM flows.targets WHERE targetid=%s;", [target]) + db.cursor.execute("SELECT targetid,target_name,ra,decl,discovery_date FROM flows.targets WHERE targetid=%s;", [target]) elif target is not None: - db.cursor.execute("SELECT targetid,target_name,ra,decl FROM flows.targets WHERE target_name=%s;", [target]) + db.cursor.execute("SELECT targetid,target_name,ra,decl,discovery_date FROM flows.targets WHERE target_name=%s;", [target]) else: - db.cursor.execute("SELECT targetid,target_name,ra,decl FROM flows.targets WHERE catalog_downloaded=FALSE;") + db.cursor.execute("SELECT targetid,target_name,ra,decl,discovery_date FROM flows.targets WHERE catalog_downloaded=FALSE;") for row in db.cursor.fetchall(): # The unique identifier of the target: targetid = int(row['targetid']) target_name = row['target_name'] + dd = row['discovery_date'] + if dd is not None: + dd = Time(dd, format='iso', scale='utc') # Coordinate of the target, which is the centre of the search cone: coo_centre = SkyCoord(ra=row['ra'], dec=row['decl'], unit=u.deg, frame='icrs') @@ -377,11 +462,10 @@ def download_catalog(target=None, radius=24*u.arcmin, dist_cutoff=2*u.arcsec): results = query_all(coo_centre, radius=radius, dist_cutoff=dist_cutoff) # Query for a ZTF identifier for this target: - ztf_id = query_ztf_id(coo_centre, radius=radius) + ztf_id = query_ztf_id(coo_centre, radius=radius_ztf, discovery_date=dd) # Insert the catalog into the local database: try: - #db.cursor.execute("TRUNCATE flows.refcat2;") db.cursor.executemany("""INSERT INTO flows.refcat2 ( starid, ra, diff --git a/flows/tns.py b/flows/tns.py index 4bbe2e8..1c7c4a8 100644 --- a/flows/tns.py +++ b/flows/tns.py @@ -178,6 +178,9 @@ def tns_getnames(months=None, date_begin=None, date_end=None, zmin=None, zmax=No if months is not None and date_end is not None and date_end < date_now - datetime.timedelta(days=months*30): logger.warning('Months limit restricts days_begin, consider increasing limit_months.') + # API key for Bot + tnsconf = _load_tns_config() + # Parameters for query: params = { 'discovered_period_value': months, # Reported Within The Last @@ -256,7 +259,8 @@ def tns_getnames(months=None, date_begin=None, date_end=None, zmin=None, zmax=No } # Query TNS for names: - con = requests.get(url_tns_search, params=params) + headers = {'user-agent': tnsconf['user-agent']} + con = requests.get(url_tns_search, params=params, headers=headers) con.raise_for_status() # Parse the CSV table: diff --git a/flows/ztf.py b/flows/ztf.py index 008e61f..dc74d8e 100644 --- a/flows/ztf.py +++ b/flows/ztf.py @@ -12,12 +12,13 @@ import astropy.units as u from astropy.coordinates import Angle, SkyCoord from astropy.table import Table +from astropy.time import Time import datetime import requests from . import api #-------------------------------------------------------------------------------------------------- -def query_ztf_id(coo_centre, radius=1*u.arcmin): +def query_ztf_id(coo_centre, radius=3*u.arcsec, discovery_date=None): """ Query ALeRCE ZTF api to lookup ZTF identifier. @@ -26,7 +27,11 @@ def query_ztf_id(coo_centre, radius=1*u.arcmin): Parameters: coo_centre (:class:`astropy.coordinates.SkyCoord`): Coordinates of centre of search cone. - radius (Angle, optional): Search radius. Default 1 arcmin. + radius (Angle, optional): Search radius. Default 3 arcsec. + discovery_date (:class:`astropy.time.Time`, optional): Discovery date of target to + match against ZTF. The date is compared to the ZTF first timestamp and ZTF targets + are rejected if they are not within 15 days prior to the discovery date + and 90 days after. Returns: str: ZTF identifier. @@ -62,6 +67,18 @@ def query_ztf_id(coo_centre, radius=1*u.arcmin): if not results: return None + # Constrain on the discovery date if it is provided: + if discovery_date is not None: + # Extract the time of the first ZTF timestamp and compare it with + # the discovery time: + firstmjd = Time([itm['firstmjd'] for itm in results], format='mjd', scale='utc') + tdelta = firstmjd.utc.mjd - discovery_date.utc.mjd + + # Only keep results that are within the margins: + results = [itm for k, itm in enumerate(results) if -15 <= tdelta[k] <= 90] + if not results: + return None + # Find target closest to the centre: coords = SkyCoord( ra=[itm['meanra'] for itm in results], diff --git a/notes/fix_ztf_ids.py b/notes/fix_ztf_ids.py index d2c356e..e8df20c 100644 --- a/notes/fix_ztf_ids.py +++ b/notes/fix_ztf_ids.py @@ -1,8 +1,12 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- +import sys +import os.path from tqdm import tqdm from astropy.coordinates import SkyCoord +if os.path.abspath('..') not in sys.path: + sys.path.insert(0, os.path.abspath('..')) import flows #-------------------------------------------------------------------------------------------------- @@ -14,8 +18,12 @@ targetid = target['targetid'] coord = SkyCoord(ra=target['ra'], dec=target['decl'], unit='deg', frame='icrs') - ztf_id = flows.ztf.query_ztf_id(coord) + dd = target['discovery_date'] + # Query for the ZTF id: + ztf_id = flows.ztf.query_ztf_id(coord, discovery_date=dd) + + # If the ZTF id is not the same as we have currently, update it in the database: if ztf_id != target['ztf_id']: print(target) print(ztf_id) diff --git a/requirements.txt b/requirements.txt index 1c17ebb..136d3ac 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ pytest flake8 -flake8-tabs !=2.3.0,!=2.3.1 +flake8-tabs >= 2.3.2 flake8-builtins flake8-logging-format numpy >= 1.16 -scipy +scipy == 1.5.4 astropy == 4.1 photutils == 1.1.0; python_version >= '3.7' photutils == 1.0.2; python_version < '3.7' @@ -18,10 +18,11 @@ PyYAML psycopg2-binary jplephem vtk -scikit-image +scikit-image == 0.17.2 tqdm pytz git+https://github.com/obscode/imagematch.git@photutils#egg=imagematch sep astroalign > 2.3 networkx +astroquery >= 0.4.2 diff --git a/run_ingest.py b/run_ingest.py index 98fd1ea..44da4ef 100644 --- a/run_ingest.py +++ b/run_ingest.py @@ -518,6 +518,10 @@ def ingest_photometry_from_inbox(): indx_sub = (tab['starid'] == -1) indx_ref = (tab['starid'] > 0) + frd = float(np.nanmax(tab[indx_ref]['mag'])) + if not np.isfinite(frd): + frd = None + phot_summary = { 'fileid_img': fileid_img, 'fileid_phot': fileid, @@ -537,7 +541,7 @@ def ingest_photometry_from_inbox(): 'seeing': float(tab.meta['seeing'].value), 'references_detected': int(np.sum(indx_ref)), 'used_for_epsf': int(np.sum(tab['used_for_epsf'])), - 'faintest_reference_detected': float(np.max(tab[indx_ref]['mag'])), + 'faintest_reference_detected': frd, 'pipeline_version': tab.meta['version'], 'latest_version': new_version } diff --git a/run_photometry.py b/run_photometry.py index 6d3a3e8..3a69291 100644 --- a/run_photometry.py +++ b/run_photometry.py @@ -88,19 +88,16 @@ def main(): group = parser.add_argument_group('Selecting which files to process') group.add_argument('--fileid', type=int, default=None, action='append', help="Process this file ID. Overrides all other filters.") group.add_argument('--targetid', type=int, default=None, action='append', help="Only process files from this target.") - group.add_argument('--filter', type=str, default=None, choices=['missing', 'all', 'error']) + group.add_argument('--filter', type=str, default=None, choices=['missing', 'all', 'error', 'version']) + group.add_argument('--minversion', type=str, default=None, help="Include files not previously processed with at least this version.") group = parser.add_argument_group('Processing settings') group.add_argument('--threads', type=int, default=1, help="Number of parallel threads to use.") - group.add_argument('--no-imagematch', help="Disable ImageMatch.", action='store_true') - group.add_argument('--autoupload', - help="Automatically upload completed photometry to Flows website. \ - Only do this, if you know what you are doing!", - action='store_true') - group.add_argument('--fixposdiff', - help="Fix SN position during PSF photometry of difference image. \ - Useful when difference image is noisy.", - action='store_true') + group.add_argument('--no-imagematch', action='store_true', help="Disable ImageMatch.") + group.add_argument('--autoupload', action='store_true', + help="Automatically upload completed photometry to Flows website. Only do this, if you know what you are doing!") + group.add_argument('--fixposdiff', action='store_true', + help="Fix SN position during PSF photometry of difference image. Useful when difference image is noisy.") group.add_argument('--wcstimeout', type=int, default=None, help="Timeout in Seconds for WCS.") args = parser.parse_args() @@ -135,9 +132,12 @@ def main(): fileids = args.fileid else: # Ask the API for a list of fileids which are yet to be processed: - fileids = [] - for targid in args.targetid: - fileids += api.get_datafiles(targetid=targid, filt=args.filter) + if args.targetid is not None: + fileids = [] + for targid in args.targetid: + fileids += api.get_datafiles(targetid=targid, filt=args.filter, minversion=args.minversion) + else: + fileids = api.get_datafiles(filt=args.filter, minversion=args.minversion) # Remove duplicates from fileids to be processed: fileids = list(set(fileids)) diff --git a/tests/test_catalogs.py b/tests/test_catalogs.py new file mode 100644 index 0000000..c967aa1 --- /dev/null +++ b/tests/test_catalogs.py @@ -0,0 +1,75 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +Test API calls. + +.. codeauthor:: Rasmus Handberg +""" + +import pytest +import numpy as np +from astropy.coordinates import SkyCoord +#from astropy.table import Table +import conftest # noqa: F401 +from flows import catalogs + +#-------------------------------------------------------------------------------------------------- +def test_download_catalog(SETUP_CONFIG): + + # Check if CasJobs have been configured, and skip the entire test if it isn't. + # This has to be done like this, to avoid problems when config.ini doesn't exist. + try: + catalogs.configure_casjobs() + except catalogs.CasjobsError: + pytest.skip("CasJobs not configured") + + # Coordinates around test-object (2019yvr): + coo_centre = SkyCoord( + ra=256.727512, + dec=30.271482, + unit='deg', + frame='icrs' + ) + + results = catalogs.query_all(coo_centre) + #print(tab) + + #assert isinstance(tab, Table), "Should return a Table" + #results = [dict(zip(tab.colnames, row)) for row in tab.filled()] + + assert isinstance(results, list), "Should return a list" + for obj in results: + assert isinstance(obj, dict), "Each element should be a dict" + + # Check columns: + assert 'starid' in obj and obj['starid'] > 0 + assert 'ra' in obj and np.isfinite(obj['ra']) + assert 'decl' in obj and np.isfinite(obj['decl']) + assert 'pm_ra' in obj + assert 'pm_dec' in obj + assert 'gaia_mag' in obj + assert 'gaia_bp_mag' in obj + assert 'gaia_rp_mag' in obj + assert 'gaia_variability' in obj + assert 'B_mag' in obj + assert 'V_mag' in obj + assert 'u_mag' in obj + assert 'g_mag' in obj + assert 'r_mag' in obj + assert 'i_mag' in obj + assert 'z_mag' in obj + assert 'H_mag' in obj + assert 'J_mag' in obj + assert 'K_mag' in obj + + # All values should be finite number or None: + for key, val in obj.items(): + if key not in ('starid', 'gaia_variability'): + assert val is None or np.isfinite(val), f"{key} is not a valid value: {val}" + + # TODO: Manually check a target from this position if the merge is correct. + #assert False + +#-------------------------------------------------------------------------------------------------- +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/tests/test_ztf.py b/tests/test_ztf.py index c6f2516..fa0c550 100644 --- a/tests/test_ztf.py +++ b/tests/test_ztf.py @@ -10,6 +10,7 @@ import numpy as np from astropy.table import Table from astropy.coordinates import SkyCoord +from astropy.time import Time import tempfile import os from conftest import capture_cli @@ -27,6 +28,16 @@ def test_ztf_id(): ztfid = ztf.query_ztf_id(coo_centre) assert ztfid == 'ZTF20aabqkxs' + # With the correct discovery date we should get the same result: + dd = Time('2019-12-27 12:30:14', format='iso', scale='utc') + ztfid = ztf.query_ztf_id(coo_centre, discovery_date=dd) + assert ztfid == 'ZTF20aabqkxs' + + # With a wrong discovery date, we should not get a ZTF id: + dd = Time('2021-12-24 18:00:00', format='iso', scale='utc') + ztfid = ztf.query_ztf_id(coo_centre, discovery_date=dd) + assert ztfid is None + coo_centre = SkyCoord( ra=181.6874198, dec=67.1649528,