Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 29 additions & 30 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,35 +15,34 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.8", "3.9", "3.10"]
python-version: ["3.10", "3.11", "3.12"]

steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Prepare conda
run: |
export PATH="$CONDA/bin:$PATH"
conda config --set always_yes yes --set changeps1 no
conda update -q conda
conda info -a
conda init bash
- name: Install dependencies
run: |
export PATH="$CONDA/bin:$PATH"
conda create -q -n testenv python=${{ matrix.python-version }} numpy hpgeom astropy healsparse fitsio esutil LSSTDESC.Coord pyyaml setuptools_scm setuptools_scm_git_archive flake8 pytest pytest-flake8 -c conda-forge
source activate testenv
pip install --no-deps .
- name: Lint with flake8
run: |
export PATH="$CONDA/bin:$PATH"
source activate testenv
# stop the build if it fails flake8 with default setup.cfg
flake8 . --count --show-source --statistics
- name: Test with pytest
run: |
export PATH="$CONDA/bin:$PATH"
source activate testenv
pytest
- uses: actions/checkout@v4
with:
# Need to clone everything to determine version from git.
fetch-depth: 0

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: "3.12"
cache: "pip"
cache-dependency-path: "requirements.txt"

- name: Build and install
run: |
python -m pip install --upgrade pip setuptools
python -m pip install pytest flake8
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
python -m pip install .

- name: Lint with flake8
run: |
# stop the build if it fails flake8 with default pyproject.toml
flake8 . --count --show-source --statistics

- name: Run tests
run: |
cd tests
pytest
9 changes: 4 additions & 5 deletions LICENSE
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
Copyright (c) 2020, Eli Rykoff (LSST Dark Energy Science
Collaboration)
Copyright (c) 2020, Eli Rykoff (LSST Dark Energy Science Collaboration)
All rights reserved.

Redistribution and use in source and binary forms, with or without
Expand All @@ -12,9 +11,9 @@ modification, are permitted provided that the following conditions are met:
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.

* Neither the name of supreme nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
* Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software without
specific prior written permission.

THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
Expand Down
10 changes: 7 additions & 3 deletions decasu/configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,9 @@ def _default_dec_corner_fields():


@dataclass
class Configuration(object):
class Configuration:
"""
Decasu configuration object.
Decasu configuration class.
"""
# Mandatory fields
outbase: str
Expand All @@ -51,6 +51,7 @@ class Configuration(object):
extra_fields: Dict[str, str] = field(default_factory=_default_extra_fields)
band_replacement: Dict[str, str] = field(default_factory=_default_band_replacement)
use_lsst_db: bool = False
use_lsst_consdb: bool = False
lsst_db_additional_selection: str = ""
time_bin: int = -1
border: int = 15
Expand All @@ -71,6 +72,7 @@ class Configuration(object):
band_field: str = 'band'
mjd_field: str = 'mjd_obs'
skyvar_field: str = 'skyvar'
fwhm_field: str = 'fwhm'
bad_amps: Dict[int, list] = field(default_factory=_default_bad_amps)
bad_ccds: List[int] = field(default_factory=_default_bad_ccds)
latitude: float = -30.1690
Expand All @@ -85,11 +87,13 @@ def __post_init__(self):
self._validate()

def _validate(self):
if self.use_lsst_db:
if self.use_lsst_db or self.use_lsst_consdb:
try:
import lsst.obs.lsst # noqa: F401
except ImportError:
raise RuntimeError("Cannot use lsst db without Rubin Science Pipelines setup.")
if self.use_lsst_db and self.use_lsst_consdb:
raise RuntimeError("Cannot set both use_lsst_db and lsst_use_consdb.")

if self.use_two_amps and self.mask_lsstcam_bad_amps:
raise RuntimeError("Cannot set both use_two_amps and mask_lsstcam_bad_amps.")
Expand Down
17 changes: 15 additions & 2 deletions decasu/decasu_hpix_mapper.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,24 +11,36 @@ def main():
parser.add_argument('-c', '--configfile', action='store', type=str, required=True,
help='YAML config file')
parser.add_argument('-i', '--infile', action='store', type=str, required=True,
help='Input fits or database file')
help='Input fits or database file or database connection string')
parser.add_argument('-b', '--bands', action='store', type=str, required=False,
help='Bands to generate map for, comma delimited')
parser.add_argument('-n', '--ncores', action='store', type=int, required=False,
default=1, help='Number of cores to run on.')
parser.add_argument('-o', '--outputpath', action='store', type=str, required=True,
help='Output path')
parser.add_argument('-B', '--outputbase', action='store', type=str, required=False,
help='Output filename base; will replace outbase in config.')
parser.add_argument('-p', '--pixels', action='store', type=str, required=False,
help='Pixels to run on, comma delimited')
parser.add_argument('-s', '--simple', action='store_true', required=False,
help='Run in simple mode (nexp only)')
parser.add_argument('-k', '--keep_intermediate_files', action='store_true',
required=False, help='Keep intermediate files')
parser.add_argument('-q', '--query', required=False,
help='Additional query string; will replace lsst_db_additional_selection config.')
parser.add_argument('-m', '--make_map_images', action='store_true', required=False,
help='Automatically make skyproj map images?')

args = parser.parse_args()

config = Configuration.load_yaml(args.configfile)

if args.outputbase is not None:
config.outbase = args.outputbase

if args.query is not None:
config.lsst_db_additional_selection = args.query

if args.bands is None:
bands = []
else:
Expand All @@ -45,4 +57,5 @@ def main():
else:
mapper = MultiHealpixMapper(config, args.outputpath, ncores=args.ncores)
mapper(args.infile, bands=bands, pixels=pixels,
clear_intermediate_files=not args.keep_intermediate_files)
clear_intermediate_files=not args.keep_intermediate_files,
make_map_images=args.make_map_images)
30 changes: 25 additions & 5 deletions decasu/healpix_consolidator.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import healsparse


class HealpixConsolidator(object):
class HealpixConsolidator:
"""
Consolidate several maps into one.

Expand All @@ -13,25 +13,45 @@ class HealpixConsolidator(object):
clear_intermediate_files : `bool`
Clear input files when done?
"""
def __init__(self, config, clear_intermediate_files):
def __init__(self, config, clear_intermediate_files, make_map_images=False):
self.config = config
self.clear_intermediate_files = clear_intermediate_files
self.make_map_images = make_map_images

def __call__(self, fname, mapfiles):
def __call__(self, fname, mapfiles, descr):
"""
Consolidate a list of mapfiles, and delete input mapfiles
if clear_intermediate_files is True.

Parameters
----------
fname : `str`
Output filename
Output filename
mapfiles : `list`
Input list of files
Input list of files
descr : `str`
Description string.
"""
print("Consolidating %d maps into %s" % (len(mapfiles), fname))
healsparse.cat_healsparse_files(mapfiles, fname)

if self.make_map_images:
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.figure import Figure
import skyproj

m = healsparse.HealSparseMap.read(fname)

fig = Figure(figsize=(10, 6))
FigureCanvasAgg(fig)
ax = fig.add_subplot(111)

sp = skyproj.McBrydeSkyproj(ax=ax)
sp.draw_hspmap(m, zoom=True)
sp.draw_colorbar(label=descr)
skyprojfile = fname.replace(".hsp", "_skyproj.png")
fig.savefig(skyprojfile)

if self.clear_intermediate_files:
for f in mapfiles:
os.unlink(f)
165 changes: 165 additions & 0 deletions decasu/lsst_wcs_consdb.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,165 @@
import numpy as np
import hpgeom as hpg

from astropy.table import Table
import astropy.units as units
from astropy.time import Time
from astropy.coordinates import EarthLocation

from . import decasu_globals
from .utils import compute_visit_iqr_and_optics_scale

try:
import lsst.obs.lsst
import lsst.sphgeom
import psycopg
lsst_imported = True
except ImportError:
lsst_imported = False


class LsstWcsConsDbBuilder:
"""
Build a WCS table from the LSST Consolidated Database and get intersecting
pixels.

Parameters
----------
config : `Configuration`
decasu configuration object.
dbfile : `str`
Input database file.
bands : `list`
Bands to run. Empty list means use all.
compute_pixels : `bool`, optional
Compute pixels when rendering WCS?
"""
def __init__(self, config, dbstring, bands, compute_pixels=True):
if not lsst_imported:
raise RuntimeError("Cannot use LsstWcsConsDbBuilder without Rubin Science Pipelines.")

self.config = config
self.compute_pixels = compute_pixels

query_string = (
"SELECT cvq.eff_time, cvq.psf_sigma, "
"cvq.sky_bg, cvq.sky_noise, cvq.zero_point, "
"cv.detector, cv.visit_id, cv.s_region, "
"v.band, v.exp_time, v.exp_midpt_mjd, v.sky_rotation "
"FROM cdb_LSSTCam.ccdvisit1_quicklook as cvq, cdb_LSSTCam.ccdvisit1 as cv, "
"cdb_LSSTCam.visit1 as v "
)
where_string = (
"WHERE cvq.ccdvisit_id=cv.ccdvisit_id and "
"cv.visit_id=v.visit_id and "
"detector<189 and cvq.zero_point is not null "
)

if len(self.config.lsst_db_additional_selection) > 0:
where_string = where_string + " and " + self.config.lsst_db_additional_selection

if len(bands) > 0:
where_string = where_string + " and v.band in (" + ",".join([f"'{band}'" for band in bands]) + ")"

where_string = where_string + f" and v.exp_midpt_mjd >= {self.config.mjd_min}"
where_string = where_string + f" and v.exp_midpt_mjd <= {self.config.mjd_max}"

query_string = query_string + where_string + ";"

with psycopg.Connection.connect(dbstring) as conn:
cur = conn.execute(query_string)
rows = cur.fetchall()

db_table = Table(
np.asarray(
rows,
dtype=[
("eff_time", "f4"),
("psf_sigma", "f4"),
("sky_bg", "f4"),
("sky_noise", "f4"),
("zero_point", "f4"),
("detector", "i4"),
("visit_id", "i8"),
("s_region", "U200"),
("band", "U2"),
("exptime", "f4"),
("mjd", "f8"),
("sky_rotation", "f4"),
],
),
)

if len(bands) == 0:
self.bands = np.unique(db_table["band"])
else:
self.bands = bands

print(f"Found {len(db_table)} detector visits for {len(self.bands)} bands.")

# Add extra columns.
# Units of degrees.
db_table["decasu_lst"] = np.zeros(len(db_table))
# Units of electrons.
db_table["skyvar"] = db_table["sky_noise"]**2.
# Units of arcsec.
db_table[config.fwhm_field] = 2.355*config.arcsec_per_pix*db_table["psf_sigma"]

print("Computing local sidereal time...")
loc = EarthLocation(lat=config.latitude*units.degree,
lon=config.longitude*units.degree,
height=config.elevation*units.m)

t = Time(db_table[config.mjd_field], format="mjd", location=loc)
lst = t.sidereal_time("apparent")
db_table["decasu_lst"][:] = lst.to_value(units.degree)

# Compute a couple of additional psf quantities.
db_table[f"{config.fwhm_field}_iqr"] = np.zeros(len(db_table))
db_table[f"{config.fwhm_field}_optics_scale"] = np.zeros(len(db_table))

print('Computing fwhm scaled properties...')
compute_visit_iqr_and_optics_scale(self.config, db_table)

instrument = lsst.obs.lsst.LsstCam()
camera = instrument.getCamera()

decasu_globals.table = db_table
decasu_globals.lsst_camera = camera

def __call__(self, row):
"""
Compute intersecting pixels for onw row.

Parameters
----------
row : `int`
Row to compute intersecting pixels.

Returns
-------
wcs : `int`
Placeholder.
pixels : `list`
List of nside = `config.nside_run` intersecting pixels.
Returned if compute_pixels is True in initialization.
centers : `tuple` [`float`]]
"""
if (row % 10000) == 0:
print("Working on WCS index %d" % (row))

# Link to global table.
self.table = decasu_globals.table

region_str = self.table["s_region"][row]

region = lsst.sphgeom.Region.from_ivoa_pos("".join(region_str.split("ICRS")).upper())
centroid = lsst.sphgeom.LonLat(region.getCentroid())
center = [centroid.getLon().asDegrees(), centroid.getLat().asDegrees()]

if self.compute_pixels:
vertices = np.asarray([[v.x(), v.y(), v.z()] for v in region.getVertices()])
pixels = hpg.query_polygon_vec(self.config.nside_run, vertices, inclusive=True, fact=16)
return 0, pixels, center
else:
return 0, center
Loading