Skip to content

Commit

Permalink
Reformat with black
Browse files Browse the repository at this point in the history
  • Loading branch information
mkelley committed Nov 8, 2024
1 parent 776cbea commit 28030c2
Show file tree
Hide file tree
Showing 10 changed files with 41 additions and 67 deletions.
3 changes: 1 addition & 2 deletions _precommit_hook
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,5 @@ if [ $DONT_FORMAT_ON_CODE_COMMIT ]; then
"""
else
# Auto-format all python scripts
.venv/bin/autopep8 -ir sbn_survey_image_service/**
.venv/bin/autopep8 -ir tests/**
.venv/bin/black sbn_survey_image_service/**
fi
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ dependencies = [

[project.optional-dependencies]
recommended = ["psycopg2-binary>=2.8"]
dev = ["autopep8", "mypy", "pycodestyle"]
dev = ["black", "mypy", "pycodestyle"]
test = ["pytest>=7.0", "pytest-cov>=3.0"]
docs = ["sphinx", "sphinx-automodapi", "numpydoc"]

Expand Down
3 changes: 1 addition & 2 deletions sbn_survey_image_service/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,5 +69,4 @@ def handle_other_error(error: Exception):
# for development
logger.info("Running " + ENV.APP_NAME)
logger.info(application.url_map)
app.run("sbn_survey_image_service.app:app",
host=ENV.API_HOST, port=ENV.API_PORT)
app.run("sbn_survey_image_service.app:app", host=ENV.API_HOST, port=ENV.API_PORT)
11 changes: 4 additions & 7 deletions sbn_survey_image_service/data/add.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
def _remove_prefix(s: str, prefix: str):
"""If ``s`` starts with ``prefix`` remove it."""
if s.startswith(prefix):
return s[len(prefix):]
return s[len(prefix) :]
else:
return s

Expand Down Expand Up @@ -162,8 +162,7 @@ def pds4_image(label_path: str) -> Image:
"/Internal_Reference/[reference_type='is_instrument']/../name"
).text.split()
),
target=label.find(
"Observation_Area/Target_Identification/name").text,
target=label.find("Observation_Area/Target_Identification/name").text,
calibration_level=PDS4CalibrationLevel[
label.find(
"Observation_Area/Primary_Result_Summary/processing_level"
Expand Down Expand Up @@ -300,8 +299,7 @@ def add_directory(
for filename in filenames:
if os.path.splitext(filename)[1].lower() in extensions:
n_files += 1
n_added += add_label(os.path.join(dirpath,
filename), session, **kwargs)
n_added += add_label(os.path.join(dirpath, filename), session, **kwargs)

if not recursive:
break
Expand Down Expand Up @@ -358,8 +356,7 @@ def __main__() -> None:
logger.setLevel(logging.DEBUG if args.v else logging.INFO)

# options to pass on to add_* functions:
kwargs = dict(base_url=args.base_url,
strip_leading=args.strip_leading.rstrip("/"))
kwargs = dict(base_url=args.base_url, strip_leading=args.strip_leading.rstrip("/"))
session: Session
with data_provider_session() as session:
if args.create:
Expand Down
24 changes: 12 additions & 12 deletions sbn_survey_image_service/data/test/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,8 +93,7 @@ def create_data(session, path):
logger.info("Creating ~400 images and labels.")
centers: np.ndarray = np.degrees(spherical_distribution(400))
image_size: int = 300
pixel_size: float = np.degrees(
np.sqrt(4 * np.pi / len(centers))) / image_size / 10
pixel_size: float = np.degrees(np.sqrt(4 * np.pi / len(centers))) / image_size / 10
xy: np.ndarray = np.mgrid[:image_size, :image_size][::-1]

w: WCS = WCS()
Expand Down Expand Up @@ -132,8 +131,7 @@ def create_data(session, path):
continue

hdu: fits.HDUList = fits.HDUList()
hdu.append(fits.PrimaryHDU(
data.astype(np.int32), header=w.to_header()))
hdu.append(fits.PrimaryHDU(data.astype(np.int32), header=w.to_header()))
hdu.writeto(image_path, overwrite=True)
outf: io.IOBase
with open(label_path, "w") as outf:
Expand Down Expand Up @@ -183,8 +181,11 @@ def create_tables() -> None:
def delete_data(session) -> None:
"""Delete test data from database."""

(session.query(Image).filter(Image.collection ==
"urn:nasa:pds:survey:test-collection").delete())
(
session.query(Image)
.filter(Image.collection == "urn:nasa:pds:survey:test-collection")
.delete()
)


def exists(session) -> bool:
Expand All @@ -196,8 +197,9 @@ def exists(session) -> bool:

try:
results: Any = (
session.query(Image).filter(
Image.collection == "urn:nasa:pds:survey:test-collection").all()
session.query(Image)
.filter(Image.collection == "urn:nasa:pds:survey:test-collection")
.all()
)
except OperationalError:
return False
Expand Down Expand Up @@ -228,8 +230,7 @@ def _parse_args() -> argparse.Namespace:
default=ENV.TEST_DATA_PATH,
help="directory to which to save test data files",
)
parser.add_argument("--add", action="store_true",
help="add/create test data set")
parser.add_argument("--add", action="store_true", help="add/create test data set")
parser.add_argument(
"--exists",
action="store_true",
Expand Down Expand Up @@ -266,8 +267,7 @@ def _main() -> None:
create_data(session, args.path)
elif args.delete:
delete_data(session)
logger.info(
"Database cleaned, but test files must be removed manually.")
logger.info("Database cleaned, but test files must be removed manually.")
elif args.exists:
if exists(session):
print("Test data set appears to be valid.")
Expand Down
19 changes: 4 additions & 15 deletions sbn_survey_image_service/models/test/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,20 +7,9 @@ class TestImage:
"""Test Image object odds and ends."""

def test_repr(self):
im: Image = Image(
obs_id='asdf',
image_url='fdsa',
label_url='jkl;'
)
assert (
repr(im)
== "Image(obs_id='asdf', image_url='fdsa', label_url='jkl;')"
)
im: Image = Image(obs_id="asdf", image_url="fdsa", label_url="jkl;")
assert repr(im) == "Image(obs_id='asdf', image_url='fdsa', label_url='jkl;')"

def test_str(self):
im: Image = Image(
obs_id='asdf',
image_url='fdsa',
label_url='jkl;'
)
assert str(im) == '<Class Image: asdf>'
im: Image = Image(obs_id="asdf", image_url="fdsa", label_url="jkl;")
assert str(im) == "<Class Image: asdf>"
6 changes: 2 additions & 4 deletions sbn_survey_image_service/scripts/sbnsis.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,7 @@ def restart(self) -> None:
os.kill(ppid, signal.SIGWINCH)
ellipsis(10)

print_color(" - Stopping old service parent process",
end="", flush=True)
print_color(" - Stopping old service parent process", end="", flush=True)
os.kill(ppid, signal.SIGQUIT)
ellipsis(1)

Expand Down Expand Up @@ -243,8 +242,7 @@ def env_file(self) -> None:
print_color("Wrote new .env file.")

def argument_parser(self) -> ArgumentParser:
parser: ArgumentParser = ArgumentParser(
description="SBN Survey Image Service")
parser: ArgumentParser = ArgumentParser(description="SBN Survey Image Service")
subparsers = parser.add_subparsers(help="sub-command help")

# start #########
Expand Down
20 changes: 10 additions & 10 deletions sbn_survey_image_service/services/image.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,8 +69,7 @@ def __init__(self, ra: float | None, dec: float | None, size: str | Angle | None
self.normalize()

self.size: Angle = (
self.MINUMUM_SIZE if size is None else max(
self.MINUMUM_SIZE, Angle(size))
self.MINUMUM_SIZE if size is None else max(self.MINUMUM_SIZE, Angle(size))
)

def __str__(self) -> str:
Expand Down Expand Up @@ -101,7 +100,7 @@ def normalize(self) -> None:
# Dec -90 to 90
self.dec = min(max(self.dec, -90), 90)

def cutout(self, url: str, wcs_ext: int, data_ext: int, meta: dict={}) -> str:
def cutout(self, url: str, wcs_ext: int, data_ext: int, meta: dict = {}) -> str:
"""Generate a cutout from URL.
Expand Down Expand Up @@ -207,7 +206,7 @@ def filename_suffix(cutout_spec: CutoutSpec, format: ImageFormat) -> str:
suffix: str = ""
if not cutout_spec.full_size:
# attachment file name is based on coordinates and size
suffix = f'_{cutout_spec.ra:.5f}{cutout_spec.dec:+.5f}_{cutout_spec.size}'
suffix = f"_{cutout_spec.ra:.5f}{cutout_spec.dec:+.5f}_{cutout_spec.size}"

return f"{suffix}.{format.extension}"

Expand Down Expand Up @@ -286,8 +285,7 @@ def image_query(
try:
format = ImageFormat(format)
except ValueError:
raise ParameterValueError(
"image_query format must be fits, png, or jpeg.")
raise ParameterValueError("image_query format must be fits, png, or jpeg.")

im: Image
session: Session
Expand All @@ -301,8 +299,7 @@ def image_query(
session.expunge(im)

# create attachment file name
download_filename: str = os.path.splitext(
os.path.basename(im.image_url))[0]
download_filename: str = os.path.splitext(os.path.basename(im.image_url))[0]
download_filename += filename_suffix(cutout_spec, format)

# ATLAS data and WCS are found in the first extension
Expand All @@ -314,15 +311,18 @@ def image_query(

# generate the cutout, as needed
meta = {"sis-lid": obs_id}
fits_image_path: str = cutout_spec.cutout(im.image_url, wcs_ext, data_ext, meta=meta)
fits_image_path: str = cutout_spec.cutout(
im.image_url, wcs_ext, data_ext, meta=meta
)

# FITS format? done!
if format == ImageFormat.FITS:
return fits_image_path, download_filename

# formulate the final image file name
image_path = generate_cache_filename(
im.image_url, str(cutout_spec), format.extension)
im.image_url, str(cutout_spec), format.extension
)

# was this file already generated? serve it!
if os.path.exists(image_path):
Expand Down
3 changes: 1 addition & 2 deletions sbn_survey_image_service/services/label.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,7 @@ def label_query(obs_id: str) -> Tuple[str, str]:
exc: Exception
try:
label_url: str = (
session.query(Image.label_url).filter(
Image.obs_id == obs_id).one()[0]
session.query(Image.label_url).filter(Image.obs_id == obs_id).one()[0]
)
except NoResultFound as exc:
raise InvalidImageID("Image ID not found in database.") from exc
Expand Down
17 changes: 5 additions & 12 deletions sbn_survey_image_service/test/test_services.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,7 @@ def test_label_query():
image_path, download_filename = label_query(
"urn:nasa:pds:survey:test-collection:test-000039"
)
assert image_path == os.path.join(
"file://", ENV.TEST_DATA_PATH, "test-000039.xml")
assert image_path == os.path.join("file://", ENV.TEST_DATA_PATH, "test-000039.xml")


def test_label_query_fail():
Expand Down Expand Up @@ -67,8 +66,7 @@ def test_image_query_full_frame_jpg():
)

# should return a file in the cache directory
assert os.path.dirname(image_path) == os.path.abspath(
ENV.SBNSIS_CUTOUT_CACHE)
assert os.path.dirname(image_path) == os.path.abspath(ENV.SBNSIS_CUTOUT_CACHE)
assert image_path == expected_path
assert download_filename == "test-000023.jpeg"

Expand All @@ -87,8 +85,7 @@ def test_image_query_full_frame_png():
)

# should return a file in the cache directory
assert os.path.dirname(image_path) == os.path.abspath(
ENV.SBNSIS_CUTOUT_CACHE)
assert os.path.dirname(image_path) == os.path.abspath(ENV.SBNSIS_CUTOUT_CACHE)
assert image_path == expected_path
assert download_filename == "test-000023.png"

Expand Down Expand Up @@ -117,13 +114,9 @@ def test_image_query_cutout():
)

# should return fits file in cache directory
assert os.path.dirname(image_path) == os.path.abspath(
ENV.SBNSIS_CUTOUT_CACHE)
assert os.path.dirname(image_path) == os.path.abspath(ENV.SBNSIS_CUTOUT_CACHE)
assert image_path == expected_path
assert (
download_filename
== f'test-000102_{+ra:.5f}{+dec:.5f}_{size}.fits'
)
assert download_filename == f"test-000102_{+ra:.5f}{+dec:.5f}_{size}.fits"

# inspect file, value should be -25 at the center
im: np.ndarray = fits.getdata(image_path)
Expand Down

0 comments on commit 28030c2

Please sign in to comment.