From fcb7bb82aadd6d0912e7039243330d07fd68a7b8 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 09:56:52 +1000 Subject: [PATCH 01/27] Fixed docstring typo --- vast_pipeline/image/main.py | 2 +- vast_pipeline/pipeline/utils.py | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/vast_pipeline/image/main.py b/vast_pipeline/image/main.py index 4ecbb6823..8b25d5f4f 100644 --- a/vast_pipeline/image/main.py +++ b/vast_pipeline/image/main.py @@ -107,7 +107,7 @@ def __init__(self, path: str, hdu_index: int=0) -> None: def __get_header(self, hdu_index: int) -> fits.Header: """ - Retrieves the header from teh FITS image. + Retrieves the header from the FITS image. Args: hdu_index: diff --git a/vast_pipeline/pipeline/utils.py b/vast_pipeline/pipeline/utils.py index 9169f25a1..75938bef9 100644 --- a/vast_pipeline/pipeline/utils.py +++ b/vast_pipeline/pipeline/utils.py @@ -1707,3 +1707,31 @@ def write_parquets( ) return skyregs_df + +def open_fits(fits_path: Union[str, Path]): + """ + This function opens both compressed and uncompressed fits files. + + Args: + fits_path: Path to the fits file + + Returns: + HDUList loaded from the fits file + + Raises: + ValueError: File extension must be .fits or .fits.fz + """ + + if type(fits_path) = Path: + fits_path = str(fits_path) + + hdul = fits.open(fits_path) + + if fits_path.endswith('.fits'): + return hdul + elif fits_path.endswith('.fits.fz'): + return fits.HDUList(hdul[1:]) + else: + raise ValueError("Unrecognised extension for {fits_path}." + "File extension must be .fits or .fits.fz" + ) From 170274304d50edc93b1c60993cece4ee919fa412 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 09:58:07 +1000 Subject: [PATCH 02/27] Added open_fits to image.main.py --- vast_pipeline/image/main.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vast_pipeline/image/main.py b/vast_pipeline/image/main.py index 8b25d5f4f..5d4b26c4d 100644 --- a/vast_pipeline/image/main.py +++ b/vast_pipeline/image/main.py @@ -19,6 +19,7 @@ from vast_pipeline import models from vast_pipeline.survey.translators import tr_selavy +from vast_pipeline.pipeline.utils import open_fits logger = logging.getLogger(__name__) @@ -117,7 +118,7 @@ def __get_header(self, hdu_index: int) -> fits.Header: The FITS header as an astropy.io.fits.Header object. """ try: - with fits.open(self.path) as hdulist: + with open_fits(self.path) as hdulist: hdu = hdulist[hdu_index] except Exception: raise IOError(( From 0d92f069f484017d79a3fcf51d871dfcf5afe686 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 09:59:30 +1000 Subject: [PATCH 03/27] Added open_fits to views.py --- vast_pipeline/views.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vast_pipeline/views.py b/vast_pipeline/views.py index 3eec7edbf..7c2720499 100644 --- a/vast_pipeline/views.py +++ b/vast_pipeline/views.py @@ -66,6 +66,7 @@ from vast_pipeline.management.commands.initpiperun import initialise_run from vast_pipeline.forms import PipelineRunForm, CommentForm, TagWithCommentsForm from vast_pipeline.pipeline.config import PipelineConfig +from vast_pipeline.pipeline.utils import open_fits logger = logging.getLogger(__name__) @@ -1867,7 +1868,7 @@ def get(self, request, measurement_id: int, size: str = "normal"): measurement = Measurement.objects.get(id=measurement_id) - image_hdu: fits.PrimaryHDU = fits.open(measurement.image.path)[0] + image_hdu: fits.PrimaryHDU = open_fits(measurement.image.path)[0] coord = SkyCoord(ra=measurement.ra, dec=measurement.dec, unit="deg") sizes = { "xlarge": "40arcmin", From 61a2aeca8be7759ddb558b9cdf478721cdbc969c Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 10:00:02 +1000 Subject: [PATCH 04/27] Added open_fits to pipeline.utils.get_rms_noise_image_values --- vast_pipeline/pipeline/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vast_pipeline/pipeline/utils.py b/vast_pipeline/pipeline/utils.py index 75938bef9..421066428 100644 --- a/vast_pipeline/pipeline/utils.py +++ b/vast_pipeline/pipeline/utils.py @@ -793,7 +793,7 @@ def get_rms_noise_image_values(rms_path: str) -> Tuple[float, float, float]: logger.debug('Extracting Image RMS values from Noise file...') med_val = min_val = max_val = 0. try: - with fits.open(rms_path) as f: + with open_fits(rms_path) as f: data = f[0].data data = data[np.logical_not(np.isnan(data))] data = data[data != 0] From 13a00c6f240f4cf6da4512c79b296b06e6212936 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 10:00:44 +1000 Subject: [PATCH 05/27] Added open_fits to pipeline.new_sources.py --- vast_pipeline/pipeline/new_sources.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vast_pipeline/pipeline/new_sources.py b/vast_pipeline/pipeline/new_sources.py index c5b751147..3eeedec7d 100644 --- a/vast_pipeline/pipeline/new_sources.py +++ b/vast_pipeline/pipeline/new_sources.py @@ -84,7 +84,7 @@ def get_image_rms_measurements( return group image = group.iloc[0]['img_diff_rms_path'] - with fits.open(image) as hdul: + with open_fits(image) as hdul: header = hdul[0].header wcs = WCS(header, naxis=2) data = hdul[0].data.squeeze() From 61803ebeeee2e0fd2949359781226a0b6fb4ef5d Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 10:02:33 +1000 Subject: [PATCH 06/27] Added open_fits to forced_extraction --- vast_pipeline/pipeline/forced_extraction.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/vast_pipeline/pipeline/forced_extraction.py b/vast_pipeline/pipeline/forced_extraction.py index 33bb9e04d..a6280077a 100644 --- a/vast_pipeline/pipeline/forced_extraction.py +++ b/vast_pipeline/pipeline/forced_extraction.py @@ -20,6 +20,7 @@ from forced_phot import ForcedPhot from ..utils.utils import StopWatch +from vast_pipeline.pipeline.utils import open_fits logger = logging.getLogger(__name__) @@ -148,7 +149,10 @@ def extract_from_image( unit=(u.deg, u.deg) ) - FP = ForcedPhot(image, background, noise) + FP = ForcedPhot(open_fits(image), + open_fits(background), + open_fits(noise) + ) flux, flux_err, chisq, DOF, cluster_id = FP.measure( P_islands, cluster_threshold=cluster_threshold, From 8e6d5add12d0ebe4fdb8bb08fe78210957f65fb5 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 10:07:23 +1000 Subject: [PATCH 07/27] Fixed circular imports --- vast_pipeline/image/main.py | 3 ++- vast_pipeline/views.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/vast_pipeline/image/main.py b/vast_pipeline/image/main.py index 5d4b26c4d..f9abaff51 100644 --- a/vast_pipeline/image/main.py +++ b/vast_pipeline/image/main.py @@ -19,7 +19,6 @@ from vast_pipeline import models from vast_pipeline.survey.translators import tr_selavy -from vast_pipeline.pipeline.utils import open_fits logger = logging.getLogger(__name__) @@ -117,6 +116,8 @@ def __get_header(self, hdu_index: int) -> fits.Header: Returns: The FITS header as an astropy.io.fits.Header object. """ + from vast_pipeline.pipeline.utils import open_fits + try: with open_fits(self.path) as hdulist: hdu = hdulist[hdu_index] diff --git a/vast_pipeline/views.py b/vast_pipeline/views.py index 7c2720499..bc5d873df 100644 --- a/vast_pipeline/views.py +++ b/vast_pipeline/views.py @@ -66,7 +66,6 @@ from vast_pipeline.management.commands.initpiperun import initialise_run from vast_pipeline.forms import PipelineRunForm, CommentForm, TagWithCommentsForm from vast_pipeline.pipeline.config import PipelineConfig -from vast_pipeline.pipeline.utils import open_fits logger = logging.getLogger(__name__) @@ -1856,6 +1855,8 @@ def SourceDetail(request, pk): class ImageCutout(APIView): + from vast_pipeline.pipeline.utils import open_fits + authentication_classes = [SessionAuthentication, BasicAuthentication] permission_classes = [IsAuthenticated] From 1df734b4f8ed31fa5a906b8026befde42399f280 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 31 Jul 2023 10:14:51 +1000 Subject: [PATCH 08/27] Added pathlib import --- vast_pipeline/pipeline/utils.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/vast_pipeline/pipeline/utils.py b/vast_pipeline/pipeline/utils.py index 421066428..fd11ac30f 100644 --- a/vast_pipeline/pipeline/utils.py +++ b/vast_pipeline/pipeline/utils.py @@ -21,6 +21,7 @@ from django.contrib.auth.models import User from psutil import cpu_count from itertools import chain +from pathlib import Path from vast_pipeline.image.main import FitsImage, SelavyImage from vast_pipeline.utils.utils import ( @@ -1722,7 +1723,7 @@ def open_fits(fits_path: Union[str, Path]): ValueError: File extension must be .fits or .fits.fz """ - if type(fits_path) = Path: + if type(fits_path) == Path: fits_path = str(fits_path) hdul = fits.open(fits_path) From 7920fb6e54274da6e865d380c47cdc10655e6884 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Thu, 3 Aug 2023 10:01:40 +1000 Subject: [PATCH 09/27] Added memmap argument to open_fits --- vast_pipeline/pipeline/utils.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/vast_pipeline/pipeline/utils.py b/vast_pipeline/pipeline/utils.py index fd11ac30f..dc658115f 100644 --- a/vast_pipeline/pipeline/utils.py +++ b/vast_pipeline/pipeline/utils.py @@ -1709,12 +1709,13 @@ def write_parquets( return skyregs_df -def open_fits(fits_path: Union[str, Path]): +def open_fits(fits_path: Union[str, Path], memmap: Optional[bool]=True): """ This function opens both compressed and uncompressed fits files. Args: fits_path: Path to the fits file + memmap: Open the fits file with mmap. Returns: HDUList loaded from the fits file @@ -1726,7 +1727,7 @@ def open_fits(fits_path: Union[str, Path]): if type(fits_path) == Path: fits_path = str(fits_path) - hdul = fits.open(fits_path) + hdul = fits.open(fits_path, memmap=memmap) if fits_path.endswith('.fits'): return hdul From a631ed795c8b042e56eb38997771803bccda1294 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Thu, 3 Aug 2023 10:06:45 +1000 Subject: [PATCH 10/27] Added separate preload function --- vast_pipeline/pipeline/forced_extraction.py | 35 ++++++++++++++++++--- 1 file changed, 31 insertions(+), 4 deletions(-) diff --git a/vast_pipeline/pipeline/forced_extraction.py b/vast_pipeline/pipeline/forced_extraction.py index a6280077a..d83fd48ce 100644 --- a/vast_pipeline/pipeline/forced_extraction.py +++ b/vast_pipeline/pipeline/forced_extraction.py @@ -105,6 +105,29 @@ def get_data_from_parquet( return {'prefix': prefix, 'max_id': max_id, 'id': image_id} +def _forcedphot_preload(image: str, + background: str, + noise: str, + memmap: Optional[bool]=False + ): + """ + Load the relevant image, background and noisemap files. + + Args: + image: a string with the path of the image file + background: a string with the path of the background map + noise: a string with the path of the noise map + + Returns: + A tuple containing the HDU lists + """ + + image_hdul = open_fits(image, memmap=memmap) + background_hdul = open_fits(background, memmap=memmap) + noise_hdul = open_fits(noise, memmap=memmap) + + return image_hdul, background_hdul, noise_hdul + def extract_from_image( df: pd.DataFrame, image: str, @@ -148,11 +171,15 @@ def extract_from_image( df['wavg_dec'].values, unit=(u.deg, u.deg) ) + # load the image, background and noisemaps into memory + # a dedicated function may seem unneccesary, but will be useful if we split the load to a separate thread. + forcedphot_input = _forcedphot_preload(image, + background, + noise, + memmap=False + ) + FP = ForcedPhot(forcedphot_input) - FP = ForcedPhot(open_fits(image), - open_fits(background), - open_fits(noise) - ) flux, flux_err, chisq, DOF, cluster_id = FP.measure( P_islands, cluster_threshold=cluster_threshold, From 4a78fd2a7588b03ded895e0ceed4112df91810ac Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Thu, 3 Aug 2023 10:18:38 +1000 Subject: [PATCH 11/27] Fix typing imports --- vast_pipeline/pipeline/forced_extraction.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vast_pipeline/pipeline/forced_extraction.py b/vast_pipeline/pipeline/forced_extraction.py index d83fd48ce..ea2ac38ca 100644 --- a/vast_pipeline/pipeline/forced_extraction.py +++ b/vast_pipeline/pipeline/forced_extraction.py @@ -13,7 +13,7 @@ from django.conf import settings from django.db import transaction from pyarrow.parquet import read_schema -from typing import Any, List, Tuple, Dict +from typing import Any, List, Tuple, Dict, Optional from vast_pipeline.models import Image, Measurement, Run from vast_pipeline.pipeline.loading import make_upload_measurements @@ -178,7 +178,7 @@ def extract_from_image( noise, memmap=False ) - FP = ForcedPhot(forcedphot_input) + FP = ForcedPhot(*forcedphot_input) flux, flux_err, chisq, DOF, cluster_id = FP.measure( P_islands, From 0e76b7284e3408b166d410c28de99099ad23a9db Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Tue, 8 Aug 2023 10:02:13 +1000 Subject: [PATCH 12/27] Add missing import --- vast_pipeline/pipeline/new_sources.py | 1 + 1 file changed, 1 insertion(+) diff --git a/vast_pipeline/pipeline/new_sources.py b/vast_pipeline/pipeline/new_sources.py index 3eeedec7d..2e8b9d631 100644 --- a/vast_pipeline/pipeline/new_sources.py +++ b/vast_pipeline/pipeline/new_sources.py @@ -14,6 +14,7 @@ from vast_pipeline.models import Image, Run from vast_pipeline.utils.utils import StopWatch +from vast_pipeline.pipeline.utils import open_fits logger = logging.getLogger(__name__) From 2eeed77baa993e4788a013f95255934bf6003c15 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Tue, 8 Aug 2023 10:04:35 +1000 Subject: [PATCH 13/27] Move open_fits to image/utils --- vast_pipeline/image/utils.py | 30 ++++++++++++++++++++++++++++++ vast_pipeline/pipeline/utils.py | 29 ----------------------------- 2 files changed, 30 insertions(+), 29 deletions(-) diff --git a/vast_pipeline/image/utils.py b/vast_pipeline/image/utils.py index 4b85d5389..a8fe21e38 100644 --- a/vast_pipeline/image/utils.py +++ b/vast_pipeline/image/utils.py @@ -8,6 +8,7 @@ import pandas as pd from typing import Tuple +from pathlib import Path logger = logging.getLogger(__name__) @@ -246,3 +247,32 @@ def calc_condon_flux_errors( except Exception as e: logger.debug("Error in the calculation of Condon errors for a source", exc_info=True) return 0., 0., 0., 0., 0., 0., 0. + +def open_fits(fits_path: Union[str, Path], memmap: Optional[bool]=True): + """ + This function opens both compressed and uncompressed fits files. + + Args: + fits_path: Path to the fits file + memmap: Open the fits file with mmap. + + Returns: + HDUList loaded from the fits file + + Raises: + ValueError: File extension must be .fits or .fits.fz + """ + + if type(fits_path) == Path: + fits_path = str(fits_path) + + hdul = fits.open(fits_path, memmap=memmap) + + if fits_path.endswith('.fits'): + return hdul + elif fits_path.endswith('.fits.fz'): + return fits.HDUList(hdul[1:]) + else: + raise ValueError("Unrecognised extension for {fits_path}." + "File extension must be .fits or .fits.fz" + ) diff --git a/vast_pipeline/pipeline/utils.py b/vast_pipeline/pipeline/utils.py index dc658115f..cdae033a4 100644 --- a/vast_pipeline/pipeline/utils.py +++ b/vast_pipeline/pipeline/utils.py @@ -1708,32 +1708,3 @@ def write_parquets( ) return skyregs_df - -def open_fits(fits_path: Union[str, Path], memmap: Optional[bool]=True): - """ - This function opens both compressed and uncompressed fits files. - - Args: - fits_path: Path to the fits file - memmap: Open the fits file with mmap. - - Returns: - HDUList loaded from the fits file - - Raises: - ValueError: File extension must be .fits or .fits.fz - """ - - if type(fits_path) == Path: - fits_path = str(fits_path) - - hdul = fits.open(fits_path, memmap=memmap) - - if fits_path.endswith('.fits'): - return hdul - elif fits_path.endswith('.fits.fz'): - return fits.HDUList(hdul[1:]) - else: - raise ValueError("Unrecognised extension for {fits_path}." - "File extension must be .fits or .fits.fz" - ) From fd9f7f366005789e8833484fce24aaa3c8f657b3 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Tue, 8 Aug 2023 10:07:49 +1000 Subject: [PATCH 14/27] Updated open_fits imports --- vast_pipeline/image/main.py | 2 +- vast_pipeline/pipeline/forced_extraction.py | 2 +- vast_pipeline/pipeline/new_sources.py | 2 +- vast_pipeline/pipeline/utils.py | 2 +- vast_pipeline/views.py | 3 +-- 5 files changed, 5 insertions(+), 6 deletions(-) diff --git a/vast_pipeline/image/main.py b/vast_pipeline/image/main.py index f9abaff51..3de59d84e 100644 --- a/vast_pipeline/image/main.py +++ b/vast_pipeline/image/main.py @@ -19,6 +19,7 @@ from vast_pipeline import models from vast_pipeline.survey.translators import tr_selavy +from vast_pipeline.image.utils import open_fits logger = logging.getLogger(__name__) @@ -116,7 +117,6 @@ def __get_header(self, hdu_index: int) -> fits.Header: Returns: The FITS header as an astropy.io.fits.Header object. """ - from vast_pipeline.pipeline.utils import open_fits try: with open_fits(self.path) as hdulist: diff --git a/vast_pipeline/pipeline/forced_extraction.py b/vast_pipeline/pipeline/forced_extraction.py index ea2ac38ca..af5be561c 100644 --- a/vast_pipeline/pipeline/forced_extraction.py +++ b/vast_pipeline/pipeline/forced_extraction.py @@ -20,7 +20,7 @@ from forced_phot import ForcedPhot from ..utils.utils import StopWatch -from vast_pipeline.pipeline.utils import open_fits +from vast_pipeline.image.utils import open_fits logger = logging.getLogger(__name__) diff --git a/vast_pipeline/pipeline/new_sources.py b/vast_pipeline/pipeline/new_sources.py index 2e8b9d631..e7d1292cc 100644 --- a/vast_pipeline/pipeline/new_sources.py +++ b/vast_pipeline/pipeline/new_sources.py @@ -14,7 +14,7 @@ from vast_pipeline.models import Image, Run from vast_pipeline.utils.utils import StopWatch -from vast_pipeline.pipeline.utils import open_fits +from vast_pipeline.image.utils import open_fits logger = logging.getLogger(__name__) diff --git a/vast_pipeline/pipeline/utils.py b/vast_pipeline/pipeline/utils.py index cdae033a4..7fdd529d1 100644 --- a/vast_pipeline/pipeline/utils.py +++ b/vast_pipeline/pipeline/utils.py @@ -21,9 +21,9 @@ from django.contrib.auth.models import User from psutil import cpu_count from itertools import chain -from pathlib import Path from vast_pipeline.image.main import FitsImage, SelavyImage +from vast_pipeline.image.utils import open_fits from vast_pipeline.utils.utils import ( eq_to_cart, StopWatch, optimize_ints, optimize_floats ) diff --git a/vast_pipeline/views.py b/vast_pipeline/views.py index bc5d873df..6245ed7dd 100644 --- a/vast_pipeline/views.py +++ b/vast_pipeline/views.py @@ -66,6 +66,7 @@ from vast_pipeline.management.commands.initpiperun import initialise_run from vast_pipeline.forms import PipelineRunForm, CommentForm, TagWithCommentsForm from vast_pipeline.pipeline.config import PipelineConfig +from vast_pipeline.image.utils import open_fits logger = logging.getLogger(__name__) @@ -1855,8 +1856,6 @@ def SourceDetail(request, pk): class ImageCutout(APIView): - from vast_pipeline.pipeline.utils import open_fits - authentication_classes = [SessionAuthentication, BasicAuthentication] permission_classes = [IsAuthenticated] From 89d138be3a09975215880ba376ac6870c830773d Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Tue, 8 Aug 2023 10:08:54 +1000 Subject: [PATCH 15/27] Added correct typing imports --- vast_pipeline/image/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vast_pipeline/image/utils.py b/vast_pipeline/image/utils.py index a8fe21e38..153ddad7f 100644 --- a/vast_pipeline/image/utils.py +++ b/vast_pipeline/image/utils.py @@ -7,7 +7,7 @@ import numpy as np import pandas as pd -from typing import Tuple +from typing import Tuple, Union, Optional from pathlib import Path From 70c5a5841e181bcac50c10b4d5736186a856307f Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Tue, 8 Aug 2023 10:14:47 +1000 Subject: [PATCH 16/27] Added fits import --- vast_pipeline/image/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/vast_pipeline/image/utils.py b/vast_pipeline/image/utils.py index 153ddad7f..ebee9152a 100644 --- a/vast_pipeline/image/utils.py +++ b/vast_pipeline/image/utils.py @@ -9,6 +9,7 @@ from typing import Tuple, Union, Optional from pathlib import Path +from astropy.io import fits logger = logging.getLogger(__name__) From ed777e623913d38d1b3ea950e92ca1566d68de57 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Tue, 8 Aug 2023 10:15:04 +1000 Subject: [PATCH 17/27] Updated logging --- vast_pipeline/image/main.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/vast_pipeline/image/main.py b/vast_pipeline/image/main.py index 3de59d84e..7be25fd5a 100644 --- a/vast_pipeline/image/main.py +++ b/vast_pipeline/image/main.py @@ -123,8 +123,8 @@ def __get_header(self, hdu_index: int) -> fits.Header: hdu = hdulist[hdu_index] except Exception: raise IOError(( - 'Could not read this FITS file: ' - f'{os.path.basename(self.path)}' + 'Could not read FITS file: ' + f'{self.path}' )) return hdu.header.copy() From c0c4817bc02238cc8406f6fe59b946b4c0a7d475 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Mon, 20 Nov 2023 16:08:03 +1100 Subject: [PATCH 18/27] PEP8 --- vast_pipeline/image/main.py | 21 ++++++---- vast_pipeline/image/utils.py | 30 ++++++++------ vast_pipeline/pipeline/forced_extraction.py | 45 ++++++++++++--------- vast_pipeline/pipeline/new_sources.py | 21 +++++----- 4 files changed, 66 insertions(+), 51 deletions(-) diff --git a/vast_pipeline/image/main.py b/vast_pipeline/image/main.py index 7be25fd5a..2a8edaf5b 100644 --- a/vast_pipeline/image/main.py +++ b/vast_pipeline/image/main.py @@ -34,6 +34,7 @@ class Image(object): path (str): The system path to the image. """ + def __init__(self, path: str) -> None: """ Initiliase an image object. @@ -81,7 +82,7 @@ class FitsImage(Image): entire_image = True - def __init__(self, path: str, hdu_index: int=0) -> None: + def __init__(self, path: str, hdu_index: int = 0) -> None: """ Initialise a FitsImage object. @@ -117,7 +118,7 @@ def __get_header(self, hdu_index: int) -> fits.Header: Returns: The FITS header as an astropy.io.fits.Header object. """ - + try: with open_fits(self.path) as hdulist: hdu = hdulist[hdu_index] @@ -225,7 +226,8 @@ def __get_radius_pixels( The radius of the image in pixels. """ if self.entire_image: - # a large circle that *should* include the whole image (and then some) + # a large circle that *should* include the whole image + # (and then some) diameter = np.hypot(header[fits_naxis1], header[fits_naxis2]) else: # We simply place the largest circle we can in the centre. @@ -246,10 +248,11 @@ def __get_frequency(self, header: fits.Header) -> None: self.freq_eff = None self.freq_bw = None try: - if ('ctype3' in header) and (header['ctype3'] in ('FREQ', 'VOPT')): + freq_keys = ('FREQ', 'VOPT') + if ('ctype3' in header) and (header['ctype3'] in freq_keys): self.freq_eff = header['crval3'] self.freq_bw = header['cdelt3'] if 'cdelt3' in header else 0.0 - elif ('ctype4' in header) and (header['ctype4'] in ('FREQ', 'VOPT')): + elif ('ctype4' in header) and (header['ctype4'] in freq_keys): self.freq_eff = header['crval4'] self.freq_bw = header['cdelt4'] if 'cdelt4' in header else 0.0 else: @@ -273,6 +276,7 @@ class SelavyImage(FitsImage): associated with the image. config (Dict): The image configuration settings. """ + def __init__( self, path: str, @@ -315,7 +319,8 @@ def read_selavy(self, dj_image: models.Image) -> pd.DataFrame: Dataframe containing the cleaned and processed Selavy components. """ # TODO: improve with loading only the cols we need and set datatype - if self.selavy_path.endswith(".xml") or self.selavy_path.endswith(".vot"): + if self.selavy_path.endswith( + ".xml") or self.selavy_path.endswith(".vot"): df = Table.read( self.selavy_path, format="votable", use_names_over_ids=True ).to_pandas() @@ -462,12 +467,12 @@ def read_selavy(self, dj_image: models.Image) -> pd.DataFrame: .agg('sum') ) - df['flux_int_isl_ratio'] = ( + df['flux_int_isl_ratio'] = ( df['flux_int'].values / island_flux_totals.loc[df['island_id']]['flux_int'].values ) - df['flux_peak_isl_ratio'] = ( + df['flux_peak_isl_ratio'] = ( df['flux_peak'].values / island_flux_totals.loc[df['island_id']]['flux_peak'].values ) diff --git a/vast_pipeline/image/utils.py b/vast_pipeline/image/utils.py index ebee9152a..c77f1cab9 100644 --- a/vast_pipeline/image/utils.py +++ b/vast_pipeline/image/utils.py @@ -83,7 +83,7 @@ def calc_error_radius(ra, ra_err, dec, dec_err) -> float: np.deg2rad(i), dec_1, np.deg2rad(j) - )) for i,j in zip(ra_offsets, dec_offsets) + )) for i, j in zip(ra_offsets, dec_offsets) ] seps = np.column_stack(seps) @@ -192,7 +192,7 @@ def calc_condon_flux_errors( (1. + (theta_B / major)**2)**alpha_maj2 * (1. + (theta_b / minor)**2)**alpha_min2 * snr**2) - rho_sq3 = ((major * minor / (4.* theta_B * theta_b)) * + rho_sq3 = ((major * minor / (4. * theta_B * theta_b)) * (1. + (theta_B / major)**2)**alpha_maj3 * (1. + (theta_b / minor)**2)**alpha_min3 * snr**2) @@ -212,9 +212,9 @@ def calc_condon_flux_errors( # ra and dec errors errorra = np.sqrt((error_par_major * np.sin(theta))**2 + - (error_par_minor * np.cos(theta))**2) + (error_par_minor * np.cos(theta))**2) errordec = np.sqrt((error_par_major * np.cos(theta))**2 + - (error_par_minor * np.sin(theta))**2) + (error_par_minor * np.sin(theta))**2) errormajor = np.sqrt(2) * major / rho1 errorminor = np.sqrt(2) * minor / rho2 @@ -240,35 +240,39 @@ def calc_condon_flux_errors( help1 = (errormajor / major)**2 help2 = (errorminor / minor)**2 help3 = theta_B * theta_b / (major * minor) - errorflux = np.abs(flux_int) * np.sqrt(errorpeaksq / flux_peak**2 + help3 * (help1 + help2)) + help4 = np.sqrt(errorpeaksq / flux_peak**2 + help3 * (help1 + help2)) + errorflux = np.abs(flux_int) * help4 # need to return flux_peak if used. return errorpeak, errorflux, errormajor, errorminor, errortheta, errorra, errordec except Exception as e: - logger.debug("Error in the calculation of Condon errors for a source", exc_info=True) + logger.debug( + "Error in the calculation of Condon errors for a source", + exc_info=True) return 0., 0., 0., 0., 0., 0., 0. -def open_fits(fits_path: Union[str, Path], memmap: Optional[bool]=True): + +def open_fits(fits_path: Union[str, Path], memmap: Optional[bool] = True): """ This function opens both compressed and uncompressed fits files. - + Args: fits_path: Path to the fits file memmap: Open the fits file with mmap. - + Returns: HDUList loaded from the fits file - + Raises: ValueError: File extension must be .fits or .fits.fz """ - if type(fits_path) == Path: + if isinstance(fits_path, Path): fits_path = str(fits_path) - + hdul = fits.open(fits_path, memmap=memmap) - + if fits_path.endswith('.fits'): return hdul elif fits_path.endswith('.fits.fz'): diff --git a/vast_pipeline/pipeline/forced_extraction.py b/vast_pipeline/pipeline/forced_extraction.py index af5be561c..eef7a3ed8 100644 --- a/vast_pipeline/pipeline/forced_extraction.py +++ b/vast_pipeline/pipeline/forced_extraction.py @@ -69,7 +69,8 @@ def get_data_from_parquet( Args: file_and_image_id: - a tuple containing the path of the measurements parquet file and the image ID. + a tuple containing the path of the measurements parquet file and + the image ID. p_run_path: Pipeline run path to get forced parquet in case of add mode. add_mode: @@ -108,26 +109,27 @@ def get_data_from_parquet( def _forcedphot_preload(image: str, background: str, noise: str, - memmap: Optional[bool]=False + memmap: Optional[bool] = False ): """ Load the relevant image, background and noisemap files. - + Args: image: a string with the path of the image file background: a string with the path of the background map noise: a string with the path of the noise map - + Returns: A tuple containing the HDU lists """ - + image_hdul = open_fits(image, memmap=memmap) background_hdul = open_fits(background, memmap=memmap) noise_hdul = open_fits(noise, memmap=memmap) - + return image_hdul, background_hdul, noise_hdul - + + def extract_from_image( df: pd.DataFrame, image: str, @@ -172,12 +174,13 @@ def extract_from_image( unit=(u.deg, u.deg) ) # load the image, background and noisemaps into memory - # a dedicated function may seem unneccesary, but will be useful if we split the load to a separate thread. + # a dedicated function may seem unneccesary, but will be useful if we + # split the load to a separate thread. forcedphot_input = _forcedphot_preload(image, background, noise, memmap=False - ) + ) FP = ForcedPhot(*forcedphot_input) flux, flux_err, chisq, DOF, cluster_id = FP.measure( @@ -197,7 +200,7 @@ def finalise_forced_dfs( df: pd.DataFrame, prefix: str, max_id: int, beam_bmaj: float, beam_bmin: float, beam_bpa: float, id: int, datetime: datetime.datetime, image: str - ) -> pd.DataFrame: +) -> pd.DataFrame: """ Compute populate leftover columns for the dataframe with forced photometry data given the input parameters @@ -254,7 +257,7 @@ def parallel_extraction( df: pd.DataFrame, df_images: pd.DataFrame, df_sources: pd.DataFrame, min_sigma: float, edge_buffer: float, cluster_threshold: float, allow_nan: bool, add_mode: bool, p_run_path: str - ) -> pd.DataFrame: +) -> pd.DataFrame: """ Parallelize forced extraction with Dask @@ -291,7 +294,7 @@ def parallel_extraction( """ # explode the lists in 'img_diff' column (this will make a copy of the df) out = ( - df.rename(columns={'img_diff':'image', 'source':'source_tmp_id'}) + df.rename(columns={'img_diff': 'image', 'source': 'source_tmp_id'}) # merge the rms_min column from df_images .merge( df_images[['rms_min']], @@ -316,8 +319,8 @@ def parallel_extraction( out['max_snr'] = out['flux_peak'].values / out['image_rms_min'].values out = out[out['max_snr'] > min_sigma].reset_index(drop=True) logger.debug("Min forced sigma dropped %i sources", - predrop_shape - out.shape[0] - ) + predrop_shape - out.shape[0] + ) # drop some columns that are no longer needed and the df should look like # out @@ -340,7 +343,8 @@ def parallel_extraction( # create a list of dictionaries with image file paths and dataframes # with data related to each images def image_data_func(image_name: str) -> Dict[str, Any]: - nonlocal out # `out` refers to the `out` declared in nearest enclosing scope + # `out` refers to the `out` declared in nearest enclosing scope + nonlocal out return { 'image_id': df_images.at[image_name, 'id'], 'image': df_images.at[image_name, 'path'], @@ -415,7 +419,7 @@ def image_data_func(image_name: str) -> Dict[str, Any]: pd.concat(intermediate_df, axis=0, sort=False) .rename( columns={ - 'wavg_ra':'ra', 'wavg_dec':'dec', 'image_name': 'image' + 'wavg_ra': 'ra', 'wavg_dec': 'dec', 'image_name': 'image' } ) ) @@ -424,7 +428,7 @@ def image_data_func(image_name: str) -> Dict[str, Any]: def write_group_to_parquet( - df: pd.DataFrame, fname: str, add_mode: bool) -> None: + df: pd.DataFrame, fname: str, add_mode: bool) -> None: ''' Write a dataframe correpondent to a single group/image to a parquet file. @@ -451,7 +455,7 @@ def write_group_to_parquet( def parallel_write_parquet( - df: pd.DataFrame, run_path: str, add_mode: bool = False) -> None: + df: pd.DataFrame, run_path: str, add_mode: bool = False) -> None: ''' Parallelize writing parquet files for forced measurements. @@ -467,9 +471,10 @@ def parallel_write_parquet( None ''' images = df['image'].unique().tolist() - get_fname = lambda n: os.path.join( + + def get_fname(n): return os.path.join( run_path, - 'forced_measurements_' + n.replace('.','_') + '.parquet' + 'forced_measurements_' + n.replace('.', '_') + '.parquet' ) dfs = list(map(lambda x: (df[df['image'] == x], get_fname(x)), images)) n_cpu = cpu_count() - 1 diff --git a/vast_pipeline/pipeline/new_sources.py b/vast_pipeline/pipeline/new_sources.py index e7d1292cc..9b9053fb2 100644 --- a/vast_pipeline/pipeline/new_sources.py +++ b/vast_pipeline/pipeline/new_sources.py @@ -102,7 +102,7 @@ def get_image_rms_measurements( npix = round( (nbeam / 2. * bmaj.to('arcsec') / - pixelscale).value + pixelscale).value ) npix = int(round(npix * edge_buffer)) @@ -156,7 +156,7 @@ def get_image_rms_measurements( nan_valid = [] # Get slices of each source and check NaN is not included. - for i,j in zip(array_coords[0], array_coords[1]): + for i, j in zip(array_coords[0], array_coords[1]): sl = tuple(( slice(i - acceptable_no_nan_dist, i + acceptable_no_nan_dist), slice(j - acceptable_no_nan_dist, j + acceptable_no_nan_dist) @@ -245,10 +245,10 @@ def new_sources( min_sigma: float, edge_buffer: float, p_run: Run ) -> pd.DataFrame: """ - Processes the new sources detected to check that they are valid new sources. - This involves checking to see that the source *should* be seen at all in - the images where it is not detected. For valid new sources the snr - value the source would have in non-detected images is also calculated. + Processes the new sources detected to check that they are valid new + sources. This involves checking to see that the source *should* be seen at + all in the images where it is not detected. For valid new sources the + snr value the source would have in non-detected images is also calculated. Args: sources_df: @@ -353,7 +353,7 @@ def new_sources( left_on='detection', right_on='name', how='left' - ).rename(columns={'datetime':'detection_time'}) + ).rename(columns={'datetime': 'detection_time'}) new_sources_df = new_sources_df.merge( images_df[[ @@ -364,7 +364,7 @@ def new_sources( right_on='name', how='left' ).rename(columns={ - 'datetime':'img_diff_time', + 'datetime': 'img_diff_time', 'rms_min': 'img_diff_rms_min', 'rms_median': 'img_diff_rms_median', 'noise_path': 'img_diff_rms_path' @@ -439,10 +439,11 @@ def new_sources( new_sources_df .drop_duplicates('source') .set_index('source') - .rename(columns={'true_sigma':'new_high_sigma'}) + .rename(columns={'true_sigma': 'new_high_sigma'}) ) - # moving forward only the new_high_sigma columns is needed, drop all others. + # moving forward only the new_high_sigma columns is needed, drop all + # others. new_sources_df = new_sources_df[['new_high_sigma']] logger.info( From f3e9c9ee2b6f4ca9131e263ff20a0d36b9e630f4 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 09:09:56 +1100 Subject: [PATCH 19/27] Updated open_fits to correctly handle new compression methods --- vast_pipeline/image/utils.py | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/vast_pipeline/image/utils.py b/vast_pipeline/image/utils.py index c77f1cab9..15694dd00 100644 --- a/vast_pipeline/image/utils.py +++ b/vast_pipeline/image/utils.py @@ -263,21 +263,13 @@ def open_fits(fits_path: Union[str, Path], memmap: Optional[bool] = True): Returns: HDUList loaded from the fits file - - Raises: - ValueError: File extension must be .fits or .fits.fz """ if isinstance(fits_path, Path): fits_path = str(fits_path) hdul = fits.open(fits_path, memmap=memmap) - - if fits_path.endswith('.fits'): - return hdul - elif fits_path.endswith('.fits.fz'): + if hdul[0].data is None: return fits.HDUList(hdul[1:]) else: - raise ValueError("Unrecognised extension for {fits_path}." - "File extension must be .fits or .fits.fz" - ) + return hdul From 5fcba9be437bf6444b580405aa578190edabe42b Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 09:12:04 +1100 Subject: [PATCH 20/27] Better compimagehdu check --- vast_pipeline/image/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/vast_pipeline/image/utils.py b/vast_pipeline/image/utils.py index 15694dd00..cd93715f3 100644 --- a/vast_pipeline/image/utils.py +++ b/vast_pipeline/image/utils.py @@ -269,7 +269,9 @@ def open_fits(fits_path: Union[str, Path], memmap: Optional[bool] = True): fits_path = str(fits_path) hdul = fits.open(fits_path, memmap=memmap) - if hdul[0].data is None: + + # This is a messy way to check, but I can't think of a better one + if type(hdul[1]) == fits.hdu.compressed.CompImageHDU: return fits.HDUList(hdul[1:]) else: return hdul From d37f4dc450467d16cf1553af4497e6fe97c0480c Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 09:14:06 +1100 Subject: [PATCH 21/27] Remove unused imports --- vast_pipeline/pipeline/new_sources.py | 1 - vast_pipeline/pipeline/utils.py | 1 - 2 files changed, 2 deletions(-) diff --git a/vast_pipeline/pipeline/new_sources.py b/vast_pipeline/pipeline/new_sources.py index 9b9053fb2..d30f490dd 100644 --- a/vast_pipeline/pipeline/new_sources.py +++ b/vast_pipeline/pipeline/new_sources.py @@ -6,7 +6,6 @@ from psutil import cpu_count from astropy import units as u from astropy.coordinates import SkyCoord -from astropy.io import fits from astropy.wcs import WCS from astropy.wcs.utils import ( proj_plane_pixel_scales diff --git a/vast_pipeline/pipeline/utils.py b/vast_pipeline/pipeline/utils.py index 7fdd529d1..82437141c 100644 --- a/vast_pipeline/pipeline/utils.py +++ b/vast_pipeline/pipeline/utils.py @@ -15,7 +15,6 @@ import dask.dataframe as dd from typing import Any, List, Optional, Dict, Tuple, Union -from astropy.io import fits from astropy.coordinates import SkyCoord, Angle from django.conf import settings from django.contrib.auth.models import User From bffb97b06bd0f8b3afd212fcb7a0196ad2260cc2 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 09:40:51 +1100 Subject: [PATCH 22/27] Correctly handle regular single-hdu fits files --- vast_pipeline/image/utils.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/vast_pipeline/image/utils.py b/vast_pipeline/image/utils.py index cd93715f3..152017532 100644 --- a/vast_pipeline/image/utils.py +++ b/vast_pipeline/image/utils.py @@ -271,7 +271,9 @@ def open_fits(fits_path: Union[str, Path], memmap: Optional[bool] = True): hdul = fits.open(fits_path, memmap=memmap) # This is a messy way to check, but I can't think of a better one - if type(hdul[1]) == fits.hdu.compressed.CompImageHDU: + if len(hdul) == 1: + return hdul + elif type(hdul[1]) == fits.hdu.compressed.CompImageHDU: return fits.HDUList(hdul[1:]) else: return hdul From f40b8bcb69120a18b8b34537cb12eb92934837ba Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 12:07:31 +1100 Subject: [PATCH 23/27] Temporarily remove cache poetry install --- .github/workflows/test-suite.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 5a5652fed..2354e8e13 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -34,11 +34,11 @@ jobs: uses: KyleMayes/install-llvm-action@v1 with: version: "10.0" - - name: cache poetry install - uses: actions/cache@v2 - with: - path: ~/.local - key: poetry-1.5.1-0 + #- name: cache poetry install + # uses: actions/cache@v2 + # with: + # path: ~/.local + # key: poetry-1.5.1-0 - uses: snok/install-poetry@v1 with: From 601cc73462bf483e72b0f2bf92daa3b815d53728 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 12:12:02 +1100 Subject: [PATCH 24/27] Permanently remove poetry install cacheing - it only takes 20s --- .github/workflows/test-suite.yml | 5 ----- 1 file changed, 5 deletions(-) diff --git a/.github/workflows/test-suite.yml b/.github/workflows/test-suite.yml index 2354e8e13..955a1ad33 100644 --- a/.github/workflows/test-suite.yml +++ b/.github/workflows/test-suite.yml @@ -34,11 +34,6 @@ jobs: uses: KyleMayes/install-llvm-action@v1 with: version: "10.0" - #- name: cache poetry install - # uses: actions/cache@v2 - # with: - # path: ~/.local - # key: poetry-1.5.1-0 - uses: snok/install-poetry@v1 with: From 3b2e3ee7268517ee543b6375d91aad229eced393 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 12:25:50 +1100 Subject: [PATCH 25/27] Updated docs to note that compressed fits files are supported --- docs/using/runconfig.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/using/runconfig.md b/docs/using/runconfig.md index 867379f23..864f3bdc3 100644 --- a/docs/using/runconfig.md +++ b/docs/using/runconfig.md @@ -151,8 +151,7 @@ Boolean. Astropy warnings are suppressed in the logging output if set to `True`. **`inputs.image`** Line entries or epoch headed entries. -The full paths to the image FITS files to be processed. -Epoch mode is activated by including an extra key value with the epoch name, see the example below for a demonstration. +The full paths to the image FITS files to be processed - these can be regular FITS files, or FITS files that use a [`CompImageHDU`](https://docs.astropy.org/en/stable/io/fits/api/images.html#astropy.io.fits.CompImageHDU). In principle the pipeline also supports [`.fits.fz`](https://heasarc.gsfc.nasa.gov/fitsio/fpack/) files, although this is not officially supported. Epoch mode is activated by including an extra key value with the epoch name, see the example below for a demonstration. Refer to [this section](../design/association.md#epoch-based-association) of the documentation for more information on epoch based association. From fd7cbcbef967980a9c839a05b6a2f59e0be9ca10 Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Fri, 8 Dec 2023 12:30:52 +1100 Subject: [PATCH 26/27] Updated changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a867ad052..395a0841c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,6 +8,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), #### Added +- Added support for compressed FITS files [#694](https://github.com/askap-vast/vast-pipeline/pull/694) - Added links to Data Central DAS and the Fink Broker to the source page [#697](https://github.com/askap-vast/vast-pipeline/pull/697/) - Added `n_new_sources` column to run model to store the number of new sources in a pipeline run [#676](https://github.com/askap-vast/vast-pipeline/pull/676). - Added `MAX_CUTOUT_IMAGES` to the pipeline settings to limit the number of postage stamps displayed on the source detail page [#658](https://github.com/askap-vast/vast-pipeline/pull/658). @@ -41,6 +42,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), #### Changed +- Updated all FITS loading to use a wrapper that can handle compressed FITS files [#694](https://github.com/askap-vast/vast-pipeline/pull/694) - Updated path to test data in github actions and docs [#699](https://github.com/askap-vast/vast-pipeline/pull/699) - Changed GitHub actions test suite to install pipeline via poetry [#699](https://github.com/askap-vast/vast-pipeline/pull/699). - Updated GitHub actions ubuntu version to 20.04 [#699](https://github.com/askap-vast/vast-pipeline/pull/699). From 06be02185f67d8d78ae7914c54f27d31d9b5149a Mon Sep 17 00:00:00 2001 From: Dougal Dobie Date: Sat, 9 Dec 2023 13:31:25 +1100 Subject: [PATCH 27/27] Updated changelog --- CHANGELOG.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 22696cec9..8d86dd369 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -114,7 +114,8 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), #### List of PRs -- [#702](https://github.com/askap-vast/vast-pipeline/pull/702): fix: Downgrade ci-docs to python 3.8 +- [#694](https://github.com/askap-vast/vast-pipeline/pull/694): feat: Handle compressed fits files. +- [#702](https://github.com/askap-vast/vast-pipeline/pull/702): fix: Downgrade ci-docs to python 3.8. - [#701](https://github.com/askap-vast/vast-pipeline/pull/701): fix: Update Gr1N poetry to v8, force python 3.8.10. - [#699](https://github.com/askap-vast/vast-pipeline/pull/699): docs, feat: Add new regression data download URL and updates to Github Actions. - [#697](https://github.com/askap-vast/vast-pipeline/pull/697/): feat: Added links to Data Central DAS and the Fink Broker to the source page.