From 79de55a0aab07499535d9614d9581ed50cdda3ca Mon Sep 17 00:00:00 2001 From: Akio Taniguchi Date: Thu, 19 Oct 2023 12:12:33 +0000 Subject: [PATCH 1/5] #73 Fix missing configs in devcontainer.json --- .devcontainer/devcontainer.json | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 0fdb079..24ca63e 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -18,8 +18,10 @@ "settings": { "files.insertFinalNewline": true, "files.trimTrailingWhitespace": true, + "python.formatting.provider": "black", "python.languageServer": "Pylance", "[python]": { + "editor.formatOnSave": true, "editor.insertSpaces": true, "editor.tabSize": 4 } From 03eb0b92fbcbaf11ea85533f64cd263c48fd0716 Mon Sep 17 00:00:00 2001 From: Akio Taniguchi Date: Sat, 21 Oct 2023 06:38:46 +0000 Subject: [PATCH 2/5] #73 Remove unused features --- decode/__init__.py | 36 +--- decode/array.py | 309 --------------------------- decode/cube.py | 398 ----------------------------------- decode/data/fitsinfo.toml | 77 ------- decode/io.py | 429 -------------------------------------- decode/logging.py | 58 ------ decode/models.py | 412 ------------------------------------ decode/plot.py | 302 --------------------------- decode/utils.py | 318 ---------------------------- 9 files changed, 1 insertion(+), 2338 deletions(-) delete mode 100644 decode/array.py delete mode 100644 decode/cube.py delete mode 100644 decode/data/fitsinfo.toml delete mode 100644 decode/io.py delete mode 100644 decode/logging.py delete mode 100644 decode/models.py delete mode 100644 decode/plot.py delete mode 100644 decode/utils.py diff --git a/decode/__init__.py b/decode/__init__.py index 74f53b7..4c71ca5 100644 --- a/decode/__init__.py +++ b/decode/__init__.py @@ -1,37 +1,3 @@ -__all__ = [ - "array", - "cube", - "io", - "models", - "logging", - "plot", - "utils", - "ones", - "zeros", - "full", - "empty", - "ones_like", - "zeros_like", - "full_like", - "empty_like", - "concat", - "fromcube", - "tocube", - "makecontinuum", - "setlogger", -] +__all__ = [] __version__ = "1.0.0" __author__ = "Akio Taniguchi" - - -# submodules -from . import array -from . import cube -from . import io -from . import models -from . import logging -from . import plot -from . import utils -from .array import * -from .cube import * -from .logging import * diff --git a/decode/array.py b/decode/array.py deleted file mode 100644 index 66669d3..0000000 --- a/decode/array.py +++ /dev/null @@ -1,309 +0,0 @@ -__all__ = [ - "array", - "ones", - "zeros", - "full", - "empty", - "ones_like", - "zeros_like", - "full_like", - "empty_like", - "concat", -] - - -# standard library -from dataclasses import dataclass -from logging import getLogger -from typing import Any, Literal, Tuple - - -# dependencies -import numpy as np -import xarray as xr -from xarray_dataclasses import AsDataArray, Coord, Data - - -# type hints -_ = Tuple[()] -Ti = Literal["t"] -Ch = Literal["ch"] - - -# module logger -logger = getLogger(__name__) - - -# runtime classes -@dataclass(frozen=True) -class Array(AsDataArray): - """Specification for de:code arrays.""" - - data: Data[Tuple[Ti, Ch], Any] - vrad: Coord[Ti, float] = 0.0 - x: Coord[Ti, float] = 0.0 - y: Coord[Ti, float] = 0.0 - time: Coord[Ti, float] = 0.0 - temp: Coord[Ti, float] = 0.0 - pressure: Coord[Ti, float] = 0.0 - vapor_pressure: Coord[Ti, float] = 0.0 - windspd: Coord[Ti, float] = 0.0 - winddir: Coord[Ti, float] = 0.0 - scantype: Coord[Ti, str] = "GRAD" - scanid: Coord[Ti, int] = 0 - masterid: Coord[Ch, int] = 0 - kidid: Coord[Ch, int] = 0 - kidfq: Coord[Ch, float] = 0.0 - kidtp: Coord[Ch, int] = 0 - weight: Coord[Tuple[Ti, Ch], float] = 1.0 - coordsys: Coord[_, str] = "RADEC" - datatype: Coord[_, str] = "Temperature" - xref: Coord[_, float] = 0.0 - yref: Coord[_, float] = 0.0 - type: Coord[_, str] = "dca" - - -@xr.register_dataarray_accessor("dca") -@dataclass(frozen=True) -class ArrayAccessor: - """Accessor for de:code arrays.""" - - array: xr.DataArray - - @property - def tcoords(self): - """Dictionary of arrays that label time axis.""" - return {k: v.values for k, v in self.array.coords.items() if v.dims == ("t",)} - - @property - def chcoords(self): - """Dictionary of arrays that label channel axis.""" - return {k: v.values for k, v in self.array.coords.items() if v.dims == ("ch",)} - - @property - def datacoords(self): - """Dictionary of arrays that label time and channel axis.""" - return { - k: v.values for k, v in self.array.coords.items() if v.dims == ("t", "ch") - } - - @property - def scalarcoords(self): - """Dictionary of values that don't label any axes (point-like).""" - return {k: v.values for k, v in self.array.coords.items() if v.dims == ()} - - def __setstate__(self, state): - """A method used for pickling.""" - self.__dict__ = state - - def __getstate__(self): - """A method used for unpickling.""" - return self.__dict__ - - -# runtime functions -def array( - data, - tcoords=None, - chcoords=None, - scalarcoords=None, - datacoords=None, - attrs=None, - name=None, -): - """Create an array as an instance of xarray.DataArray with Decode accessor. - - Args: - data (numpy.ndarray): 2D (time x channel) array. - tcoords (dict, optional): Dictionary of arrays that label time axis. - chcoords (dict, optional): Dictionary of arrays that label channel axis. - scalarcoords (dict, optional): Dictionary of values - that don't label any axes (point-like). - datacoords (dict, optional): Dictionary of arrays - that label time and channel axes. - attrs (dict, optional): Dictionary of attributes to add to the instance. - name (str, optional): String that names the instance. - - Returns: - array (decode.array): Decode array. - """ - # initialize coords with default values - array = Array.new(data) - - # update coords with input values (if any) - if tcoords is not None: - array.coords.update({k: ("t", v) for k, v in tcoords.items()}) - - if chcoords is not None: - array.coords.update({k: ("ch", v) for k, v in chcoords.items()}) - - if datacoords is not None: - array.coords.update({k: (("t", "ch"), v) for k, v in datacoords.items()}) - - if scalarcoords is not None: - array.coords.update({k: ((), v) for k, v in scalarcoords.items()}) - - if attrs is not None: - array.attrs.update(attrs) - - if name is not None: - array.name = name - - return array - - -def zeros(shape, dtype=None, **kwargs): - """Create an array of given shape and type, filled with zeros. - - Args: - shape (sequence of ints): 2D shape of the array. - dtype (data-type, optional): Desired data-type for the array. - kwargs (optional): Other arguments of the array (*coords, attrs, and name). - - Returns: - array (decode.array): Decode array filled with zeros. - """ - data = np.zeros(shape, dtype) - return array(data, **kwargs) - - -def ones(shape, dtype=None, **kwargs): - """Create an array of given shape and type, filled with ones. - - Args: - shape (sequence of ints): 2D shape of the array. - dtype (data-type, optional): Desired data-type for the array. - kwargs (optional): Other arguments of the array (*coords, attrs, and name). - - Returns: - array (decode.array): Decode array filled with ones. - """ - data = np.ones(shape, dtype) - return array(data, **kwargs) - - -def full(shape, fill_value, dtype=None, **kwargs): - """Create an array of given shape and type, filled with `fill_value`. - - Args: - shape (sequence of ints): 2D shape of the array. - fill_value (scalar or numpy.ndarray): Fill value or array. - dtype (data-type, optional): Desired data-type for the array. - kwargs (optional): Other arguments of the array (*coords, attrs, and name). - - Returns: - array (decode.array): Decode array filled with `fill_value`. - """ - return (zeros(shape, **kwargs) + fill_value).astype(dtype) - - -def empty(shape, dtype=None, **kwargs): - """Create an array of given shape and type, without initializing entries. - - Args: - shape (sequence of ints): 2D shape of the array. - dtype (data-type, optional): Desired data-type for the array. - kwargs (optional): Other arguments of the array (*coords, attrs, and name). - - Returns: - array (decode.array): Decode array without initializing entries. - """ - data = np.empty(shape, dtype) - return array(data, **kwargs) - - -def zeros_like(array, dtype=None, keepmeta=True): - """Create an array of zeros with the same shape and type as the input array. - - Args: - array (xarray.DataArray): The shape and data-type of it define - these same attributes of the output array. - dtype (data-type, optional): If specified, this function overrides - the data-type of the output array. - keepmeta (bool, optional): Whether *coords, attrs, and name of the input - array are kept in the output one. Default is True. - - Returns: - array (decode.array): Decode array filled with zeros. - """ - if keepmeta: - return xr.zeros_like(array, dtype) - else: - return zeros(array.shape, dtype) - - -def ones_like(array, dtype=None, keepmeta=True): - """Create an array of ones with the same shape and type as the input array. - - Args: - array (xarray.DataArray): The shape and data-type of it define - these same attributes of the output array. - dtype (data-type, optional): If spacified, this function overrides - the data-type of the output array. - keepmeta (bool, optional): Whether *coords, attrs, and name of the input - array are kept in the output one. Default is True. - - Returns: - array (decode.array): Decode array filled with ones. - """ - if keepmeta: - return xr.ones_like(array, dtype) - else: - return ones(array.shape, dtype) - - -def full_like(array, fill_value, reverse=False, dtype=None, keepmeta=True): - """Create an array of `fill_value` with the same shape and type as the input array. - - Args: - array (xarray.DataArray): The shape and data-type of it define - these same attributes of the output array. - fill_value (scalar or numpy.ndarray): Fill value or array. - dtype (data-type, optional): If spacified, this function overrides - the data-type of the output array. - keepmeta (bool, optional): Whether *coords, attrs, and name of the input - array are kept in the output one. Default is True. - - Returns: - array (decode.array): Decode array filled with `fill_value`. - """ - if keepmeta: - return (zeros_like(array) + fill_value).astype(dtype) - else: - return full(array.shape, fill_value, dtype) - - -def empty_like(array, dtype=None, keepmeta=True): - """Create an array of empty with the same shape and type as the input array. - - Args: - array (xarray.DataArray): The shape and data-type of it define - these same attributes of the output array. - dtype (data-type, optional): If spacified, this function overrides - the data-type of the output array. - keepmeta (bool, optional): Whether *coords, attrs, and name of the input - array are kept in the output one. Default is True. - - Returns: - array (decode.array): Decode array without initializing entries. - """ - if keepmeta: - return empty( - array.shape, - dtype, - tcoords=array.dca.tcoords, - chcoords=array.dca.chcoords, - scalarcoords=array.dca.scalarcoords, - attrs=array.attrs, - name=array.name, - ) - else: - return empty(array.shape, dtype) - - -def concat(objs, dim=None, **kwargs): - xref = objs[0].xref.values - yref = objs[0].yref.values - for obj in objs: - obj.coords.update({"xref": xref, "yref": yref}) - return xr.concat(objs, dim=dim, **kwargs) diff --git a/decode/cube.py b/decode/cube.py deleted file mode 100644 index 3a92248..0000000 --- a/decode/cube.py +++ /dev/null @@ -1,398 +0,0 @@ -__all__ = [ - "cube", - "fromcube", - "tocube", - "makecontinuum", -] - - -# standard library -from dataclasses import dataclass -from logging import getLogger -from typing import Any, Literal, Tuple - - -# dependencies -import numpy as np -import xarray as xr -from astropy import units as u -from scipy.interpolate import interp1d -from scipy.ndimage import map_coordinates -from xarray_dataclasses import AsDataArray, Coord, Data - - -# type hints -_ = Tuple[()] -X = Literal["x"] -Y = Literal["y"] -Ch = Literal["ch"] - - -# module logger -logger = getLogger(__name__) - - -# runtime classes -@dataclass(frozen=True) -class Cube(AsDataArray): - """Specification for de:code cubes.""" - - data: Data[Tuple[X, Y, Ch], Any] - x: Coord[X, float] = 0.0 - y: Coord[Y, float] = 0.0 - masterid: Coord[Ch, int] = 0 - kidid: Coord[Ch, int] = 0 - kidfq: Coord[Ch, float] = 0.0 - kidtp: Coord[Ch, int] = 0 - noise: Coord[Tuple[X, Y, Ch], float] = 1.0 - coordsys: Coord[_, str] = "RADEC" - datatype: Coord[_, str] = "temperature" - xref: Coord[_, float] = 0.0 - yref: Coord[_, float] = 0.0 - type: Coord[_, str] = "dcc" - - -@xr.register_dataarray_accessor("dcc") -@dataclass(frozen=True) -class CubeAccessor: - """Accessor for de:code cubes.""" - - cube: xr.DataArray - - @property - def xcoords(self): - """Dictionary of arrays that label x axis.""" - return {k: v.values for k, v in self.cube.coords.items() if v.dims == ("x",)} - - @property - def ycoords(self): - """Dictionary of arrays that label y axis.""" - return {k: v.values for k, v in self.cube.coords.items() if v.dims == ("y",)} - - @property - def chcoords(self): - """Dictionary of arrays that label channel axis.""" - return {k: v.values for k, v in self.cube.coords.items() if v.dims == ("ch",)} - - @property - def datacoords(self): - """Dictionary of arrays that label x, y, and channel axis.""" - return { - k: v.values - for k, v in self.cube.coords.items() - if v.dims == ("x", "y", "ch") - } - - @property - def scalarcoords(self): - """Dictionary of values that don't label any axes (point-like).""" - return {k: v.values for k, v in self.cube.coords.items() if v.dims == ()} - - def __setstate__(self, state): - """A method used for pickling.""" - self.__dict__ = state - - def __getstate__(self): - """A method used for unpickling.""" - return self.__dict__ - - -# runtime functions -def cube( - data, - xcoords=None, - ycoords=None, - chcoords=None, - scalarcoords=None, - datacoords=None, - attrs=None, - name=None, -): - """Create a cube as an instance of xarray.DataArray with Decode accessor. - - Args: - data (numpy.ndarray): 3D (x x y x channel) array. - xcoords (dict, optional): Dictionary of arrays that label x axis. - ycoords (dict, optional): Dictionary of arrays that label y axis. - chcoords (dict, optional): Dictionary of arrays that label channel axis. - scalarcoords (dict, optional): Dictionary of values - that don't label any axes (point-like). - datacoords (dict, optional): Dictionary of arrays - that label x, y, and channel axes. - attrs (dict, optional): Dictionary of attributes to add to the instance. - name (str, optional): String that names the instance. - - Returns: - decode cube (decode.cube): Decode cube. - """ - # initialize coords with default values - cube = Cube.new(data) - - # update coords with input values (if any) - if xcoords is not None: - cube.coords.update({k: ("x", v) for k, v in xcoords.items()}) - - if ycoords is not None: - cube.coords.update({k: ("y", v) for k, v in ycoords.items()}) - - if chcoords is not None: - cube.coords.update({k: ("ch", v) for k, v in chcoords.items()}) - - if datacoords is not None: - cube.coords.update({k: (("x", "y", "ch"), v) for k, v in datacoords.items()}) - - if scalarcoords is not None: - cube.coords.update({k: ((), v) for k, v in scalarcoords.items()}) - - if attrs is not None: - cube.attrs.update(attrs) - - if name is not None: - cube.name = name - - return cube - - -def fromcube(cube, template): - """Covert a decode cube to a decode array. - - Args: - cube (decode.cube): Decode cube to be cast. - template (decode.array): Decode array whose shape the cube is cast on. - - Returns: - decode array (decode.array): Decode array. - - Notes: - This functions is under development. - """ - array = xr.zeros_like(template) - - y, x = array.y.values, array.x.values - gy, gx = cube.y.values, cube.x.values - iy = interp1d(gy, np.arange(len(gy)))(y) - ix = interp1d(gx, np.arange(len(gx)))(x) - - for ch in range(len(cube.ch)): - array[:, ch] = map_coordinates(cube.values[:, :, ch], (ix, iy)) - - return array - - -def tocube(array, **kwargs): - """Convert a decode array to decode cube. - - Args: - array (decode.array): Decode array which will be converted. - kwargs (optional): Other arguments. - xarr (list or numpy.ndarray): Grid array of x direction. - yarr (list or numpy.ndarray): Grid array of y direction. - gx (float): The size of grid of x. - gy (float): The size of grid of y. - nx (int): The number of grid of x direction. - ny (int): The number of grid of y direction. - xmin (float): Minimum value of x. - xmax (float): Maximum value of x. - ymin (float): Minimum value of y. - ymax (float): Maximum value of y. - xc (float): Center of x. - yc (float): Center of y. - unit (str): Unit of x/y. - 'deg' or 'degree': Degree (Default). - 'arcmin': Arcminute. - 'arcsec': Arcsecond. - - Returns: - decode cube (decode.cube): Decode cube. - - Notes: - Available combinations of kwargs are - (1) xarr/yarr and xc/yc - (2) gx/gy and xmin/xmax/ymin/ymax and xc/yc - (3) nx/ny and xmin/xmax/ymin/ymax - """ - # pick up kwargs - unit = kwargs.pop("unit", "deg") - unit2deg = getattr(u, unit).to("deg") - - xc = kwargs.pop("xc", float(array.xref)) * unit2deg - yc = kwargs.pop("yc", float(array.yref)) * unit2deg - xarr = kwargs.pop("xarr", None) - yarr = kwargs.pop("yarr", None) - xmin = kwargs.pop("xmin", None) - xmax = kwargs.pop("xmax", None) - ymin = kwargs.pop("ymin", None) - ymax = kwargs.pop("ymax", None) - gx = kwargs.pop("gx", None) - gy = kwargs.pop("gy", None) - nx = kwargs.pop("nx", None) - ny = kwargs.pop("ny", None) - if None not in [xarr, yarr]: - x_grid = xr.DataArray(xarr * unit2deg, dims="grid") - y_grid = xr.DataArray(yarr * unit2deg, dims="grid") - else: - if None not in [xmin, xmax, ymin, ymax]: - xmin = xmin * unit2deg - xmax = xmax * unit2deg - ymin = ymin * unit2deg - ymax = ymax * unit2deg - else: - xmin = array.x.min() - xmax = array.x.max() - ymin = array.y.min() - ymax = array.y.max() - logger.info("xmin xmax ymin ymax") - logger.info("{} {} {} {}".format(xmin, xmax, ymin, ymax)) - - if None not in [gx, gy]: - gx = gx * unit2deg - gy = gy * unit2deg - logger.info("xc yc gx gy") - logger.info("{} {} {} {}".format(xc, yc, gx, gy)) - - gxmin = np.floor((xmin - xc) / gx) - gxmax = np.ceil((xmax - xc) / gx) - gymin = np.floor((ymin - yc) / gy) - gymax = np.ceil((ymax - yc) / gy) - xmin = gxmin * gx + xc - xmax = gxmax * gx + xc - ymin = gymin * gy + yc - ymax = gymax * gy + yc - - x_grid = xr.DataArray(np.arange(xmin, xmax + gx, gx), dims="grid") - y_grid = xr.DataArray(np.arange(ymin, ymax + gy, gy), dims="grid") - elif None not in [nx, ny]: - logger.info("nx ny") - logger.info("{} {}".format(nx, ny)) - # nx/ny does not support xc/yc - xc = 0 - yc = 0 - - x_grid = xr.DataArray(np.linspace(xmin, xmax, nx), dims="grid") - y_grid = xr.DataArray(np.linspace(ymin, ymax, ny), dims="grid") - else: - raise KeyError("Arguments are wrong.") - - # reverse the direction of x when coordsys == 'RADEC' - if array.coordsys == "RADEC": - x_grid = x_grid[::-1] - - # compute gridding - nx, ny, nch = len(x_grid), len(y_grid), len(array.ch) - i = np.abs(array.x - x_grid).argmin("grid").compute() - j = np.abs(array.y - y_grid).argmin("grid").compute() - index = i + nx * j - - array.coords.update({"index": index}) - groupedarray = array.groupby("index") - groupedones = xr.ones_like(array).groupby("index") - - gridarray = groupedarray.mean("t") - stdarray = groupedarray.std("t") - numarray = groupedones.sum("t") - - logger.info("Gridding started.") - gridarray = gridarray.compute() - noisearray = (stdarray / numarray**0.5).compute() - logger.info("Gridding finished.") - - # create cube - mask = gridarray.index.values - - temp = np.full([ny * nx, nch], np.nan) - temp[mask] = gridarray.values - data = temp.reshape((ny, nx, nch)).swapaxes(0, 1) - - temp = np.full([ny * nx, nch], np.nan) - temp[mask] = noisearray.values - noise = temp.reshape((ny, nx, nch)).swapaxes(0, 1) - - xcoords = {"x": x_grid.values} - ycoords = {"y": y_grid.values} - chcoords = { - "masterid": array.masterid.values, - "kidid": array.kidid.values, - "kidfq": array.kidfq.values, - "kidtp": array.kidtp.values, - } - scalarcoords = { - "coordsys": array.coordsys.values, - "datatype": array.datatype.values, - "xref": array.xref.values, - "yref": array.yref.values, - } - datacoords = {"noise": noise} - - return cube( - data, - xcoords=xcoords, - ycoords=ycoords, - chcoords=chcoords, - scalarcoords=scalarcoords, - datacoords=datacoords, - ) - - -def makecontinuum(cube, **kwargs): - """Make a continuum array. - - Args: - cube (decode.cube): Decode cube which will be averaged over channels. - kwargs (optional): Other arguments. - inchs (list): Included channel kidids. - exchs (list): Excluded channel kidids. - - Returns: - decode cube (decode.cube): Decode cube (2d). - """ - # pick up kwargs - inchs = kwargs.pop("inchs", None) - exchs = kwargs.pop("exchs", None) - weight = kwargs.pop("weight", None) - - if (inchs is not None) or (exchs is not None): - raise KeyError("Inchs and exchs are no longer supported. Use weight instead.") - - # if inchs is not None: - # logger.info('inchs') - # logger.info('{}'.format(inchs)) - # subcube = cube[:, :, inchs] - # else: - # mask = np.full(len(cube.ch), True) - # if exchs is not None: - # logger.info('exchs') - # logger.info('{}'.format(exchs)) - # mask[exchs] = False - # subcube = cube[:, :, mask] - - if weight is None: - weight = 1.0 - # else: - # cont = (subcube * (1 / subcube.noise**2)).sum(dim='ch') \ - # / (1 / subcube.noise**2).sum(dim='ch') - # cont = cont.expand_dims(dim='ch', axis=2) - cont = (cube * (1 / weight**2)).sum(dim="ch") / (1 / weight**2).sum(dim="ch") - - # define coordinates - xcoords = {"x": cube.x.values} - ycoords = {"y": cube.y.values} - chcoords = { - "masterid": np.array([0]), # np.array([int(subcube.masterid.mean(dim='ch'))]), - "kidid": np.array([0]), # np.array([int(subcube.kidid.mean(dim='ch'))]), - "kidfq": np.array([0]), # np.array([float(subcube.kidfq.mean(dim='ch'))]), - "kidtp": np.array([1]), - } # np.array([1])} - scalarcoords = { - "coordsys": cube.coordsys.values, - "datatype": cube.datatype.values, - "xref": cube.xref.values, - "yref": cube.yref.values, - } - - return cube( - cont.values, - xcoords=xcoords, - ycoords=ycoords, - chcoords=chcoords, - scalarcoords=scalarcoords, - ) diff --git a/decode/data/fitsinfo.toml b/decode/data/fitsinfo.toml deleted file mode 100644 index 55529ba..0000000 --- a/decode/data/fitsinfo.toml +++ /dev/null @@ -1,77 +0,0 @@ -[dcube_2d] -BSCALE = 1.0 -BZERO = 0.0 -BMAJ = 0.005 -BMIN = 0.005 -BPA = 0.0 -BTYPE = "Temperature" -OBJECT = "Another Universe" -BUNIT = "K" -RADESYS = "ICRS" -PC01_01 = 1.0 -PC01_02 = 0.0 -PC02_01 = 0.0 -PC02_02 = 1.0 -CTYPE1 = "RA--SFL" -CRVAL1 = 0.0 -CDELT1 = 0.002 -CRPIX1 = 1.0 -CUNIT1 = "deg" -CTYPE2 = "DEC--SFL" -CRVAL2 = 0.0 -CDELT2 = 0.002 -CRPIX2 = 1.0 -CUNIT2 = "deg" -TELESCOP = "ASTE" -INSTRUM = "DESHIMA" -OBSERVER = "Edwin Hubble" -DATE-OBS = "2017-01-01T00:00:00.0" -TIMESYS = "UTC" -OBSRA = 180.0 -OBSDEC = 0.0 -DATE = "2017-01-01T00:00:00.0" - -[dcube_3d] -BSCALE = 1.0 -BZERO = 0.0 -BMAJ = 0.005 -BMIN = 0.005 -BPA = 0.0 -BTYPE = "Temperature" -OBJECT = "Another Universe" -BUNIT = "K" -RADESYS = "ICRS" -PC01_01 = 1.0 -PC01_02 = 0.0 -PC01_03 = 0.0 -PC02_01 = 0.0 -PC02_02 = 1.0 -PC02_03 = 0.0 -PC03_01 = 0.0 -PC03_02 = 0.0 -PC03_03 = 1.0 -CTYPE1 = "RA--SFL" -CRVAL1 = 0.0 -CDELT1 = 0.002 -CRPIX1 = 1.0 -CUNIT1 = "deg" -CTYPE2 = "DEC--SFL" -CRVAL2 = 0.0 -CDELT2 = 0.002 -CRPIX2 = 1.0 -CUNIT2 = "deg" -CTYPE3 = "FREQ" -CRVAL3 = 0.0 -CDELT3 = 1.0 -CRPIX3 = 1.0 -CUNIT3 = "GHz" -RESTFREQ = 3.3e11 -SPECSYS = "BARYCENT" -TELESCOP = "ASTE" -INSTRUM = "DESHIMA" -OBSERVER = "Edwin Hubble" -DATE-OBS = "2017-01-01T00:00:00.0" -TIMESYS = "UTC" -OBSRA = 180.0 -OBSDEC = 0.0 -DATE = "2017-01-01T00:00:00.0" diff --git a/decode/io.py b/decode/io.py deleted file mode 100644 index c95eeb5..0000000 --- a/decode/io.py +++ /dev/null @@ -1,429 +0,0 @@ -__all__ = [ - "loaddfits", - "savefits", - "loadnetcdf", - "savenetcdf", -] - - -# standard library -from datetime import datetime -from pytz import timezone -from logging import getLogger -from uuid import uuid4 -from pathlib import Path -from pkgutil import get_data - - -# dependencies -import tomli -import decode as dc -import numpy as np -import xarray as xr -from astropy.io import fits -from scipy.interpolate import interp1d - - -# module logger -logger = getLogger(__name__) - - -def loaddfits( - fitsname, - coordtype="azel", - loadtype="temperature", - starttime=None, - endtime=None, - pixelids=None, - scantypes=None, - mode=0, - **kwargs -): - """Load a decode array from a DFITS file. - - Args: - fitsname (str): Name of DFITS file. - coordtype (str): Coordinate type included into a decode array. - 'azel': Azimuth / elevation. - 'radec': Right ascension / declination. - loadtype (str): Data unit of xarray. - 'Tsignal': Temperature [K]. - 'Psignal': Power [W]. - 'amplitude': Amplitude. - 'phase': Phase. - 'linphase': Linear phase. - starttime (int, str or numpy.datetime64): Start time of loaded data. - It can be specified by the start index (int), - the time compatible with numpy.datetime64 (str), - or numpy.datetime64 (numpy.datetime64). - Default is None and it means the data will be loaded from the first record. - endtime (int, str or numpy.datetime64): End time of loaded data. - It can be specified by the end index (int), - the time compatible with numpy.datetime64 (str), - or numpy.datetime64 (numpy.datetime64). - Default is None and it means the data will be loaded until the last record. - pixelids (int or list): Under development. - scantypes (list(str)): Scan types, such as 'GRAD', 'SCAN', 'OFF', 'R'. - mode (int): Loading mode. - 0: Relative coordinates with cosine projection (RECOMMENDED). - 1: Relative coordinates without cosine projection. - 2: Absolute coordinates. - kwargs (optional): - findR (bool): Automatically find R positions. - ch (int): Representative channel id used for finding R. - Rth (float): Threshold of R. - skyth (flaot): Threshold of sky. - cutnum (int): The number of points of unused data at the edge. - still (bool): When it is true, scantypes of on/off are manually assigned. - period (float): On/off period in second for still data. - shuttle (bool): For shuttle observations. - xmin_off (float): Minimum x of off-point data. - xmax_off (float): Maximum x of off-point data. - xmin_on (float): Minimum x of on-point data. - xmax_on (float): Maximum x of on-point data. - - Returns: - decode array (decode.array): Loaded decode array. - """ - if mode not in [0, 1, 2]: - raise KeyError(mode) - - logger.info("coordtype starttime endtime mode loadtype") - logger.info("{} {} {} {} {}".format(coordtype, starttime, endtime, mode, loadtype)) - - # pick up kwargs - # for findR - findR = kwargs.pop("findR", False) - ch = kwargs.pop("ch", 0) - Rth = kwargs.pop("Rth", 280) - skyth = kwargs.pop("skyth", 150) - cutnum = kwargs.pop("cutnum", 1) - # for still - still = kwargs.pop("still", False) - period = kwargs.pop("period", 2) - # for shuttle - shuttle = kwargs.pop("shuttle", False) - xmin_off = kwargs.pop("xmin_off", 0) - xmax_off = kwargs.pop("xmax_off", 0) - xmin_on = kwargs.pop("xmin_on", 0) - xmax_on = kwargs.pop("xmax_on", 0) - - # load data - fitsname = str(Path(fitsname).expanduser()) - - with fits.open(fitsname) as hdulist: - obsinfo = hdulist["OBSINFO"].data - obshdr = hdulist["OBSINFO"].header - antlog = hdulist["ANTENNA"].data - readout = hdulist["READOUT"].data - wealog = hdulist["WEATHER"].data - - # obsinfo - masterids = obsinfo["masterids"][0].astype(np.int64) - kidids = obsinfo["kidids"][0].astype(np.int64) - kidfreqs = obsinfo["kidfreqs"][0].astype(np.float64) - kidtypes = obsinfo["kidtypes"][0].astype(np.int64) - - # parse start/end time - t_ant = np.array(antlog["time"]).astype(np.datetime64) - t_out = np.array(readout["starttime"]).astype(np.datetime64) - t_wea = np.array(wealog["time"]).astype(np.datetime64) - - if starttime is None: - startindex = 0 - elif isinstance(starttime, int): - startindex = starttime - elif isinstance(starttime, str): - startindex = np.searchsorted(t_out, np.datetime64(starttime)) - elif isinstance(starttime, np.datetime64): - startindex = np.searchsorted(t_out, starttime) - else: - raise ValueError(starttime) - - if endtime is None: - endindex = t_out.shape[0] - elif isinstance(endtime, int): - endindex = endtime - elif isinstance(endtime, str): - endindex = np.searchsorted(t_out, np.datetime64(endtime), "right") - elif isinstance(endtime, np.datetime64): - endindex = np.searchsorted(t_out, endtime, "right") - else: - raise ValueError(starttime) - - if t_out[endindex - 1] > t_ant[-1]: - logger.warning("Endtime of readout is adjusted to that of ANTENNA HDU.") - endindex = np.searchsorted(t_out, t_ant[-1], "right") - - t_out = t_out[startindex:endindex] - - # readout - if loadtype == "temperature": - response = readout["Tsignal"][startindex:endindex].astype(np.float64) - elif loadtype == "power": - response = readout["Psignal"][startindex:endindex].astype(np.float64) - elif loadtype == "amplitude": - response = readout["amplitude"][startindex:endindex].astype(np.float64) - elif loadtype == "phase": - response = readout["phase"][startindex:endindex].astype(np.float64) - elif loadtype == "linphase": - response = readout["line_phase"][startindex:endindex].astype(np.float64) - else: - raise KeyError(loadtype) - - # antenna - if coordtype == "azel": - x = antlog["az"].copy() - y = antlog["el"].copy() - xref = np.median(antlog["az_center"]) - yref = np.median(antlog["el_center"]) - if mode in [0, 1]: - x -= antlog["az_center"] - y -= antlog["el_center"] - if mode == 0: - x *= np.cos(np.deg2rad(antlog["el"])) - elif coordtype == "radec": - x = antlog["ra"].copy() - y = antlog["dec"].copy() - xref = obshdr["RA"] - yref = obshdr["DEC"] - if mode in [0, 1]: - x -= xref - y -= yref - if mode == 0: - x *= np.cos(np.deg2rad(antlog["dec"])) - else: - raise KeyError(coordtype) - scantype = antlog["scantype"] - - # weatherlog - temp = wealog["temperature"] - pressure = wealog["pressure"] - vpressure = wealog["vapor-pressure"] - windspd = wealog["windspd"] - winddir = wealog["winddir"] - - # interpolation - dt_out = (t_out - t_out[0]) / np.timedelta64(1, "s") - dt_ant = (t_ant - t_out[0]) / np.timedelta64(1, "s") - dt_wea = (t_wea - t_out[0]) / np.timedelta64(1, "s") - x_i = np.interp(dt_out, dt_ant, x) - y_i = np.interp(dt_out, dt_ant, y) - - temp_i = np.interp(dt_out, dt_wea, temp) - pressure_i = np.interp(dt_out, dt_wea, pressure) - vpressure_i = np.interp(dt_out, dt_wea, vpressure) - windspd_i = np.interp(dt_out, dt_wea, windspd) - winddir_i = np.interp(dt_out, dt_wea, winddir) - - scandict = {t: n for n, t in enumerate(np.unique(scantype))} - scantype_v = np.zeros(scantype.shape[0], dtype=int) - for k, v in scandict.items(): - scantype_v[scantype == k] = v - scantype_vi = interp1d( - dt_ant, - scantype_v, - kind="nearest", - bounds_error=False, - fill_value=(scantype_v[0], scantype_v[-1]), - )(dt_out) - scantype_i = np.full_like(scantype_vi, "GRAD", dtype="= Rth) - scantype_i[Rindex] = "R" - movemask = np.hstack( - [[False] * cutnum, scantype_i[cutnum:] != scantype_i[:-cutnum]] - ) | np.hstack( - [scantype_i[:-cutnum] != scantype_i[cutnum:], [False] * cutnum] - ) & ( - scantype_i == "R" - ) - scantype_i[movemask] = "JUNK" - scantype_i[(response[:, ch] > skyth) & (scantype_i != "R")] = "JUNK" - scantype_i[(response[:, ch] <= skyth) & (scantype_i == "R")] = "JUNK" - skyindex = np.where(response[:, ch] <= skyth) - scantype_i_temp = scantype_i.copy() - scantype_i_temp[skyindex] = "SKY" - movemask = np.hstack( - [[False] * cutnum, scantype_i_temp[cutnum:] != scantype_i_temp[:-cutnum]] - ) | np.hstack( - [scantype_i_temp[:-cutnum] != scantype_i_temp[cutnum:], [False] * cutnum] - ) & ( - scantype_i_temp == "SKY" - ) - scantype_i[movemask] = "JUNK" - - # scanid - scanid_i = np.cumsum(np.hstack([False, scantype_i[1:] != scantype_i[:-1]])) - - # coordinates - tcoords = { - "x": x_i, - "y": y_i, - "time": t_out, - "temp": temp_i, - "pressure": pressure_i, - "vapor-pressure": vpressure_i, - "windspd": windspd_i, - "winddir": winddir_i, - "scantype": scantype_i, - "scanid": scanid_i, - } - chcoords = { - "masterid": masterids, - "kidid": kidids, - "kidfq": kidfreqs, - "kidtp": kidtypes, - } - scalarcoords = { - "coordsys": coordtype.upper(), - "datatype": loadtype, - "xref": xref, - "yref": yref, - } - - # make array - array = dc.array( - response, tcoords=tcoords, chcoords=chcoords, scalarcoords=scalarcoords - ) - if scantypes is not None: - mask = np.full(array.shape[0], False) - for scantype in scantypes: - mask |= array.scantype == scantype - array = array[mask] - - return array - - -def savefits(cube, fitsname, **kwargs): - """Save a cube to a 3D-cube FITS file. - - Args: - cube (xarray.DataArray): Cube to be saved. - fitsname (str): Name of output FITS file. - kwargs (optional): Other arguments common with astropy.io.fits.writeto(). - """ - # pick up kwargs - dropdeg = kwargs.pop("dropdeg", False) - ndim = len(cube.dims) - - # load yaml - fitsinfo = get_data("decode", "data/fitsinfo.toml") - hdrdata = tomli.loads(fitsinfo.decode("utf-8")) - - # default header - if ndim == 2: - header = fits.Header(hdrdata["dcube_2d"]) - data = cube.values.T - elif ndim == 3: - if dropdeg: - header = fits.Header(hdrdata["dcube_2d"]) - data = cube.values[:, :, 0].T - else: - header = fits.Header(hdrdata["dcube_3d"]) - - kidfq = cube.kidfq.values - freqrange = ~np.isnan(kidfq) - orderedfq = np.argsort(kidfq[freqrange]) - newcube = cube[:, :, orderedfq] - data = newcube.values.T - else: - raise TypeError(ndim) - - # update Header - if cube.coordsys == "AZEL": - header.update({"CTYPE1": "dAZ", "CTYPE2": "dEL"}) - elif cube.coordsys == "RADEC": - header.update({"OBSRA": float(cube.xref), "OBSDEC": float(cube.yref)}) - else: - pass - header.update( - { - "CRVAL1": float(cube.x[0]), - "CDELT1": float(cube.x[1] - cube.x[0]), - "CRVAL2": float(cube.y[0]), - "CDELT2": float(cube.y[1] - cube.y[0]), - "DATE": datetime.now(timezone("UTC")).isoformat(), - } - ) - if (ndim == 3) and (not dropdeg): - header.update( - { - "CRVAL3": float(newcube.kidfq[0]), - "CDELT3": float(newcube.kidfq[1] - newcube.kidfq[0]), - } - ) - - fitsname = str(Path(fitsname).expanduser()) - fits.writeto(fitsname, data, header, **kwargs) - logger.info("{} has been created.".format(fitsname)) - - -def loadnetcdf(filename, copy=True): - """Load a dataarray from a NetCDF file. - - Args: - filename (str): Filename (*.nc). - copy (bool): If True, dataarray is copied in memory. Default is True. - - Returns: - dataarray (xarray.DataArray): Loaded dataarray. - """ - filename = str(Path(filename).expanduser()) - - if copy: - dataarray = xr.open_dataarray(filename).copy() - else: - dataarray = xr.open_dataarray(filename, chunks={}) - - if dataarray.name is None: - dataarray.name = filename.rstrip(".nc") - - for key, val in dataarray.coords.items(): - if val.dtype.kind == "S": - dataarray[key] = val.astype("U") - elif val.dtype == np.int32: - dataarray[key] = val.astype("i8") - - return dataarray - - -def savenetcdf(dataarray, filename=None): - """Save a dataarray to a NetCDF file. - - Args: - dataarray (xarray.DataArray): Dataarray to be saved. - filename (str): Filename (used as .nc). - If not spacified, random 8-character name will be used. - """ - if filename is None: - if dataarray.name is not None: - filename = dataarray.name - else: - filename = uuid4().hex[:8] - else: - filename = str(Path(filename).expanduser()) - - if not filename.endswith(".nc"): - filename += ".nc" - - dataarray.to_netcdf(filename) - logger.info("{} has been created.".format(filename)) diff --git a/decode/logging.py b/decode/logging.py deleted file mode 100644 index 60e5343..0000000 --- a/decode/logging.py +++ /dev/null @@ -1,58 +0,0 @@ -__all__ = ["setlogger"] - - -# standard library -import logging -from copy import copy -from pathlib import Path - - -# constants -DATEFORMAT = "%Y-%m-%d %H:%M:%S" -LOGFORMAT = "{asctime} | {levelname:8} | {funcName}: {message}" -DEFAULTLEVEL = "INFO" - - -# module logger -logger = logging.getLogger(__name__) - - -# classes -class setlogger(object): - def __init__(self, level=None, filename=None, overwrite=False, encoding="utf-8"): - self.logger = logging.getLogger("decode") - self.logger.addHandler(logging.NullHandler()) - # save current state - self.oldhandlers = copy(self.logger.handlers) - self.oldlevel = copy(self.logger.level) - # set new state - self.sethandlers(filename, overwrite, encoding) - self.setlevel(level) - - def sethandlers(self, filename, overwrite, encoding): - for handler in self.logger.handlers: - self.logger.removeHandler(handler) - - if filename is None: - handler = logging.StreamHandler() - else: - filename = str(Path(filename).expanduser()) - mode = "w" if overwrite else "a" - handler = logging.FileHandler(filename, mode, encoding) - - formatter = logging.Formatter(LOGFORMAT, DATEFORMAT, style="{") - handler.setFormatter(formatter) - self.logger.addHandler(handler) - - def setlevel(self, level): - level = DEFAULTLEVEL if level is None else level.upper() - self.logger.setLevel(level) - for handler in self.logger.handlers: - handler.setLevel(level) - - def __enter__(self): - pass - - def __exit__(self, exc_type, exc_value, traceback): - self.logger.handlers = self.oldhandlers - self.logger.level = self.oldlevel diff --git a/decode/models.py b/decode/models.py deleted file mode 100644 index bcf0e58..0000000 --- a/decode/models.py +++ /dev/null @@ -1,412 +0,0 @@ -__all__ = [ - "pca", - "chopper_calibration", - "r_division", - "gauss_fit", -] - - -# standard library -from logging import getLogger - - -# dependencies -import decode as dc -import numpy as np -from astropy.modeling import fitting, models -from sklearn.decomposition import TruncatedSVD - - -def pca(onarray, offarray, n=10, exchs=None, pc=False, mode="mean"): - """Apply Principal Component Analysis (PCA) method to estimate baselines at each time. - - Args: - onarray (decode.array): Decode array of on-point observations. - offarray (decode.array): Decode array of off-point observations. - n (int): The number of pricipal components. - pc (bool): When True, this function also returns - eigen vectors and their coefficients. - mode (None or str): The way of correcting offsets. - 'mean': Mean. - 'median': Median. - None: No correction. - - Returns: - filtered (decode.array): Baseline-subtracted array. - When pc is True: - Ps (list(np.ndarray)): Eigen vectors. - Cs (list(np.ndarray)): Coefficients. - """ - logger = getLogger("decode.models.pca") - logger.info("n_components exchs mode") - if exchs is None: - exchs = [16, 44, 46] - logger.info("{} {} {}".format(n, exchs, mode)) - - offid = np.unique(offarray.scanid) - onid = np.unique(onarray.scanid) - - onarray = onarray.copy() # Xarray - onarray[:, exchs] = 0 - onvalues = onarray.values - onscanid = onarray.scanid.values - offarray = offarray.copy() # Xarray - offarray[:, exchs] = 0 - offvalues = offarray.values - offscanid = offarray.scanid.values - - Ps, Cs = [], [] - Xatm = dc.full_like(onarray, onarray) - Xatmvalues = Xatm.values - model = TruncatedSVD(n_components=n) - for i in onid: - leftid = np.searchsorted(offid, i) - 1 - rightid = np.searchsorted(offid, i) - - Xon = onvalues[onscanid == i] - if leftid == -1: - Xoff = offvalues[offscanid == offid[rightid]] - Xoff_m = getattr(np, "nan" + mode)(Xoff, axis=0) if mode is not None else 0 - Xon_m = Xoff_m - model.fit(Xoff - Xoff_m) - elif rightid == len(offid): - Xoff = offvalues[offscanid == offid[leftid]] - Xoff_m = getattr(np, "nan" + mode)(Xoff, axis=0) if mode is not None else 0 - Xon_m = Xoff_m - model.fit(Xoff - Xoff_m) - else: - Xoff_l = offvalues[offscanid == offid[leftid]] - Xoff_lm = ( - getattr(np, "nan" + mode)(Xoff_l, axis=0) if mode is not None else 0 - ) - Xoff_r = offvalues[offscanid == offid[rightid]] - Xoff_rm = ( - getattr(np, "nan" + mode)(Xoff_r, axis=0) if mode is not None else 0 - ) - Xon_m = ( - getattr(np, "nan" + mode)(np.vstack([Xoff_l, Xoff_r]), axis=0) - if mode is not None - else 0 - ) - model.fit(np.vstack([Xoff_l - Xoff_lm, Xoff_r - Xoff_rm])) - P = model.components_ - C = model.transform(Xon - Xon_m) - - Xatmvalues[onscanid == i] = C @ P + Xon_m - # Xatms.append(dc.full_like(Xon, C @ P + Xon_m.values)) - Ps.append(P) - Cs.append(C) - - if pc: - return Xatm, Ps, Cs - else: - return Xatm - - -def chopper_calibration(onarray, offarray, rarray, Tamb, mode="mean"): - logger = getLogger("decode.models.chopper_calibration") - logger.info("mode") - logger.info("{}".format(mode)) - - onarray, offarray = r_division(onarray, offarray, rarray, mode=mode) - - offid = np.unique(offarray.scanid) - onid = np.unique(onarray.scanid) - - onarray = onarray.copy() # Xarray - onvalues = onarray.values - onscanid = onarray.scanid.values - offarray = offarray.copy() # Xarray - offvalues = offarray.values - offscanid = offarray.scanid.values - for i in onid: - oleftid = np.searchsorted(offid, i) - 1 - orightid = np.searchsorted(offid, i) - - Xon = onvalues[onscanid == i] - if oleftid == -1: - Xoff = offvalues[offscanid == offid[orightid]] - Xoff_m = getattr(np, "nan" + mode)(Xoff, axis=0) - elif orightid == len(offid): - Xoff = offvalues[offscanid == offid[oleftid]] - Xoff_m = getattr(np, "nan" + mode)(Xoff, axis=0) - else: - Xoff_l = offvalues[offscanid == offid[oleftid]] - Xoff_r = offvalues[offscanid == offid[orightid]] - Xoff_m = getattr(np, "nan" + mode)(np.vstack([Xoff_l, Xoff_r]), axis=0) - onvalues[onscanid == i] = Tamb * (Xon - Xoff_m) / (1 - Xoff_m) - - for j in offid: - Xoff = offvalues[offscanid == j] - Xoff_m = getattr(np, "nan" + mode)(Xoff, axis=0) - offvalues[offscanid == j] = Tamb * (Xoff - Xoff_m) / (1 - Xoff_m) - - return onarray, offarray - - -def r_division(onarray, offarray, rarray, mode="mean"): - """Apply R division. - - Args: - onarray (decode.array): Decode array of on-point observations. - offarray (decode.array): Decode array of off-point observations. - rarray (decode.array): Decode array of R observations. - mode (str): Method for the selection of nominal R value. - 'mean': Mean. - 'median': Median. - - Returns: - onarray_cal (decode.array): Calibrated array of on-point observations. - offarray_cal (decode.array): Calibrated array of off-point observations. - """ - logger = getLogger("decode.models.r_division") - logger.info("mode") - logger.info("{}".format(mode)) - - offid = np.unique(offarray.scanid) - onid = np.unique(onarray.scanid) - rid = np.unique(rarray.scanid) - - onarray = onarray.copy() # Xarray - onvalues = onarray.values - onscanid = onarray.scanid.values - offarray = offarray.copy() # Xarray - offvalues = offarray.values - offscanid = offarray.scanid.values - rarray = rarray.copy() # Xarray - rvalues = rarray.values - rscanid = rarray.scanid.values - for i in onid: - rleftid = np.searchsorted(rid, i) - 1 - rrightid = np.searchsorted(rid, i) - - if rleftid == -1: - Xr = rvalues[rscanid == rid[rrightid]] - Xr_m = getattr(np, "nan" + mode)(Xr, axis=0) - elif rrightid == len(rid): - Xr = rvalues[rscanid == rid[rleftid]] - Xr_m = getattr(np, "nan" + mode)(Xr, axis=0) - else: - Xr_l = rvalues[rscanid == rid[rleftid]] - Xr_r = rvalues[rscanid == rid[rrightid]] - Xr_m = getattr(np, "nan" + mode)(np.vstack([Xr_l, Xr_r]), axis=0) - onvalues[onscanid == i] /= Xr_m - - for j in offid: - rleftid = np.searchsorted(rid, j) - 1 - rrightid = np.searchsorted(rid, j) - - if rleftid == -1: - Xr = rvalues[rscanid == rid[rrightid]] - Xr_m = getattr(np, "nan" + mode)(Xr, axis=0) - elif rrightid == len(rid): - Xr = rvalues[rscanid == rid[rleftid]] - Xr_m = getattr(np, "nan" + mode)(Xr, axis=0) - else: - Xr_l = rvalues[rscanid == rid[rleftid]] - Xr_r = rvalues[rscanid == rid[rrightid]] - Xr_m = getattr(np, "nan" + mode)(np.vstack([Xr_l, Xr_r]), axis=0) - offvalues[offscanid == j] /= Xr_m - - Xon_rdiv = dc.full_like(onarray, onarray) - Xoff_rdiv = dc.full_like(offarray, offarray) - Xonoff_rdiv = dc.concat([Xon_rdiv, Xoff_rdiv], dim="t") - Xonoff_rdiv_sorted = Xonoff_rdiv[np.argsort(Xonoff_rdiv.time.values)] - - scantype = Xonoff_rdiv_sorted.scantype.values - newscanid = np.cumsum(np.hstack([False, scantype[1:] != scantype[:-1]])) - onmask = np.in1d(Xonoff_rdiv_sorted.scanid, onid) - offmask = np.in1d(Xonoff_rdiv_sorted.scanid, offid) - Xon_rdiv = Xonoff_rdiv_sorted[onmask] - Xoff_rdiv = Xonoff_rdiv_sorted[offmask] - Xon_rdiv.coords.update({"scanid": ("t", newscanid[onmask])}) - Xoff_rdiv.coords.update({"scanid": ("t", newscanid[offmask])}) - - return Xon_rdiv, Xoff_rdiv - - -def gauss_fit( - map_data, - chs=None, - mode="deg", - amplitude=1, - x_mean=0, - y_mean=0, - x_stddev=None, - y_stddev=None, - theta=None, - cov_matrix=None, - noise=0, - **kwargs -): - """make a 2D Gaussian model and fit the observed data with the model. - - Args: - map_data (xarray.Dataarray): Dataarray of cube or single chs. - chs (list of int): in prep. - mode (str): Coordinates for the fitting - 'pix' - 'deg' - amplitude (float or None): Initial amplitude value of Gaussian fitting. - x_mean (float): Initial value of mean of the fitting Gaussian in x. - y_mean (float): Initial value of mean of the fitting Gaussian in y. - x_stddev (float or None): Standard deviation of the Gaussian - in x before rotating by theta. - y_stddev (float or None): Standard deviation of the Gaussian - in y before rotating by theta. - theta (float, optional or None): Rotation angle in radians. - cov_matrix (ndarray, optional): A 2x2 covariance matrix. If specified, - overrides the ``x_stddev``, ``y_stddev``, and ``theta`` defaults. - - Returns: - decode cube (xarray cube) with fitting results in array and attrs. - """ - - if chs is None: - chs = np.ogrid[0:63] # the number of channels would be changed - - if len(chs) > 1: - for n, ch in enumerate(chs): - subdata = np.transpose( - np.full_like(map_data[:, :, ch], map_data.values[:, :, ch]) - ) - subdata[np.isnan(subdata)] = 0 - - if mode == "deg": - mX, mY = np.meshgrid(map_data.x, map_data.y) - - elif mode == "pix": - mX, mY = np.mgrid[0 : len(map_data.y), 0 : len(map_data.x)] - - g_init = models.Gaussian2D( - amplitude=np.nanmax(subdata), - x_mean=x_mean, - y_mean=y_mean, - x_stddev=x_stddev, - y_stddev=y_stddev, - theta=theta, - cov_matrix=cov_matrix, - **kwargs - ) + models.Const2D(noise) - fit_g = fitting.LevMarLSQFitter() - g = fit_g(g_init, mX, mY, subdata) - - g_init2 = models.Gaussian2D( - amplitude=np.nanmax(subdata - g.amplitude_1), - x_mean=x_mean, - y_mean=y_mean, - x_stddev=x_stddev, - y_stddev=y_stddev, - theta=theta, - cov_matrix=cov_matrix, - **kwargs - ) - fit_g2 = fitting.LevMarLSQFitter() - g2 = fit_g2(g_init2, mX, mY, subdata) - - if n == 0: - results = np.array([g2(mX, mY)]) - peaks = np.array([g2.amplitude.value]) - x_means = np.array([g2.x_mean.value]) - y_means = np.array([g2.y_mean.value]) - x_stddevs = np.array([g2.x_stddev.value]) - y_stddevs = np.array([g2.y_stddev.value]) - thetas = np.array([g2.theta.value]) - if fit_g2.fit_info["param_cov"] is None: - uncerts = np.array([0]) - else: - error = np.diag(fit_g2.fit_info["param_cov"]) ** 0.5 - uncerts = np.array([error[0]]) - - else: - results = np.append(results, [g2(mX, mY)], axis=0) - peaks = np.append(peaks, [g2.amplitude.value], axis=0) - x_means = np.append(x_means, [g2.x_mean.value], axis=0) - y_means = np.append(y_means, [g2.y_mean.value], axis=0) - x_stddevs = np.append(x_stddevs, [g2.x_stddev.value], axis=0) - y_stddevs = np.append(y_stddevs, [g2.y_stddev.value], axis=0) - thetas = np.append(thetas, [g2.theta.value], axis=0) - - if fit_g2.fit_info["param_cov"] is None: - uncerts = np.append(uncerts, [0], axis=0) - else: - error = np.diag(fit_g2.fit_info["param_cov"]) ** 0.5 - uncerts = np.append(uncerts, [error[0]], axis=0) - - result = map_data.copy() - result.values = np.transpose(results) - result.attrs.update( - { - "peak": peaks, - "x_mean": x_means, - "y_mean": y_means, - "x_stddev": x_stddevs, - "y_stddev": y_stddevs, - "theta": thetas, - "uncert": uncerts, - } - ) - - else: - subdata = np.transpose( - np.full_like(map_data[:, :, 0], map_data.values[:, :, 0]) - ) - subdata[np.isnan(subdata)] = 0 - - if mode == "deg": - mX, mY = np.meshgrid(map_data.x, map_data.y) - - elif mode == "pix": - mX, mY = np.mgrid[0 : len(map_data.y), 0 : len(map_data.x)] - - g_init = models.Gaussian2D( - amplitude=np.nanmax(subdata), - x_mean=x_mean, - y_mean=y_mean, - x_stddev=x_stddev, - y_stddev=y_stddev, - theta=theta, - cov_matrix=cov_matrix, - **kwargs - ) + models.Const2D(noise) - fit_g = fitting.LevMarLSQFitter() - g = fit_g(g_init, mX, mY, subdata) - - g_init2 = models.Gaussian2D( - amplitude=np.nanmax(subdata - g.amplitude_1), - x_mean=x_mean, - y_mean=y_mean, - x_stddev=x_stddev, - y_stddev=y_stddev, - theta=theta, - cov_matrix=cov_matrix, - **kwargs - ) - fit_g2 = fitting.LevMarLSQFitter() - g2 = fit_g2(g_init2, mX, mY, subdata) - - results = np.array([g2(mX, mY)]) - peaks = np.array([g2.amplitude.value]) - x_means = np.array([g2.x_mean.value]) - y_means = np.array([g2.y_mean.value]) - x_stddevs = np.array([g2.x_stddev.value]) - y_stddevs = np.array([g2.y_stddev.value]) - thetas = np.array([g2.theta.value]) - error = np.diag(fit_g2.fit_info["param_cov"]) ** 0.5 - uncerts = np.array(error[0]) - - result = map_data.copy() - result.values = np.transpose(results) - result.attrs.update( - { - "peak": peaks, - "x_mean": x_means, - "y_mean": y_means, - "x_stddev": x_stddevs, - "y_stddev": y_stddevs, - "theta": thetas, - "uncert": uncerts, - } - ) - - return result diff --git a/decode/plot.py b/decode/plot.py deleted file mode 100644 index 22064af..0000000 --- a/decode/plot.py +++ /dev/null @@ -1,302 +0,0 @@ -__all__ = [ - "plotcoords", - "plot_tcoords", - "plottimestream", - "plot_timestream", - "plotspectrum", - "plot_spectrum", - "plot_chmap", - "plotpsd", - "plotallanvar", -] - - -# standard library -from logging import getLogger - - -# dependencies -import numpy as np -import matplotlib.pyplot as plt -from scipy.signal.windows import hann -from .utils import allan_variance, deprecation_warning, psd - - -# module logger -logger = getLogger(__name__) - - -# functions -def plot_tcoords(array, coords, scantypes=None, ax=None, **kwargs): - """Plot coordinates related to the time axis. - - Args: - array (xarray.DataArray): Array which the coodinate information is included. - coords (list): Name of x axis and y axis. - scantypes (list): Scantypes. If None, all scantypes are used. - ax (matplotlib.axes): Axis you want to plot on. - kwargs (optional): Plot options passed to ax.plot(). - """ - if ax is None: - ax = plt.gca() - - if scantypes is None: - ax.plot(array[coords[0]], array[coords[1]], label="ALL", **kwargs) - else: - for scantype in scantypes: - ax.plot( - array[coords[0]][array.scantype == scantype], - array[coords[1]][array.scantype == scantype], - label=scantype, - **kwargs - ) - ax.set_xlabel(coords[0]) - ax.set_ylabel(coords[1]) - ax.set_title("{} vs {}".format(coords[1], coords[0])) - ax.legend() - - logger.info("{} vs {} has been plotted.".format(coords[1], coords[0])) - - -def plot_timestream(array, kidid, xtick="time", scantypes=None, ax=None, **kwargs): - """Plot timestream data. - - Args: - array (xarray.DataArray): Array which the timestream data are included. - kidid (int): Kidid. - xtick (str): Type of x axis. - 'time': Time. - 'index': Time index. - scantypes (list): Scantypes. If None, all scantypes are used. - ax (matplotlib.axes): Axis you want to plot on. - kwargs (optional): Plot options passed to ax.plot(). - """ - if ax is None: - ax = plt.gca() - - index = np.where(array.kidid == kidid)[0] - if len(index) == 0: - raise KeyError("Such a kidid does not exist.") - index = int(index) - - if scantypes is None: - if xtick == "time": - ax.plot(array.time, array[:, index], label="ALL", **kwargs) - elif xtick == "index": - ax.plot(np.ogrid[: len(array.time)], array[:, index], label="ALL", **kwargs) - else: - for scantype in scantypes: - if xtick == "time": - ax.plot( - array.time[array.scantype == scantype], - array[:, index][array.scantype == scantype], - label=scantype, - **kwargs - ) - elif xtick == "index": - ax.plot( - np.ogrid[: len(array.time[array.scantype == scantype])], - array[:, index][array.scantype == scantype], - label=scantype, - **kwargs - ) - ax.set_xlabel("{}".format(xtick)) - ax.set_ylabel(str(array.datatype.values)) - ax.legend() - - kidtpdict = {0: "wideband", 1: "filter", 2: "blind"} - try: - kidtp = kidtpdict[int(array.kidtp[index])] - except KeyError: - kidtp = "filter" - ax.set_title("ch #{} ({})".format(kidid, kidtp)) - - logger.info("timestream data (ch={}) has been plotted.".format(kidid)) - - -def plot_spectrum(cube, xtick, ytick, aperture, ax=None, **kwargs): - """Plot a spectrum. - - Args: - cube (xarray.DataArray): Cube which the spectrum information is included. - xtick (str): Type of x axis. - 'freq': Frequency [GHz]. - 'id': Kid id. - ytick (str): Type of y axis. - 'max': Maximum. - 'sum': Summation. - 'mean': Mean. - aperture (str): The shape of aperture. - 'box': Box. - 'circle': Circle. - ax (matplotlib.axes): Axis you want to plot on. - kwargs (optional): - When 'box' is specified as shape, - xc: Center of x. - yc: Center of y. - width: Width. - height: Height. - xmin: Minimum of x. - xmax: Maximum of x. - ymin: Minimum of y. - ymax: Maximum of y. - When 'circle' is specified as shape, - xc: Center of x. - yc: Center of y. - radius: Radius. - Remaining kwargs are passed to ax.step(). - - Notes: - All kwargs should be specified as pixel coordinates. - """ - if ax is None: - ax = plt.gca() - - # pick up kwargs - xc = kwargs.pop("xc", None) - yc = kwargs.pop("yc", None) - width = kwargs.pop("width", None) - height = kwargs.pop("height", None) - xmin = kwargs.pop("xmin", None) - xmax = kwargs.pop("xmax", None) - ymin = kwargs.pop("ymin", None) - ymax = kwargs.pop("ymax", None) - radius = kwargs.pop("radius", None) - exchs = kwargs.pop("exchs", None) - - # labels - xlabeldict = {"freq": "frequency [GHz]", "id": "kidid"} - - cube = cube.copy() - datatype = cube.datatype - if aperture == "box": - if None not in [xc, yc, width, height]: - xmin, xmax = int(xc - width / 2), int(xc + width / 2) - ymin, ymax = int(yc - width / 2), int(yc + width / 2) - elif None not in [xmin, xmax, ymin, ymax]: - pass - else: - raise KeyError("Invalid arguments.") - value = getattr(cube[xmin:xmax, ymin:ymax, :], ytick)(dim=("x", "y")) - elif aperture == "circle": - if None not in [xc, yc, radius]: - pass - else: - raise KeyError("Invalid arguments.") - x, y = np.ogrid[0 : len(cube.x), 0 : len(cube.y)] - mask = (x - xc) ** 2 + (y - yc) ** 2 < radius**2 - mask = np.broadcast_to(mask[:, :, np.newaxis], cube.shape) - masked = np.ma.array(cube.values, mask=~mask) - value = getattr(np, "nan" + ytick)(masked, axis=(0, 1)) - else: - raise KeyError(aperture) - - if xtick == "freq": - kidfq = cube.kidfq.values - freqrange = ~np.isnan(kidfq) - if exchs is not None: - freqrange[exchs] = False - x = kidfq[freqrange] - y = value[freqrange] - ax.step(x[np.argsort(x)], y[np.argsort(x)], where="mid", **kwargs) - elif xtick == "id": - ax.step(cube.kidid.values, value, where="mid", **kwargs) - else: - raise KeyError(xtick) - ax.set_xlabel("{}".format(xlabeldict[xtick])) - ax.set_ylabel("{} ({})".format(datatype.values, ytick)) - ax.set_title("spectrum") - - -def plot_chmap(cube, kidid, ax=None, **kwargs): - """Plot an intensity map. - - Args: - cube (xarray.DataArray): Cube which the spectrum information is included. - kidid (int): Kidid. - ax (matplotlib.axes): Axis the figure is plotted on. - kwargs (optional): Plot options passed to ax.imshow(). - """ - if ax is None: - ax = plt.gca() - - index = np.where(cube.kidid == kidid)[0] - if len(index) == 0: - raise KeyError("Such a kidid does not exist.") - index = int(index) - - im = ax.pcolormesh(cube.x, cube.y, cube[:, :, index].T, **kwargs) - ax.set_xlabel("x") - ax.set_ylabel("y") - ax.set_title("intensity map ch #{}".format(kidid)) - return im - - -def plotpsd(data, dt, ndivide=1, window=hann, overlap_half=False, ax=None, **kwargs): - """Plot PSD (Power Spectral Density). - - Args: - data (np.ndarray): Input data. - dt (float): Time between each data. - ndivide (int): Do averaging (split data into ndivide, - get psd of each, and average them). - overlap_half (bool): Split data to half-overlapped regions. - ax (matplotlib.axes): Axis the figure is plotted on. - kwargs (optional): Plot options passed to ax.plot(). - """ - if ax is None: - ax = plt.gca() - vk, psddata = psd(data, dt, ndivide, window, overlap_half) - ax.loglog(vk, psddata, **kwargs) - ax.set_xlabel("Frequency [Hz]") - ax.set_ylabel("PSD") - ax.legend() - - -def plotallanvar(data, dt, tmax=10, ax=None, **kwargs): - """Plot Allan variance. - - Args: - data (np.ndarray): Input data. - dt (float): Time between each data. - tmax (float): Maximum time. - ax (matplotlib.axes): Axis the figure is plotted on. - kwargs (optional): Plot options passed to ax.plot(). - """ - if ax is None: - ax = plt.gca() - tk, allanvar = allan_variance(data, dt, tmax) - ax.loglog(tk, allanvar, **kwargs) - ax.set_xlabel("Time [s]") - ax.set_ylabel("Allan Variance") - ax.legend() - - -# alias -@deprecation_warning( - "Use plot_tcoords() instead. plotcoords() will be removed in the future." - " The order of the arguments has been changed in plot_tcoords()." - " For a while, De:code properly passes the arguments" - " in plotcoords() to plot_tcoords()." -) -def plotcoords(array, ax, coords, scantypes=None, **kwargs): - plot_tcoords(array, coords, scantypes=scantypes, ax=ax, **kwargs) - - -@deprecation_warning( - "Use plot_timestream() instead. plottimestream() has been removed." - "The arguments has been changed in plot_timestream().", - DeprecationWarning, -) -def plottimestream(array, ax=None, xtick="time", **kwargs): - pass - - -@deprecation_warning( - "Use plot_spectrum() instead. plotspectrum() will be removed in the future." - " The order of the arguments has been changed in plot_spectrum()." - " For a while, De:code properly passes the arguments" - " in plotspectrum() to plot_spectrum()." -) -def plotspectrum(cube, ax, xtick, ytick, aperture, **kwargs): - plot_spectrum(cube, xtick, ytick, aperture, ax=ax, **kwargs) diff --git a/decode/utils.py b/decode/utils.py deleted file mode 100644 index fcde756..0000000 --- a/decode/utils.py +++ /dev/null @@ -1,318 +0,0 @@ -__all__ = [ - "allan_variance", - "chunk", - "deprecation_warning", - "one_thread_per_process", - "psd", - "slicewhere", - "xarrayfunc", -] - - -# standard library -from concurrent.futures import ProcessPoolExecutor as Pool -from contextlib import contextmanager -from functools import wraps -from inspect import Parameter, signature, stack -from logging import getLogger -from multiprocessing import cpu_count -from sys import _getframe as getframe - - -# dependencies -import numpy as np -import xarray as xr -from morecopy import copy -from scipy import ndimage -from scipy.fftpack import fftfreq, fft -from scipy.signal.windows import hann - - -# constants -DEFAULT_N_CHUNKS = 1 -try: - MAX_WORKERS = cpu_count() - 1 -except NotImplementedError: - MAX_WORKERS = 1 - - -# runtime functions -def allan_variance(data, dt, tmax=10): - """Calculate Allan variance. - - Args: - data (np.ndarray): Input data. - dt (float): Time between each data. - tmax (float): Maximum time. - - Returns: - vk (np.ndarray): Frequency. - allanvar (np.ndarray): Allan variance. - """ - allanvar = [] - nmax = len(data) if len(data) < tmax / dt else int(tmax / dt) - for i in range(1, nmax + 1): - databis = data[len(data) % i :] - y = databis.reshape(len(data) // i, i).mean(axis=1) - allanvar.append(((y[1:] - y[:-1]) ** 2).mean() / 2) - return dt * np.arange(1, nmax + 1), np.array(allanvar) - - -def chunk(*argnames, concatfunc=None): - """Make a function compatible with multicore chunk processing. - - This function is intended to be used as a decorator like:: - - >>> @dc.chunk('array') - >>> def func(array): - ... # do something - ... return newarray - >>> - >>> result = func(array, timechunk=10) - - or you can set a global chunk parameter outside the function:: - - >>> timechunk = 10 - >>> result = func(array) - """ - - def _chunk(func): - depth = [s.function for s in stack()].index("") - f_globals = getframe(depth).f_globals - - # original (unwrapped) function - orgfunc = copy(func) - orgfunc.__name__ += "_org" - f_globals[orgfunc.__name__] = orgfunc - - @wraps(func) - def wrapper(*args, **kwargs): - depth = [s.function for s in stack()].index("") - f_globals = getframe(depth).f_globals - - # parse args and kwargs - params = signature(func).parameters - for i, (key, val) in enumerate(params.items()): - if not val.kind == Parameter.POSITIONAL_OR_KEYWORD: - break - - try: - kwargs.update({key: args[i]}) - except IndexError: - kwargs.setdefault(key, val.default) - - # n_chunks and n_processes - n_chunks = DEFAULT_N_CHUNKS - n_processes = MAX_WORKERS - - if argnames: - length = len(kwargs[argnames[0]]) - - if "numchunk" in kwargs: - n_chunks = kwargs.pop("numchunk") - elif "timechunk" in kwargs: - n_chunks = round(length / kwargs.pop("timechunk")) - elif "numchunk" in f_globals: - n_chunks = f_globals["numchunk"] - elif "timechunk" in f_globals: - n_chunks = round(length / f_globals["timechunk"]) - - if "n_processes" in kwargs: - n_processes = kwargs.pop("n_processes") - elif "n_processes" in f_globals: - n_processes = f_globals["n_processes"] - - # make chunked args - chunks = {} - for name in argnames: - arg = kwargs.pop(name) - try: - chunks.update({name: np.array_split(arg, n_chunks)}) - except TypeError: - chunks.update({name: np.tile(arg, n_chunks)}) - - # run the function - futures = [] - results = [] - with one_thread_per_process(), Pool(n_processes) as p: - for i in range(n_chunks): - chunk = {key: val[i] for key, val in chunks.items()} - futures.append(p.submit(orgfunc, **{**chunk, **kwargs})) - - for future in futures: - results.append(future.result()) - - # make an output - if concatfunc is not None: - return concatfunc(results) - - try: - return xr.concat(results, "t") - except TypeError: - return np.concatenate(results, 0) - - return wrapper - - return _chunk - - -def deprecation_warning(message, cls=PendingDeprecationWarning): - import warnings - - warnings.filterwarnings("always", category=PendingDeprecationWarning) - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - warnings.warn(message, cls, stacklevel=2) - func(*args, **kwargs) - - return wrapper - - return decorator - - -@contextmanager -def one_thread_per_process(): - """Return a context manager where only one thread is allocated to a process. - - This function is intended to be used as a with statement like:: - - >>> with process_per_thread(): - ... do_something() # one thread per process - - Notes: - This function only works when MKL (Intel Math Kernel Library) - is installed and used in, for example, NumPy and SciPy. - Otherwise this function does nothing. - - """ - try: - import mkl - - is_mkl = True - except ImportError: - is_mkl = False - - if is_mkl: - n_threads = mkl.get_max_threads() - mkl.set_num_threads(1) - try: - # block nested in the with statement - yield - finally: - # revert to the original value - mkl.set_num_threads(n_threads) - else: - yield - - -def psd(data, dt, ndivide=1, window=hann, overlap_half=False): - """Calculate power spectrum density of data. - - Args: - data (np.ndarray): Input data. - dt (float): Time between each data. - ndivide (int): Do averaging (split data into ndivide, - get psd of each, and average them). - ax (matplotlib.axes): Axis you want to plot on. - doplot (bool): Plot how averaging works. - overlap_half (bool): Split data to half-overlapped regions. - - Returns: - vk (np.ndarray): Frequency. - psd (np.ndarray): PSD - """ - logger = getLogger("decode.utils.ndarray.psd") - - if overlap_half: - step = int(len(data) / (ndivide + 1)) - size = step * 2 - else: - step = int(len(data) / ndivide) - size = step - - if bin(len(data)).count("1") != 1: - logger.warning( - "warning: length of data is not power of 2: {}".format(len(data)) - ) - size = int(len(data) / ndivide) - if bin(size).count("1") != 1.0: - if overlap_half: - logger.warning( - "warning: ((length of data) / (ndivide+1)) * 2" - " is not power of 2: {}".format(size) - ) - else: - logger.warning( - "warning: (length of data) / ndivide is not power of 2: {}".format(size) - ) - psd = np.zeros(size) - vk_ = fftfreq(size, dt) - vk = vk_[np.where(vk_ >= 0)] - - for i in range(ndivide): - d = data[i * step : i * step + size] - if window is None: - w = np.ones(size) - corr = 1.0 - else: - w = window(size) - corr = np.mean(w**2) - psd = psd + 2 * (np.abs(fft(d * w))) ** 2 / size * dt / corr - - return vk, psd[: len(vk)] / ndivide - - -def slicewhere(condition): - """Return slices of regions that fulfill condition. - - Example: - >>> cond = [False, True, True, False, False, True, False] - >>> fm.utils.slicewhere(cond) - [slice(1L, 3L, None), slice(5L, 6L, None)] - - Args: - condition (numpy.ndarray): Array of booleans. - - Returns: - slices (list of slice): List of slice objects. - """ - return [region[0] for region in ndimage.find_objects(ndimage.label(condition)[0])] - - -def xarrayfunc(func): - """Make a function compatible with xarray.DataArray. - - This function is intended to be used as a decorator like:: - - >>> @dc.xarrayfunc - >>> def func(array): - ... # do something - ... return newarray - >>> - >>> result = func(array) - - Args: - func (function): Function to be wrapped. The first argument - of the function must be an array to be processed. - - Returns: - wrapper (function): Wrapped function. - """ - - @wraps(func) - def wrapper(*args, **kwargs): - if any(isinstance(arg, xr.DataArray) for arg in args): - newargs = [] - for arg in args: - if isinstance(arg, xr.DataArray): - newargs.append(arg.values) - else: - newargs.append(arg) - - return xr.zeros_like(args[0]) + func(*newargs, **kwargs) - else: - return func(*args, **kwargs) - - return wrapper From beda945dc98bc44044f714233bd08a4ed9d1e2a3 Mon Sep 17 00:00:00 2001 From: Akio Taniguchi Date: Sat, 21 Oct 2023 06:48:41 +0000 Subject: [PATCH 3/5] #73 Remove SVG logo for docs --- docs/_static/logo.svg | 11 ----------- docs/conf.py | 4 ++-- 2 files changed, 2 insertions(+), 13 deletions(-) delete mode 100644 docs/_static/logo.svg diff --git a/docs/_static/logo.svg b/docs/_static/logo.svg deleted file mode 100644 index 6ad7f5f..0000000 --- a/docs/_static/logo.svg +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - de:code - \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index 83038e0..e87cc2d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,13 +19,13 @@ "sphinx.ext.napoleon", "sphinx.ext.viewcode", ] +myst_heading_anchors = 3 templates_path = ["_templates"] # options for HTML output -html_logo = "_static/logo.svg" -html_static_path = ["_static"] html_theme = "pydata_sphinx_theme" html_theme_options = { "github_url": "https://github.com/deshima-dev/decode/", + "logo": {"text": "de:code"}, } From 06ab5ef80027a9d2d1f7079ea66e458d7455ca17 Mon Sep 17 00:00:00 2001 From: Akio Taniguchi Date: Sat, 21 Oct 2023 06:48:51 +0000 Subject: [PATCH 4/5] #73 Update workflows --- .github/workflows/gh-pages.yml | 9 +++------ .github/workflows/pypi.yml | 7 +++---- .github/workflows/tests.yml | 12 ++++-------- 3 files changed, 10 insertions(+), 18 deletions(-) diff --git a/.github/workflows/gh-pages.yml b/.github/workflows/gh-pages.yml index 9725ecf..58f5c77 100644 --- a/.github/workflows/gh-pages.yml +++ b/.github/workflows/gh-pages.yml @@ -16,12 +16,9 @@ jobs: - uses: actions/setup-python@v4 with: python-version: "3.11" - - name: Install project dependencies - run: pip install poetry && poetry install - - name: Build docs - run: docs/build - - name: Deploy docs - uses: peaceiris/actions-gh-pages@v3 + - run: pip install poetry && poetry install + - run: docs/build + - uses: peaceiris/actions-gh-pages@v3 with: force_orphan: true github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 260bd85..9d90eae 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -12,9 +12,8 @@ jobs: env: POETRY_PYPI_TOKEN_PYPI: ${{ secrets.PYPI_TOKEN }} steps: - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 with: python-version: "3.11" - - name: Publish package to PyPI - run: pip install poetry && poetry publish --build + - run: pip install poetry && poetry publish --build diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 4db0d25..e6cba43 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -25,11 +25,7 @@ jobs: - uses: actions/setup-python@v4 with: python-version: ${{ matrix.python }} - - name: Install project dependencies - run: pip install poetry && poetry install - - name: Test code's formatting (Black) - run: black --check docs tests decode - - name: Test code's execution (pytest) - run: pytest -v tests - - name: Test docs' building (Sphinx) - run: docs/build + - run: pip install poetry && poetry install + - run: black --check docs tests decode + - run: pytest -v tests + - run: docs/build From 194278240a65b233e7891b522a0b2c357af2d549 Mon Sep 17 00:00:00 2001 From: Akio Taniguchi Date: Sat, 21 Oct 2023 06:53:02 +0000 Subject: [PATCH 5/5] #73 Remove test for metadata --- .github/workflows/tests.yml | 1 - tests/.gitkeep | 0 tests/test_metadata.py | 11 ----------- 3 files changed, 12 deletions(-) create mode 100644 tests/.gitkeep delete mode 100644 tests/test_metadata.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index e6cba43..95b23d8 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -27,5 +27,4 @@ jobs: python-version: ${{ matrix.python }} - run: pip install poetry && poetry install - run: black --check docs tests decode - - run: pytest -v tests - run: docs/build diff --git a/tests/.gitkeep b/tests/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_metadata.py b/tests/test_metadata.py deleted file mode 100644 index cf3bc80..0000000 --- a/tests/test_metadata.py +++ /dev/null @@ -1,11 +0,0 @@ -import decode as dc - - -def test_version(): - """Make sure the version is valid.""" - assert dc.__version__ == "1.0.0" - - -def test_author(): - """Make sure the author is valid.""" - assert dc.__author__ == "Akio Taniguchi"