Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pre-release updates #42

Merged
merged 28 commits into from
Nov 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file removed .DS_Store
Binary file not shown.
2 changes: 2 additions & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ jobs:
pytest --cov=solpolpy tests/ --cov-report xml:/home/runner/coverage.xml
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
with:
fail_ci_if_error: true
verbose: true
13 changes: 13 additions & 0 deletions .github/workflows/weeklypr.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
on:
schedule:
- cron: '0 0 * * THU'
name: weekly PR
jobs:
createPullRequest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: create pull request
run: gh pr create -B main -H develop --title 'Weekly merge to develop' --body 'Created by Github action'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -127,4 +127,4 @@ dmypy.json

# Pyre type checker
.pyre/
tests/.DS_Store
*.DS_Store
Binary file removed docs/.DS_Store
Binary file not shown.
26 changes: 21 additions & 5 deletions docs/_config.yml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# Book settings
# Learn more at https://jupyterbook.org/customize/config.html

title: My sample book
author: The Jupyter Book Community
logo: logo.png
title: solpolpy
author: PUNCH SOC
# logo: ""

# Force re-execution of notebooks on each build.
# See https://jupyterbook.org/content/execute.html
Expand All @@ -21,9 +21,25 @@ bibtex_bibfiles:

# Information about where the book exists on the web
repository:
url: https://github.com/executablebooks/jupyter-book # Online location of your book
url: https://github.com/punch-mission/solpolpy # Online location of your book
path_to_book: docs # Optional path to your book, relative to the repository root
branch: master # Which branch of the repository should be used when creating links (optional)
branch: main # Which branch of the repository should be used when creating links (optional)

parse:
# default extensions to enable in the myst parser.
# See https://myst-parser.readthedocs.io/en/latest/using/syntax-optional.html
myst_enable_extensions:
# - amsmath
- colon_fence
# - deflist
- dollarmath
- html_admonition
# - html_image
- linkify
# - replacements
# - smartquotes
- substitution
myst_url_schemes: [mailto, http, https] # URI schemes that will be recognised as external URLs in Markdown links

# Add GitHub buttons to your book
# See https://jupyterbook.org/customize/config.html#add-a-link-to-your-repository
Expand Down
Binary file removed solpolpy/.DS_Store
Binary file not shown.
5 changes: 2 additions & 3 deletions solpolpy/alpha.py
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We shouldn't leave commented out code.

Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,8 @@
x = np.arange(-x_size // 2, x_size // 2)
y = np.arange(-y_size // 2, y_size // 2)
xx, yy = np.meshgrid(x, y)
# return np.fliplr(np.arctan2(yy, xx))*u.radian
return np.flipud(np.rot90(np.fliplr(np.arctan2(yy, xx) + np.pi), k=1))* u.radian

return np.rot90(np.fliplr(np.arctan2(yy, xx)+np.pi), k=1)*u.radian

Check warning on line 30 in solpolpy/alpha.py

View check run for this annotation

Codecov / codecov/patch

solpolpy/alpha.py#L30

Added line #L30 was not covered by tests
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is not covered by tests. I notice most of the alpha.py file isn't. Let's write some tests and also make proper docstrings.



def zeros(shape):
return np.zeros(shape)
Expand Down
3 changes: 2 additions & 1 deletion solpolpy/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,5 @@
"Stokes": [["Bi", "Bq", "Bu"]],
"Bp3": [["B", "pB", "pBp", "alpha"], ["B", "pB", "pBp"]],
"Bthp": [["B", "theta", "p"]],
"npol": [["angle_1", "angle_2", "angle_3"], ["angle_1", "angle_2", "angle_3", "angle_4"]]}
"npol": [["angle_1", "angle_2", "angle_3"]],
"fourpol": [["angle_1", "angle_2", "angle_3", "angle_4"]]}
29 changes: 4 additions & 25 deletions solpolpy/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,10 +121,6 @@
input_data = sp.polarizers.npol_to_mzp(input_data)
input_kind = "MZP"

# Convert a set of inputs given at different polarizing angles to a common base of MZP.
# input_data_mzp = sp.polarizers.npol_to_mzp(input_data)
# input_kind = "MZP"

input_key = list(input_data)
transform_path = get_transform_path(input_kind, out_polarize_state)
equation = get_transform_equation(transform_path)
Expand Down Expand Up @@ -186,28 +182,11 @@
metad = input_data[keys[0]].meta
deg2rad = (np.pi * u.radian) / (180 * u.degree)

# TODO: don't use inputs inside
if len(img_shape) == 2: # it's an image and not just an array
inp = input("Do you wish to provide an alpha array?").lower()
if inp.startswith('n'):
print("Continuing with default options... Waiting for input...")
inp_ref = input("Choose the reference along the Solar: \"North\" or \"West\":").lower()
if inp_ref.startswith('n'):
alpha = radial_north(img_shape)
else:
alpha = radial_west(img_shape)
input_data.update(NDCollection([("alpha", NDCube(alpha, wcs=wcs, meta=metad))], meta={}, aligned_axes='all'))
elif inp.startswith('y'):
print("Provide the alpha matrix in FITS format")
alpha_path = input("Provide the path of alpha FITS file:")
hdu = fits.open(alpha_path)

# TODO: make the FITS file use a header keyword to specify the units of alpha
if np.max(hdu[0].data) > 2 * np.pi + 1:
alph = hdu[0].data * deg2rad
else:
alph = hdu[0].data
input_data.update(NDCollection([("alpha", NDCube(alph, wcs=wcs, meta=metad))], meta={}, aligned_axes='all'))
alpha = radial_north(img_shape)

Check warning on line 186 in solpolpy/core.py

View check run for this annotation

Codecov / codecov/patch

solpolpy/core.py#L186

Added line #L186 was not covered by tests
else:
raise ValueError(f"Data must be an image with 2 dimensions, found {len(img_shape)}.")
input_data.update(NDCollection([("alpha", NDCube(alpha, wcs=wcs, meta=metad))], meta={}, aligned_axes='all'))

Check warning on line 189 in solpolpy/core.py

View check run for this annotation

Codecov / codecov/patch

solpolpy/core.py#L188-L189

Added lines #L188 - L189 were not covered by tests

return input_data

Expand Down
60 changes: 13 additions & 47 deletions solpolpy/instruments.py
jmbhughes marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
@@ -1,71 +1,37 @@

from typing import List

from ndcube import NDCube, NDCollection
from astropy.io import fits
from astropy.wcs import WCS


def load_data(path_list: List[str]) -> NDCollection:
"""
path_list: String of path list where group of files to be loaded is present.
default_alpha: bool, optional
If default_alpha is true the alpha matrix will be referenced from Solar North. Default is True.
Make it False to provide alpha (in FITS format) with same size as of input data
or select the alpha matrix (from the list).
Parameters
----------
path_list: List[str]
list of paths to be loaded

Returns
-------
NDCollection
The data are loaded as NDCollection object with WCS and header information available.
The keys are labelled as 'angle_1', 'angle_2, 'angle_3', ...
Alpha matrix is also added apart from the polarizing angles data and can be accessed through 'alpha' key.
The keys are labeled as 'angle_1', 'angle_2, 'angle_3', ...
"""

# create list of FITS
fits_type = []

# get length of list to determine how many files to process.
list_len = len(path_list)
assert list_len >= 2, 'requires at least 2 FITS files'

for xlist_item in path_list:
with fits.open(xlist_item) as hdul:
fits_type.append(hdul[0].header['DETECTOR'])

if len(set(fits_type)) != 1:
raise Exception("Input FITS are of different types")


data_out = []
i = 0

for data_path in path_list:
for i, data_path in enumerate(path_list):
with fits.open(data_path) as hdul:
i = i+1
wcs = WCS(hdul[0].header)
data_out.append(("angle_" + str(i), NDCube(hdul[0].data, wcs=wcs, meta=hdul[0].header)))
# size = hdul[0].data.shape
# if default_alpha:
# alph = radial_north(size)
# data_out.append(("alpha", NDCube(alph, wcs=wcs)))
# else:
# inp = input("Do you wish to provide an alpha array?").lower()
# if inp.startswith('n'):
# print("Continuing with default options... Waiting for input...")
# inp_ref = input("Choose the reference along the Solar: \"North\" or \"West\":").lower()
# if inp_ref.startswith('n'):
# alpha = radial_north(size)
# else:
# alpha = radial_west(size)
# data_out.append(("alpha", NDCube(alpha, wcs=wcs, meta=hdul[0].header)))
# elif inp.startswith('y'):
# print("Provide the alpha matrix in FITS format")
# alpha_path = input("Provide the path of alpha FITS file:")
# hdu = fits.open(alpha_path)
# if np.max(hdu[0].data) > 2 * np.pi + 1:
# alph = hdu[0].data * deg2rad
# else:
# alph = hdu[0].data
# data_out.append(("alpha", NDCube(alph, wcs=wcs, meta=hdu[0].header)))

print("Hurray!!! Data loaded successfully.")
data_out.append(("angle_" + str(i),
NDCube(hdul[0].data,
wcs=wcs,
meta=hdul[0].header)))

return NDCollection(data_out, meta={}, aligned_axes="all")
52 changes: 18 additions & 34 deletions solpolpy/polarizers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,16 +19,23 @@ def npol_to_mzp(input_cube):
in_list = list(input_cube)
conv_fact = (np.pi * u.radian) / (180 * u.degree)

# constants come from https://www.sciencedirect.com/science/article/pii/S0019103515003620?via%3Dihub
if input_cube['angle_1'].meta['OBSRVTRY'] == 'STEREO_B':
offset_angle = -18 * u.degree # STEREOB
elif input_cube['angle_1'].meta['OBSRVTRY'] == 'STEREO_A':
offset_angle = 45.8 * u.degree # STEREOA
else:
offset_angle = 0

for p_angle in in_list:
if p_angle == "alpha":
break
# input_dict[(conv_polar_from_head(input_cube[p_angle])) * u.degree * conv_fact] = input_cube[p_angle].data
input_dict[(conv_polar_from_head(input_cube[p_angle])) * u.degree * conv_fact] = input_cube[p_angle].data

mzp_ang = [-60, 0, 60]
Bmzp = {}
for ang in mzp_ang: Bmzp[ang * u.degree] = (1 / 3) * np.sum(
[ith_polarizer_brightness * (1 + 2 * np.cos(2 * (ang * u.degree * conv_fact - ith_angle)))
[ith_polarizer_brightness * (1 + 2 * np.cos(2 * (ang * u.degree * conv_fact - (ith_angle-offset_angle))))
for ith_angle, ith_polarizer_brightness in input_dict.items()], axis=0)

# todo: update header properly; time info?
Expand Down Expand Up @@ -62,9 +69,6 @@ def mzp_to_bpb(input_cube):
break
input_dict[(input_cube[p_angle].meta['POLAR']) * u.degree * conv_fact] = input_cube[p_angle].data

# if "alpha" not in input_cube:
# raise ValueError("missing alpha")

alpha = input_cube['alpha'].data * u.radian
B = (2 / 3) * (np.sum([ith_polarizer_brightness
for ith_angle, ith_polarizer_brightness
Expand Down Expand Up @@ -97,11 +101,6 @@ def bpb_to_mzp(input_cube):
if "alpha" not in input_cube:
raise ValueError("missing alpha")

# for p_angle in in_list:
# if p_angle == "alpha":
# break
# input_dict[(input_cube[p_angle].meta['POLAR'])] = input_cube[p_angle].data

alpha = input_cube['alpha'].data * u.radian
B, pB = input_cube["B"].data, input_cube["pB"].data
mzp_ang = [-60, 0, 60]
Expand Down Expand Up @@ -210,8 +209,6 @@ def mzp_to_stokes(input_cube):
BStokes_cube.append(("Bi", NDCube(Bi, wcs=input_cube["Bm"].wcs, meta=metaI)))
BStokes_cube.append(("Bq", NDCube(Bq, wcs=input_cube["Bm"].wcs, meta=metaQ)))
BStokes_cube.append(("Bu", NDCube(Bu, wcs=input_cube["Bm"].wcs, meta=metaU)))
# BStokes_cube["alpha"] = NDCube(alpha, wcs=input_cube["B"].wcs)

return NDCollection(BStokes_cube, meta={}, aligned_axes="all")


Expand Down Expand Up @@ -258,12 +255,7 @@ def mzp_to_bp3(input_cube):
if p_angle == "alpha":
break
input_dict[(input_cube[p_angle].meta['POLAR'] * u.degree * conv_fact)] = input_cube[p_angle].data

# alpha = alpha1([input_cube['Bm'].meta['NAXIS1'], input_cube['Bm'].meta['NAXIS2']]) #input_dict['alpha']

# if "alpha" not in input_cube:
# raise ValueError("missing alpha")


alpha = input_cube['alpha'].data * u.radian
B = (2 / 3) * (np.sum([ith_polarizer_brightness for ith_angle, ith_polarizer_brightness
in input_dict.items() if ith_angle != "alpha"], axis=0))
Expand Down Expand Up @@ -399,30 +391,22 @@ def btbr_to_npol(input_cube, angles):

return NDCollection(Bnpol_cube, meta={}, aligned_axes="all")


def fourpol_to_stokes(input_cube):
"""
Notes
------
Table 1 in DeForest et al. 2022.

"""""
input_dict = {}
in_list = list(input_cube)

for p_angle in in_list:
if p_angle == "alpha":
break
input_dict[(input_cube[p_angle].meta['POLAR'])] = input_cube[p_angle].data

Bi = input_cube[0].data + input_cube[90].data
Bq = input_cube[90].data - input_cube[0].data
Bu = input_cube[135].data - input_cube[45].data
Bi = input_cube["B0"].data + input_cube["B90"].data
Bq = input_cube["B90"].data - input_cube["B0"].data
Bu = input_cube["B135"].data - input_cube["B45"].data

metaI, metaQ, metaU = copy.copy(input_cube[0].meta), copy.copy(input_cube[0].meta), copy.copy(input_cube[0].meta)
metaI, metaQ, metaU = copy.copy(input_cube["B0"].meta), copy.copy(input_cube["B0"].meta), copy.copy(input_cube["B0"].meta)
BStokes_cube = []
BStokes_cube.append(("Bi", NDCube(Bi, wcs=input_cube[0].wcs, meta=metaI)))
BStokes_cube.append(("Bq", NDCube(Bq, wcs=input_cube[0].wcs, meta=metaQ)))
BStokes_cube.append(("Bu", NDCube(Bu, wcs=input_cube[0].wcs, meta=metaU)))
# BStokes_cube["alpha"] = NDCube(alpha, wcs=input_cube["B"].wcs)
BStokes_cube.append(("Bi", NDCube(Bi, wcs=input_cube["B0"].wcs, meta=metaI)))
BStokes_cube.append(("Bq", NDCube(Bq, wcs=input_cube["B0"].wcs, meta=metaQ)))
BStokes_cube.append(("Bu", NDCube(Bu, wcs=input_cube["B0"].wcs, meta=metaU)))

return NDCollection(BStokes_cube, meta={}, aligned_axes="all")
Binary file removed tests/.DS_Store
Binary file not shown.
Loading