Skip to content

Commit

Permalink
Enabling enhanced Ruff linting (#246)
Browse files Browse the repository at this point in the history
  • Loading branch information
Abinash-bit authored Nov 13, 2024
1 parent bd9ae41 commit f8c3e09
Show file tree
Hide file tree
Showing 14 changed files with 122 additions and 61 deletions.
4 changes: 2 additions & 2 deletions .cruft.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"template": "https://github.com/sunpy/package-template",
"commit": "2f0f5dd538a1def8d3eacd8bebb16d3bded773c7",
"commit": "75f84c4adf1753af67967930c3335bc73bca9bf5",
"checkout": null,
"context": {
"cookiecutter": {
Expand All @@ -16,7 +16,7 @@
"enable_dynamic_dev_versions": "y",
"include_example_code": "n",
"include_cruft_update_github_workflow": "y",
"use_extended_ruff_linting": "n",
"use_extended_ruff_linting": "y",
"_sphinx_theme": "sunpy",
"_parent_project": "",
"_install_requires": "",
Expand Down
23 changes: 23 additions & 0 deletions .github/workflows/label_sync.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: Label Sync
on:
workflow_dispatch:
schedule:
# ┌───────── minute (0 - 59)
# │ ┌───────── hour (0 - 23)
# │ │ ┌───────── day of the month (1 - 31)
# │ │ │ ┌───────── month (1 - 12 or JAN-DEC)
# │ │ │ │ ┌───────── day of the week (0 - 6 or SUN-SAT)
- cron: '0 0 * * *' # run every day at midnight UTC

# Give permissions to write issue labels
permissions:
issues: write

jobs:
label_sync:
runs-on: ubuntu-latest
name: Label Sync
steps:
- uses: srealmoreno/label-sync-action@850ba5cef2b25e56c6c420c4feed0319294682fd
with:
config-file: https://raw.githubusercontent.com/sunpy/.github/main/labels.yml
48 changes: 31 additions & 17 deletions .github/workflows/sub_package_update.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,6 @@ jobs:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
include:
- add-paths: .
body: apply the changes to this repo.
branch: cruft/update
commit-message: "Automatic package template update"
title: Updates from the package template

steps:
- uses: actions/checkout@v4

Expand All @@ -55,25 +47,47 @@ jobs:
echo "has_changes=$CHANGES" >> "$GITHUB_OUTPUT"
- name: Run update if available
id: cruft_update
if: steps.check.outputs.has_changes == '1'
run: |
git config --global user.email "${{ github.actor }}@users.noreply.github.com"
git config --global user.name "${{ github.actor }}"
cruft update --skip-apply-ask --refresh-private-variables
cruft_output=$(cruft update --skip-apply-ask --refresh-private-variables)
echo $cruft_output
git restore --staged .
- name: Create pull request
if [[ "$cruft_output" == *"Failed to cleanly apply the update, there may be merge conflicts."* ]]; then
echo merge_conflicts=1 >> $GITHUB_OUTPUT
else
echo merge_conflicts=0 >> $GITHUB_OUTPUT
fi
- name: Check if only .cruft.json is modified
id: cruft_json
if: steps.check.outputs.has_changes == '1'
run: |
git status --porcelain=1
if [[ "$(git status --porcelain=1)" == " M .cruft.json" ]]; then
echo "Only .cruft.json is modified. Exiting workflow early."
echo "has_changes=0" >> "$GITHUB_OUTPUT"
else
echo "has_changes=1" >> "$GITHUB_OUTPUT"
fi
- name: Create pull request
if: steps.cruft_json.outputs.has_changes == '1'
uses: peter-evans/create-pull-request@v7
with:
token: ${{ secrets.GITHUB_TOKEN }}
add-paths: ${{ matrix.add-paths }}
commit-message: ${{ matrix.commit-message }}
branch: ${{ matrix.branch }}
add-paths: "."
commit-message: "Automatic package template update"
branch: "cruft/update"
delete-branch: true
branch-suffix: timestamp
title: ${{ matrix.title }}
draft: ${{ steps.cruft_update.outputs.merge_conflicts == '1' }}
title: "Updates from the package template"
body: |
This is an autogenerated PR, which will ${{ matrix.body }}.
[Cruft](https://cruft.github.io/cruft/) has detected updates from the Package Template
This is an autogenerated PR, which will applies the latest changes from the [SunPy Package Template](https://github.com/sunpy/package-template).
If this pull request has been opened as a draft there are conflicts which need fixing.
**To run the CI on this pull request you will need to close it and reopen it.**
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: "v0.7.1"
rev: "v0.7.2"
hooks:
- id: ruff
args: ["--fix"]
Expand Down
62 changes: 43 additions & 19 deletions .ruff.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,45 +14,69 @@ select = [
"W",
"UP",
"PT",
"BLE",
"A",
"C4",
"INP",
"PIE",
"T20",
"RET",
"TID",
"PTH",
"PD",
"PLC",
"PLE",
"FLY",
"NPY",
"PERF",
"RUF",
]
extend-ignore = [
# pycodestyle
# pycodestyle (E, W)
"E501", # ignore line length will use a formatter instead
"E712", # Avoid equality comparisons to True; use if {cond}: for truth checks
"E721", # type comparison Use is and is not for type comparisons, or isinstance() for isinstance checks
# upgrades
# pyupgrade (UP)
"UP038", # Use | in isinstance - not compatible with models and is slower
# pytest (PT)
"PT001", # Always use pytest.fixture()
"PT004", # Fixtures which don't return anything should have leading _
"PT011", # except(ValueRaises) is too broad
"PT023", # Always use () on pytest decorators
# flake8-pie
# flake8-pie (PIE)
"PIE808", # Disallow passing 0 as the first argument to range
# flake8-use-pathlib
# flake8-use-pathlib (PTH)
"PTH123", # open() should be replaced by Path.open()
# Ruff
# Ruff (RUF)
"RUF003", # Ignore ambiguous quote marks, doesn't allow ' in comments
"RUF012", # Mutable class attributes should be annotated with `typing.ClassVar`
"RUF013", # PEP 484 prohibits implicit `Optional`
"RUF015", # Prefer `next(iter(...))` over single element slice
"RUF012", # Mutable class attributes should be annotated with `typing.ClassVar`
"RUF013", # PEP 484 prohibits implicit `Optional`
"RUF015", # Prefer `next(iter(...))` over single element slice
]

[lint.per-file-ignores]
# Part of configuration, not a package.
"setup.py" = ["INP001"]
"conftest.py" = ["INP001"]
"setup.py" = [
"INP001", # File is part of an implicit namespace package.
]
"conftest.py" = [
"INP001", # File is part of an implicit namespace package.
]
"docs/conf.py" = [
"E402" # Module imports not at top of file
"E402" # Module imports not at top of file
]
"docs/*.py" = [
"INP001", # Implicit-namespace-package. The examples are not a package.
"INP001", # File is part of an implicit namespace package.
]
"sunkit-image/*.py" = [
"examples/**.py" = [
"T201", # allow use of print in examples
"INP001", # File is part of an implicit namespace package.
]
"__init__.py" = [
"E402", # Module level import not at top of cell
"F401", # Unused import
"F403", # from {name} import * used; unable to detect undefined names
"F405", # {name} may be undefined, or defined from star imports
]
"test_*.py" = [
"E402", # Module level import not at top of cell
]
"__init__.py" = ["E402", "F401", "F403"]
"test_*.py" = ["B011", "D", "E402", "PGH001", "S101"]

[lint.pydocstyle]
convention = "numpy"
2 changes: 1 addition & 1 deletion examples/calculating_time_lags.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
The specific implementation in this package is described in detail
in Appendix C of `Barnes et al. (2019) <https://doi.org/10.3847/1538-4357/ab290c>`__.
"""
# sphinx_gallery_thumbnail_number = 4 # NOQA: ERA001
# sphinx_gallery_thumbnail_number = 4

import dask.array
import matplotlib.pyplot as plt
Expand Down
2 changes: 1 addition & 1 deletion examples/detecting_swirls.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
Unfortunately, currently ASDA within sunkit-image only works on arrays.
"""
# sphinx_gallery_thumbnail_number = 3 # NOQA: ERA001
# sphinx_gallery_thumbnail_number = 3

import matplotlib.pyplot as plt
import numpy as np
Expand Down
2 changes: 1 addition & 1 deletion examples/finding_sunspots_using_stara.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
`this <https://gitlab.com/wtbarnes/aia-on-pleiades/-/blob/master/notebooks/tidy/finding_sunspots.ipynb>`__
notebook implementation of the same algorithm using dask arrays.
"""
# sphinx_gallery_thumbnail_number = 2 # NOQA: ERA001
# sphinx_gallery_thumbnail_number = 2

import matplotlib.pyplot as plt
from skimage.measure import label, regionprops_table
Expand Down
2 changes: 1 addition & 1 deletion examples/tracing_loops.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
`here <http://www.lmsal.com/~aschwand/software/tracing/tracing_tutorial1.html>`__.
"""
# sphinx_gallery_thumbnail_number = 1 # NOQA: ERA001
# sphinx_gallery_thumbnail_number = 1

import matplotlib.pyplot as plt
import numpy as np
Expand Down
14 changes: 7 additions & 7 deletions sunkit_image/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,22 +41,22 @@


@pytest.fixture(scope="session", autouse=True)
def _tmp_config_dir(request): # NOQA: ARG001
def _tmp_config_dir(request):
"""
Globally set the default config for all tests.
"""
tmpdir = tempfile.TemporaryDirectory()

os.environ["SUNPY_CONFIGDIR"] = str(tmpdir.name)
astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name) # NOQA: SLF001
astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name) # NOQA: SLF001
astropy.config.paths.set_temp_config._temp_path = str(tmpdir.name)
astropy.config.paths.set_temp_cache._temp_path = str(tmpdir.name)

yield

del os.environ["SUNPY_CONFIGDIR"]
tmpdir.cleanup()
astropy.config.paths.set_temp_config._temp_path = None # NOQA: SLF001
astropy.config.paths.set_temp_cache._temp_path = None # NOQA: SLF001
astropy.config.paths.set_temp_config._temp_path = None
astropy.config.paths.set_temp_cache._temp_path = None


@pytest.fixture()
Expand All @@ -71,7 +71,7 @@ def _undo_config_dir_patch():


@pytest.fixture(scope="session", autouse=True)
def tmp_dl_dir(request): # NOQA: ARG001
def tmp_dl_dir(request):
"""
Globally set the default download directory for the test run to a tmp dir.
"""
Expand All @@ -93,7 +93,7 @@ def _undo_download_dir_patch():


@pytest.fixture(scope="session", autouse=True)
def _hide_parfive_progress(request): # NOQA: ARG001
def _hide_parfive_progress(request):
"""
Set the PARFIVE_HIDE_PROGRESS to hide the parfive progress bar in tests.
"""
Expand Down
2 changes: 1 addition & 1 deletion sunkit_image/granule.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ def segment(smap, *, skimage_method="li", mark_dim_centers=False, bp_min_flux=No
resolution,
bp_min_flux,
)
logging.info(f"Segmentation has identified {granule_count} granules and {brightpoint_count} brightpoint") # NOQA: G004
logging.info(f"Segmentation has identified {granule_count} granules and {brightpoint_count} brightpoint")
# Create output map using input wcs and adding colormap such that 0 (intergranules) = black, 1 (granule) = white, 2 (brightpoints) = yellow, 3 (dim_centers) = blue.
segmented_map = sunpy.map.Map(seg_im_markbp, smap.wcs)
cmap = mpl.colors.ListedColormap(["black", "white", "#ffc406", "blue"])
Expand Down
12 changes: 6 additions & 6 deletions sunkit_image/tests/test_radial.py
Original file line number Diff line number Diff line change
Expand Up @@ -268,27 +268,27 @@ def test_fit_polynomial_to_log_radial_intensity():
degree = 1
expected = np.polyfit(radii.to(u.R_sun).value, np.log(intensity), degree)

assert np.allclose(rad._fit_polynomial_to_log_radial_intensity(radii, intensity, degree), expected) # NOQA: SLF001
assert np.allclose(rad._fit_polynomial_to_log_radial_intensity(radii, intensity, degree), expected)


def test_calculate_fit_radial_intensity():
polynomial = np.asarray([1, 2, 3])
radii = (0.001, 0.002) * u.R_sun
expected = np.exp(np.poly1d(polynomial)(radii.to(u.R_sun).value))

assert np.allclose(rad._calculate_fit_radial_intensity(radii, polynomial), expected) # NOQA: SLF001
assert np.allclose(rad._calculate_fit_radial_intensity(radii, polynomial), expected)


def test_normalize_fit_radial_intensity():
polynomial = np.asarray([1, 2, 3])
radii = (0.001, 0.002) * u.R_sun
normalization_radii = (0.003, 0.004) * u.R_sun
expected = rad._calculate_fit_radial_intensity(radii, polynomial) / rad._calculate_fit_radial_intensity( # NOQA: SLF001
expected = rad._calculate_fit_radial_intensity(radii, polynomial) / rad._calculate_fit_radial_intensity(
normalization_radii,
polynomial,
)

assert np.allclose(rad._normalize_fit_radial_intensity(radii, polynomial, normalization_radii), expected) # NOQA: SLF001
assert np.allclose(rad._normalize_fit_radial_intensity(radii, polynomial, normalization_radii), expected)


@skip_windows
Expand All @@ -311,13 +311,13 @@ def test_intensity_enhance(map_test1):
radial_bin_summary.to(u.R_sun).value <= fit_range[1].to(u.R_sun).value,
)

polynomial = rad._fit_polynomial_to_log_radial_intensity( # NOQA: SLF001
polynomial = rad._fit_polynomial_to_log_radial_intensity(
radial_bin_summary[fit_here],
radial_intensity[fit_here],
degree,
)

enhancement = 1 / rad._normalize_fit_radial_intensity(map_r, polynomial, normalization_radius) # NOQA: SLF001
enhancement = 1 / rad._normalize_fit_radial_intensity(map_r, polynomial, normalization_radius)
enhancement[map_r < normalization_radius] = 1

assert np.allclose(
Expand Down
6 changes: 3 additions & 3 deletions sunkit_image/utils/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from .decorators import * # NOQA: F403
from .noise import * # NOQA: F403
from .utils import * # NOQA: F403
from .decorators import *
from .noise import *
from .utils import *
2 changes: 1 addition & 1 deletion sunkit_image/utils/tests/test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ def test_calculate_gamma():
)
N = (2 * r + 1) ** 2
pnorm = np.linalg.norm(pm, axis=1)
cross = utils.utils._cross2d(pm, vel[..., 0]) # NOQA: SLF001
cross = utils.utils._cross2d(pm, vel[..., 0])
vel_norm = np.linalg.norm(vel[..., 0], axis=2)
sint = cross / (pnorm * vel_norm + 1e-10)
expected = np.nansum(sint, axis=1) / N
Expand Down

0 comments on commit f8c3e09

Please sign in to comment.