Skip to content

Commit

Permalink
🔥 Use ruff for formatting.
Browse files Browse the repository at this point in the history
  • Loading branch information
arafune committed Mar 18, 2024
1 parent 85ccfaf commit 7fa7231
Show file tree
Hide file tree
Showing 9 changed files with 32 additions and 31 deletions.
13 changes: 0 additions & 13 deletions .github/workflows/black.yml

This file was deleted.

12 changes: 12 additions & 0 deletions .github/workflows/ruff.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
name: Ruff

on: [push, pull_request]

jobs:
ruff:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: chartboost/ruff-action@v1
with:
python-version: "3.11"
12 changes: 8 additions & 4 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
repos:
- hooks:
- id: black
repo: https://github.com/psf/black
rev: 23.3.0
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.3.3
hooks:
# Run the linter
# - id: ruff
# args: [ --fix ]
# Run the formatter
- id: ruff-format
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ build-backend = "hatchling.build"
managed = true
dev-dependencies = [
"colorama>=0.4.6",
"black>=23.3.0",
"ruff>=0.3.3",
"pytest>=7.3.2",
"pytest-cov",
"pytest-qt>=4.2.0",
Expand Down Expand Up @@ -100,6 +100,7 @@ lint.ignore = [
lint.select = ["ALL"]
target-version = "py310"
line-length = 100
indent-width = 4

exclude = ["scripts", "docs", "conda"]

Expand Down
8 changes: 4 additions & 4 deletions src/arpes/analysis/decomposition.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ def decomposition_along(

@wraps(decomposition_along)
def pca_along(
*args: * tuple[xr.DataArray, list[str]],
*args: *tuple[xr.DataArray, list[str]],
**kwargs: Unpack[PCAParam],
) -> tuple[xr.DataArray, sklearn.decomposition.PCA]:
"""Specializes `decomposition_along` with `sklearn.decomposition.PCA`."""
Expand All @@ -192,7 +192,7 @@ def pca_along(

@wraps(decomposition_along)
def factor_analysis_along(
*args: * tuple[xr.DataArray, list[str]],
*args: *tuple[xr.DataArray, list[str]],
**kwargs: Unpack[FactorAnalysisParam],
) -> tuple[xr.DataArray, sklearn.decomposition.FactorAnalysis]:
"""Specializes `decomposition_along` with `sklearn.decomposition.FactorAnalysis`."""
Expand All @@ -201,7 +201,7 @@ def factor_analysis_along(

@wraps(decomposition_along)
def ica_along(
*args: * tuple[xr.DataArray, list[str]],
*args: *tuple[xr.DataArray, list[str]],
**kwargs: Unpack[FastICAParam],
) -> tuple[xr.DataArray, sklearn.decomposition.FastICA]:
"""Specializes `decomposition_along` with `sklearn.decomposition.FastICA`."""
Expand All @@ -210,7 +210,7 @@ def ica_along(

@wraps(decomposition_along)
def nmf_along(
*args: * tuple[xr.DataArray, list[str]],
*args: *tuple[xr.DataArray, list[str]],
**kwargs: Unpack[NMFParam],
) -> tuple[xr.DataArray, sklearn.decomposition.NMF]:
"""Specializes `decomposition_along` with `sklearn.decomposition.NMF`."""
Expand Down
2 changes: 1 addition & 1 deletion src/arpes/analysis/deconvolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def make_psf(
)

if fwhm:
sigmas = {k: v / (2 * np.sqrt(2 * np.log(2))) for k, v, in sigmas.items()}
sigmas = {k: v / (2 * np.sqrt(2 * np.log(2))) for k, v in sigmas.items()}
cov: NDArray[np.float_] = np.zeros((len(sigmas), len(sigmas)))
for i, dim in enumerate(data.dims):
cov[i][i] = sigmas[dim] ** 2 # sigma is deviation, but multivariate_normal uses covariant
Expand Down
4 changes: 1 addition & 3 deletions src/arpes/corrections/fermi_edge_corrections.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,7 @@ def apply_direct_fermi_edge_correction(
correction = build_direct_fermi_edge_correction(arr, *args, **kwargs)

assert isinstance(correction, xr.Dataset)
shift_amount = (
-correction / arr.G.stride(generic_dim_names=False)["eV"]
) # pylint: disable=invalid-unary-operand-type
shift_amount = -correction / arr.G.stride(generic_dim_names=False)["eV"] # pylint: disable=invalid-unary-operand-type
energy_axis_index = list(arr.dims).index("eV")

correction_axis = list(arr.dims).index(correction.dims[0])
Expand Down
3 changes: 1 addition & 2 deletions src/arpes/load_pxt.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,8 +104,7 @@ def read_igor_binary_wave(raw_bytes: bytes) -> xr.DataArray:

wave_data = np.fromstring(
raw_bytes[
igor_wave_header_dtype.itemsize
+ offset : igor_wave_header_dtype.itemsize
igor_wave_header_dtype.itemsize + offset : igor_wave_header_dtype.itemsize
+ n_points * point_size
+ offset
],
Expand Down
6 changes: 3 additions & 3 deletions src/arpes/simulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,9 @@ def cloud_to_arr(
cloud_as_image[(int(np.floor(x)) + 1) % shape_x][int(np.floor(y)) % shape_y] += (
1 - frac_low_x
) * frac_low_y
cloud_as_image[int(np.floor(x)) % shape_x][
(int(np.floor(y)) + 1) % shape_y
] += frac_low_x * (1 - frac_low_y)
cloud_as_image[int(np.floor(x)) % shape_x][(int(np.floor(y)) + 1) % shape_y] += (
frac_low_x * (1 - frac_low_y)
)
cloud_as_image[(int(np.floor(x)) + 1) % shape_x][(int(np.floor(y)) + 1) % shape_y] += (
1 - frac_low_x
) * (1 - frac_low_y)
Expand Down

0 comments on commit 7fa7231

Please sign in to comment.