diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md
index 27aa5460..828a850d 100644
--- a/docs/CHANGELOG.md
+++ b/docs/CHANGELOG.md
@@ -1,5 +1,10 @@
# Change Log
+## 0.5.2
+
+- 📝 Refactor codebase: unify type annotations and import future features
+- 🐛 Allow methods decorated by @ProcGroup.add_proc to return None
+
## 0.5.1
- 🚑 Remove remaining more-itertools
diff --git a/pipen/channel.py b/pipen/channel.py
index 8760f131..7277fec3 100644
--- a/pipen/channel.py
+++ b/pipen/channel.py
@@ -1,7 +1,9 @@
"""Provide some function for creating and modifying channels (dataframes)"""
+from __future__ import annotations
+
from glob import glob
from os import path
-from typing import Any, List, Union
+from typing import Any, List
import pandas
from pandas import DataFrame
@@ -14,7 +16,7 @@ class Channel(DataFrame):
"""A DataFrame wrapper with creators"""
@classmethod
- def create(cls, value: Union[DataFrame, List[Any]]) -> DataFrame:
+ def create(cls, value: DataFrame | List[Any]) -> DataFrame:
"""Create a channel from a list.
The second dimension is identified by tuple. if all elements are tuple,
@@ -151,7 +153,7 @@ def from_table(cls, *args, **kwargs):
@register_verb(DataFrame)
def expand_dir(
data: DataFrame,
- col: Union[str, int] = 0,
+ col: str | int = 0,
pattern: str = "*",
ftype: str = "any",
sortby: str = "name",
@@ -189,7 +191,7 @@ def expand_dir(
@register_verb(DataFrame)
-def collapse_files(data: DataFrame, col: Union[str, int] = 0) -> DataFrame:
+def collapse_files(data: DataFrame, col: str | int = 0) -> DataFrame:
"""Collapse a Channel according to the files in
,
other cols will use the values in row 0.
diff --git a/pipen/cli/version.py b/pipen/cli/version.py
index 06f7520b..ee43a0c8 100644
--- a/pipen/cli/version.py
+++ b/pipen/cli/version.py
@@ -51,5 +51,5 @@ def exec_command(self, args: Namespace) -> None:
ver = versions[key]
verlines = ver.splitlines()
print(f"{key.ljust(keylen)}: {verlines.pop(0)}")
- for verline in verlines:
+ for verline in verlines: # pragma: no cover
print(f"{' ' * keylen} {verline}")
diff --git a/pipen/defaults.py b/pipen/defaults.py
index 1954ba1b..e860be8a 100644
--- a/pipen/defaults.py
+++ b/pipen/defaults.py
@@ -59,16 +59,16 @@
# Just the total width of the terminal
# when logging with a rich.Panel()
-CONSOLE_WIDTH_WITH_PANEL: int = 100
+CONSOLE_WIDTH_WITH_PANEL = 100
# The width of the terminal when the width cannot be detected,
# we are probably logging into a file
-CONSOLE_DEFAULT_WIDTH: int = 256
+CONSOLE_DEFAULT_WIDTH = 256
# [05/16/22 11:46:40] I
# v0.3.4:
# 05-16 11:11:11 I
# The markup code is included
# Don't modify this unless the logger formatter is changed
-CONSOLE_WIDTH_SHIFT: int = 25
+CONSOLE_WIDTH_SHIFT = 25
# For pipen scheduler plugins
SCHEDULER_ENTRY_GROUP = "pipen_sched"
# For pipen template plugins
diff --git a/pipen/job.py b/pipen/job.py
index ca35a30f..06557d95 100644
--- a/pipen/job.py
+++ b/pipen/job.py
@@ -1,10 +1,12 @@
"""Provide the Job class"""
+from __future__ import annotations
+
import logging
import shlex
from os import PathLike
from pathlib import Path
import shutil
-from typing import TYPE_CHECKING, Any, Dict, Mapping, Union
+from typing import TYPE_CHECKING, Any, Dict, Mapping
from diot import OrderedDiot
from xqute import Job as XquteJob
@@ -210,7 +212,7 @@ def template_data(self) -> Mapping[str, Any]:
def log(
self,
- level: Union[int, str],
+ level: int | str,
msg: str,
*args,
limit: int = 3,
diff --git a/pipen/pipen.py b/pipen/pipen.py
index f0cbf90e..7db3acdd 100644
--- a/pipen/pipen.py
+++ b/pipen/pipen.py
@@ -1,11 +1,13 @@
"""Main entry module, provide the Pipen class"""
+from __future__ import annotations
+
import asyncio
from itertools import chain
from os import PathLike
from pathlib import Path
import pprint
import textwrap
-from typing import ClassVar, Iterable, List, Sequence, Type, Union
+from typing import ClassVar, Iterable, List, Sequence, Type
from diot import Diot
from rich import box
@@ -64,7 +66,7 @@ def __init__(
self,
name: str = None,
desc: str = None,
- outdir: PathLike = None,
+ outdir: str | PathLike = None,
**kwargs,
) -> None:
"""Constructor"""
@@ -215,7 +217,7 @@ def set_data(self, *indata: Iterable) -> "Pipen":
def set_starts(
self,
- *procs: Union[Type[Proc], Sequence[Type[Proc]]],
+ *procs: Type[Proc] | Sequence[Type[Proc]],
clear: bool = True,
):
"""Set the starts
diff --git a/pipen/pluginmgr.py b/pipen/pluginmgr.py
index f8c1b051..d2587f27 100644
--- a/pipen/pluginmgr.py
+++ b/pipen/pluginmgr.py
@@ -1,7 +1,8 @@
"""Define hooks specifications and provide plugin manager"""
-import signal
+from __future__ import annotations
+
from pathlib import Path
-from typing import Any, Dict, Optional, Union, TYPE_CHECKING
+from typing import Any, Dict, TYPE_CHECKING
from simplug import Simplug, SimplugResult
from xqute import JobStatus, Scheduler
@@ -11,6 +12,7 @@
if TYPE_CHECKING: # pragma: no cover
+ import signal
from xqute import Xqute
from .job import Job
from .proc import Proc
@@ -35,7 +37,7 @@ def on_setup(config: Dict[str, Any]) -> None:
@plugin.spec
-async def on_init(pipen: "Pipen") -> None:
+async def on_init(pipen: Pipen) -> None:
"""When the pipeline is initialized, and default configs are loaded
Args:
@@ -44,7 +46,7 @@ async def on_init(pipen: "Pipen") -> None:
@plugin.spec
-async def on_start(pipen: "Pipen") -> None:
+async def on_start(pipen: Pipen) -> None:
"""Right before the pipeline starts running.
Process relationships are inferred.
@@ -55,7 +57,7 @@ async def on_start(pipen: "Pipen") -> None:
@plugin.spec
-async def on_complete(pipen: "Pipen", succeeded: bool):
+async def on_complete(pipen: Pipen, succeeded: bool):
"""The the pipeline is completed.
Args:
@@ -65,7 +67,7 @@ async def on_complete(pipen: "Pipen", succeeded: bool):
@plugin.spec
-def on_proc_init(proc: "Proc"):
+def on_proc_init(proc: Proc):
"""Called before proc get instantiated.
Enables plugins to modify the default attributes of processes
@@ -76,7 +78,7 @@ def on_proc_init(proc: "Proc"):
@plugin.spec
-def on_proc_input_computed(proc: "Proc"):
+def on_proc_input_computed(proc: Proc):
"""Called after process input data is computed.
Args:
@@ -85,7 +87,7 @@ def on_proc_input_computed(proc: "Proc"):
@plugin.spec
-async def on_proc_start(proc: "Proc"):
+async def on_proc_start(proc: Proc):
"""When a process is starting
Args:
@@ -94,7 +96,7 @@ async def on_proc_start(proc: "Proc"):
@plugin.spec(result=SimplugResult.TRY_ALL_FIRST_AVAIL)
-def on_proc_shutdown(proc: "Proc", sig: Optional[signal.Signals]) -> None:
+def on_proc_shutdown(proc: Proc, sig: signal.Signals) -> None:
"""When pipeline is shutting down, by Ctrl-c for example.
Return False to stop shutting down, but you have to shut it down
@@ -109,7 +111,7 @@ def on_proc_shutdown(proc: "Proc", sig: Optional[signal.Signals]) -> None:
@plugin.spec
-async def on_proc_done(proc: "Proc", succeeded: Union[bool, str]) -> None:
+async def on_proc_done(proc: Proc, succeeded: bool | str) -> None:
"""When a process is done
Args:
@@ -120,7 +122,7 @@ async def on_proc_done(proc: "Proc", succeeded: Union[bool, str]) -> None:
@plugin.spec
-async def on_job_init(proc: "Proc", job: "Job"):
+async def on_job_init(proc: Proc, job: Job):
"""When a job is initialized
Args:
@@ -130,7 +132,7 @@ async def on_job_init(proc: "Proc", job: "Job"):
@plugin.spec
-async def on_job_queued(proc: "Proc", job: "Job"):
+async def on_job_queued(proc: Proc, job: Job):
"""When a job is queued in xqute. Note it might not be queued yet in
the scheduler system.
@@ -141,7 +143,7 @@ async def on_job_queued(proc: "Proc", job: "Job"):
@plugin.spec(result=SimplugResult.TRY_ALL_FIRST_AVAIL)
-async def on_job_submitting(proc: "Proc", job: "Job") -> Optional[bool]:
+async def on_job_submitting(proc: Proc, job: Job) -> bool:
"""When a job is submitting.
The first plugin (based on priority) have this hook return False will
@@ -157,7 +159,7 @@ async def on_job_submitting(proc: "Proc", job: "Job") -> Optional[bool]:
@plugin.spec
-async def on_job_submitted(proc: "Proc", job: "Job"):
+async def on_job_submitted(proc: Proc, job: Job):
"""When a job is submitted in the scheduler system.
Args:
@@ -167,7 +169,7 @@ async def on_job_submitted(proc: "Proc", job: "Job"):
@plugin.spec
-async def on_job_running(proc: "Proc", job: "Job"):
+async def on_job_running(proc: Proc, job: Job):
"""When a job starts to run in then scheduler system.
Args:
@@ -177,7 +179,7 @@ async def on_job_running(proc: "Proc", job: "Job"):
@plugin.spec(result=SimplugResult.TRY_ALL_FIRST_AVAIL)
-async def on_job_killing(proc: "Proc", job: "Job") -> Optional[bool]:
+async def on_job_killing(proc: Proc, job: Job) -> bool:
"""When a job is being killed.
The first plugin (based on priority) have this hook return False will
@@ -193,7 +195,7 @@ async def on_job_killing(proc: "Proc", job: "Job") -> Optional[bool]:
@plugin.spec
-async def on_job_killed(proc: "Proc", job: "Job"):
+async def on_job_killed(proc: Proc, job: Job):
"""When a job is killed
Args:
@@ -203,7 +205,7 @@ async def on_job_killed(proc: "Proc", job: "Job"):
@plugin.spec
-async def on_job_succeeded(proc: "Proc", job: "Job"):
+async def on_job_succeeded(proc: Proc, job: Job):
"""When a job completes successfully.
Args:
@@ -213,7 +215,7 @@ async def on_job_succeeded(proc: "Proc", job: "Job"):
@plugin.spec
-async def on_job_failed(proc: "Proc", job: "Job"):
+async def on_job_failed(proc: Proc, job: Job):
"""When a job is done but failed.
Args:
@@ -229,7 +231,7 @@ class PipenMainPlugin:
name = "main"
@plugin.impl
- def on_proc_shutdown(self, proc: "Proc", sig: Optional[signal.Signals]):
+ def on_proc_shutdown(self, proc: Proc, sig: signal.Signals):
"""When a process is shutting down"""
if sig: # pragma: no cover
proc.log(
@@ -239,17 +241,17 @@ def on_proc_shutdown(self, proc: "Proc", sig: Optional[signal.Signals]):
)
@plugin.impl
- async def on_job_submitted(self, proc: "Proc", job: "Job"):
+ async def on_job_submitted(self, proc: Proc, job: Job):
"""Update the progress bar when a job is submitted"""
proc.pbar.update_job_submitted()
@plugin.impl
- async def on_job_running(self, proc: "Proc", job: "Job"):
+ async def on_job_running(self, proc: Proc, job: Job):
"""Update the progress bar when a job starts to run"""
proc.pbar.update_job_running()
@plugin.impl
- async def on_job_succeeded(self, proc: "Proc", job: "Job"):
+ async def on_job_succeeded(self, proc: Proc, job: Job):
"""Cache the job and update the progress bar when a job is succeeded"""
# now the returncode is 0, however, we need to check if output files
# have been created or not, this makes sure job.cache not fail
@@ -271,7 +273,7 @@ async def on_job_succeeded(self, proc: "Proc", job: "Job"):
proc.pbar.update_job_succeeded()
@plugin.impl
- async def on_job_failed(self, proc: "Proc", job: "Job"):
+ async def on_job_failed(self, proc: Proc, job: Job):
"""Update the progress bar when a job is failed"""
proc.pbar.update_job_failed()
if job.status == JobStatus.RETRYING:
@@ -279,7 +281,7 @@ async def on_job_failed(self, proc: "Proc", job: "Job"):
proc.pbar.update_job_retrying()
@plugin.impl
- async def on_job_killed(self, proc: "Proc", job: "Job"):
+ async def on_job_killed(self, proc: Proc, job: Job):
"""Update the status of a killed job"""
# instead of FINISHED to force the whole pipeline to quit
job.status = JobStatus.FAILED # pragma: no cover
@@ -296,37 +298,37 @@ class XqutePipenPlugin:
name = "xqute.pipen"
@xqute_plugin.impl
- def on_shutdown(self, xqute: "Xqute", sig: Optional[signal.Signals]):
+ def on_shutdown(self, xqute: Xqute, sig: signal.Signals):
"""When a process is shutting down"""
return plugin.hooks.on_proc_shutdown(xqute.proc, sig)
@xqute_plugin.impl
- async def on_job_init(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_init(self, scheduler: Scheduler, job: Job):
"""When a job is initialized"""
await plugin.hooks.on_job_init(job.proc, job)
@xqute_plugin.impl
- async def on_job_queued(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_queued(self, scheduler: Scheduler, job: Job):
"""When a job is queued"""
await plugin.hooks.on_job_queued(job.proc, job)
@xqute_plugin.impl
- async def on_job_submitting(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_submitting(self, scheduler: Scheduler, job: Job):
"""When a job is being submitted"""
return await plugin.hooks.on_job_submitting(job.proc, job)
@xqute_plugin.impl
- async def on_job_submitted(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_submitted(self, scheduler: Scheduler, job: Job):
"""When a job is submitted"""
await plugin.hooks.on_job_submitted(job.proc, job)
@xqute_plugin.impl
- async def on_job_running(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_running(self, scheduler: Scheduler, job: Job):
"""When a job starts to run"""
await plugin.hooks.on_job_running(job.proc, job)
@xqute_plugin.impl
- async def on_job_killing(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_killing(self, scheduler: Scheduler, job: Job):
"""When a job is being killed"""
return await plugin.hooks.on_job_killing( # pragma: no cover
job.proc,
@@ -334,7 +336,7 @@ async def on_job_killing(self, scheduler: Scheduler, job: "Job"):
)
@xqute_plugin.impl
- async def on_job_killed(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_killed(self, scheduler: Scheduler, job: Job):
"""When a job is killed"""
await plugin.hooks.on_job_killed( # pragma: no cover
job.proc,
@@ -342,12 +344,12 @@ async def on_job_killed(self, scheduler: Scheduler, job: "Job"):
)
@xqute_plugin.impl
- async def on_job_succeeded(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_succeeded(self, scheduler: Scheduler, job: Job):
"""When a job is succeeded"""
await plugin.hooks.on_job_succeeded(job.proc, job)
@xqute_plugin.impl
- async def on_job_failed(self, scheduler: Scheduler, job: "Job"):
+ async def on_job_failed(self, scheduler: Scheduler, job: Job):
"""When a job is failed"""
await plugin.hooks.on_job_failed(job.proc, job)
diff --git a/pipen/proc.py b/pipen/proc.py
index 3218ef63..a1f2a046 100644
--- a/pipen/proc.py
+++ b/pipen/proc.py
@@ -303,7 +303,7 @@ def __init_subclass__(cls) -> None:
)
cls.__procgroup__ = None
- def __init__(self, pipeline: "Pipen" = None) -> None:
+ def __init__(self, pipeline: Pipen = None) -> None:
"""Constructor
This is called only at runtime.
diff --git a/pipen/procgroup.py b/pipen/procgroup.py
index b6095a4d..7d13cdcc 100644
--- a/pipen/procgroup.py
+++ b/pipen/procgroup.py
@@ -120,10 +120,15 @@ def add_proc(
a cached property that returns the process class
"""
if isinstance(self_or_method, ProcGroup):
+ # Called as self.add_proc or pg.add_proc
+ if proc is None:
+ return self_or_method.add_proc # type: ignore
+
if proc.name in self_or_method.__class__.PRESERVED:
raise ValueError(
f"Process name `{proc.name}` is reserved for ProcGroup"
)
+
setattr(self_or_method, proc.name, proc)
proc.__procgroup__ = self_or_method
if not proc.requires:
@@ -134,6 +139,19 @@ def add_proc(
@wraps(self_or_method)
def wrapper(self):
proc = self_or_method(self)
+
+ if proc is None:
+ return None
+
+ if (
+ not isinstance(proc, Proc)
+ and (not isinstance(proc, type) or not issubclass(proc, Proc))
+ ):
+ raise ValueError(
+ f"`{proc}` is not a process "
+ "(either a subclass or an instance of Proc)"
+ )
+
proc.__procgroup__ = self
if not proc.requires:
self.starts.append(proc)
@@ -144,9 +162,9 @@ def wrapper(self):
def as_pipen(
self,
- name: str = None,
- desc: str = None,
- outdir: PathLike = None,
+ name: str | None = None,
+ desc: str | None = None,
+ outdir: str | PathLike | None = None,
**kwargs,
) -> Pipen:
"""Convert the pipeline to a Pipen instance
diff --git a/pipen/progressbar.py b/pipen/progressbar.py
index 08ba9d46..3ca29cfa 100644
--- a/pipen/progressbar.py
+++ b/pipen/progressbar.py
@@ -1,4 +1,6 @@
"""Provide the PipelinePBar and ProcPBar classes"""
+from __future__ import annotations
+
from typing import TYPE_CHECKING
from .utils import truncate_text
@@ -16,7 +18,7 @@ class ProcPBar:
"""The progress bar for processes"""
def __init__(
- self, manager: "enlighten.Manager", proc_size: int, proc_name: str
+ self, manager: enlighten.Manager, proc_size: int, proc_name: str
) -> None:
self.submitted_counter = manager.counter(
total=proc_size,
diff --git a/pipen/scheduler.py b/pipen/scheduler.py
index b86f4984..79ab245c 100644
--- a/pipen/scheduler.py
+++ b/pipen/scheduler.py
@@ -1,5 +1,7 @@
"""Provide builting schedulers"""
-from typing import Type, Union
+from __future__ import annotations
+
+from typing import Type
from xqute import Scheduler
from xqute.schedulers.local_scheduler import LocalJob as XquteLocalJob
@@ -39,7 +41,7 @@ class SgeScheduler(XquteSgeScheduler):
job_class = SgeJob
-def get_scheduler(scheduler: Union[str, Type[Scheduler]]) -> Type[Scheduler]:
+def get_scheduler(scheduler: str | Type[Scheduler]) -> Type[Scheduler]:
"""Get the scheduler by name of the scheduler class itself
Args:
diff --git a/pipen/template.py b/pipen/template.py
index bee21b5e..0a84fc2e 100644
--- a/pipen/template.py
+++ b/pipen/template.py
@@ -1,6 +1,8 @@
"""Template adaptor for pipen"""
+from __future__ import annotations
+
from abc import ABC, abstractmethod
-from typing import Any, Mapping, Type, Union
+from typing import Any, Mapping, Type
from liquid import Liquid
@@ -116,9 +118,7 @@ def _render(self, data: Mapping[str, Any]) -> str:
return self.engine.render(data)
-def get_template_engine(
- template: Union[str, Type[Template]],
-) -> Type[Template]:
+def get_template_engine(template: str | Type[Template]) -> Type[Template]:
"""Get the template engine by name or the template engine itself
Args:
diff --git a/pipen/utils.py b/pipen/utils.py
index 216f4acc..ef92b84e 100644
--- a/pipen/utils.py
+++ b/pipen/utils.py
@@ -1,4 +1,6 @@
"""Provide some utilities"""
+from __future__ import annotations
+
import logging
import textwrap
from itertools import groupby
@@ -17,24 +19,8 @@
Mapping,
Tuple,
Type,
- Union,
)
-if TYPE_CHECKING: # pragma: no cover
- import pandas
-
-try: # pragma: no cover
- from functools import cached_property
-except ImportError: # pragma: no cover
- # python 3.7
- from cached_property import cached_property
-
-try: # pragma: no cover
- from importlib import metadata as importlib_metadata
-except ImportError: # pragma: no cover
- # python 3.7
- import importlib_metadata
-
from rich.console import Console
from rich.logging import RichHandler as _RichHandler
from rich.table import Table
@@ -51,7 +37,20 @@
from .pluginmgr import plugin
from .version import __version__
+try: # pragma: no cover
+ from functools import cached_property
+except ImportError: # pragma: no cover
+ # python 3.7
+ from cached_property import cached_property
+
+try: # pragma: no cover
+ from importlib import metadata as importlib_metadata
+except ImportError: # pragma: no cover
+ # python 3.7
+ import importlib_metadata
+
if TYPE_CHECKING: # pragma: no cover
+ import pandas
from rich.console import RenderableType
@@ -98,7 +97,7 @@ def get_level_text(self, record: logging.LogRecord) -> Text:
def get_logger(
name: str = LOGGER_NAME,
- level: Union[str, int] = None,
+ level: str | int = None,
) -> logging.LoggerAdapter:
"""Get the logger by given plugin name
@@ -299,8 +298,8 @@ def get_plugin_context(plugins: List[Any]) -> SimplugContext:
def log_rich_renderable(
- renderable: "RenderableType",
- color: str,
+ renderable: RenderableType,
+ color: str | None,
logfunc: Callable,
*args: Any,
**kwargs: Any,
@@ -348,7 +347,7 @@ def brief_list(blist: List[int]) -> str:
return ", ".join(ret)
-def pipen_banner() -> "RenderableType":
+def pipen_banner() -> RenderableType:
"""The banner for pipen
Returns:
@@ -441,7 +440,7 @@ def truncate_text(text: str, width: int, end: str = "…") -> str:
return text[: (width - len(end))] + end
-def make_df_colnames_unique_inplace(thedf: "pandas.DataFrame") -> None:
+def make_df_colnames_unique_inplace(thedf: pandas.DataFrame) -> None:
"""Make the columns of a data frame unique
Args:
diff --git a/pipen/version.py b/pipen/version.py
index 926158d3..efba4568 100644
--- a/pipen/version.py
+++ b/pipen/version.py
@@ -1,3 +1,3 @@
"""Provide version of pipen"""
-__version__ = "0.5.1"
+__version__ = "0.5.2"
diff --git a/poetry.lock b/poetry.lock
index 0b60ff3a..4394233d 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
[[package]]
name = "aiofile"
@@ -149,42 +149,37 @@ files = [
[[package]]
name = "caio"
-version = "0.9.11"
+version = "0.9.12"
description = "Asynchronous file IO for Linux MacOS or Windows."
category = "main"
optional = false
python-versions = ">=3.7, <4"
files = [
- {file = "caio-0.9.11-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:3b9b2f1d94876ee3544b78d8a33e0c0091b79fe4bdc8cc00b64d4d9c59385195"},
- {file = "caio-0.9.11-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3adc061b2005e3e3e83066b5e9ac9966634ff65db5b60abe3bf1ec7f2c7903"},
- {file = "caio-0.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b90cc66230e78fd85d94e28fd6bd73032319286f0808f60118a2e63dbf4cde5"},
- {file = "caio-0.9.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a5c0d2d7d437c13ef72061acf0f4f878ae17642b73bd1d8a5594b9c7398f98e"},
- {file = "caio-0.9.11-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b9856bcb2f523e9ac2e00e75f3bd110a927b9ed0569a9d13c029d4b3aa8a79e4"},
- {file = "caio-0.9.11-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58e184d33e94962acbc9f4e8ff9b993ca6f78794de9018a2c3060cb2c190398e"},
- {file = "caio-0.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19f8da7314bd80a8620e8e1fc65e971bbb97eaa8ead692e81d91561cc69d0a0b"},
- {file = "caio-0.9.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19255def9e395768573f002e9d01c3eaabbf6f4dda68a1c56a0862403da1faa6"},
- {file = "caio-0.9.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cd1c6d433830a7e1c2d5ff854956ea3a0a46ac6e72196824a55c00111a32d21c"},
- {file = "caio-0.9.11-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:8157587b6ee340d7510c7e37b6f5f1223ec363154b38b43b718c7cc6b77d58c2"},
- {file = "caio-0.9.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c263011c8993d4330ae93ade6973026777b23fc41dd94cbd018b0b0f4eef8a"},
- {file = "caio-0.9.11-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e81dd50f9b53db5214469f3b953bb16cfb96c05f08f334c7aaf0d9096c37689"},
- {file = "caio-0.9.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99eb9ef0543e85e49a7883b35152e216af27158ba4f5e610e784c090d550ee4c"},
- {file = "caio-0.9.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7dcd1f63e50b6a175750bf1fd92734b0a9a53108ee14afa72971b64da1d701e5"},
- {file = "caio-0.9.11-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:e02e0c241b6dc19be0f85ece23ff6861907ef1cc6a8923a81345887a79f29300"},
- {file = "caio-0.9.11-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:b60914721ebbc87d7098b55266d26ff52e7f59aa9b902eadfb185e5ca3bf913e"},
- {file = "caio-0.9.11-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22ad87a94b2062151ef78c613711c97ddcebba7399ece39e4fd88728853137d2"},
- {file = "caio-0.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e262c613d47c443c4daea03eb7c15001992136af71d4b6a344c20f0f8307f25b"},
- {file = "caio-0.9.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf65276487232bca415f62699298601537b558d8fd11b038633e555c45d0e82f"},
- {file = "caio-0.9.11-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:2086d65b157d39b0f070a3e6dfbca970c69d15547dfa9ef346506f4da968e960"},
- {file = "caio-0.9.11-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:43ed0962d712db7a7081cb300a6cf8e7924efd6508006db679e16557e60a394c"},
- {file = "caio-0.9.11-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:510f01f71361d31ce8938f691e96a7a1306c29e0475aa59a77300a61b6f0732a"},
- {file = "caio-0.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a62a1706f11661f0941bf220f56ee91279d44e4fbfdbc21f43ca2b5d7a52531"},
- {file = "caio-0.9.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e35124be09f9f26a420c279bbabb5ef0626ce0edff3e5b445e6102855453d63"},
- {file = "caio-0.9.11-py3-none-any.whl", hash = "sha256:287f7a598d8497588f0f0578eb90343b0c9ff3803b57c24e58f54793333132bf"},
- {file = "caio-0.9.11.tar.gz", hash = "sha256:33ca49789bf2d55bc3f5def36784c4624fbf16b78e9c299a2c7f6f22ac084aa6"},
+ {file = "caio-0.9.12-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df395e7e1c2025b3f32dbeff20a8b6491959fac8fbd5a9c2d452bf1f5c0ca2bf"},
+ {file = "caio-0.9.12-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:acd31b7828c6683bc46e467a32359f08c686d4c25a7e645c029a07c36685fea7"},
+ {file = "caio-0.9.12-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0b7ffb561ca5c24e7f080aaa73ebb143ae659bd69645a748b332762c389349f"},
+ {file = "caio-0.9.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6285d772ae3a55e758b1bd3bc34f095757e4af45dcc30a183becf9bbdead8ced"},
+ {file = "caio-0.9.12-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18df0caecfaa90ab9ac84ff71975fcf2342554b6f65ef69049337204b8b5af42"},
+ {file = "caio-0.9.12-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e569b83e9b41d12e094190d0e1a546610829a65609f429a1845e3250d4c5804"},
+ {file = "caio-0.9.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7833f81f58e76a3585ff59813e63fa278731c8d26fefe52ae12e783d38c8faea"},
+ {file = "caio-0.9.12-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:aa1dab77aca0b2672b9a364f14a03e764c5d811c0ae1395f661a2b8f6723f958"},
+ {file = "caio-0.9.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f366c595eda130a184f372d458d647b0ac879a46e872757648d4e29b6cea12ad"},
+ {file = "caio-0.9.12-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2759fe1957d0effb3bc38b3508b20fa37610bff9005f3926f570e6c06e901567"},
+ {file = "caio-0.9.12-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e65060717e27c702cd76a90be33250cae46be32a3009781ce382c8675fa7551"},
+ {file = "caio-0.9.12-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:a8204c6a7ea3c96057abba3da1690190b3cae0c3f03d81e9b5e3c99978b5bb6e"},
+ {file = "caio-0.9.12-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:820bb3ef23ce0d4096f822a8fea97ff2c239dd0351fa588801d2de627df9ab98"},
+ {file = "caio-0.9.12-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9a27973a03c777934decd577be577a61a188bee72272b3dc37a7cbc5eedf91"},
+ {file = "caio-0.9.12-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d6b424644ac37ce84f9dd99757d29e7cff01018d3b31f8ec0a38f2d5216165c"},
+ {file = "caio-0.9.12-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:15192a28e054cd591489af82f4b1093f15338eb201a2399a337461ff0bbd9fc9"},
+ {file = "caio-0.9.12-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:55317d2c90479a58108cfbd6816b85e584e61c48b42269c55f69cf4443857068"},
+ {file = "caio-0.9.12-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b18318d04cad7ef985fc2fb86d43f866ba34c1ee3445385f2370d6f843c05c69"},
+ {file = "caio-0.9.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:73c20d8fc7dfb140b7d57e69e6f17e1637d2ac4a9ebe0f5f8c94b56f87c5c087"},
+ {file = "caio-0.9.12-py3-none-any.whl", hash = "sha256:b81b3271478e91f18e7ac1f3e4f914ba0924364857ba8e27f03b9d0aea915ca8"},
+ {file = "caio-0.9.12.tar.gz", hash = "sha256:d2be553738dd793f8a01a60316f2c5284fbf152219241c0c67ca05f650a37a37"},
]
[package.extras]
-develop = ["aiomisc", "pytest", "pytest-cov"]
+develop = ["aiomisc-pytest", "pytest", "pytest-cov"]
[[package]]
name = "colorama"
@@ -267,14 +262,14 @@ toml = ["tomli"]
[[package]]
name = "datar"
-version = "0.11.1"
+version = "0.11.2"
description = "A Grammar of Data Manipulation in python"
category = "dev"
optional = false
python-versions = ">=3.7.1,<4.0.0"
files = [
- {file = "datar-0.11.1-py3-none-any.whl", hash = "sha256:b4014405e586b2f4361ae7a6e93aef160433d4d5e36fba1f7caa574af056a24b"},
- {file = "datar-0.11.1.tar.gz", hash = "sha256:6cb2e8f4c21908394b1ddc7569b40b036017d0a7919fc3b66da5a4216c5377e8"},
+ {file = "datar-0.11.2-py3-none-any.whl", hash = "sha256:1b1789157666869fe7e4752725b6598709d0e7846bbd385987bed88cddb82ebd"},
+ {file = "datar-0.11.2.tar.gz", hash = "sha256:8d4a31b492dd4d18d414364ea821c98bf390d31bb891c03af9328baaa364c9eb"},
]
[package.dependencies]
@@ -305,14 +300,14 @@ numpy = ">=1.17,<2.0"
[[package]]
name = "datar-pandas"
-version = "0.2.0"
+version = "0.2.1"
description = "The pandas backend for datar"
category = "dev"
optional = false
python-versions = ">=3.7.1,<4.0.0"
files = [
- {file = "datar_pandas-0.2.0-py3-none-any.whl", hash = "sha256:6af735a8c2703afe68b53ef33e9c3461142516690b34ed798693add5835b0bf4"},
- {file = "datar_pandas-0.2.0.tar.gz", hash = "sha256:5be01b58ce55fae9d9bbe4ff30ea78fd555550d1270108d09a3dcec89f6fa7f2"},
+ {file = "datar_pandas-0.2.1-py3-none-any.whl", hash = "sha256:29647a0bb11c1577a691a0304b653f8fe31382ca9f9bc92527c676952c2131a6"},
+ {file = "datar_pandas-0.2.1.tar.gz", hash = "sha256:a4887c161f08c78055ed40b2827ae7acb8267bff2b0a8afa7d8e7c2ce4689412"},
]
[package.dependencies]
@@ -873,14 +868,14 @@ plugins = ["importlib-metadata"]
[[package]]
name = "pytest"
-version = "7.2.1"
+version = "7.2.2"
description = "pytest: simple powerful testing with Python"
category = "dev"
optional = false
python-versions = ">=3.7"
files = [
- {file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"},
- {file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"},
+ {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"},
+ {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"},
]
[package.dependencies]
@@ -1006,19 +1001,19 @@ files = [
[[package]]
name = "rich"
-version = "13.3.1"
+version = "13.3.2"
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
category = "main"
optional = false
python-versions = ">=3.7.0"
files = [
- {file = "rich-13.3.1-py3-none-any.whl", hash = "sha256:8aa57747f3fc3e977684f0176a88e789be314a99f99b43b75d1e9cb5dc6db9e9"},
- {file = "rich-13.3.1.tar.gz", hash = "sha256:125d96d20c92b946b983d0d392b84ff945461e5a06d3867e9f9e575f8697b67f"},
+ {file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"},
+ {file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"},
]
[package.dependencies]
-markdown-it-py = ">=2.1.0,<3.0.0"
-pygments = ">=2.14.0,<3.0.0"
+markdown-it-py = ">=2.2.0,<3.0.0"
+pygments = ">=2.13.0,<3.0.0"
typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""}
[package.extras]
diff --git a/pyproject.toml b/pyproject.toml
index 2c030c42..401835e4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "poetry.masonry.api"
[tool.poetry]
name = "pipen"
-version = "0.5.1"
+version = "0.5.2"
description = "A pipeline framework for python"
authors = [ "pwwang ",]
license = "MIT"
diff --git a/tests/test_procgroup.py b/tests/test_procgroup.py
index 865d7640..2030455b 100644
--- a/tests/test_procgroup.py
+++ b/tests/test_procgroup.py
@@ -26,7 +26,7 @@ class PG(ProcGroup):
pg = PG()
- @pg.add_proc
+ @pg.add_proc()
class P1(Proc):
pass
@@ -59,6 +59,7 @@ def p3(self):
return P3
pg = PG()
+
assert pg.starts == [P1]
assert pg.p1 is P1
@@ -67,6 +68,20 @@ def p3(self):
assert pg.procs == {"P1": P1, "P2": P2, "P3": P3}
+def test_define_proc_wrong_return():
+ class PG(ProcGroup):
+ @ProcGroup.add_proc
+ def p1(self):
+ return None
+
+ @ProcGroup.add_proc
+ def p2(self):
+ return 1
+
+ with pytest.raises(ValueError):
+ PG()
+
+
def test_as_pipen():
class PG(ProcGroup):
"""A pipeline group"""