From 2c1eb64c780bdd30da525c9d3b51345746e0b47f Mon Sep 17 00:00:00 2001 From: Ejiroghene Laurel Dafe <99752279+EjiroLaurelD@users.noreply.github.com> Date: Thu, 7 Mar 2024 11:59:12 +0100 Subject: [PATCH 1/8] Reduce flake8 max-line-length (#3187) --- .flake8 | 2 +- parsl/dataflow/taskrecord.py | 4 +++- parsl/executors/high_throughput/executor.py | 4 ++-- parsl/executors/high_throughput/interchange.py | 7 ++++++- parsl/jobs/strategy.py | 3 ++- parsl/providers/slurm/slurm.py | 7 ++++++- 6 files changed, 20 insertions(+), 7 deletions(-) diff --git a/.flake8 b/.flake8 index 4c434c7518..25b4d033d9 100644 --- a/.flake8 +++ b/.flake8 @@ -8,7 +8,7 @@ # W504: line break after binary operator # (Raised by flake8 even when it is followed) ignore = E126, E402, E129, W504 -max-line-length = 151 +max-line-length = 150 exclude = test_import_fail.py, parsl/executors/workqueue/parsl_coprocess.py # E741 disallows ambiguous single letter names which look like numbers diff --git a/parsl/dataflow/taskrecord.py b/parsl/dataflow/taskrecord.py index 34d5ef4ca5..07da5386f8 100644 --- a/parsl/dataflow/taskrecord.py +++ b/parsl/dataflow/taskrecord.py @@ -70,7 +70,9 @@ class TaskRecord(TypedDict, total=False): # these three could be more strongly typed perhaps but I'm not thinking about that now func: Callable fn_hash: str - args: Sequence[Any] # in some places we uses a Tuple[Any, ...] and in some places a List[Any]. This is an attempt to correctly type both of those. + args: Sequence[Any] + # in some places we uses a Tuple[Any, ...] and in some places a List[Any]. + # This is an attempt to correctly type both of those. kwargs: Dict[str, Any] time_invoked: Optional[datetime.datetime] diff --git a/parsl/executors/high_throughput/executor.py b/parsl/executors/high_throughput/executor.py index ee326c5f66..422ef1b50b 100644 --- a/parsl/executors/high_throughput/executor.py +++ b/parsl/executors/high_throughput/executor.py @@ -629,8 +629,8 @@ def submit(self, func, resource_specification, *args, **kwargs): """Submits work to the outgoing_q. The outgoing_q is an external process listens on this - queue for new work. This method behaves like a - submit call as described here `Python docs: `_ + queue for new work. This method behaves like a submit call as described here `Python docs: `_ Args: - func (callable) : Callable function diff --git a/parsl/executors/high_throughput/interchange.py b/parsl/executors/high_throughput/interchange.py index 0c96cc51f1..11d5ed2ee4 100644 --- a/parsl/executors/high_throughput/interchange.py +++ b/parsl/executors/high_throughput/interchange.py @@ -392,7 +392,12 @@ def start(self) -> None: logger.info("Processed {} tasks in {} seconds".format(self.count, delta)) logger.warning("Exiting") - def process_task_outgoing_incoming(self, interesting_managers: Set[bytes], hub_channel: Optional[zmq.Socket], kill_event: threading.Event) -> None: + def process_task_outgoing_incoming( + self, + interesting_managers: Set[bytes], + hub_channel: Optional[zmq.Socket], + kill_event: threading.Event + ) -> None: """Process one message from manager on the task_outgoing channel. Note that this message flow is in contradiction to the name of the channel - it is not an outgoing message and it is not a task. diff --git a/parsl/jobs/strategy.py b/parsl/jobs/strategy.py index b396d43e37..b04b4eb6c6 100644 --- a/parsl/jobs/strategy.py +++ b/parsl/jobs/strategy.py @@ -245,7 +245,8 @@ def _general_strategy(self, status_list, *, strategy_type): exec_status.scale_in(active_blocks - min_blocks) else: - logger.debug(f"Idle time {idle_duration}s is less than max_idletime {self.max_idletime}s for executor {label}; not scaling in") + logger.debug( + f"Idle time {idle_duration}s is less than max_idletime {self.max_idletime}s for executor {label}; not scaling in") # Case 2 # More tasks than the available slots. diff --git a/parsl/providers/slurm/slurm.py b/parsl/providers/slurm/slurm.py index e792bc07c6..2542b54d42 100644 --- a/parsl/providers/slurm/slurm.py +++ b/parsl/providers/slurm/slurm.py @@ -290,7 +290,12 @@ def submit(self, command: str, tasks_per_node: int, job_name="parsl.slurm") -> s else: logger.error("Submit command failed") logger.error("Retcode:%s STDOUT:%s STDERR:%s", retcode, stdout.strip(), stderr.strip()) - raise SubmitException(job_name, "Could not read job ID from submit command standard output", stdout=stdout, stderr=stderr, retcode=retcode) + raise SubmitException( + job_name, "Could not read job ID from submit command standard output", + stdout=stdout, + stderr=stderr, + retcode=retcode + ) def cancel(self, job_ids): ''' Cancels the jobs specified by a list of job ids From 6f7e38a2b1c634eee60171e545536f927f04a9ef Mon Sep 17 00:00:00 2001 From: Sobia <157086698+SobiaAman@users.noreply.github.com> Date: Thu, 7 Mar 2024 18:06:48 +0500 Subject: [PATCH 2/8] Reduce flake8 max-line-length (#3190) --- .flake8 | 2 +- parsl/monitoring/db_manager.py | 6 ++++-- parsl/tests/test_scaling/test_scale_down_htex_auto_scale.py | 3 ++- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.flake8 b/.flake8 index 25b4d033d9..6820294264 100644 --- a/.flake8 +++ b/.flake8 @@ -8,7 +8,7 @@ # W504: line break after binary operator # (Raised by flake8 even when it is followed) ignore = E126, E402, E129, W504 -max-line-length = 150 +max-line-length = 149 exclude = test_import_fail.py, parsl/executors/workqueue/parsl_coprocess.py # E741 disallows ambiguous single letter names which look like numbers diff --git a/parsl/monitoring/db_manager.py b/parsl/monitoring/db_manager.py index 84e25619a4..0ee4d29e03 100644 --- a/parsl/monitoring/db_manager.py +++ b/parsl/monitoring/db_manager.py @@ -583,8 +583,10 @@ def _migrate_logs_to_internal(self, logs_queue: queue.Queue, queue_tag: str, kil self._dispatch_to_internal(x) elif queue_tag == 'resource': assert isinstance(x, tuple), "_migrate_logs_to_internal was expecting a tuple, got {}".format(x) - assert x[0] == MessageType.RESOURCE_INFO, \ - "_migrate_logs_to_internal can only migrate RESOURCE_INFO message from resource queue, got tag {}, message {}".format(x[0], x) + assert x[0] == MessageType.RESOURCE_INFO, ( + "_migrate_logs_to_internal can only migrate RESOURCE_INFO message from resource queue, " + "got tag {}, message {}".format(x[0], x) + ) self._dispatch_to_internal(x) elif queue_tag == 'node': assert len(x) == 2, "expected message tuple to have exactly two elements" diff --git a/parsl/tests/test_scaling/test_scale_down_htex_auto_scale.py b/parsl/tests/test_scaling/test_scale_down_htex_auto_scale.py index 94a20af14e..5d076b5565 100644 --- a/parsl/tests/test_scaling/test_scale_down_htex_auto_scale.py +++ b/parsl/tests/test_scaling/test_scale_down_htex_auto_scale.py @@ -98,7 +98,8 @@ def test_scale_out(tmpd_cwd, try_assert): assert dfk.executors['htex_local'].outstanding == 0 - # now we can launch one "long" task - and what should happen is that the connected_managers count "eventually" (?) converges to 1 and stays there. + # now we can launch one "long" task - + # and what should happen is that the connected_managers count "eventually" (?) converges to 1 and stays there. finish_path = tmpd_cwd / "stage2_workers_may_continue" From 74a0e6e0a56d78803c41a4bfc7a3bad1217524de Mon Sep 17 00:00:00 2001 From: christailu <44705085+christailu@users.noreply.github.com> Date: Thu, 7 Mar 2024 09:45:47 -0600 Subject: [PATCH 3/8] Added NSF badge for award 150475 (#3181) --- README.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.rst b/README.rst index b3f3652090..3402b76d66 100644 --- a/README.rst +++ b/README.rst @@ -59,6 +59,10 @@ then explore the `parallel computing patterns Date: Thu, 7 Mar 2024 21:35:35 +0530 Subject: [PATCH 4/8] Reduce flake8 max-line-length (#3199) --- .flake8 | 2 +- parsl/dataflow/dflow.py | 5 ++++- parsl/executors/taskvine/executor.py | 5 ++++- parsl/tests/configs/user_opts.py | 6 +++++- 4 files changed, 14 insertions(+), 4 deletions(-) diff --git a/.flake8 b/.flake8 index 6820294264..7b7d8a1ab5 100644 --- a/.flake8 +++ b/.flake8 @@ -8,7 +8,7 @@ # W504: line break after binary operator # (Raised by flake8 even when it is followed) ignore = E126, E402, E129, W504 -max-line-length = 149 +max-line-length = 148 exclude = test_import_fail.py, parsl/executors/workqueue/parsl_coprocess.py # E741 disallows ambiguous single letter names which look like numbers diff --git a/parsl/dataflow/dflow.py b/parsl/dataflow/dflow.py index e77c0da3eb..f57c5327a8 100644 --- a/parsl/dataflow/dflow.py +++ b/parsl/dataflow/dflow.py @@ -722,7 +722,10 @@ def launch_task(self, task_record: TaskRecord) -> Future: self._send_task_log_info(task_record) if hasattr(exec_fu, "parsl_executor_task_id"): - logger.info(f"Parsl task {task_id} try {try_id} launched on executor {executor.label} with executor id {exec_fu.parsl_executor_task_id}") + logger.info( + f"Parsl task {task_id} try {try_id} launched on executor {executor.label} " + f"with executor id {exec_fu.parsl_executor_task_id}" + ) else: logger.info(f"Parsl task {task_id} try {try_id} launched on executor {executor.label}") diff --git a/parsl/executors/taskvine/executor.py b/parsl/executors/taskvine/executor.py index cb958ff624..d217cb5675 100644 --- a/parsl/executors/taskvine/executor.py +++ b/parsl/executors/taskvine/executor.py @@ -228,7 +228,10 @@ def __create_data_and_logging_dirs(self): # factory logs go with manager logs regardless self.factory_config.scratch_dir = self.manager_config.vine_log_dir logger.debug(f"Function data directory: {self._function_data_dir}, log directory: {log_dir}") - logger.debug(f"TaskVine manager log directory: {self.manager_config.vine_log_dir}, factory log directory: {self.factory_config.scratch_dir}") + logger.debug( + f"TaskVine manager log directory: {self.manager_config.vine_log_dir}, " + f"factory log directory: {self.factory_config.scratch_dir}" + ) def start(self): """Create submit process and collector thread to create, send, and diff --git a/parsl/tests/configs/user_opts.py b/parsl/tests/configs/user_opts.py index 8efc5c5074..bcab09ed03 100644 --- a/parsl/tests/configs/user_opts.py +++ b/parsl/tests/configs/user_opts.py @@ -52,7 +52,11 @@ # 'username': MIDWAY_USERNAME, # 'script_dir': '/scratch/midway2/{}/parsl_scripts'.format(MIDWAY_USERNAME), # 'scheduler_options': "", - # 'worker_init': 'cd /scratch/midway2/{}/parsl_scripts; module load Anaconda3/5.1.0; source activate parsl_testing;'.format(MIDWAY_USERNAME), + # 'worker_init': ( + # 'cd /scratch/midway2/{}/parsl_scripts; ' + # 'module load Anaconda3/5.1.0; ' + # 'source activate parsl_testing;' + # ).format(MIDWAY_USERNAME), # }, # 'osg': { # 'username': OSG_USERNAME, From 11d78b788c79cf7d0179378aa312ce736dda994a Mon Sep 17 00:00:00 2001 From: Racheal Date: Thu, 7 Mar 2024 18:24:21 +0100 Subject: [PATCH 5/8] Move historical documentation pages into a new documentation book (#3174) Co-authored-by: Yadu Nand Babuji --- docs/devguide/index.rst | 2 -- docs/{devguide => historical}/changelog.rst | 0 docs/{devguide => historical}/design.rst | 0 docs/historical/index.rst | 9 +++++++++ docs/{userguide => historical}/performance.rst | 0 docs/index.rst | 1 + docs/userguide/index.rst | 1 - 7 files changed, 10 insertions(+), 3 deletions(-) rename docs/{devguide => historical}/changelog.rst (100%) rename docs/{devguide => historical}/design.rst (100%) create mode 100644 docs/historical/index.rst rename docs/{userguide => historical}/performance.rst (100%) diff --git a/docs/devguide/index.rst b/docs/devguide/index.rst index 06c8b4202d..ec99d42103 100644 --- a/docs/devguide/index.rst +++ b/docs/devguide/index.rst @@ -5,8 +5,6 @@ Developer documentation :maxdepth: 3 contributing - changelog - design roadmap packaging ../README diff --git a/docs/devguide/changelog.rst b/docs/historical/changelog.rst similarity index 100% rename from docs/devguide/changelog.rst rename to docs/historical/changelog.rst diff --git a/docs/devguide/design.rst b/docs/historical/design.rst similarity index 100% rename from docs/devguide/design.rst rename to docs/historical/design.rst diff --git a/docs/historical/index.rst b/docs/historical/index.rst new file mode 100644 index 0000000000..5575a3557e --- /dev/null +++ b/docs/historical/index.rst @@ -0,0 +1,9 @@ +Historical Documents +==================== + +.. toctree:: + :maxdepth: 2 + + changelog + design + performance diff --git a/docs/userguide/performance.rst b/docs/historical/performance.rst similarity index 100% rename from docs/userguide/performance.rst rename to docs/historical/performance.rst diff --git a/docs/index.rst b/docs/index.rst index ec0756542c..61a8541a6c 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -108,6 +108,7 @@ Table of Contents faq reference devguide/index + historical/index Indices and tables diff --git a/docs/userguide/index.rst b/docs/userguide/index.rst index 3d667816a5..12254cd6e2 100644 --- a/docs/userguide/index.rst +++ b/docs/userguide/index.rst @@ -21,4 +21,3 @@ User guide usage_tracking plugins parsl_perf - performance From 49cca8acc8a2abef475c8b00c08731ed83bdd932 Mon Sep 17 00:00:00 2001 From: Amarachi Crystal Omereife <60928319+marameref@users.noreply.github.com> Date: Thu, 7 Mar 2024 19:53:36 +0100 Subject: [PATCH 6/8] Reduce max-length of flake8 (#3201) * Reduce max-length from 148 to 147 * Delete hello1.txt * Delete output.txt --- .flake8 | 2 +- parsl/addresses.py | 2 +- parsl/dataflow/dflow.py | 6 +++--- parsl/executors/taskvine/executor.py | 3 +-- parsl/monitoring/db_manager.py | 8 +++++++- parsl/tests/configs/user_opts.py | 8 +++----- 6 files changed, 16 insertions(+), 13 deletions(-) diff --git a/.flake8 b/.flake8 index 7b7d8a1ab5..b78c9f5bf5 100644 --- a/.flake8 +++ b/.flake8 @@ -8,7 +8,7 @@ # W504: line break after binary operator # (Raised by flake8 even when it is followed) ignore = E126, E402, E129, W504 -max-line-length = 148 +max-line-length = 147 exclude = test_import_fail.py, parsl/executors/workqueue/parsl_coprocess.py # E741 disallows ambiguous single letter names which look like numbers diff --git a/parsl/addresses.py b/parsl/addresses.py index 68df9af4b7..424075006b 100644 --- a/parsl/addresses.py +++ b/parsl/addresses.py @@ -81,7 +81,7 @@ def address_by_hostname() -> str: def address_by_interface(ifname: str) -> str: """Returns the IP address of the given interface name, e.g. 'eth0' - This is taken from a Stack Overflow answer: https://stackoverflow.com/questions/24196932/how-can-i-get-the-ip-address-of-eth0-in-python#24196955 + This is from a Stack Overflow answer: https://stackoverflow.com/questions/24196932/how-can-i-get-the-ip-address-of-eth0-in-python#24196955 Parameters ---------- diff --git a/parsl/dataflow/dflow.py b/parsl/dataflow/dflow.py index f57c5327a8..00310a371c 100644 --- a/parsl/dataflow/dflow.py +++ b/parsl/dataflow/dflow.py @@ -723,9 +723,9 @@ def launch_task(self, task_record: TaskRecord) -> Future: if hasattr(exec_fu, "parsl_executor_task_id"): logger.info( - f"Parsl task {task_id} try {try_id} launched on executor {executor.label} " - f"with executor id {exec_fu.parsl_executor_task_id}" - ) + f"Parsl task {task_id} try {try_id} launched on executor {executor.label} " + f"with executor id {exec_fu.parsl_executor_task_id}") + else: logger.info(f"Parsl task {task_id} try {try_id} launched on executor {executor.label}") diff --git a/parsl/executors/taskvine/executor.py b/parsl/executors/taskvine/executor.py index d217cb5675..207675147e 100644 --- a/parsl/executors/taskvine/executor.py +++ b/parsl/executors/taskvine/executor.py @@ -230,8 +230,7 @@ def __create_data_and_logging_dirs(self): logger.debug(f"Function data directory: {self._function_data_dir}, log directory: {log_dir}") logger.debug( f"TaskVine manager log directory: {self.manager_config.vine_log_dir}, " - f"factory log directory: {self.factory_config.scratch_dir}" - ) + f"factory log directory: {self.factory_config.scratch_dir}") def start(self): """Create submit process and collector thread to create, send, and diff --git a/parsl/monitoring/db_manager.py b/parsl/monitoring/db_manager.py index 0ee4d29e03..eec2b4323a 100644 --- a/parsl/monitoring/db_manager.py +++ b/parsl/monitoring/db_manager.py @@ -103,7 +103,13 @@ def insert(self, *, table: str, messages: List[MonitoringMessage]) -> None: def rollback(self) -> None: self.session.rollback() - def _generate_mappings(self, table: Table, columns: Optional[List[str]] = None, messages: List[MonitoringMessage] = []) -> List[Dict[str, Any]]: + def _generate_mappings( + self, + table: Table, + columns: Optional[List[str]] = None, + messages: List[MonitoringMessage] = [], + ) -> List[Dict[str, Any]]: + mappings = [] for msg in messages: m = {} diff --git a/parsl/tests/configs/user_opts.py b/parsl/tests/configs/user_opts.py index bcab09ed03..979d834e49 100644 --- a/parsl/tests/configs/user_opts.py +++ b/parsl/tests/configs/user_opts.py @@ -52,11 +52,9 @@ # 'username': MIDWAY_USERNAME, # 'script_dir': '/scratch/midway2/{}/parsl_scripts'.format(MIDWAY_USERNAME), # 'scheduler_options': "", - # 'worker_init': ( - # 'cd /scratch/midway2/{}/parsl_scripts; ' - # 'module load Anaconda3/5.1.0; ' - # 'source activate parsl_testing;' - # ).format(MIDWAY_USERNAME), + # 'worker_init': 'cd /scratch/midway2/{}/parsl_scripts; ' + # 'module load Anaconda3/5.1.0; source activate parsl_testing;' + # .format(MIDWAY_USERNAME), # }, # 'osg': { # 'username': OSG_USERNAME, From 2d4bc09d8cad5b46aa7472234cf43a37bd4c23ee Mon Sep 17 00:00:00 2001 From: Ben Clifford Date: Mon, 11 Mar 2024 06:31:38 -0500 Subject: [PATCH 7/8] Change API usage to explict start/end style (#3230) Previously the message format was driven by a small "invoke me repeatedly, I'll change my behaviour based on how many times you've invoked me" state machine. This PR removes that state machine and relies on the DFK knowing whether it is starting up or shutting down - that information is available statically inside the DFK code. --- parsl/dataflow/dflow.py | 4 ++-- parsl/usage_tracking/usage.py | 14 +++++--------- 2 files changed, 7 insertions(+), 11 deletions(-) diff --git a/parsl/dataflow/dflow.py b/parsl/dataflow/dflow.py index 00310a371c..211bfa5e9d 100644 --- a/parsl/dataflow/dflow.py +++ b/parsl/dataflow/dflow.py @@ -95,7 +95,7 @@ def __init__(self, config: Config) -> None: self.checkpoint_lock = threading.Lock() self.usage_tracker = UsageTracker(self) - self.usage_tracker.send_message() + self.usage_tracker.send_start_message() self.task_state_counts_lock = threading.Lock() self.task_state_counts = {state: 0 for state in States} @@ -1205,7 +1205,7 @@ def cleanup(self) -> None: self._checkpoint_timer.close() # Send final stats - self.usage_tracker.send_message() + self.usage_tracker.send_end_message() self.usage_tracker.close() logger.info("Closing job status poller") diff --git a/parsl/usage_tracking/usage.py b/parsl/usage_tracking/usage.py index d13731f478..01db16bbfb 100644 --- a/parsl/usage_tracking/usage.py +++ b/parsl/usage_tracking/usage.py @@ -109,7 +109,6 @@ def __init__(self, dfk, port=50077, sys.version_info.micro) self.tracking_enabled = self.check_tracking_enabled() logger.debug("Tracking status: {}".format(self.tracking_enabled)) - self.initialized = False # Once first message is sent this will be True def check_tracking_enabled(self): """Check if tracking is enabled. @@ -176,15 +175,12 @@ def send_UDP_message(self, message: str) -> None: except Exception as e: logger.debug("Usage tracking failed: {}".format(e)) - def send_message(self) -> None: - """Send message over UDP. - """ - if not self.initialized: - message = self.construct_start_message() - self.initialized = True - else: - message = self.construct_end_message() + def send_start_message(self) -> None: + message = self.construct_start_message() + self.send_UDP_message(message) + def send_end_message(self) -> None: + message = self.construct_end_message() self.send_UDP_message(message) def close(self, timeout: float = 10.0) -> None: From e03a97b9877e9a83bfd0cfddd470bd692b54fa55 Mon Sep 17 00:00:00 2001 From: Colin Thomas <33940547+colinthomas-z80@users.noreply.github.com> Date: Mon, 11 Mar 2024 10:27:34 -0400 Subject: [PATCH 8/8] TaskVine: add config and links to CCL documentation (#3205) --- docs/userguide/configuring.rst | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/userguide/configuring.rst b/docs/userguide/configuring.rst index 5233b30ddc..79825a303d 100644 --- a/docs/userguide/configuring.rst +++ b/docs/userguide/configuring.rst @@ -484,12 +484,12 @@ This system uses Grid Engine which Parsl interfaces with using the `parsl.provid .. literalinclude:: ../../parsl/configs/cc_in2p3.py -CCL (Notre Dame, with Work Queue) ---------------------------------- +CCL (Notre Dame, TaskVine) +-------------------------- -.. image:: http://ccl.cse.nd.edu/software/workqueue/WorkQueueLogoSmall.png +.. image:: https://ccl.cse.nd.edu/software/taskvine/taskvine-logo.png -To utilize Work Queue with Parsl, please install the full CCTools software package within an appropriate Anaconda or Miniconda environment +To utilize TaskVine with Parsl, please install the full CCTools software package within an appropriate Anaconda or Miniconda environment (instructions for installing Miniconda can be found `in the Conda install guide `_): .. code-block:: bash @@ -498,17 +498,17 @@ To utilize Work Queue with Parsl, please install the full CCTools software packa $ conda activate $ conda install -y -c conda-forge ndcctools parsl -This creates a Conda environment on your machine with all the necessary tools and setup needed to utilize Work Queue with the Parsl library. +This creates a Conda environment on your machine with all the necessary tools and setup needed to utilize TaskVine with the Parsl library. + +The following snippet shows an example configuration for using the Parsl/TaskVine executor to run applications on the local machine. +This examples uses the `parsl.executors.taskvine.TaskVineExecutor` to schedule tasks, and a local worker will be started automatically. +For more information on using TaskVine, including configurations for remote execution, visit the +`TaskVine/Parsl documentation online `_. -The following snippet shows an example configuration for using the Work Queue distributed framework to run applications on remote machines at large. -This examples uses the `parsl.executors.WorkQueueExecutor` to schedule tasks locally, -and assumes that Work Queue workers have been externally connected to the master using the -`work_queue_factory `_ or -`condor_submit_workers `_ command line utilities from CCTools. -For more information on using Work Queue or to get help with running applications using CCTools, -visit the `CCTools documentation online `_. +.. literalinclude:: ../../parsl/configs/vineex_local.py -.. literalinclude:: ../../parsl/configs/wqex_local.py +TaskVine's predecessor, WorkQueue, may continue to be used with Parsl. +For more information on using WorkQueue visit the `CCTools documentation online `_. Expanse (SDSC) --------------