From 54c69a9046446f6ea9bfa5fc8fb65cfcae88599a Mon Sep 17 00:00:00 2001 From: ddlBoJack Date: Sun, 5 May 2024 18:57:37 +0800 Subject: [PATCH] Update README and clean up files --- CODE_OF_CONDUCT.md | 80 ----- CONTRIBUTING.md | 59 ---- LICENSE | 125 -------- README.md | 329 ++++----------------- UPDATES.md | 19 -- USE_POLICY.md | 49 --- docs/logo.jpg | Bin 0 -> 98679 bytes examples/aac_audiocaps/README.md | 8 +- examples/aac_audiocaps/conf/prompt.yaml | 1 - examples/asr_librispeech/README.md | 23 ++ scripts/finetune_aac_llama.sh | 103 ------- scripts/finetune_asr_llama.sh | 104 ------- scripts/finetune_asr_tinyllama.sh | 100 ------- scripts/finetune_asr_vicuna.sh | 132 --------- scripts/finetune_avsr.sh | 107 ------- scripts/finetune_avsr_debug.sh | 104 ------- scripts/finetune_avsr_vicuna_debug_0113.sh | 53 ---- scripts/finetune_echat.sh | 102 ------- scripts/finetune_mls_aya.sh | 113 ------- scripts/finetune_mls_llama.sh | 90 ------ scripts/finetune_mls_vicuna.sh | 90 ------ scripts/inference_asr.sh | 48 --- scripts/inference_asr_batch.sh | 65 ---- scripts/inference_asr_batch_2.sh | 67 ----- scripts/inference_echat.sh | 42 --- 25 files changed, 83 insertions(+), 1930 deletions(-) delete mode 100644 CODE_OF_CONDUCT.md delete mode 100644 CONTRIBUTING.md delete mode 100644 LICENSE delete mode 100644 UPDATES.md delete mode 100644 USE_POLICY.md create mode 100644 docs/logo.jpg delete mode 100644 scripts/finetune_aac_llama.sh delete mode 100644 scripts/finetune_asr_llama.sh delete mode 100644 scripts/finetune_asr_tinyllama.sh delete mode 100644 scripts/finetune_asr_vicuna.sh delete mode 100644 scripts/finetune_avsr.sh delete mode 100644 scripts/finetune_avsr_debug.sh delete mode 100644 scripts/finetune_avsr_vicuna_debug_0113.sh delete mode 100644 scripts/finetune_echat.sh delete mode 100755 scripts/finetune_mls_aya.sh delete mode 100755 scripts/finetune_mls_llama.sh delete mode 100755 scripts/finetune_mls_vicuna.sh delete mode 100644 scripts/inference_asr.sh delete mode 100755 scripts/inference_asr_batch.sh delete mode 100755 scripts/inference_asr_batch_2.sh delete mode 100644 scripts/inference_echat.sh diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md deleted file mode 100644 index 08b500a2..00000000 --- a/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,80 +0,0 @@ -# Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as -contributors and maintainers pledge to make participation in our project and -our community a harassment-free experience for everyone, regardless of age, body -size, disability, ethnicity, sex characteristics, gender identity and expression, -level of experience, education, socio-economic status, nationality, personal -appearance, race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment -include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or - advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic - address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable -behavior and are expected to take appropriate and fair corrective action in -response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or -reject comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct, or to ban temporarily or -permanently any contributor for other behaviors that they deem inappropriate, -threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies within all project spaces, and it also applies when -an individual is representing the project or its community in public spaces. -Examples of representing a project or community include using an official -project e-mail address, posting via an official social media account, or acting -as an appointed representative at an online or offline event. Representation of -a project may be further defined and clarified by project maintainers. - -This Code of Conduct also applies outside the project spaces when there is a -reasonable belief that an individual's behavior may have a negative impact on -the project or its community. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team at . All -complaints will be reviewed and investigated and will result in a response that -is deemed necessary and appropriate to the circumstances. The project team is -obligated to maintain confidentiality with regard to the reporter of an incident. -Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good -faith may face temporary or permanent repercussions as determined by other -members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html - -[homepage]: https://www.contributor-covenant.org - -For answers to common questions about this code of conduct, see -https://www.contributor-covenant.org/faq diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 0fc2e450..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,59 +0,0 @@ -# Contributing to llama-recipes -We want to make contributing to this project as easy and transparent as -possible. - -## Pull Requests -We actively welcome your pull requests. - -1. Fork the repo and create your branch from `main`. -2. If you've added code that should be tested, add tests. -3. If you've changed APIs, update the documentation. -4. Ensure the test suite passes. -5. Make sure your code lints. -6. If you haven't already, complete the Contributor License Agreement ("CLA"). - -## Contributor License Agreement ("CLA") -In order to accept your pull request, we need you to submit a CLA. You only need -to do this once to work on any of Facebook's open source projects. - -Complete your CLA here: - -## Issues -We use GitHub issues to track public bugs. Please ensure your description is -clear and has sufficient instructions to be able to reproduce the issue. - -Facebook has a [bounty program](https://www.facebook.com/whitehat/) for the safe -disclosure of security bugs. In those cases, please go through the process -outlined on that page and do not file a public issue. - -## License -By contributing to llama-recipes, you agree that your contributions will be licensed -under the LICENSE file in the root directory of this source tree. - -## Tests -Llama-recipes currently comes with a basic set of unit tests (covering the parts of the main training script and training loop) but we strive to increase our test coverage in the future in order to mitigate silent errors. -When submitting a new feature PR please make sure to cover the newly added code with a unit test. -Run the tests locally to ensure the new feature does not break an old one. -We use **pytest** for our unit tests and to run them locally you need to install llama-recipes with optional [tests] dependencies enabled: -``` -pip install --extra-index-url https://download.pytorch.org/whl/test/cu118 llama-recipes[tests] -``` -For development and contributing to llama-recipes please install from source with all optional dependencies: -``` -pip install -U pip setuptools -pip install --extra-index-url https://download.pytorch.org/whl/test/cu118 -e .[tests,auditnlg,vllm] -``` -The unit tests can be found in the [tests](./tests/) folder and you can run them from the main directory using: -``` -python -m pytest tests/ -``` -To run all tests of a single file you can give the filename directly: -``` -python -m pytest tests/test_finetuning.py -``` -To run a specific test you can filter for its name with -``` -python -m pytest tests/test_finetuning.py -k test_finetuning_peft -``` -To add a new test simply create a new test file under the tests folder (filename has to start with `test_`). -Group tests spanning the same feature in the same file and create a subfolder if the tests are very extensive. \ No newline at end of file diff --git a/LICENSE b/LICENSE deleted file mode 100644 index bbe189a3..00000000 --- a/LICENSE +++ /dev/null @@ -1,125 +0,0 @@ -LLAMA 2 COMMUNITY LICENSE AGREEMENT -Llama 2 Version Release Date: July 18, 2023 - -"Agreement" means the terms and conditions for use, reproduction, distribution and -modification of the Llama Materials set forth herein. - -"Documentation" means the specifications, manuals and documentation -accompanying Llama 2 distributed by Meta at ai.meta.com/resources/models-and- -libraries/llama-downloads/. - -"Licensee" or "you" means you, or your employer or any other person or entity (if -you are entering into this Agreement on such person or entity's behalf), of the age -required under applicable laws, rules or regulations to provide legal consent and that -has legal authority to bind your employer or such other person or entity if you are -entering in this Agreement on their behalf. - -"Llama 2" means the foundational large language models and software and -algorithms, including machine-learning model code, trained model weights, -inference-enabling code, training-enabling code, fine-tuning enabling code and other -elements of the foregoing distributed by Meta at ai.meta.com/resources/models-and- -libraries/llama-downloads/. - -"Llama Materials" means, collectively, Meta's proprietary Llama 2 and -Documentation (and any portion thereof) made available under this Agreement. - -"Meta" or "we" means Meta Platforms Ireland Limited (if you are located in or, if you -are an entity, your principal place of business is in the EEA or Switzerland) and Meta -Platforms, Inc. (if you are located outside of the EEA or Switzerland). - -By clicking "I Accept" below or by using or distributing any portion or element of the -Llama Materials, you agree to be bound by this Agreement. - -1. License Rights and Redistribution. - - a. Grant of Rights. You are granted a non-exclusive, worldwide, non- -transferable and royalty-free limited license under Meta's intellectual property or -other rights owned by Meta embodied in the Llama Materials to use, reproduce, -distribute, copy, create derivative works of, and make modifications to the Llama -Materials. - - b. Redistribution and Use. - - i. If you distribute or make the Llama Materials, or any derivative works -thereof, available to a third party, you shall provide a copy of this Agreement to such -third party. - ii. If you receive Llama Materials, or any derivative works thereof, from -a Licensee as part of an integrated end user product, then Section 2 of this -Agreement will not apply to you. - - iii. You must retain in all copies of the Llama Materials that you -distribute the following attribution notice within a "Notice" text file distributed as a -part of such copies: "Llama 2 is licensed under the LLAMA 2 Community License, -Copyright (c) Meta Platforms, Inc. All Rights Reserved." - - iv. Your use of the Llama Materials must comply with applicable laws -and regulations (including trade compliance laws and regulations) and adhere to the -Acceptable Use Policy for the Llama Materials (available at -https://ai.meta.com/llama/use-policy), which is hereby incorporated by reference into -this Agreement. - - v. You will not use the Llama Materials or any output or results of the -Llama Materials to improve any other large language model (excluding Llama 2 or -derivative works thereof). - -2. Additional Commercial Terms. If, on the Llama 2 version release date, the -monthly active users of the products or services made available by or for Licensee, -or Licensee's affiliates, is greater than 700 million monthly active users in the -preceding calendar month, you must request a license from Meta, which Meta may -grant to you in its sole discretion, and you are not authorized to exercise any of the -rights under this Agreement unless or until Meta otherwise expressly grants you -such rights. - -3. Disclaimer of Warranty. UNLESS REQUIRED BY APPLICABLE LAW, THE -LLAMA MATERIALS AND ANY OUTPUT AND RESULTS THEREFROM ARE -PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, -EITHER EXPRESS OR IMPLIED, INCLUDING, WITHOUT LIMITATION, ANY -WARRANTIES OF TITLE, NON-INFRINGEMENT, MERCHANTABILITY, OR -FITNESS FOR A PARTICULAR PURPOSE. YOU ARE SOLELY RESPONSIBLE -FOR DETERMINING THE APPROPRIATENESS OF USING OR REDISTRIBUTING -THE LLAMA MATERIALS AND ASSUME ANY RISKS ASSOCIATED WITH YOUR -USE OF THE LLAMA MATERIALS AND ANY OUTPUT AND RESULTS. - -4. Limitation of Liability. IN NO EVENT WILL META OR ITS AFFILIATES BE -LIABLE UNDER ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, TORT, -NEGLIGENCE, PRODUCTS LIABILITY, OR OTHERWISE, ARISING OUT OF THIS -AGREEMENT, FOR ANY LOST PROFITS OR ANY INDIRECT, SPECIAL, -CONSEQUENTIAL, INCIDENTAL, EXEMPLARY OR PUNITIVE DAMAGES, EVEN -IF META OR ITS AFFILIATES HAVE BEEN ADVISED OF THE POSSIBILITY OF -ANY OF THE FOREGOING. - -5. Intellectual Property. - - a. No trademark licenses are granted under this Agreement, and in -connection with the Llama Materials, neither Meta nor Licensee may use any name -or mark owned by or associated with the other or any of its affiliates, except as -required for reasonable and customary use in describing and redistributing the -Llama Materials. - - b. Subject to Meta's ownership of Llama Materials and derivatives made by or -for Meta, with respect to any derivative works and modifications of the Llama -Materials that are made by you, as between you and Meta, you are and will be the -owner of such derivative works and modifications. - - c. If you institute litigation or other proceedings against Meta or any entity -(including a cross-claim or counterclaim in a lawsuit) alleging that the Llama -Materials or Llama 2 outputs or results, or any portion of any of the foregoing, -constitutes infringement of intellectual property or other rights owned or licensable -by you, then any licenses granted to you under this Agreement shall terminate as of -the date such litigation or claim is filed or instituted. You will indemnify and hold -harmless Meta from and against any claim by any third party arising out of or related -to your use or distribution of the Llama Materials. - -6. Term and Termination. The term of this Agreement will commence upon your -acceptance of this Agreement or access to the Llama Materials and will continue in -full force and effect until terminated in accordance with the terms and conditions -herein. Meta may terminate this Agreement if you are in breach of any term or -condition of this Agreement. Upon termination of this Agreement, you shall delete -and cease use of the Llama Materials. Sections 3, 4 and 7 shall survive the -termination of this Agreement. - -7. Governing Law and Jurisdiction. This Agreement will be governed and -construed under the laws of the State of California without regard to choice of law -principles, and the UN Convention on Contracts for the International Sale of Goods -does not apply to this Agreement. The courts of California shall have exclusive -jurisdiction of any dispute arising out of this Agreement. diff --git a/README.md b/README.md index 1edbd792..11195570 100644 --- a/README.md +++ b/README.md @@ -1,24 +1,36 @@ -# SLAM-LLM: **S**peech, **L**anguage, **A**udio, **M**usic Processing with Large Language Model - -# News -- [Update Mar. 13, 2024] Please join [slack](https://join.slack.com/t/slam-llm/shared_invite/zt-2cxmm7fue-tEKmZcL1hB8s2R2GQdTTiA). We will sync our updates here. - +
+

+ SLAM-LLM +

+

+ SLAM-LLM is a deep learning toolkit that allows researchers and +developers to train custom multimodal large language model (MLLM), focusing on Speech, Language, Audio, Music processing. We provide detailed recipes for training and high-performance checkpoints for inference.
+

+

+ SLAM-LLM Logo +

+

+

+ version + version + python + mit +
# Table of Contents -1. [Setup](#setup) -2. [Fine-tuning](#fine-tuning) - - [Single GPU](#single-gpu) - - [Multi GPU One Node](#multiple-gpus-one-node) - - [Multi GPU Multi Node](#multi-gpu-multi-node) -3. [Inference](#inference) - - [Batch Inference](#batch-inference) - - [Real-time Inference](#real-time-inference) -4. [License and Acceptable Use Policy](#license) -5. [Citation](#citation) +1. [News](#news) +2. [Installation](#installation) +3. [Uasge](#uasge) + - [List of Recipes](#list-of-recipes) + - [Configuration Priority](#configuration-priority) +4. [Features](#features) +5. [Acknowledge](#acknowledge) -# Setup +# News +- [Update Apr. 28, 2024] Recipes for automated audio captioning (AAC) with SOTA performance has been supported. +- [Update Mar. 31, 2024] Recipes for automatic speech recognition (ASR) with SOTA performance has been supported. -## Installation +# Installation ```bash git clone https://github.com/huggingface/transformers.git cd transformers @@ -36,265 +48,36 @@ cd SLAM-LLM pip install -e . ``` -**For more in depth information checkout the following:** - -* [Single GPU Fine-tuning](./docs/single_gpu.md) -* [Multi-GPU Fine-tuning](./docs/multi_gpu.md) -* [LLM Fine-tuning](./docs/LLM_finetuning.md) -* [Adding custom datasets](./docs/Dataset.md) -* [Inference](./docs/inference.md) -* [FAQs](./docs/FAQ.md) - -# Fine-tuning - -We take Automatic Speech Recognition (ASR) with Large Language Models (LLM) as an example to demonstrate the fine-tuning process. The same process can be applied to other tasks in [example](./examples)(TODO) and [scripts](./scripts) folder. - -## Single and Multi GPU Finetune - -If you want to dive right into single or multi GPU fine-tuning, run the examples below on a single GPU like A10, T4, V100, A100 etc. -All the parameters in the examples and recipes below need to be further tuned to have desired results based on the model, method, data and task at hand. - -### Single GPU: - -```bash -export CUDA_VISIBLE_DEVICES=0 -export TOKENIZERS_PARALLELISM=false -export OMP_NUM_THREADS=1 -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -output_dir=/nfs/maziyang.mzy/exps/finetune-asr-whisper-largev2-vicuna-7b-v1.5-linear-ds5-proj2048-stlr-lora - -python src/slam-llm/pipeline/finetune.py \ ---config-path "scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -hydra.run.dir=$output_dir \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=linear \ -++model_config.encoder_projector_ds_rate=5 \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.prompt="Transcribe speech to text. " \ -++dataset_config.train_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ -++dataset_config.val_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ -++dataset_config.input_type=mel \ -++train_config.model_name=asr \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=false \ -++train_config.use_peft=true \ -++train_config.peft_config.peft_method=lora \ -++train_config.batching_strategy=custom \ -++train_config.warmup_steps=1000 \ -++train_config.total_steps=100000 \ -++train_config.lr=1e-4 \ -++train_config.validation_interval=1000 \ -++train_config.batch_size_training=4 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++log_config.log_file=/$output_dir/train.log \ -++log_config.use_wandb=true \ -++log_config.wandb_dir=$output_dir \ -++log_config.wandb_entity_name=zym22 \ -++log_config.wandb_project_name=slam-llm \ -++log_config.wandb_exp_name=${0##*/%.*} \ -++log_config.log_interval 5 \ -++metric=acc \ -# ++model_config.encoder_projector=q-former \ -# ++dataset_config.fix_length_audio=64 \ +For some examples, you may need to use `fairseq`, the command line is as follows: ``` - -Here we make use of Parameter Efficient Methods (PEFT) as described in the next section. To run the command above make sure to pass the `peft_method` arg which can be set to `lora`, `llama_adapter` or `prefix`. - -**Note** if you are running on a machine with multiple GPUs please make sure to only make one of them visible using `export CUDA_VISIBLE_DEVICES=GPU:id` - - -### Multiple GPUs One Node with DDP: - -```bash -export CUDA_VISIBLE_DEVICES=0,1,2,3 -export TOKENIZERS_PARALLELISM=false -export OMP_NUM_THREADS=1 -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -output_dir=/nfs/maziyang.mzy/exps/finetune-asr-whisper-largev2-vicuna-7b-v1.5-linear-ds5-proj2048-stlr-lora - -torchrun \ ---nnodes 1 \ ---nproc_per_node 4 \ -src/llama_recipes/pipeline/finetune.py \ ---config-path "scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -hydra.run.dir=$output_dir \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=linear \ -++model_config.encoder_projector_ds_rate=5 \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.prompt="Transcribe speech to text. " \ -++dataset_config.train_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ -++dataset_config.val_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_dev_other.jsonl \ -++dataset_config.input_type=mel \ -++train_config.model_name=asr \ -++train_config.enable_fsdp=false \ -++train_config.enable_ddp=true \ -++train_config.use_fp16=true \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=false \ -++train_config.use_peft=true \ -++train_config.peft_config.peft_method=lora \ -++train_config.batching_strategy=custom \ -++train_config.warmup_steps=1000 \ -++train_config.total_steps=100000 \ -++train_config.lr=1e-4 \ -++train_config.validation_interval=1000 \ -++train_config.batch_size_training=4 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++log_config.log_file=/$output_dir/train.log \ -++log_config.use_wandb=true \ -++log_config.wandb_dir=$output_dir \ -++log_config.wandb_entity_name=zym22 \ -++log_config.wandb_project_name=slam-llm \ -++log_config.wandb_exp_name=${0##*/%.*} \ -++log_config.log_interval 5 \ -++metric=acc \ -# ++model_config.encoder_projector=q-former \ -# ++dataset_config.fix_length_audio=64 \ +git clone https://github.com/pytorch/fairseq +cd fairseq +pip install --editable ./ ``` -If you want to run with FSDP, you can set `++train_config.enable_fsdp=true` and `++train_config.enable_ddp=false`. - -### Flash Attention and Xformer Memory Efficient Kernels - -Setting `use_fast_kernels` will enable using of Flash Attention or Xformer memory-efficient kernels based on the hardware being used. This would speed up the fine-tuning job. This has been enabled in `optimum` library from HuggingFace as a one-liner API, please read more [here](https://pytorch.org/blog/out-of-the-box-acceleration/). - -### Fine-tuning using FSDP on 70B Model - -If you are interested in running full parameter fine-tuning on the 70B model, you can enable `low_cpu_fsdp` mode as the following command. This option will load model on rank0 only before moving model to devices to construct FSDP. This can dramatically save cpu memory when loading large models like 70B (on a 8-gpu node, this reduces cpu memory from 2+T to 280G for 70B model). This has been tested with `BF16` on 16xA100, 80GB GPUs. - -### Multi GPU Multi Node: - -```bash - -sbatch multi_node.slurm -# Change the num nodes and GPU per nodes in the script before running. +# Usage +## List of Recipes +We provide reference implementations of various LLM-based speech, audio, and music tasks: +- **Speech Task** + - [Automatic Speech Recognition (ASR)](examples/asr_librispeech/README.md) + - [Text-to-Speech (TTS)](examples/vallex/README.md) +- **Audio Task** + - [Automated Audio Captioning (AAC)](examples/aac_audiocaps/README.md) + +## Configuration Priority +We provide hierarchical configuration inheritance relationships as follows: ``` -You can read more about our fine-tuning strategies [here](./docs/LLM_finetuning.md). - -# Inference - -Once you have fine-tuned the model(for example, whisper + vicuna + linear + lora), you can use the following command to run inference on the fine-tuned model. - -## Batch Inference - -```bash -export CUDA_VISIBLE_DEVICES=0 -export TOKENIZERS_PARALLELISM=false -export OMP_NUM_THREADS=1 -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -output_dir=/nfs/maziyang.mzy/exps/finetune-asr-whisper-largev2-vicuna-7b-v1.5-linear-ds5-proj2048-stlr-lora -ckpt_path=$output_dir/asr/2 -decode_log=$ckpt_path/decode_log_test_clean_beam4 - -python src/llama_recipes/pipeline/inference_batch.py \ ---config-path "scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -hydra.run.dir=$ckpt_path \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=linear \ -++model_config.encoder_projector_ds_rate=5 \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.prompt="Transcribe speech to text. " \ -++dataset_config.val_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_test_clean.jsonl \ -++dataset_config.input_type=mel \ -++dataset_config.inference_mode=true \ -++train_config.model_name=asr \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=false \ -++train_config.use_peft=true \ -++train_config.peft_config.peft_method=lora \ -++train_config.batching_strategy=custom \ -++train_config.num_epochs=1 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++ckpt_path=$ckpt_path/model.pt \ -++peft_ckpt=$ckpt_path \ -++decode_log=$decode_log \ -# ++model_config.encoder_projector=q-former \ -# ++dataset_config.fix_length_audio=64 \ +command-line (shell file) > Hydra configuration (yaml file) > dataclass configuration (Python file) ``` -## Real-time Inference - -```bash -export CUDA_VISIBLE_DEVICES=0 -export TOKENIZERS_PARALLELISM=false -export OMP_NUM_THREADS=1 -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -output_dir=/nfs/maziyang.mzy/exps/finetune-asr-whisper-largev2-vicuna-7b-v1.5-linear-ds5-proj2048-stlr-lora -ckpt_path=$output_dir/asr/2 - -python src/llama_recipes/pipeline/inference.py \ ---config-path "scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=linear \ -++model_config.encoder_projector_ds_rate=5 \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=false \ -++train_config.use_peft=true \ -++train_config.peft_config.peft_method=lora \ -++ckpt_path=$ckpt_path/model.pt \ -++peft_ckpt=$ckpt_path \ -++decode_log=$decode_log \ -# ++model_config.encoder_projector=q-former \ -# ++dataset_config.fix_length_audio=64 \ -``` - -# License -See the License file [here](LICENSE) and Acceptable Use Policy [here](USE_POLICY.md) - -# Citation - -``` -@article{ma2024embarrassingly, - title={An Embarrassingly Simple Approach for LLM with Strong ASR Capacity}, - author={Ma, Ziyang and Yang, Guanrou and Yang, Yifan and Gao, Zhifu and Wang, Jiaming and Du, Zhihao and Yu, Fan and Chen, Qian and Zheng, Siqi and Zhang, Shiliang and others}, - journal={arXiv preprint arXiv:2402.08846}, - year={2024} -} -``` +# Features +- Easily extend to new models and tasks. +- Detailed recipes for training and high-performance checkpoints for inference. +- Mixed precision training which trains faster with less GPU memory on NVIDIA tensor cores. +- Multi-GPU training with data and model parallel, supporting [DDP](https://pytorch.org/tutorials/intermediate/ddp_tutorial.html), [FSDP](https://pytorch.org/tutorials/intermediate/FSDP_tutorial.html) and [deepspeed](https://github.com/microsoft/DeepSpeed) (still need to be improved). +- Flexible configuration based on [Hydra](https://github.com/facebookresearch/hydra) and [dataclass](https://docs.python.org/3/library/dataclasses.html) allowing a combination of code, command-line and file based configuration. + +# Acknowledge +- We borrow code from [Llama-Recipes](https://github.com/meta-llama/llama-recipes) for the training process. +- We borrow code from [Fairseq](https://github.com/facebookresearch/fairseq) for deepspeed configuration. +- We thank the contributors for providing diverse recipes. \ No newline at end of file diff --git a/UPDATES.md b/UPDATES.md deleted file mode 100644 index ffffa733..00000000 --- a/UPDATES.md +++ /dev/null @@ -1,19 +0,0 @@ -## System Prompt Update - -### Observed Issue -We received feedback from the community on our prompt template and we are providing an update to reduce the false refusal rates seen. False refusals occur when the model incorrectly refuses to answer a question that it should, for example due to overly broad instructions to be cautious in how it provides responses. - -### Updated approach -Based on evaluation and analysis, we recommend the removal of the system prompt as the default setting. Pull request [#626](https://github.com/facebookresearch/llama/pull/626) removes the system prompt as the default option, but still provides an example to help enable experimentation for those using it. - -## Token Sanitization Update - -### Observed Issue -The PyTorch scripts currently provided for tokenization and model inference allow for direct prompt injection via string concatenation. Prompt injections allow for the addition of special system and instruction prompt strings from user-provided prompts. - -As noted in the documentation, these strings are required to use the fine-tuned chat models. However, prompt injections have also been used for manipulating or abusing models by bypassing their safeguards, allowing for the creation of content or behaviors otherwise outside the bounds of acceptable use. - -### Updated approach -We recommend sanitizing [these strings](https://github.com/facebookresearch/llama#fine-tuned-chat-models) from any user provided prompts. Sanitization of user prompts mitigates malicious or accidental abuse of these strings. The provided scripts have been updated to do this. - -Note: even with this update safety classifiers should still be applied to catch unsafe behaviors or content produced by the model. An [example](https://github.com/facebookresearch/llama-recipes/blob/main/examples/inference.py) of how to deploy such a classifier can be found in the llama-recipes repository. \ No newline at end of file diff --git a/USE_POLICY.md b/USE_POLICY.md deleted file mode 100644 index 4299e1d1..00000000 --- a/USE_POLICY.md +++ /dev/null @@ -1,49 +0,0 @@ -# Llama 2 Acceptable Use Policy - -Meta is committed to promoting safe and fair use of its tools and features, including Llama 2. If you access or use Llama 2, you agree to this Acceptable Use Policy (“Policy”). The most recent copy of this policy can be found at [ai.meta.com/llama/use-policy](http://ai.meta.com/llama/use-policy). - -## Prohibited Uses -We want everyone to use Llama 2 safely and responsibly. You agree you will not use, or allow others to use, Llama 2 to: - -1. Violate the law or others’ rights, including to: - 1. Engage in, promote, generate, contribute to, encourage, plan, incite, or further illegal or unlawful activity or content, such as: - 1. Violence or terrorism - 2. Exploitation or harm to children, including the solicitation, creation, acquisition, or dissemination of child exploitative content or failure to report Child Sexual Abuse Material - 3. Human trafficking, exploitation, and sexual violence - 4. The illegal distribution of information or materials to minors, including obscene materials, or failure to employ legally required age-gating in connection with such information or materials. - 5. Sexual solicitation - 6. Any other criminal activity - 2. Engage in, promote, incite, or facilitate the harassment, abuse, threatening, or bullying of individuals or groups of individuals - 3. Engage in, promote, incite, or facilitate discrimination or other unlawful or harmful conduct in the provision of employment, employment benefits, credit, housing, other economic benefits, or other essential goods and services - 4. Engage in the unauthorized or unlicensed practice of any profession including, but not limited to, financial, legal, medical/health, or related professional practices - 5. Collect, process, disclose, generate, or infer health, demographic, or other sensitive personal or private information about individuals without rights and consents required by applicable laws - 6. Engage in or facilitate any action or generate any content that infringes, misappropriates, or otherwise violates any third-party rights, including the outputs or results of any products or services using the Llama 2 Materials - 7. Create, generate, or facilitate the creation of malicious code, malware, computer viruses or do anything else that could disable, overburden, interfere with or impair the proper working, integrity, operation or appearance of a website or computer system - - - -2. Engage in, promote, incite, facilitate, or assist in the planning or development of activities that present a risk of death or bodily harm to individuals, including use of Llama 2 related to the following: - 1. Military, warfare, nuclear industries or applications, espionage, use for materials or activities that are subject to the International Traffic Arms Regulations (ITAR) maintained by the United States Department of State - 2. Guns and illegal weapons (including weapon development) - 3. Illegal drugs and regulated/controlled substances - 4. Operation of critical infrastructure, transportation technologies, or heavy machinery - 5. Self-harm or harm to others, including suicide, cutting, and eating disorders - 6. Any content intended to incite or promote violence, abuse, or any infliction of bodily harm to an individual - - - -3. Intentionally deceive or mislead others, including use of Llama 2 related to the following: - 1. Generating, promoting, or furthering fraud or the creation or promotion of disinformation - 2. Generating, promoting, or furthering defamatory content, including the creation of defamatory statements, images, or other content - 3. Generating, promoting, or further distributing spam - 4. Impersonating another individual without consent, authorization, or legal right - 5. Representing that the use of Llama 2 or outputs are human-generated - 6. Generating or facilitating false online engagement, including fake reviews and other means of fake online engagement -4. Fail to appropriately disclose to end users any known dangers of your AI system - -Please report any violation of this Policy, software “bug,” or other problems that could lead to a violation of this Policy through one of the following means: - -* Reporting issues with the model: [github.com/facebookresearch/llama](http://github.com/facebookresearch/llama) -* Reporting risky content generated by the model: [developers.facebook.com/llama_output_feedback](http://developers.facebook.com/llama_output_feedback) -* Reporting bugs and security concerns: [facebook.com/whitehat/info](http://facebook.com/whitehat/info) -* Reporting violations of the Acceptable Use Policy or unlicensed uses of Llama: [LlamaUseReport@meta.com](mailto:LlamaUseReport@meta.com) diff --git a/docs/logo.jpg b/docs/logo.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0e31204c500b0a6f2aef2762ca470ed86f71d990 GIT binary patch literal 98679 zcmce-cRbtwyEmRzRjE<6M@5UGR*g`KR24;0)LvCpBS!5Yq}r<8R23B?_9j((w`T1< zYKt8!F@uOZpYuJxbI#-U*Y7^}`Q7h4t~^5W;C;RFdR^D^dS1`#V)9}Ea6?O7Qyp;W z5&&?C`~X}`15^RmD6djorMyOUm5Tb>HELQWTH5Q^Y1tWX(lc?h-@3)c&czAj6BPj7 zzIU6G>#mI8J#h(1DM=mySq0hq3ZnNV@Bi~hE?uLhrlp}}qorlL&&$Po|9|_s_yu6R zMsc6~xO4|_neh?@0FZp2l$ZYP0{r`R=`zI?@(o|3rnyf3K;;d<EJ!4QsL`jzK$jf10;vzNDzuOHMu;KRqzPhsH^ zkzeD!#U~_wPeNv9W#{DPk@$CUv80~|K-^K%P&Tt;nC?i0Tw{J5n^E?gnorsQ$MVW^h=!G4W>EnDk8A(t+5b1k-v75e`wz$d z)2}H2EyX4Bmq)<}00WLsg)-mK{C90{HwwtYiyL*d%dYE!J@CWw9UX&e@sRcptvsCZ z>0iW_7m9<^{XqK?pJbFwFjSEfdKL0cn*O=4?MTV7>C=D|nqkSaXF9RiG|BP9`X%WB z(Vw*omo?qf3(Fy#vqc37+TYo|XE^jWZOow3zs=d+A1r^+u8P*FzB&7pDp`0(b4w|V z1zqG6;9L9p2$@>axLqGD^)#fJaozCQRaVUz9~I(3DQ7&5GJxo13VAq zy63n@1$x7Cwcprn94DTVyjSBa|ClJXL<$wFGwZ7X{=|SHH>t+GJY#dU=fepta?Ie3Tc*aK7tsKh^6Z84qD$X zp0sQ(K3PguGP<_)`Y~AFY=j`#j`Um|jQRpz`$&|J(P48 z8rVh?b*uC58#l$;8jcGf zF>u~{935GhdFH6Vpb0s@^7v?`u?TEXecYtUD2hIqXG58Kgzw3F`O2G@kcYo!`ewv~ zv1c^nSY$oLO2na+=LAVPc#PrP+t~a-A`Hv{Qo0LuIdp(bGljnFTvfqRUjPnGh5}-8 zQH4{DX&LU6vke~@m@`~OX`}EQjFy-kyX??WyK&7Gfvqo@F~mUC2e5MIbvy${C|ozoFg>i z!_9>EGN+Uy3z!%$&HwZurW3 zVa@9BEz1CgedOUli1JND3yDTY9ecm}6d4sk&7BSH7F_8%RvudjrXI5C-~s;_S*FZN z3mUlqObDDLUjS~gpCV7Q+D#y?uM>t~IX?=iUV{7YNg64o&-^O_jiF%-7{K1XBY+PiZo$jWYM|sNA12*L7kU}!|0UG zfq4STJnPMY>wOAITw!mqrAI*NB!u z&^@+zUSlK8{Ul6`aBA|AZ~?#^Wt0^C-3TmMw2R4~`Ejg-U81@zo3K_!sK;1`ERK3Z zLi{|N_qzEyF|$;g*20+iX!D&~zGwA2LZ8%Scu?b0=#%LTHMEpSy0e)Z5~(w;8_^ZeYsamb!Wl1($eMJlBqN$lsV}Nk@VOt%g5zK*bp8P=xF7 z7%MBbUvuIH-aQ{H>)d=hVCAS|t`7v0*tNOng=KEeYu*#8&0|g6n{}!j6$Qq7l7AR+ zOQg#Iq$?wea(Je;Ii@;EP0?;<%+oi9h;}`#KZ7v8()DSBhgn|NE&vH8zz0MI{gVj@ zRdj;{=1(X(+~w=I=lxr!6r`Td8cV90@8h4k7i?pu zhP&CCxvps1*?#%sNe8Peg~`-b8~QKS=hS^9wn#mK1>Rq5*2-8M074NhY;RZ-Oy8r@ zs5OA$ppOMU>Scx5K^|)UogpNN^`_f@-C;ep-ZOM-%U)Z7!Jzf--`}LZsI05K9I-$~ zsWBS>(5Z08d6uBO7dkxS&pKUOUtRz>mdOby@TeTllaw=S-Ph)emx&H~hmHcB)u+PS zekznDKnniodG`Gx_Ca*4Lbg!lz|hC-ewd=llayqLp8Ia~9xye)Asl4lZ$3AR`>+cS zH68X|SL9ph5tF?^_4lP|7qE?Rg1~q}kTX_Ma>nwYrB#;0l?d$00rPZ99et#eZ5&K| zs5u`rVOEI__cq(w%`&r#r!~Ay;#FtZuzWq79jN+)cFp;+N8E<_99!d=a(ba&mqd4P zV^JKnS&Nf$p0(^phwJUmh$NUzrWI7M!%rcQ75pX+cGFvoW5r?Gc)4=(3Rccsq>0sg z{|3`6ybU(}sI%jou zVNRY!cMW72LpDN8bew57;P#DNeWR5zZqFIMM7c8Z)!*vXSO5?qgAHc>YtCEmh-Gxm z5zAxl?-bq+lh`aQ6&812e_d%xngXk_Bzj3A{qG9ymbs5NG<5h?=$VM-^$7|N{=)lQ z0470j2J6d@Oa4YQ&w6~Y>VTpp2|pzl3H*8|4&FHyBOk0kWX?vN-|1fGKEd&k9;!q7 z+f?p(d{t5~aPy3km5DaS@ zB&kD!2`aU(lXzv;LQ(O^c~cw@YM(_70-OecEj6DMkm0mFgd5iGmGN}vr@qj`!g8_(LC-?q+R98 zt5(C2*-8nBx--6TqD+>rEf3@q{FUyEHaUc|-|G9$p+}xYMA|gLF+V zpR*&5G}M6T;f9lC|JM??MLWN0c7d9ro;=ps@lh7S@1RQ)B394DGB&R8zYET5-a9Ha z`yQvpY`wz&<(t0Rb21E0zd{C8mmD)AABi#RKU4X4^l)`83}#m3c$4AB3=^yWC%q(w zg@9mov}!qC;*+8~&EL(0E{9;Kx^E|VtJyuvA1zX32wughUS}nZ*ye8Z(`9?e*Sh)cF(hxjshoXL2(Lzv++x!Xlk4^(BmzMpw zB}j@YM3KDnY@#aNTrF!XlN)R7`lNYh<#6V!in_ZYQVsPcvva^I8`PLyIE% zH){cFuYU_*i*l2zcuO&|3hG}%Ru|52MQvmcTv-TGFw#0PA6`p#7`1geQ_$pgh(vv2iy!3O3Ks?t3rq_X`{jMx`IPfV+9bllTY z{*YU{Bw<)Uh|fiblQ^(n3U?eT&z=PLqwbST(MiEv5WNg&UCV*&ytvvtY2$&djWJZ~ zMKkUK5GT=l1u~qBfx%1vHpOBfXA*U;;?@#fF+r@Hu!Es}kX?@)(+=fJEIaon z4xp-Dsn=phx{u8jCpMSAUT*oFvruq%okHK&|EQ?`YjySi<7^;&^f+D|(VK}Q-+M%0%1p{36$T@TaHK7XY5l!;<;XL^uSe^MNUjo}gF7g&2(N!S^b|KOQn~hF6oG z05@JVQO@}zbu-@RohBtb0?)C73x`wuadXkxCz8fH2+clk(v6xw$eFaWusu+(aah=n zVvO!hJ%&Bt{$c4e0bTR_1|MbTXE@+zbKxQ7J6LVbt;l^gkC63QIDP>_)xra;!;7c8 zZ0DG^pp*C{*J~+w{S+t4Nu$UciMqpwlW}6#lbxRK6jR%b23IbeTIc!aJll6VW^fk~ ze*DLODj@QV(+#`U-NjSn6|{H(2ut{MorMjVEZF<&JsA!)rzMp6^(8Z5DBJ#pL0<|$ zoeJVMS=?uWd+D z4f-PgBupuSU7*qb*MHII8<*#=Bm<}40R*8)|X^3=%Ga*4yFSj+ho|kpwxfj1u@=U=wLnYMa zJ@KIj27U9d<55XjDR3>0gZ&RSeKN-QSo}t5aommb;PV9W(p=_-;k1>tbzaMs391 zhz^chsv-<;p~a0wz}vV`u>eG;-X|PE>r}}^Jtk4B;Z?5WpX#{MAyU`&#&AP3O9Ov) zsj4w+q?==(Q5*S;QP8Ay>|t6K-9%TRZ&c&GAKXb#AEYXDRDLkrpynupVsB*^Ynj=e z%05jA>Nw^$W{ZJQdS!Ip&B*OKDtYj7=Zf2-0Eb%|HKlhd`SA34tg*7+s@+_2e>)EF zzC1cOZkN>xO&v<+(-civNj}f*QYzX4UUSN7LD9=>vi?fovX_ESXsk5jSJ6qztm{!2 zAkaxxv_5OU?dt`g1|><3Hw7?<`=*N_&>jb%*1qJJym0EIdGL5B zo8zlx<-D2!)xC26p~q%4!)WA5oyTfEBDY{iCCzZ52(>K{eq24P%JG_N5(iebP5}-# zR4>Y?RbJn?0My@j9@GOrEE*0eoiRM#66HLfD3~G2*Iob+HA=F7s3v$X0Jm+Fh(2Yc z0RpP<>B1yeW4jQA20~#?GfX5(nluxJnPZWCfoxENox_oixC%ln^x{{mxfyFMf?-wUfj{b|PeHkY<_j!#wE7 z{X=QnmoDc{0o@ls-3veotR8Qpasl}0*I8Hc_3pIaw;A$Jmh-FZM^|DACKE5QUMfJo z@VqVTgOpTeeXGs~jzJ zSzB7Q-prn`cvh@Xs~;-3{vbCs{-c=^8DYkz%(petgKrRk#v{vIN$E~}M?PHN5N#%% z0oEv=LgyQ%laKai`WG3QJxwnF5?)hH>pn9%dg|=xTj!{TTXZRP)!{p~Jl@rw(`H|l zx3oIF7z&nfB})(QYO5^3-ywBkD!d3eQ>3k!0e7zWP`zCB>%s9e&8_gO$zIgBm3PnV z!f|lPflVtb0oCN|KzZUDf~sr!Mjsmb@@bU^_smq8*orU-q~w8R(5=Lb9jjM6u^b@+ zousHV)1lpI3kB#;R=y7Ji{1zd3^PY9?tD=cz;$>0O>|iWR_JUtl3p>4@CLJd?lt@S zea|1A*6VDQpEc7Xj9u+?KrS_j&z~cHd$rMXVxNTJ&N@#_Ea@TLO%LWNlvDHgR`ktH zY+ws;KFgX1UPjG!&9{`zjduiFqJrv-W|qD9GvK#Tn^vXRHD#d6GSqr<>FVQw(}m#o z)%$%cvsmdY`Vw8`Fx3SoNeo2)d|`=9w1NJR#=6L8R_U_duBy#=JsGO z0H>EqAxCI(bVV`1>%pWh>n5rNjgaqTb|9A2!zOlKRyfVHmu&x5IhyJaP3F5V0Dm1w z>MM@JQ@#Vm474af z$`vZ78DgbJW+wVfrt-pJfu-NKiPHtSZXeQl&fk?djjVB+C^57*^N0)hg*^^+9kSqY zdPw$4#oEsY17?91uCvo^PooyYrI7~%)G)z8nedxyZF_L1xx+J=*C&t@PdHQLF5lRl z5!W^atC+@^y3Zk(qZzvEZ>yYtbLW)8LZ1X*CWPd7P!I2n@{CC3f~B7Usfm7!GcpD2 zXV1be0B_K*n(&7uWz$}z=BlJWK7t2*MhJ{-irx-U)g5&-$*?NoZfp3*?OR|PsKALw zddQHiE{J%ox&22xu>+#f+RWOZmJz-vXP^in-h{OtqFsODzwMW<+gCS4UjT02mnUiv zjOe^t=NRxV&p=$vKkMD%I%HGMVt*4M!h=Xc*742trjI0QLRZ$N^B3%EGhOJ6Ubo%> z`dQ5Q6BBjXVySEEYOt6k_pktT=+5heF67RT?uz%>@1UnUN}WpoO395eu<^w7jf zTxR0I$!BxLW8k7?TqdS8+8pAwaeA*YbfxjU*w5HgbWkhftC}LCvB)J#W)q1e1lLB| zjWVgZ2mX_99;hszp;UB4LQwcj(%it}QFissOvUa3&V-fz1t?JK#Ot@$36K2XO1~^t zcDsJM*LAoubj5|*(AMnNPj9RsB$AsUc`S9QIRLq{S`g<1s;nQtqUrsuloX7D}x+| zjjWU&nAw4r;WtW}dN4`f-wIs6gK7m1H=4@&_r0}YarHWS*v`F@<-Q6oae5!TkMMZI zc+8xrIzj}&&oj~|t>O|ADD6l%c5pr@8*RzfqNK5{d@E4arLuy&$=+4-9nM-cJw;13 zifhH(`sT1L+@W3-19LgNCO~yPwd7#eGI3ia-G5xcCR}V3C=gT8u7Rr-(i?ma=A69w z!iE(zeU+f;;Tj~qS&+&bdR2~t<%8QPB{%X=3j3Lu7n`JCZm7{$VYJPq**t!DZA+%e z%x%a1tgJ&SIPV8f_5qsQL)qH*_qeQs(RpDZx%}HDWHQJSk4gva%IDN#CUK`C8`N+I zcLo^?yD>6%614OK(c?(+>4r3ychtIEUq|MYoUZLW%Vi^_lU{u-U=_J#v(;EE&>8Ap z@yT}W@61U?;PzX2;&1e8^cR?%4+i*=Bw5`sCNrJ<$TLorw`G7)G1M(=^hQABX}!w0 zXyx|3dBYJwHrDFvwcK#Nb(IB#`Eq_`oiL}2zS`^O@!Dsyqt2~{rY!EX;5SmAOL(?_ z0WG*o%Moc`zQt5j5Qo?K}qt%na9W$UzGS!s@U zUdvUP={e-34U!xkk}oy>t-jM)3!OUy%y20y8;rL;Z*z+R({-SiXv@hNBN&4p^l>~c^ zxHj7P{eD*P9}^^LIFylH2<8wM<~)awyiDh zAJh|EH9ysAx{Yfp3CytREHF)C%z8Wj{8Vu$IK;a#pfLA%`ZL!#X0Q5$gB?aG?9bP9Kf^0Bsk;}u+oy_f6n`;1) z)@CXT0B{3jK2QCA=-=6Eg_5fYD+BDXN9BXZW`AGp9Yjlg*t;8~=d~sB@F|Y!bAxxw zrbWH0h#oxo=IfiyTOE?qXJa$4I|QSvVYKFJ5x3bmTVuMIQTM9`s{M|)ryA8)SGi29 z`xlx0Kx@Vw@;wPjefNGQD^}|gE-gCp0o`hw8{mdQkKh1siDle^$$P7}RmZWYU9ldx zWh8bDADqg>!Zm-ZQ*S&|kHZW;zyY88MQ#N3{KWv(_4pCeW2DPgrt&qo&&{F+^iTZw zIF4qT7cE<%QP#M$%sThaGADMT_Y;K_5SEDeuo0^9rcXa6MBU$+<~{EuSbWgIvxejn z61C#=#$63o=X8^>d1HFdoiiee)TUH*pF1SB<_jEVd#Sw@=M~wLN_nS5RbA=d*#@L4 z+#RdxYYH3|{rdFOB+Q0>QT8*nxW=1+ytdtl zab@O6ZEZn|n5WT{RCgrM1xLiq7TYijLTFLLvGfmw?}kHM`VmF#5%a)xsPhG&y?Q$+ zUpgnlu1WPc&0{LCC6205Ry%X zVYf4N(w*)XR~vTNec$PyiDtE%ZKQJEn`=0C+Iu=&>-VZHT&ausf6aw7P~ z#xY>$%?~5@<1en@!ilv0`q+?zlKH=#FRb)_$<=9pdHpl>6L``imJB4_id~(8+Y7l& z7V5q>ok4BSAuS!M^R)*_PfAm05~SQ7NBi5Xm|4sJJhKTCzb`cuj0?U6*QylZa{Blo zW0S;eHIv0gOYvkWc1sGoqE^A2M*vc8 z6D%eu4|B`IW9;Oyn%kp+vMa$Xhs5D~cURHh$gClq zCI}e*^TdUHT7_E-VQm2<973qP&#y;T#68@;djU98)v2rl?+puh9r7V-fOBPv4^Z}l zqM7i9+m@s|74e&E4%fR-HVmQMyX`vKTWPF;J}yd2;hlq?@B}a?CYWo=ANjGG--G65 zklqErLZ+D+WU~#t5oKy2X)c&}0iaNOA@WIGD44CdQR1T2enehvEzu-UMA3Uz)$CImtxK=QwMEDbDyK}93@J56e} zY1&b>f=ShirLSKFe)Y26p=XU$e#oNi{8Ncy(Taos#qWlJa=1WOjiaj{|9VM#0k|H( z*^ieCJJh4V_IOa#PqWU_wctn6m7~GuMx6z`gk;acjySLdfuKolZmxnsDXTOg+^em(DgGKC119f12AwVJ^AuuzExp#M=3 z2alD~^sy)U#ewnhff8MSdkaHUk3xGE6zQHE`e_59r{?aidNQH7F>Pnsy;eJ<6Voch zyUgRITdeax#dpyt%${x_)m9F|R^H3P|G;BlQF(17A^ftYu=vk`I&x`rl6=5&faJY&t>B||#3AcWEB2|)ruFBm+#|I)Li;*=qvGhy%Idm?#LFT+ z4<8*+RPU8~EtrI%+Lgr{@VdomF0Ag~RXu4t$xP_~*AcvMn2W)1h{!RcsY zf@JpH7~Gn+PR-XZDK3APX1Lo(~>7D&t~ZI0xfb&9i6Y1XTg!9NDm<4574rB z`l{Dv5)x~sufmz$&);Lf!VS&mq?^ywbC%a6TRY~1*G^4BiRKg5Ylc>LsP1rCay(Le zc(}Iy;B*`sAep6O1TnV9b*#)nFcD^)Vt0BONBV1acbMUZW^ZB7$H}+qPnTV^u@Zl- zQYvsFGt@3;?K<j7sbA;gFEwbf^SVW1nvn67C@w=62!<`8sNh4w(z2L9Po zQFX3O>+AS?z3V_O-nx&j2!kiF-vU>-_C=7(J*yjI%eGe+L9ftlV)7sfx~J>4$dTj1 zpn62li3th{)GZ$at#2Q4&{%{zF+8Ah?5EyRf=pDh8!e@=acCH12Qo(DG^M%fUens^ zQu^6tW(kO|EZwP%+wn^S;p=p$hhYj&WSwOdhhpl(j$ioN1e3L2A-O}=cB6~>KPt!q z8zt={_X&8pqW2UGT_t9~#HJ*N?rk0o2F=23hqFyr>#DkYTsGf+^u5JEm>4*f&5(^N zGIwU}tAdyryjI4lkm3&!;XKoA%3MFkU2#VfZ{N?rDdJ=wb0J)zW~U<8=}CoN*W6gL7OV3 zmQ6(J17e+tWUh@)t}~<6}5xIF|2+e1`M2`ybASqiGX2 zY(=K0Z5}@Cc=#DQFgVWcBDyH<Ulnb5k?@gf~14 zK;#74f2#U;rB>PEG%bEl<5L4A27XhjDK6VmlX2rALR^yEa|7+77=pQ>MVh9`Huz^T zH2X(S-+i|m>Gg;uFmzO>ifG{-lDBOIdSn7p46}>Ay_{PZje2fj_S+?0jh$-Lr-*s% z^t)3KB{_+PlWRc0f3bh72gx;I{y%~0pAI*LX1l{I_NeMt*q7J7+{mV(u(SF52cTYP zd5ZydoQ@~9pet*Z2_?%<%TPv@H+sX_+@+=l9loZ_OH`>G^zM3-H2PF!&ov6RSMByt zqTm3Xrl@4^fk7}fwr@ze$Mx5KIryyv=6PR-;)LD8m!cG_b98GtTSDJ}JI_T08iVcL z#oHWB*PBps^EZ~P{8U0tPAbTL-fH_0w6foGTXav}S2=qElI`~NZ7ON^2k<;#RR(&k z)h_N=Y5&lZ=b^LFvnHLJgC&1-VMUHnZ7u> z>i+NmpI^X;Qa}CXGu=C?Rg_@Av%^g|RfR;kDs`*}-vuCB`+&qq3fjEZY52@uO)J@@ z9KU3w$3T?fv~*z&+)G%X=)cMlP7GVKxo=V!JQU1lWz+FNH)Nka9{BB%qJLfju?86G zZ;ST)CjScdVs_21NY0&RY`^;V1x%K`opVk;x-k*Vl3#)2+mEo>q^~G zki{hM!8Y75j*WoMAa1^AnIT;7bkDZa8K7%8J=~YEi{G?(Zr-jUdHEl6pwmebQ$gro zoOv9Q>E9SKIL^tc(l9>=RX8#6fkmieGf&nb%a(nNm$R$NRHnjw#GX6O2Z_ebl99-~ znDy^;LbPhS(_kk%O+uN_1)z!2Uod82pD66ZBlRlQ9vJfboX)$Mp^|d6LC}uZr9pGK zxm*I+bG1=XorfuY*;QZA(9q&}^6}RJ!M?G9CV6y9WdU;2xTeM{A-?zHxj(7$Ai6BO zyGL;LB{`(6p_w@E zphK|xW@ZA6Od&V?thL$ad0BRW3>AJuL?v2-(!Z;Bov3mH$b{_MA;_G9kVL>*U?#*h zcj~(qhm_~F2kri09Wi=m+8uMiB$J-l=a(-H5GyhD$h`{yi&kA{JD0Yp?sA3W(w~j^ zlj+mQvFGm#a}#p+J{P3EDY)BVi@k)Vs$InxV%JV4enLNO8(dE+-@8*rg?oTzd$Wu( zmj?b+7v7SM6hiYZ{=-YLHoZSjZBR#6hq)021LhkmuUf#4BM`k*L!aRb)?@oCt^~n0 zHXb&-(r2`5&3T$iqXs@>F%f@d+=s94=AyWX*J!Y?qo*owlTl9#Ea}HXhZXmY6BBC4 z+oL_I=yQ#dAZpCnLnU`j#EZ(dBe}s+%)vJmuAv8iS*R@%I4jDc#@7qlu~raVs( zZd6dZ7$nsrh(vW{d7e#(6g|3K=K=@zDisCS&Ketv?VoX*7_r!eDd?|61t!8o7E?{y z6$+hB#mds$N@s6^4(%pQhQSOZ+I3}i##!g5b-B6n`vNwz9q_-ZojiGh8|KYaFiFTF zf?{$jI;il7xwa;*zMMAX0I&c*+{~#PvkTuwtU4nrNA>q#Z1lTzacZ&3Ox;g6gtd_f zrJg~Loe*AwFaBmnI8t#4gKieXXp5n(X1S=I-`u*@8zgjw2hb5WxAq} zK@W=-U9GK^0!p2~lB4=+j&I0S(DVOtvNY4l-@+I2FLeIV<^2*Cc1(Ywb5C&InX*lT z5c+1**xK1Yzq!F?)cBepcbw&zh*omt=WbJbBP;2jGkIAwYcQ(?JqBpg<+{H2ijyrf zoqN3~4Q>(@-&A3{hp3IGI!`BZxP08yR*-{S`smXdY^LXZ{N}6dOo+!N8{zaJnH8#_Alj4A0eI{&+#v53R}% zM-O^L&ym6ustH{*-Rumlp!F8mko-oISgj4d7pr>`svm0+lZCu;L69d$B$#q zSI(VPEDem~luzTdkY(sarTfn2h16{KAFKWest@+rK}|g;$o)#JJKSMaN5Gr|>Y(F4i?HE^|C&Em?rWz9}NHnF)J*To>E3xLPVD~VisS05`Ikx6XC zSYsg$w-!H;&?l+5zpU|PZdNDAA2imJnBXV%kY`I*M@5YVd&0x@n7rf>H0E07fKAx- zC2bV_GhUSR#-6M>ACc;;YduIK0IO)Ru&EQ=3u$5&=F_^~p!o}KK=tNUyT6e)&kelQ z?kBWg8*JK-ukT$o-HsAN<5f>RXsTYzd<5GZ!>C#+-d{Aej9;u{_yNB|2Ft-wOyS>6 zW5&6SebK=`4|orh!v-e?6pli22)o(}9q)QraKHzO?tBDOUw75HS_CC@-V(?9^Oc8S zLNRNgnI{FqdJvtxLF%FUkAl7|<%Y~3L_tiDT)VZZ#I&>g^|4;i8<`Bt5oOs;Go<8k$Wpha7hehzs znf24?WU;$(PIg04zl%}Sn;cbv)XB5KP4VtlBh3}J_nCo8!Mio&vXbVMq-Kpw z(LIrHVtKHGc*D2SIciK51rv6_SiCI1bZgY^*eicyOT)v`CWPpBH@RAC!gCPa!`->7 zu{D=9*Z6oGDxUs1(23P_#@>uxg$Rk%Ea9Lo5YxLc*2J}WWW0o{_8C^= zfDwXUct-?mPS+oYoFGpDwEEG;${li(eM2!jZ_~2Tqzu zAnuLN%C-Yv+u%WCS}~}~v_JWI_{M0&Ev&LSvML|$FWE`F;)7NyI^D^f%1z%-`N)0e z0^pODHl9DP`o7|1#e)n;S0u^1<2yO_82ma${atD=bLMm*(T!Q&DjYUTP73u_b#Hq80gQiPlj>(n*pUHje@$mnzND(_X%u34%BaPi-BP2UK9QGY>V!ZRkPXc!|~$afpV9=v&d7(R;V9 z4uiQJ@C6q4g6*eTKe6n?TY=#wb5x_i9y+Cu^PAh{zsVxaAg+^%!6}I-B6x{Q-y_GG zxBhx!D9MDVamdmhoiirg-7>oDyKb7ECM^8W}$qKA6GX%gx;pV@?^q>0$h6Yv_e*>Br;YCk=%Xg7YA#!82F0zoE?fx)>Va ziZ@^A|C71D_fDwrxD-GA6u*bf4$*e-OnlBilu|?+`Pp__`|F^>klOQ4J>9XEp`ASs z(OI0ErSyM#0%(?2UH8fZzCxl{1`pnHl}YPf&Du6Zm0Ors&gFQqR2I@E`q408APM)q zd>ym+`0f5+6cll#`*A62p_A*Gdu4!k2*9S=U-k9@<4SoQeY<9 z4@O=bMTrkxjcx@NyfGCO6^I$k%_&xQ8w8|kESi&TMZ9*G3D zOgT0*N%w+1+~i~ftDl2QUOMgN92BwCUe&m5r1=}PWqk>^Ed8{s<~mt7x>*o#l`n;9 z-DBX4Z>|QbR%?2Sob2|}puMUAkuPUL9OFe)qLXvoV; zlDG5ZAM(>9a^M3nXCj4L-hmqAFxTf9u+%>G!cy1BwYp0OpLj125CQh?}}z{I{{NMKyo@K8y~2r^z%T|_ZLPz*3tqKAU)Ut5=L zpW{QGPfb}kji`zX0PBW`pDiyv;AU$|g~KT#0JkRtj|c$brk#&A2?b=$!536_O(TQJ zcrbYug0Q_3`lH+5jP2wGMB&?Rp*V|s*321aol)jnR}!`Kf5c>`A5X4do{ zk0*|8?Cfdeac9%ru`X@iDo6NQ<;jjsJa zM1V*gK^sqFL*eWd6`ox;BA$5jSN3J@pm7c&qVjD;W~P2qwA17q{!YRC`eQDOoc`5_=vQd zaC%=_q?J^ktIw25L-PvB zb8a^}_vJyVXKb2g6zw@vA`-LSp+;eY?oL_rDEE=glwUfl?6z<)}29BDrNj^%>jx`F>EKPguAyl=cDHFx%=Js&RRd&N|VgJ&OI+%0K zN6cR0t}L!rI64k1jC9?ftex&o8|&T_5A=R$_c1Q?G&Mnu?^fj(1Gv>?rgG7vNDb*0fVhaFVn9D21z-+R1GH!ps9LM<2a)=J2XoxgE( zbfh$SbhwH2`Iw8#5HeaTj}R)Q9lrFq_tzyWdM9pJahP#(_s{s|zo71oOS?t>YvX zM85RNEDa})-xUn`5SWZitGKJmEk6}d`Eo-pD&J{*!ecM4GG4v-zF;NKj6zGHRDkT}uwcs^=OI+W)U+&~V zmK<5V+;$9JRNl162v_8+h2(280#YJ3b0Rzj7to?YHqxHahZf{_|;2DH=D2-FdEf zcDedSE%$p%5rzz^19T$fZ=O>QUvHvJ&^WmuF6n)e8F}wtXw%BtVdcAy@5h$~s?csZA-7Zxc?yFbntY}yv-MX-`6jY@ zRv2`67Ux~zy!Oq#a0YwOsA^8|$J{Tr=6EIFiK{zBJXZ^nTL5xSLYuWU(|xq#fsJ8` zAFxVAZQk1&+Ju-axP!Cv59cqX`kmqp7v zzbzO-+3H(Xzf*Lz!)!|mgp9d;3*HmmKjax3oe(h|O8uu3_rI?qNKeDgT{=6#M4cZ~ zoQ#WSEy3wFI$h^V+)CD|-Is;4CvVZQ{1I#$xK=iOUUuWO2p;M>NeI&%Wk9?%(@#m7CXOz_jM30XfB5u~YKy_H{SCV?92i$lXFQEx9{y-^N9)lfjDFcXAQ>ubDh^(k)z5IY#>f zeT<#JkWJIlN4?|kIIse?dC|;b5=h8zFv>AyJkE+vLLKZravEJD(qYJh^4B)?=glZY zRYmwIyNe1sg2Ucel04wjPE zWg;q(CBr6*=f=C|1-rh8u1Cdrp@x2rQ)#zHCO7#B(^-hx92>dObW)|Ud(2S?+$*zR z@67PhIh~g)22J;uJRpI1?>*Y7UK5)GE+{l4K5lw$+|l!_%x01G$XSwbSUg9AoaH?C z%=(*Z2(uq0hjRp&!R_GWZ}XSj<~A?~nRzJ0{Q{cWki|HqFh7h)%93HK*Wdfi-Gy{E zZ6;yYm9vD&vrx4;ht85rZAOWP+BU;5L(DCm!|SP>rYm2BUVd~j@AkgsIV0^Sbh*WZXuLA2Dse-eJ$B)L@j+E{%Zv4kc}pL=swJc9e%vl!%QY4z$C z7>lnT>=MlCQmcs{VIui0jeiUgV;cKK2`DyiYIcDc6W_2VLOJ3Oat~s|x;ZZKajf!M z=kc>%_+!YcX$*ay+|HrU7U(H?2EgC>BY6isqA)wxjhh^Mh3)j3l$qxuxEtJdtqgqI znfYg$VKZ_Z&M}pRL@Q@`G)fP49Q4hvcYiix?v&I-N?#9 z%`hR*SAr99w4{ihI4u2Ne7$8<)Nl0e4Wgh(7<7#iqEaFt&4@?~Bi$gN)JS)Zf`H^m z3rI=l(A_nJIP}onJ;1;K!+5^G^M7-m^_+EH@M2htS;Uw7-uv3u_1WG_Ebd_~Qk|E| zYzuXDw&xBXFG+{q?sJwlDsWy_Jv0V?lVP{L+~vw$qSM2*YX&G&UvZ?SR<>>wX5qVuv|0B|I_9c%^dbU+G9`8L7rN*Hde$t(ER#9#VC4UUsf1q3z zy3j1NtoWUY(gJ(%t6v0pNaa&!44A?QwEJ|T!={~Fwed;~NLF~(c?pk}Q(AJ1xUYWU z?+F@h*LOd9O%-(Bzf=}0QB|&N1#XN-+vvQtN4mOS15)ymZnm!n7S*9?Lsw9AITG#@ z-MkbkA!2RY*=(d>rsV)8+<-Kjt8KAr<$~OoYY$%**YN%P;9twAhfE2}Dt5n6cF`*< zV65kZVBjYiPyaaP%3BE@Pual2m~g(zl~<~EFXrhOqo1-!ims1g5E2Wi*l1`v z3{PjQ;jNe(^5`OXbvs}D(#ud=hEr~HWpqpIUFJ@$XO5xQQCTsKgd4?!EB=ydAk3qq z{iwW!n{}#Z$&gl~uQ>Ak=o@!kQHAgV!owQJhzRlCq~*cc5>IYCaRE059eHT!oR3r6 zKRN%Hz_NPMUPiTw40!zOo`~{_7XH3D{qg(fzZY?#@Ba~LU*;mOqBvmBC(O6Hd(frq z2U*K)3_Ep0z6Dcr^K;@ilNr^WXBn4#$qy7G1K8hWiiW)Zjrn>Pv^KE`v&4vMSe5|K zXd$fg2M`lbbLK_C?bn}fjJA0xD`%yfI$$!l=l%7MK^%kg8oiJWl{%8>})fT!P3%0(E#L&LnWg~3? zPKm`kErqOVC41m^qh3#?fB3uP>w;GxJ$IyZ@}4B*8x2~2YI4haBRZh?`t=g=Wb;AG z-8~p(nU8X*{^l4JSVK+b?orgCglA-MQ+kBAgC6+1$OwZN52wY>Gs!BHO&LlXXSu?x z>d`nhx~N}m`mXjt*b8Zu>%-A{v}UBv*PAT&Y&h*{G_)T913V&Y)uEn-_6DYR1~ z@Yr-#_+~tl`bQ**n#UW-&R)2e*lE89GTk~XQpMa{BP@b>Db9H*;~R8e!{XeabH%^yF1UhZ#^DM zHQj?(*pvw@pTnyUY4snyEYbTe<2AMFSr3$Hdv8b5qubJ4>Z|wMjQwd)g5YMbgj6@- z2*0gQHu7#EV*!-LkNuyQ8ftT`)eSU6)&I|UrKvLpvZBpK7gi#?UuEYYxWj&VnPFGZ z(?9+2+0){xLJTD06i(6ArcqCfnve3fLa?J-`JglF$Z5Q|&4^+Xqin1qkOso)Ql(|t z$#6F_A;QWy^;0+RZ*KhXxgtMw>oU)KQ*L`!2r@5sft);PA@@?{w@rV-Bkb%KPdnWU zy6c57s=rdtEl73o37mEuE#>Z<;{Pr>lXKkW2bcmqlU6n+Z>QUIn_3?oLD*{2(WTSy z^bA!()PL8q@A3#0G*g+HM&D2 z(@8{AEC*!a-QKjH_3xm5Zgl1jbdj^Fe4><*JBnLxx1QvZ=_G^B@71fg)cG5F)ye8> zQ{B{=(u&7r)^Z8+Y=1$bQSy5`TUQBq`;{vJ@+nxSy~xxL90T0WBHt{KQIJdbBVuQzLPZ}$q9fXAc^l6NpmWx}XQ3=zh3vks$JV5nfK zr1v?~oui`1JB#6}v-Qi}9T>6k`S)r^N``cQHTD`5CfnZfl%x>PW6aO4I%l^jW1ZAd zry(hXY>iKF)HD)lig80H?0>yr(jkVg>p&;fH~VX>&4I?-g1jQXhvIw^Gq1MReq7WC zH2Rv};4ej|@5#z=O&ax0qZVV$+`O+Eh3^E{ekFJ-?fmH~lu6tD^Rr5+drfe6p@y@1 zQi|F8b7MkI;-Dw_9J}C6WyY=cXd!$N z^?Tu;<1Xa5k7yL%|84!W=DZAtG@%!1MT&gIUP-*)Oi@6DO7RtXB^tN?e(D;MOxU@W zhLq>wGL(Y4(z&kg%{$+GBl-lA^ylV{#n;}nyQWOzZri+%!jYdf6tZ;4KKopuB{8mD z8waD54wEkpZylErZLXszFH?>RS8|_6zufViIC2W6eR@zQ4VSGu3a*Bpsj+vCxsYzq zZYe0-*)`$yy!@zqtY!}yEbFch4*Js_>7DU)E`4bfi=W z?z}qFdWin>9Tw~?!-)eg+v|$fA7T3{k_xmX1jX>NDP_Cl*k6uCBF&IxX=rC`M(u2JX@7g73~+;nuDow=6ICo1#Av3d)4(=+ignyq-I0YSMvic2+# zo;)}z`0%c3@QF%qHeVpJOhTK$GFc{Esz9kSpcebqMf=ozB>pl`QmffM&v0rtaHoqy zbzIxTD1OtT;^K#I@h2FuN%S+{zLkirfyT&5L$ff>q^>cgvvMU0^=l>8fNaFLo>9(j zrP|}hk+Sp!EAdz&Z?Y{fb7S2)S4UT;!n_&(lu=q9khmw-(_xM9+Ue<-8>_V@441Ht zb}a|{Je>t)sH~QTL@9dsFc^O&|*^bt6iWWTer5omVuxPx{`Ul&erEO5K-+hfn-I+M}1(yly!ZEvV6BH zBLNSI7+k&yQ_TSKGb8WP3Kte@d0^X^&>BUT@S)Lty=USo>fCpj*ZG%IPy+`#*UIn> z!g+lT{G(xKzNOgLjOjz*=&Vw;{@Pj-Pm;Ce7gC7qzG<#l0cRpILNmLN`o; zq)+C{sa7{wWgadDUSyJ+8%fDo=2TI%mT|Icnz{@QTC{m4`n}9a;yV4|B^`il`CDztMBT;r_}$O? zajtg<0E8zt!||ZtACb7o=E7rK1Y(yf#=&r4?ufn4zS%cDubw&0U#n7b+;&*kr)sg< zkpf5GWv*(tjcNQxM3W-~?_NX8*{^^pg4So|UQg{a4Efm#SN@q!(fa{(0{CqX>!@=i z2M5gCi0riZjM80vT}RY)-wi_=4qGpQ9X*C`LI~&)vyw@mltaW;o3TPuOvi0|);Mt7 z7Zs<{ikqfM%Zp4~DK#0X&Yra22ETF&y)f%h-o@`Gmhi}ZaxIE~s*TIFoaT3s0y_m; zUvUm@4V3U~I>N)Ujc|#Rvu~?w>t;zW{JxRju{;inUZS3Cq=c==gyzjQPKk~4OFVz# zVLgWdFj$*RBDtFpc!e<&A*GM7S1SiCD%ZjQrOOlwQ2zBI6e6VrX^ZV#tBY#J0cZO{I`AX7OOVC}bbF1tG< z`Upa(T5soW6j#CU`7{Br4@<8gx$$POPhv_-LtWa6LEwWsGt(Wb+s4Osm&_f)HA29x zL`t))mQM~0E=)$9CuV!~AA9B#osjE)p2Xsr>GWyf3Q}~WgC?lxU zyDf?r&rOzm=A8!23^o(09E`8dKwlAx)ZgPzE0!#eceEYF2G^Y2G~|7_ztQByhI8@j zc-(^vRakZM6duI#++L4ehWAz6(7zTmQT3C-EM$fsZ_`J&G*1%2JEDAGU6gc-U53J< zq_m>v8ev=sjRhQ!W%mTp<7k+D$>9ga*NC6p8(0pF-q>Mmy74NT6+CUfl@)i1CP<&A ze!uSi4um+^sBwS^RYuh8{2GQUx)j>qhGT(GENW*sMuMOZ1Pf7CT_(-WSz1`&qg;N8Sj%1h2s$9mnU*2B6Q(9BQN z8lJ)51SA8r4BQxjLzwj(@O@zcYFd%4l)>uZZG|w{t@)y8(HC=V87k*dZwOp)HdHmG8=egyR9 zz^I_j$i;%|_?3??Ji0~!Qx{F>i&P6P{;*mOT7mk!s8FcrEh@?;qWa=-qost7S>2Y6 z$~Mx;bO0o{eoX3gdx?0a$??Y4SNq<4kA_$X_BFf_+>NHCP%s!3?Drr!5kr`T2tq%{ zAc*O0E{PRnibrpE@no^u2@T2cczu^Ep4vKF_axaEm|pjNO0YgIDF}PS?_H*<+3fF$ zq}Tg+@Id_yfPiSG9@f(%`J4c01)Lo7On3PHvZ&=7Zd`h4X+b}w4t{?iRFg_tQFebn z$7WpJwiLT{YbOOG`f06?J8as;`x$$j3|ppJnqpn?FgIU4e!5)4sFPB%h+Ku| z{E2Jtjiuf(aXZvmPivFp2WN8zTg(((Iu&!78TE6SNREVFvM&rA9im3F`74VQ2o4##{o z;r@b+A9er15Bk(Rpn)>qf=R4(59@Ha`CM3uFK^mPQyum$JTW1kxA+p|{q#bXKzzM= zeal-KyWex&1@Yt=zc0(o+H=UXPJ%COh`l^q~UxuNKgsN5JJp+sbr}rJFD?k1? z^=&-n#rz%VpA@SY1PsSfI)f`-F7YuLd7x^|tUsqP7yFGhe@lDyNMy-W=x8B(2}D;@ z-M$|Jc{Sq@sDJW9vCZ;OL&3PLE2UA0g8F_x8RJy@kq+`Es5f@h*}BwVPU&U%j?s2H z)J(6fs;$YlMLbEg;-N6_3(I&auTzdJ)?M;zPoq#!rz;LbXmPXPK&i#a9;>nCgF~}^ zMzp-rt&NF(8zVY!vE@9+G&0;)L(!_U6&2&s?84wrX@$Fi)NzI0svdMU=MDIvb-ujB zVqK3gs6IM2B3e*R_(v2L0L5?%UMo-wzfCniW?>H<}rhwkkW zV<>xiu4m!D1%S^3ZL)ML9?NFua)GK>aj5c4_OWr6vQ6TZ9b&EfPbwvM(q5S<- z$^KQghT{0S2O{!%*;_4>P-Y8iE&jSn^5@$R>0voMjckp9+7i%3lwz;sMr*u~f7C}6&ka0@ardBMqX(u7 zUJhC%USiVJ1>j&ebMF0T2B+ef_h;_HBGsjyT{AMtG_{wH$3;vUybm+n1Vyi&ih-lF zCZy~UJBv-J8VQzCymZ+EvZr^$BRULk20X_^wZ?5QJW5gE)hkjUwv0S)@9~$;^n!G? zKBy+`92o7P@k7(!q?cKr%@`-&EDc-ptI+%OhFn~R*;SN- zn|Nd4eOBfADNO@hS7NRCe~MT7L|}8c$_|0}$h%WC@=oa&M5H9c;npFgr6tXZTw6a( z4Ymgybl^8M!4rYhZ}iI)E6Nb7ieKDNlpS=%xg4{+C*BH+9$Uw<;#xqHY3ghFEWC3? z7`*$F?wmvnl#Ec1Pf*G;7wOv*)oqMj@n%!tbr6+euHNnjl*4UN1Wc2E~CDyBAM zKX^gl$Y@{yM?+Rk9hZ5i_stllW|YsF-s{6En`18ZbTCLQ&z8zD4vUK35D&JJ?yna_ zFo_e)19#|CzL$fAq9`5*nJ*qh9<=JwEjDFS$OHoxb#?W< zrfl1>oy(fMRNElwxj}O4JpI)tt?}hJvgN0zHGvq1%_g4EMuUM@2kUy7Y~4&3bzPUg zQurIpi(U(Hr2$Uq-I}k+RWmCuv+hf?aN#LLbwq9gtE|njXt!^%fCUdzRIxt0-w;N1 z*&%z$@~qx?VVkN$cZr|rr4LW^LbsEDl=sp~Ydx-}z;J%c#pbuox;HNIh3xZsF;qTy zpj9hzaW5rl`he80@KyhT5NiAFlNnFpwaSL9WeDXwE*zUx`zczi-S(41xTN!3dS{Cd zQZcA&Mk7@9ONBK77|y_laievGgyNLJh+MZ@%^F%QR$afw7Q)P23mhw!Shgq~MIKuOrkZ*yLW- z+GgBni|_lR-5w`X?buh+Z;tO=qC<@DpF)vCFgYP3AN)%$(D^{95frMbcKKdXG-=zjG%(e?=Olb%U~uG+gPNd-@dDrIu*eX zoOODJ(0r(P2CMe|(o+|(JjU=Bx_Pa0aZxSyGSv9apS2R}K~60{P{!6ZxD=hamGWD+jjP$5%Hsn^)oVqO7w(eZbBeW5H59P zcJR*X$Z4XDRSZf0i{)|%d25=u$o$-`a4i&NseST5O*tV%jx;Vt@r*1Ietiop&NCr$cZ=8FaAQX;y#8?fl32SL zymH%pUY3v7K7Y+l$ zM9+A(20z|wj}bVEw91Gx@D+xf%?I>dbX}1E1~xrjl%0MK^r`?r>X00<$)XECMnfNsLzpT$}3s^%~4L0qP{zLc?T~h2rjq! zWSa`i4n_VcR7_AoE_@Wp56d}lF=Ga=3FN}iBpFu+tzkGMT>Gt^EOZ1GF_;=S01#dC7>;mjY+7a0bi-a$n+OBdh(0cx6q?dwpYv`%S zY;a1Utb#O^l)boC$$|fUc^cjP|0DGJKZD(bKX{dizY$CGZ?=sdRC+sKLuf}OZyDF! zokM^1`-m~abNS>jkp>P4zYu@fC$sI4d`WFM86~Qz`&K(8pUukBPo~b?@>Q1D_5Roq z|r^tV>4DvQ>t{4H68 z946t!c7cAJKYsaloeOnV?=A21;8xgueG0cYa8_B>%3iD97cA9mZT`ke^XE4kJqpx) zXXT~`tW`aDW@6C?f9Edj=rLeI(PqYlcbUrxy2Xxes;@*9m+EQ*{Bk3I!=7M1?ZL#ElavP|Br{CgA@A^5p> z8qn4%n>c<9=p+R)d_b3Nr7WbDhRwcfRE=>U7zYb|S@;zV73~RNteeQR{rx`a`^yqB z%02($=n#qbNnCHCW&-L`tt8=6B~zOwuPHV1ApB{L9Usm6=gO>V(zZ+V{zc8{M2I)Ttq*pR&Y;fT zuC!s!G0a)kp!X6miT~z_a=2)uJd4y0o;yv$1Qg-NSnOr>+VL^9{V(3_0g&Y!2(fIu zp=dXmV50*UX8Wf8tFTE)<=GU?2i=@Iz@bI@k0?+|uK))&hHH#*iT6&MbFU>x7E}Er z`oorRs_hBrmHo?cj)MAS8lGQcHjTjN@&AbKVfTEO&~bjkI9c}tqww%T^XbZF*I z_5j{}$=;~_ybsO%vDSl~IRV6z@s2HYrhLXxWi|SPv)mwjGd()t@9m7C=dhTBKa*cJYKgOO9|6Lg|%pZ3g60_4O)yhExCjV<6C@bxqEMapS| z_R#lrQ!hE1WdrLuxa9l4H|@9=a)+)<>1)W}XnBYtcah{ERe0W&B^UUNlGo<1&U+yt z);0moxM5=D$TNZIF%`I8oT2FV(YNWQb8+X$p`h^ zrwBLPv!(6kXUhz+7k*8xJ`JtomkTAS8rbvyyi`USr#D&zknsReX z9A6wIZO{~Fb4lH9vz=++2ry!8s<1OWS&!A^Ki%{=!*r1^)4nE|aw|OINo(igbI@w6 zsdIm#G+uB4r^Q6g|M6tnDE4Q#o~!B`xw^3iA8jqbwXJX+p}t`6CVb#`c(^xAbu}9a z&+NzZtUrpd%?$+y7_Z`?Rg_dZt`brG`k$goH%i@795NoNiFButAsY51fpYO=x$di< z^b!=bzwhz3zBSDA`nT-&EV1o-QWc;@5Z@3=!{)sY=!gYy2Q3Q=c?nskz0nIi1tu%lYQR z<)>^38?QHOe48^!s#_G_*_`gbF|t>e+Vdve%4H1_(2=gBZsq9(Xk&IV%o7`0}=yV||GY#tRCItCap5Wdr?f9pF8-7^_~J+f%jzdx#t$w=Yi- zS;G6GkRy*^wxs;0e365XuSVNEz{4IiC`ub2&#!K>_QVfN#6UI~AmOMjX&MjEWWk*J z`2$8y3kE{M@l=Uwx2jswioc9OQd_i=0T)*oR%G4mJ{+$*47heV2{qdno2A~cZh;F{ zbAXkpd=0JQ#ZCLkvKih!EOYz1ax=U?PmuI;2#44sA3d{WN2=`wwi+8gdbhtqobaf9 z_!?R62ON~}#si2D;G;fnJD@N{hrXf%B}Axq@;&|(=U|r3LhXFvh3}_3Pk&d4htC98 z(@(k@&MUAzdtXSqcJ_~mvL;o{ovY5>$Ss~W;Un>xb(zE_?N+PE_~uBnxVt~JZYaW* zIYg0{3|r95SOO`-xh$ORen?UH&_hqOwDjI{EKNoGV)D-t*oGu{WCB7mgb&!a^Wp?V zS3bbgh8Frql*Ke~ohh-kc!mbVlv4055H9x=zyShOKMqrXRKRgsMyr>9EFmeGSNl`q zSmFhib`$&%&*YX0$0=3IvkzF!3q$`A@#?2%U&q2Y_D!}*S!Fh!gzX*DC_6rSJ}?)_ zcAH}u9iw<(N@_g2PHW4Rg1nR&=>iE{yq`6>_jm*=j-0i4AJdfnk)%TAcZ*f4ItKYo zK)@7K>Lk1zr78eWd z)SRsin~2P9)AJRR2k34IzQ4&n)Cm%+97XBf{yZBknv424$CBFAqTVrmrRiv6C++kC z*iQsRpp@LD8V+WCqmyNmX6_&{3`(!jo!sI1$&o(a)w}v=Jop&8&FAjs_a}Q`?{8>_ z88~b&X=I9^0<~WK)+EJRMes3xgs+6KfG^%aZ-bKMeR4AoADm zelkam;KP%CHf^8fXjqylZo*!2J5ld@?(f9KxvE@ubuz z61TohfXW=TY$6O;MC)o(uRSQ@9*#G;ES9?J<}C~Odf4BY_vRSFdxb$)n!dMRnCQyw zsVknAyk4!y?ruyvYl{%D4y${MsDN0|!#7hx#SjHqcNF5a>ncuuerw6Dkh#1&ZdB+= zK2gk5^|S}|=8ev9PB^1cAk_=S7H_>s^gA`}vV`;3*Bg@qA*W`JyL!zUE7ORz*vXC* zW{m{iX5kF2ZR0E{EUFN3m)2B-HC86{PN z;$=xJce%)EZF0ekov!n|{=2xYM}2PMcjl$s#k5P0 z@S&CGGR7g?4q4~-0mGkWyS$sm;REq^yYW5riQumTnKYhGc^L&;mZl0T>Hh%QdRFvQ z!<@N*@JwBikE^Vt34a&O(zLlsUO%VGbRs$}f#(`dgoz%*VcR;sdMMo_ zVR&X27n2Q;2LH~)frIwazfcI~n{M9yd`$6K8NlnoJ6p5LD}9mwh%B3Q$&esoI1SFg z!EH-+yt@43H>ye`JXxmshBUIpbEzToz#BJHk-DykI)gadaG+0Z$<4N1d2>VE_NESq zm#&yDGXOy?lDFA@8CS#n=7Gky`XH(9YveGNtR4?tgH!HQ+9nlpCw{!G(*Mvc5xnw- zr*x!-r;XSShLp*9Aua7>1Q7@=cX- zOKO_I4rz_17w4o|M~)G~D7r1oj)eAXOnvMtuoPro-vgYV>kwn?GCU!fPO{YR4hw4K zNL5FVtRHP;J<;4J=6QJLB57TZS5!H(w^kl$Hp{wBAv{IwS)26jxSoD8AHE77Zmz0p z30`Un5zlrGQKoqT{#vHo3PR>9je2S6whVhd=w-MadjnFUhFlL%|4^~f%6*;A^*}hC zX(Aze?K?l3LMB>*z+Mlmn!*imv#UZsWaF9_zzM!PQ~1~4Xw;x@8%&2petlp3uG^zE zE(Kbd_oiT4j0kQi_Q$ymIzG+7F>+2^DUL)-fW&qd9@3;*QT)0!};y`T#>u#^dI z%E(-uY*}17@2WYc%M-B0ZCi*F(>3ay$ zElqXylOC)6fE9O47%1^?h=ZeCoI3ckV3akDbg_E(e_zHrJ)hqgk{F)J`bVUcL@7vE z+N+tuGYv;gmR$C9S~2r;pfDp^EcwcYQ$SGURK#w~=97;OJN#Z3e9@gb16_4RYR% zFj_wQUjBlZdsE{-0u|+ z*zAt&i!n~eIn&;zFRVena6G? zxZ72HpBP0MwFRIhJTd#WpKHdvcxxB!!%R14KmGRTAz>93c2?%jO>1Quc|r+Vq7QfE zBzvDS(8>Q&m64gP-T4w-=JjxyJJgSZ(^Dl1R=~}^vP1%O4wWrr=u)N61aA3bO1-YQ ze?)a$IYr1L%5c8`dW=K%@h*Ws#$5Wtn7I)UA%Tg*`odWin8>;z%u#H2gfplW+(Gx| z9_!mLUo`)Mw>{n>+9BT6HzOl3D=+6K=}*pEPaJ$5biYq3E+##do`1x4eHOGF#UmFw zIJU2IXZ-EgvhV#ZtJx3iWzQvxf0lko^q%Jc#@q+{H9Cr%aZ_3wiDDMs zR_@Wlw~~J@J!Mr!q{;B%73(o+7e~loWeJY{$PM^&8844CNFOm$1qh zz~Pl_Udd(}k#9Z7*Na(DxLgHjr)o|T`>2^iO=jnWckExs(|?wxc^ogkt&mVikyC-K zu4v4++;WFi9e!Oj#W11~V-f)=%Sm1<28{)mRQZdvXvsG$IX&}pr-++2id_IKY3!3) z5_!`Gc401KFn`%7unyZI^{&-VZbPGwq@d)XnTRQs(D)@UpN<%3n7!2t^*Dj|&Nv_*McR+2A`>=iKxU__`2;A)%H~+okyH}o=Q7NP z@=)Bs?NUFJl8zfUqj|PtUhk2l^RTz%R{Y1L)?}Q=1L%&}gq-V-0JPoSwCPF?)-Mjm zo>L#&9SeNTIM%GrD9O0wD@Ma?fa;ZQyKVOXbe)omS7d1mhbM3S4jnBN4C`A5I7!A@7b&MD=wv)FP>$)HCDiAih-|MHO)xHDsIMxv7nr~pp<2LTBBy_; zh)*0u;phqBrH&LQg|q(P+>g(gAicUNxC>{KXUB^bbE%31DM3^GIhe5>FG3%)JyyhI%wA@>uP`;UlS1~{8O|06mT)M=%M^(9ynuKp2iDg4IO6dM_N8Tvkn2^e$J z(R>l`ne^tb09w&5NL)v)_rWL#?=|d`pcO**>|6}+Q3K+F;-xBnvdl~E%*z@AbOo?0 z!6Gn-S^|7s1LRZf(7`CYRw4+C0{)~Tn*c4?A|S3)<>-R-ri)b$SxWxRFETT=)U+#N zCR0}`iQ!HBU0&&`M)SSgfvj7)5I;l+~#aKwb(?D#oC3CDcYbS3>5L)SumDMxfx{$kyqwdI;@)pX%O=)~%u zbARfPKNa^%b%%}EP!^w{N`^}on&HNBqMy-FhsJXo#6$Vc?e%DRG9 z23}u7^Ke5ss6yN)2fp5os-&S$tmBcQICN^$DK zrm2+R`e}FKL&6m6I64jftpKgFQY%p&dlo;>@kffQU?`VYfa0fDD#u9U5iG%H624+0 z(ESQ#otFKe?`PtS*$VbDeBDp&==fvuf!mW?st!=kCHOhyoj)|ZZL|iemHe6Ca(doU z8b+RcMWMOb@>5nf^McRK=PYXJ5U|+iokPa5mHRJD!t5JPQ7T=yx|KrVG;vSL#6#u|PV!b~yU&J>Y-1(pv--lv8BE z$EPXb{Lm>kFN7|cK8{SPVlAbntwyd1Y7yr-WTs&cA#OTQXDWu$zZrYi@axy-ujM5f?V8373bE;QFiMRe)aynyjx#5PyXK+sw)>k+kLf*EhgzMsmdLmH)kXz)Sw$ zCaC{dS-?lme^kmyo(1qH01GesV26#V{s(Vm+su9tsI8qUdya@ore+B%k~`Fs@W)fv zRqA19$7<}W$1$OAZWsRYP6&b5iAW1N^m+Yd@!Yff=EIT)S&-O2498^LCJcKfCWbk# z_zUg-$-c2OA1DcH4h{700I6)F_DQl(mC~XL6Z{9imMx|t;;=U}`t;JIB-Hn|gAKzB zxl0U|5RTzbiRrSb z*zx%^2yLT2T+`LMy1EDR$u1*j<1|Cj?f|~2tGv138jMNyb`npQ8Ux>b@WJ)^r7IS3 zEAC3}?YPI^IyzBjHY)NLrIst-T8X?u-_=ei%EE-<(U5n~bvP(FtX?IKf ztV&zoN`;q)h|K2Xv9T=uWq-Dr#|J^<{O^RPUW#2?ott_gE1#C+J}*EYZXX|kAMk9_ zS016kj==TrP3YF$6Aj>XRx^QgGCB38{YDiiwQ!Cyt?N3df+`X%D{I&bqtpB!;qOQ+9y=LVmbT-Hb-6 zEvq?pr<2FjfbPswYAIVLXRdz%?dNMdo`oK-w0L$dvNj78vm=YW8|C|#!eqVr&pwFZ zUn5ehTQoP->kanv6B+fgvpVqpFs3cNZo_~~@QQD?;X=c%v*T5AKn}%6f*3+)EOKOh zz7$nbeeJ6_zDk{MmzebW^f*uVns~_OY{<`v7Y`CAm6p@2l!;XUk?oLS3j=yGK@F?S z?b1?7fS0^~Ipa+Oy!24SM5f2n)O6NJpk%ZNCb8VCg~{)@B*4<3eMYQ}5#G%!K zQ3YV)$wtr(ywF6mny?Fsd3qX#QZ!4EB@CpuY4vCqjpbJS&VR;}EOYY+*xC~%g4^3C zxjlU%yqN=?yGpq{>1w2ehq^)S1>Sv^w|A%~X=2;+A9{bJ_iay#Cy%cz!y#tv(_*n8 zmb;m2<6Xt=DD&?n6BZBxlr1RK;; zw8fytJK@9kILZ<~Mfz6Bx(0%=?&;RkJvEiefrf5?GfNJdOG()TBj+L1EVQAJIF#3xF;c+%z{(E;%LuN+^X0b|ssVgIAno}5s|T;|^f4Df4EYGA!~w3KQ8 zLt7tL2g~E3=VxR%MQkZGGju3NpZC>2A`HgmHrZ7XFo(*OT!MFfU1^cKGSiJpeOKq) z<(782e+kS!6V%xPH8!msTz|g0juPGZjK7UDc3h9>PJbMFO{u8MI}J*0fQ6JKwR2gVgVv8KGnREO5Fat4|u1ae;O=qN=yWPB3$@ z8a$z9KB!0H!Lc}ohSp~Kmzqu@<8q_w^()WX)`$b8P{uQnpj@wLBW1OariWd^)>dZ> z94vl|t0pfi8%f{XmI@TVhVE?Z=6;V#UizD$Lb2N{igE3mhOP|iMXLvH+)t2M^S}$4 z^khW)3Hgx7ml#rvt=ai^HyKB!tjiH}Uu3QSS(u9N&v-{+B zd!31FI&8tEFvAGt8qObRD(d*wjD3*frm3^D5Q?Xtht5-+*Ehq^e|`=fS3TDEF(z7- z=?nTV8u5IzNhUCuVKnLo_1|!9lWUnXC!(@e)bcg{Y@733aJDLP?-Q>gnr7QPU&r*x zq&WF=LQRf|4cb(GW|ialYBsgqZ|I#~KwAIyDv5^3c1iooQ1w#HBSR;{KykP!&hy9) z*=Y1@^! z*Yicg34STLg6_)M?Y|s#3Tupw2OO;L;77i<=18uLDL#JD;- z3|?)AT{Vz>Grf>nt?>cK#`O6(t9d2)bZqD$w>fgepB^i{+lb*qrnzyDt$(*<+o-%E zHf7_u|4aorY+jDh2FR!fyz7c#9`|0;imzdeKjmdfmlHFy180rBamMRsC2z#715*MH zkXqP0Wd`?0Uu1W9evKi@o{U>iU6{;+NlZlC-q`(a=+{imSgioUpS6EA|yl!gkW)PlQv1ff$S+O*Me6|Ipg`rDK_C#0Cf8 zwnb+XKSr}AH<4!qMhqotkP7azsIMQ8Lq0nuLy!A?PP)Rd7k4!#)`uq(!M@yj)m1Ec zt)?HLQXP?V+aCq~8&{|+AF%!zp5$k}WDo8zWuzB%RrZGY5VUVbeNUVIvZKV}iWK4Q zKv3QtL{B;ZaH-m<_-KkW)Drk-DP#t!Das~XHKw;7-aGzk{>hC!aOaul^a4fNl$n<=& zNPp;XaV+lZ@@+iiGyMMX!4gBX+%^Cj+ySc`JB=^BpU5&Q->T=3@ z-nYG_dzk=@$#)VFfj3Rzs`fCHZx>yB`q%U4xX-3kMu0DxD5dQ?)`icqz99y3Lrh@= zp1H+SaRli~mLFJvh7)PEn4`32oe3FfI-gXijy?B66?8tve<jBu2TxTV2VJEIX)p4-t9#S*@QMFd@z7AKA#vNyl`$U)dAq#<_=Ks0=2 za+p<9Bfk&@qiT+-AQVB0XJ=mA-ra8r+i+Bi%VwXHvw+psK$q(_4m*O+{38m0jNzYM z`K--EY@uTKnweLi&E67x3yXSi8~k6M+>rok=y`N0%$YKlZE+WL%o0#@I_JZc;;!ph zSo~wA>-G(?2^jhRV(YuYn(Cr$v4Nt92q--&A_z(m0jUv@E=FpQ7Lg9pdyPty8W9kw zp%*CuA#?~*rH0;n?u_nL;LsvNxBprG5mDrd2p-u5N$RTtdG*IVw4M5LN* zYPS3&U$Uy|QFTz=Ga9j*@y+vKzsjd}AY-RRP;do@ed~TzAI@%w_NemSe^L9Fs`~+V znv%-1HI^+S<%s%+g#=}-v+Gs%%XM@EfAU<^0EBjS!hAR?sS1u_d4ha#UA`E^K`C|WFr)4$^f5(idm+Hk3x2{nU5Udsa$oeY{4> zdja2wY+V+rhl_b!_wfi4Wfng7py5%Pb+5a>M+QLL7V4V-b(Ph;)&Nnk2l3GVTo&af zh#lUq@CKNEsmnyE(JqA-dj0pFIowym6y-Bp=rRE9fj$dFCj37J8ttU>J0L961x zrv4_y{qjHpFv1v=i^JdiHo02O+57l>cx>E1 zp~nCF?;+h|@3nR~D=Nh8f}_LSMuP40-z$tY#I!(sZv0HJFW?a}XIMH~Z@%^_T=@IO z%G3PnC#9PmWBnktb`4%wzYLhG;y#0+3Vh_&=|`FK@{|-{IfN#i!(T@~$jQGeUfIJM zcj&QIwJ|*c3Fmsi)>rhQI9XgT)Ou=QY7Dn7(0J1c9c?+H`nLFD8UE+Ht5c5c4=5oO zjBT~rer+qTW59bZ*n9kfjMkWu^GIf~>lM?7s#oI-CaJ?!Q*Fjgv!OHhDn;+e_E@?Tq#Gl)-=2fsRbzq#1(m8ibf_KcNc($Z4_ zD#+QHeeo`pKJn;o!IcojuLvWKu?hpD7Vq=ZhOc;{+U03Z$G-57M0l@#jJY`YlHNEP z3Xb{w$8biox(=UJNEr7?HFv}78Ch&AHq$3S%m3S^ecBti$8>IV;3(w z$_O}TsQh;L@W6%vqRpPN?9a}PXJ(FHbRV?R4Vc<{AXfr!PA|51eEsd=q;-Oe1TSYH zAK=Abpiv;YVcjQ4+oQMvp^1K&G8p)Fe|l0&6KX+bc#CE!o4QZY9UIjP%}dsQ`72VL zS94uJuR-dDQ*_>^FZn-TuLEm%y6Jxx@+o*Jf}FQRoyC3-@)_LH4RkGrSCj zB+sbgfJd7*4-am!D$j?5b9}OWK6hT!Dw&&ADxoh?vcvYopF$iTnQTX#=1`Yc^jZ#S z`vz6{b$?iuG&2p+l3waR7W3m0HF=e}cqHpKG#EhxOV)o4mVauNmxg{PvbKE?djp`QCPwrJd`^%x@F~r~Ij+y28#k^~H#w zV6Nufxj%1wuDO*cEQ!w%)Q2byL*Qax>c|c8c-zh@yBB%UKkhXc*}rX1(eX)peKUpG z#>l-AX=nY(K3(OFhay#gF@OJ^aL4@|=B1?REQy zh>=3TIet6%#%+zC8>3sD;WH_pC`0hiV*?nv3_C|EYY`RkUvFPtKA99)-;mY{x9VC1 z?HNaZ)qI{=@y|au{rqRX^Z_RknOBLQE>jq>OGYc6jLoqb%2ca0kJUQ2*Z7qG_=q{b zzq8kafL?mbvpoR~f0j#m3gtw+5;2B`f&x-R$775t80rTdCzec|7% z-=Jn6sg}naZqcdimkqss$h|@zDZXKcmo|zPL2ESm`Ti;d4UY*+GQ-=Fy1hc%q`nt5 zA<%t;RPbQekKjA}zj%C&>1ngdxpi05XqX>N8dvmiL{H88EG>#B9s+wr5$#j8a~`}n ziy`|1M_xJfJBa~CsGjs3gaPN?CZ+AO9Elc143pQdF9v9O92nw8IXY$Y#axk+~|2} zHlT8mzU5ibd4UQ-C3taR3w$&VU%yv<%&cyvk9~2zq8Tk|U}|6){?gF8O*LFZ-jjF8 zUAkA$QM}7CtZpfT7(NN(+!7c`2U>C8*KBL69lWh|_Rad4_5>|v-I>aO-z`f6YR7YB z$|T;}jw0~MHdL}yvsCAd%|8lWAp+{_Lr@rJ%mzVc#}DBPawFuszz4baZZp-{0B;FA zQ)VD-Z-5u%MrHrK;I>DAp#9uJDW@e?_CB|p$!epEk|c1xGJBg7>*8$sa?F9Cf7726 zrV(1lj3+&14A50edJ^v>CI>_QR~^lx^KG4+^u8q@7EtI2y6>-~VMRbG#kM3ZSC(9P z{(AwwcX!=?YZDa2Hi}%NzhW?JF!}pZkJ{a8m8Gj87f@VTNr{6&?kbL~2AQL;J5os; z1bLsNNU6Q>yW$&+SMF8>yhWBvq;ootjW&>+pJ$ditg=V^e@Kmg?tkamyMGivwx~}1 ztPJG+zUN@Pgfsm*JSwv5`PHX`7sEyRXr2R%WrHhFk%_>RKaC%>=V={^8eGaZX|7*e zc>S$Z-gQyq<)vuZPF9DeYvln(^zF6Q&BT-=D|-gVc2As_(C<;=gQS+7gexKD^Yi2r z6669f;o9Um9KBQ-|ocvM_K?}kwsEngc;dU|H0)~8;o z2XriA5BRS3-t!K*ly|A%MYxl}%bIICiz$praXd1-XzJJI@1?#fYfnve%WN=dR($*O z;%9p2{loK>=(#I)ji8W+Xn%1#{gnmdQYHWESAHKq{Sp^Rh2DXc3zJvt4g-Po9mSz@ zjIt&tD^0j#v%$R^>ZVFi3;Ib(Pdg%n>Me?Z1+a`EG8Ay+ zn590{c&Jxj3lS{7qLTHPM<)6nXJ1M`Q&nPgKT-G+Yvvg70 zXV;OoQWn*Mso+97+s1eyUnR}@*^9ni=tml7f5t$PifsQI?ceQ7qy)?t$ zM=aZ|b?57ep46a}38M2OBj)z5nxQ?FmG-3qLS6fwW}7r1?ZXlSd=R(mNOP5zD);bH z4Xrhup~3yt-)@J=tKk>!dR`eXYueg~7C_@h(5TooQZerttc3vOqvGIe;-Ip%6X^V~ zGRJ5(;>)&gs`=2Q18oqLcU=dou%&L49l-Y$Ui1Z&kp~V3pyqcc`qnXnLdI?{gTjmv zO&wsu+C)*p}9KHb{UCEonFK-`ZN+@42U7Vwtp5O;s1Uk;PJUUcqAlP`4-74hb z7JOW)bu{UV1sW+leRnem)PQ(n_efF54x%xz{;u$hKm1G--g@PyjltOQ@fSmB<+P9H z*PIWo&m;bfRwmy9~HBE2XuPeH~z)Pu|p99WA7Euc?q=; zyRK$gGH0_M+^+%JO^4KNn)fQ7KBoPE7vE*5#jF`%r zm(i=kI0R3v?~c-#@-yH5@7dz;+fNhUTfJSPnxzhB(3aqE4&1QbFBOLQ{Njp#()B2{w%-2-Zbu>7{S+?H9v zIlR2yqG=;CbF6;y>b@F1RZs!PTeD6dcd*M>q2rZKJm_p;CE*yQ{H*b7w5LnQ_P7LO zp~RQnCGxa%Ci>52s~?s?pRn06*cfSWWb$3*)LVSTLk6e`{OeeM&NNHNb~x&pwUh9% z-)a%$0Sc+1NP|8t&FMj=8D;BKd1mOFa7p{Fu8iP!@MfL413-VsRn(whSX2K#a)t3M z3$vuj-(ThWGwnL+iQ^0Qs@CnVY9B|iW& zZLi~Td%*ec4k%Z|QAv21x*rIHcYKOEfdSGOTbFy+cRK?83?YPSpANDIhCRxPzS}SqQ2S2dN0+AO^$F%Wh z%~F=J*Ha5NJO(DccL&sBBF{U2oiiDxdxekGsq}neV3dt3gDQMgydM_$>R!QL=TxTB9xj{hZp)A84nH86SCVyB-M5!_^7|&8=Shl`s3c$+cV* zzQ{#?FMK|L)~g0+uzZ%8df@y1qLvjQ0VDv2G}jRqe55%u{(m4M-%*ge0{wl_?8~Co zwtujOi4A_Lb#9!(c1yUFan?#Mg9bLnG+0-fT(%e>HtgVMv*gNI8#UC$to$Y`okUfP z4Xcebpbsq~NxqvGy+a@U2TaIa!~LAQd|1XavRmUK9CLxg!I#4#83$W4f`=a|l-Qj} zOb2;Pu@Uc_VRAelbDKML&!(QS7}|IYpYct+fr(IeiR-4ueMpxK|1De447M%hKF+&L zF1We+ylnXMUF}D@sD*rnqh47~okF8_776%|Hfh<<~5f(Y861IoA5(MlTLgJ zyp6_}LV^=n3g9;%tDo2-pZy>gjsd7dJ`9aIF$Dx!h20n3x4`9-_SK`{nTS)gM!j2>}*nnKb5ePdVBZ@*lPUyOAO3>;jw5XM7G@RYhD+W zfxX20FAqMLi_)eD(3w#%7Qip|`qSZB&6?Ph$u9qZ$|d|~cpJ0`KDInZWX)q#+h0*l zG!P8)EwR-cw;Aw`?MylE1lY~X23vd8{Wd@aTZFmugk}8z3F`ldzwfbpp3WU}QJd>2 z_-KRoz+h7j3^e_MUI8H?1h^HQ^wxVboNL<60fxyE#dO zuCld(xHQ=+!<^Gn=2!U(AuzroiCt5B<(9pQMK~r)2yKs}@*BDIm920?O6u-YwIdde zE`O;+z{P$YsOOK5)bsx|F$%K~lZb}%m37orI7&OS#lbe?c5I$=)h;r_C;*N90*yjs zG^;1*sCk=d$l9sv4y}CFi(wQ=^IBs@29oCn>#e905BZA>rY$e=9j6ay*$DaYW&jfL zQ3ffvlWvTWQbbYbyH~GrRaF0Q{yr{#>!-Z>q|LH!*^*YYpK;6ha5J~ai&+?bZ+BpT zO;bfmD|~v5R?R@|;{7k2w8ThgADoj!g*WL&ZM<1db6Aoeqa>Xpg0Q2FQAIme5W_8x zQw5c*$Y>THwvcIerLQ*9qRZIJgq-(^LugAmwG>6k6 zb8pfE@~wDWtx9gf8{9H4>jOLaVUW%d*Ic}l4{w+|=e1-Trtb48V)ZHZax=q!sZyXk zeoqAesm`(Rc;_3xcz0v0Pl*2Rd-`+JV9g_@%>#Q-+p&bqZ>JBdp#46RC;kWp!kDf- z>d$kVjGF}%z9~}p{-&v|LdLdst5!j|#ClN%3LvGKw1r3);ZaW@H(EuHeLWmcra)a~ z$J*ProAT~rSQ9$|t zI@A|iH2^Otnbhb1Hj*NSuh`frlsF>W(g{+3^EeO$k&S$799`GJe@bA@MCz<141@QF zWus``F zEy(?ok{~Vqj~UR95A9S6x%(Wp4gQ;)qbGjaMMQ z_!QS)X1JF5oW{OM;;1N}Q6zAaf3DTu0eq9Nr8~Uy+I}VZgYpQ|B^vyNr(fH?3~iWf z5;XU#3tX~AZszE3zDpaq1F@xlJY2JxMoZAl+iYY4wy##2sk2*}xrw$tL*Nj1O42@A z`L!LV(&|8;!4-oi&SNyphnSYVQ-(u0@x%Z}&zMKj@ks=tQvv)?^rdlktK7>X8$VVA z=tdEE=6|>bc!xZ&Vx-|?J+Pj!6cR`SdE*h}KP*Arz`Nurch!+dh&WA3z_Y!8)qFVJ z>srh4zl)0mD)EVwEdX&?Hu{R3LHhwyT56edxp1IVKxEf8H(lwOSU@sq%gK&l$>RU; zgzmRGaG2&=2OBVcCHFr#(XGmt??it3T1> zvQh$c*##fw*gPw6aHJg!ZQfRPeRT4TwZy_Z<;eRA?@_*oZgIW#TfWj%D_#%i{`&rA zJJ$A1%3+SFuR=W;8A}lu1p=QWJjVl80UX`ob{fDXhWvGm7e^3oBvTy|cK_O(|x$tJ$CDybXdJhpu22Nog1Xk>DtQlw9zBP z@Md%H+E>zM4QtDz<{*=zZMzj%l+WJrUep6an`<$xFGW7B%gW_86q?|TSK4D;ZzaX# z41dw(oI2+^x3ys+l1UKNjA#`sL~)j}^zR zsW_N%|LEKz5J!8OS(3yt;&~iqZG39un8BXQgD=^;6~fF+51ReS(Ah$7xwvq5sm!m@ zDcsDC-FaVnFG}TzFh5g-UgXdY;^?>BjMo!XW=1Vk<4bJjjt*{9Lij73{YHux2xTQU z)9~&d;FM1fhR!PY!){`hRYx#epD6DeE%(WV8>lZ?6ut&+N(1oJsrBJMLLT`2DtoTD z^ZLdb8g^swOBvKPYE(RcwFAMv0q|qM;(yQpUnI=U`wmcsfsaM;6WqCEY=9Eg7>pb@ zR^q3}PGa=|Zy_lmXrV=AHw!vIwhG) z*}zS?|8-&FFiagtrp>vdQz1%--3*26n%8aM?EfFsu^7OEpzs?5*F)17BJ0n*;m!8$ z?>7!58m~T(dDlTnHK*7a^LDt+)(lNt!JCzyDp z0LH@mK}ckYTF90N{oTTKV^Ena32f_h+S8HP6;zfCZ%2?Dig*AN;GQ!&_dyJkXgj%>RAzLs5|OuHO_ zj(5jvx28#q+E>U@U#{@lvtYQuFef>6jXCDJW+-*;hRHWiIqwLihgY)cUcDGlr2g8|tF5ly zQ%m*BLk?qbK+onUH;AsXY($HX`Cl4VxJdSuSTbqRoHlH83buUSL>zscRuvsN86N=i zP+LD0fDs@i+D)YUZD+>` zJz-R_FW&8An0;hmqtb#_p>FgysFV&LR$r~7YXh~pF-TdW5U~f@Qqd@%Hoe7NvuSS7 z2JAK5JkMVa+Nv zR5>#Wi0L(dJg2h9 zoj*C5UsH5hqccAZM)r%)+pfvH!>s4Srspf_snz zo^T}76}I}-o`6CExh@Z3W`OITxmF(&q+^y~3})CUcZI)H8p?nN(+}%5cZwWBPH-4O z76Xt)k${Gj@6?atomjtLrMfU}4FT@twEbUC*g$GwS=?xM3J=b*U|~HoK{Fi8uTUYQ zQAg81CVO8$eW2@&houeT>6x!2Z?%ePRQ5LoG_#T_Z+c+>4DAM@H|vP302sWy;5=7- zow_n)34zzB-~QX0_Aimfmw1L3skq=SAF zvdxV&)yd}0Z8Dj3i@Bl~6?k6Om#spZ>5Nl+Pq*ewHcm--SJ`wcZ8&C#wRpC}8_z!? zqdaf-*kzjf`wa6OXMD9Yz;oInk8SXblUq^TVu*YtCi9Eog~IN--zWNlD?o9N$jZvLxEQ0({#s4p&lB&=m={r9t~vLi9( z)=94}*xi8sFXkP#2^2*TaW+q+!d<_}+U z<4+g%1Y)CF$R>IBcpt0AMx!0&2lGY+{qh@iO^gA@v_(7ZRudKt{*IkD)1ns_0)>oH zoDr^yqbsvh`pZBCHKT^i6&O0as)7xCdglE`#Xmr`uQQ;h*)zOwK^DxW8#tNRx5;#ocQ7Y>VELKiBY!5#k}tA_Bb3Yf&%fV3 z;kk{qIq%SM_j#WvVbGyH?)_$WJeS;8vq-mM2)%riBY-zh`2Y?9>Y}Y%&&=>{vuga_ zG2GP`2)~rzcLW$;hO&)>qI+OOmR$23+f6R3az6zS9uUtRHI02{_aR>G5LornBf)(b zP3}D=7bw^zSqhG5W-P$Z&v}JR?B4zUj9Ux(tylNiIOhen!9CFydezA0GQ7oiNJJ`0q~(1 zN(vX`g%<~OpvE0XC^bU31i@t6mlx1gJcR^tUJ@>vI>E_IJ@j$KSxKBW>PZ zB_|8#Khz9^XCSM?Yh~T>-6!iActFYwwBhvvZrml8+oDI-G0Z~&IRM#XG3=?c9TvZ^v(fc8;jmUy}1fR8ySUWRYP0y)cEUmfCe0pg}H030)bH;mR8{(ujz0R$N)v1Drg zFu76r=z8Ho*g_V^0Nf@EsTJ4 z?J9GxX3oD8A=`=9pEQHnUu+cQ5gPjpGgvMse0^5~+IiRxsRjqzLGTjIwpR^jja8(D zCjL0CAx7(ys=XE0{a=G_vnuk|jB2tw(v_zchyNK8Q$<@h4sga*>h;KJKRdU4a;0xz z_cbKXXLRUTBRd;Mp4nx0%zsb6DFDFB;9d=0y#$CfOX6M1Ufy8UBAzE%sBb&8KYW?# zjk1_p(f2RQ1wnZdt_Ah@`=*yasUy3R7p4*gh0MQON+&*{c_(T#Z1=ql+%!p&s5(I~ z7An(qR`tO{t$%~G_!qP^HhXLr%-9@bxWf6Ke#EPf)*(3f03GtLXNtWKwpBLyLsGp2 zt&qrgr%Tpa5nAjK5#_Xe>4$}-m$=Y+6vqc6Q{zKV>vHO<=#f91T4u{#RmV%94)1rE zXdixMR)&J{LP+uu6u}&^C=jpi%;jx{z!xM}IM^#)cfYBmmw@Va465!aiw<&!p^Aah zWV5;j`7ag8ZS2Cr*2-{&m&2RL&-gN~pRKM(7sNcO$rS_J5E?bDn*L3>`+~tAhCn+K z1gK2U(4SX9_lx8Cg5zZ`@dmq8w_l!x5QX2Wq?0oO(#MP z$07B4UJ#X|S?Yc{&sAJj^ph?s;1R)2rE2aS?}!6}FXNSS;FK!|hwnMU7{V)1e?-Q# zL=~Rjy=J7opRXCL$jPky4oqBacqPa=$;)f2gSrsc!);T zudyXN{qG_&ia+?8rmw8lFncMDMUpc(PFbsUit%lUD6Y~fH#eiGf zCYG*(vi~8irA4e9t0Qp}*;gYJx>4R%2|FGF_FqZ&;%BRt9?anYI2AG_o<*6TSf*w> zhKDL}T`l7Zp9Ajm;0AFF<#o}l`IgxP=8*e!o&xe=!qc^NZ%|YgHr$;Xz+ag3f&dV} z7cmDUQ@VjD63v#}eg(;BOG8P9%WD&E%6L@)j|uQB>ko=a51v|31``q|NhTVU8(-l= zIu!TCn=Lyg8+d&OTu~M!iI);=BJs#%M0k9&y-xQD31vHj4NY|Cggu1ZM6Q`?=r*h$;j|Jvn#{Yh?1CP=mknD&?|$}3tW9#a3sXFy|nJ=$A++q zR3`g&k_CPe=93Qvz(gX5cvX+4`3DJe8+G;uyw5yDGGjj!fDuoD7c`H1nJ^RVGD_AW zZ&>^b->Z}l>*XTmXUy~~&oqKFzTH%zt5JeK;ZH$G#iksF*R*xhq@N~V0lL*^ZbL%V zOVzzztXa>_%{r{s7;7t44~boVT(UIrq$gu|{?lyj;%w@}92XH?^UO>G`u1leKB1&( z9417m*m}&R=g+6`pZUu%TRLJ5Y)i^WbBxUE0M+MBavlAHwc}l0{QaK`>{&G+#p+Bq zdl2{E{Mi7_z6(A7lX|s*x9Lw_%U8G42rCNlX^x;4js;Rwi88}pee>&vm)HoQyhZ8W z*rbnzPv?!jU2ts6c4Ql2(h}r;GT>k|rl6-Barr0u)cnLsb$;(keU1?NW%G5jpubeN>wJBo=|6+1 z-cd=Nn~V{Deh0ttT2>%Lqtd^fE0V9}J3Gf3!jSD!2MJ&;UNEmsip3Gpn?TQD9oU>J zw{GtXa{uv+KTsQg!ECrt_?DBA404>50D;FOpOHict6ul_KeX!3xR#wQ|aI*1G zb#$oK?Bp#3X}^^NFSQJjfpbuSd60HB0Pr2C0*`^vQx`;jC~8k0CIt(6dpxCJ^GIvh^COtG6y!-KzyV3KY6y~;xf!Fg z$T7x^TaJoke(P|CvwAHTM>n^HQl}j00;7HUup8xsTu4mviR#preyz^6+J*>k`%lgw z$Kzn%>?p@ozbOgNzK8kt>NrMRR^pF(I=w4q&jm60j!eYbe8`L1$3*U9sN&_Fyh834lOy%L&=ZEjMq z{-yKIRgQx{wH(t+tEJrD4_o;n!A1&ar|>y&%bqc^8Tk4a-QB<&pMbCz>?0|TnL+F` zojFs1>vTwbafjm_^q=z7{JNl4aU>_m;SMi(0*H6};G3NsQZJ+U&tvz~!B`xssN@m= zC5sgu^LzaofTZ2XUcEzWqyKBAkL@FspH!E+LOh7ez%9;YK5n?4zmQqz`?8N-VbBph z^O{at{o}Op>nTl|O;y0eMHOX*!?xIiTQVHnWfM+X8o}=D@L$qP!xUXlK=ZMzH+c#8 zN_pQnIHXK9kFRjU8W4r>b+IAa;>6Dvh9rq&)PjYOcn~B(JEG1yv*U?Fk>e$=a5|74 z2De&uC-{D~Z-k_gsj<;=iLdL>+`UTQ_vPpY>K4Nwo|qSarih-1mW7qARA@VWt#wv71JiI&Frg^fStOOGgQ zqwX?-b&Jx;`L)|GoKJ7kY-r>s=0mK?P>Z0OQJlW$6F5HZu7S3>Vf@figL z?%i%N5w0HG1z8mL4J$kDo!gh#RCM#POt#6H`tm4)0fh%Gg0Of8O|3mq20oRxR$g*7AAPu`AbqJ~tv>&!)sb`MIpiK^s^n4ZC7D zFatAT`N5K2EzDRqAdg*(G2TpVx!34A;ILcI`m+F|^j;#OG8qs%+RrkN zu?%k4jaw~AeQo@w{oTxIy8EG?i`}i_(dYc}l**6HRVxh;wV4s+L^kI8JEne$o*zq5 zCPlJ^DY(@-y6JnzZNEOviW_$Z&KU9Dyw_4qWrRY8jr#+Pokh@1&NT^Hy%+`GAv zP(9%hgE=wU;XYVIbru@5C#O+({!-awGuzfFbNt$Iv$F6lly{)-{RU<`aQrgMOX(Z5 zOhn#>QgxiSib<33r!nZk)3kC5_HsUg?NnWS_ks5v5s53&Rv$bPV%p@^8P)%x;FOm| zugDp!kiSwh722#6-JtBHbfSriFY@&KT0vA?=;@L>R4!ktIzqi3njM2y8W!_!N-j8f z$shou;bOCj%kLSOw7JB$xg-4rQfqMbwbSfT_b6NHKMu~mOT&L~pX$uGOQZ9VX&}I|Bz$L5V{NpPC z*Z77sW2J4E`#*5>CG_{nsy}O$OxhtH)&vL1Sd~vDh`pSi68bkisuZ+%VY*G)y)Oov zrQfxjIl@}n{pBg4;kA+=v1h(oK`9m)?ssx-B`zXw$bmKk=q(aMCo3ipyCphK^e6i8 zD`a|GInc~x7^U|AIJjf?F+E=rJk zP{WZW;BMwzH@2n(j8HIHk*s-nDr3RY`;4o?^atqZ@xKG|q|kJA-D79WjUTS&fpoX? zwQzKc;F|jKU2~|KhBz`y)qx{wxr}gGf@s)8;DbRuxA z$X^NEVEp_0^wAZq7iM55J`#VPbfNxQ^ZY~2LM*G%0W8cKreDlF% zl1?P=2lQnaxk-;O zHO+;=!dUy+r=?7`lS@UQU9R07t1HgIU;nFk#IrY6F zi=AefP$4pTggG= zvk8RO^Nd?#UK1R)Eg4kjA0BMjv9_F36Z6j#uaevg)7-;N2XY#NQ@uI97q5>Ta%A4C zv-r&wHJI6kJh?{-W=M@MvJd(_H2FReMJSO4TXMd^o zV$^cS-Clw|=B@>VFzpkalKfU_T2-pN^tLvUM7FVS$sQB^P>jktO&`r7|Kriwuv47^ zY5jFnP@a}wpFvzU3+@l-5U4W$8Vma&NfGL5{CFzUIAu%s9eL0PK3mpg)lbp8$z06Q8Nyp#N1{C|1ds(?Q822Mr}2%HVcOojr0U;skT6LeSS|v zDM{di;s)63P_6QBSH}2S_D`6DaorTfFMwr27F=qxe6o6ZGPdma>Ajz(i#g+Xva5Qs zO1<@iizppGrzO%6K%h6t5Z;ebfKxPsEen6k_x>PalFG*Q3}rmGumx@W*KODTmkL;f z8Ek_-ZK6HDHKSy7hG7cn78l>`3k2d3A=i9S+lSY9e z7o_q1f+N*tak^`a;#vyED%jfx=C9F#q?jB zQC8uqZk8s6HDauQeum+ctsrGgd5J7mKWIJ?w*Bt|sg*wk8SkZ;brav3sgCDtyoHd5~H*BR9v!4yf zZev8z1$t+Fd5a)Y1QfoDAYXTY@KYsi%d^dTR0>0%8DGn^djgWM-cKgXfnaBRYL08 za?`7?n7J@4&9o0&%_8D$J4&?7rpa1)#R$|1gmequPmA!R^Mnjv15-}M9jtrc8^ zVtKZPX^_A=?jd_d?Rvkx-pvE2K79pm&}CV7>3U*`N`p7frTTRIuIfYhz&9|IE`he7 zmb2!D(D~TC0Y}VMHL$D|t+dU`&$&ajc^6;yH1av7CY(GZ^_to+G8Pyt$DR?+fU#); z>Wf)BcvtvXEnL*-I97tBQItSQ;|6}aO)&IPj!+Rsd&T~chZWG>PgpcOdW3h^>D#VNyQWWXzt(Hv}@4)3IAt&(3;J$8_;F$F`%GqfSh!Q=`8TwAL)raqIOA z))|+94SsYADBuA*T*G4ENf>>x#GQWx?T{an>->%kkeh-w?fx9q{Dap(ZyM|$L;EXK z8}l^w*s01apJ2OEulA&atbTIBT!+`h=S}@9_HP(0n{_* zSo?(u4hYxptkSxLFBJPaL}5dV5M*Hp`6BdV=UwwHlamYp{g?e;^q=}V$U?0@A!G3b z_=<9{lry;0R^v@ojDEeztO~f;NunH{qw(*jJ92c5?uB#26BwB`usyER*BV1{+I9#$Le|hz==3v*IfVhgXL9^EFL+fj=bVmx(G8m zF^*TUoBro;)Qi%{V7=0Id9Melpz(x9U?nE*WVsB6r7Syu9X<|sH*|j0D?2Sk{ zz3E0z+28;*9iv?FPsInbJw1jVh4FPg5w>vNyo>_hR;)1wz5P^_K4~Ql31PKz`kLV( z8_eB#%~I^Uq5`+QQJOxHc7w!oo`>{_vXAIa0!*gMWz|MbZE9Y&LFhMK-OvvyL&D5C z7W18R)SDL1effGlS~ZQ;A~l!P+4YQHd)M=tSLQx_3|QK;mPk?h)CInOes@6#;ygZ9 z00l3S`HUz1Gw+_QdoR_V6r}RlWqlBQqM4d~Xq94H%M()cqdv%S=N_&FUv!0Zl|izy z;jXd2^5$N=XGaSzepNf<|s#=^g9pmqUKN_()LNR-Ny7uNJu4%?EXIx~r z136lEKqafg=F&v9bRH>OlQq8TC${j#?W^DeQ?Vg;GZJ_gQ0;x_rB!EF=BH5@7XDLM z%=PeF>s&j#o&V@jn}U6)NRKh$_23JWndM4H#pM8{@?4xkm+)9?UWFeex9&w;hCiJ^ zlxPZIikhqctMk6)s^T)MHx*gHUx%-Wlm7)eYk~aC_V#9UO_-b&(I-GG=jrlxq`-*I zf`|1w;NakNHDc5vin4i~eo0SAhMl#uA_Y}mVpk9a*t~p{qV@OuDN>n-mC-e^%k^IA z2zI~!;qv$A(iwn}x{*zl9%gGN`?+-d(g8}V18wQ`k@xsR+sF2yx5Js=`Zsc-@|WWf zQsXw86=yTwzjnyBhB0Z$wOQSv>quW~FdBR`xaod6yu;1yRdiQPBiF}WAis6fPcZpW z2;h5D?B`lMopTZ<#$JsX!FoRW=^yb8oa#@9M=bJ$hLB{PGuBU31yLR$5B8%2dV)+7 z8`wF{jbX;tu-Em;zX*(!VsFQElx>`W0a7UJXZ$i&t@OKo(c`R}&(1$>x=5-%-xyrU zd-mWwL=Gs6--SI3E2ZKDG4J=lpjL-%=Wej#v@d4?q3KyiQf1R~&^?k0u_ zW);E<2EtvCK)XfCP)LUoc($nfu@hkhK|V5|Fr^?FY)Am!GvC`Y8c3>?0X^{3&V`RP z!=>(52xqu!a@F9OkG)Km&+;fC0-aGEog~=Fhz|-FM9S;{IRmg?MzPmgV#n!8rZgwZ z+)148^qOO+=wB+}`OY4DB^z;O#SV=&WUf&Z;b-puV(ahhWTK{qgUkS$&xf@r^??jR19Un% zTJ_U#qtXQ;9jwDjM5^s`-flGr`BGA8&#pv@i&14gV&P_{MwiTOJqfLk<;S?+Fon*6 zmXfgJCQzxrROlrRsURp|*|u9KUY0M``+Jh+a_Uzfe}B8-MlSfDJgY-TfI^E*b2X{M z9q-;9nwqq*xY#Gf~dlEvpmdhC@ABA>YYQlpz6xq)cc$7|g2aj@U*nh3MX`Q1qS+**f9KWG8Rb#5pwH({Y+wg&)_5j(c z=L-&b4q51ZcU=nVifjIzJ$%2;6!86n*Tab;?oY#hCk(lZTuHTxtT!#E?@7#_1D!&v1djS<&gkPT6vK`F5Q_H;<9d*RqtA-L>>;FG&{by8D-4{Iy zqo}BWh=S6if*`$v)QCtIA|Sm)q=SI;PDG@45Re*`UL(B|dhb;_gdS=jKp-K{J^uc0 zxntb>ff5)aIXNSH?YZWjYtBsxpH4n6Kc(m(xw1&DBhyODr`P`uXQuJNREPaZb)XlY z>j>sfM`hKz{Dmi4Z8MRTkuUmOrT^q;4YBrs78PAyU0(MpC;|+>emPMjs#}ZL(hWR% z{qpou_a-7tWIjFUhBjRIB}pM;OOj@W&z)87W@Cdq$%)WgMaIi>b7eaR&)#}0kOy2Z z8bL8jF3pVCf;RElwhPYNob^xT$#nh68*=9iY)=+?KhtW+^sU@o=dCvCs~YgNEwiw3 zXrnHc;A+|moGY@steRWsr++I};8K1ryS4Q5!G{kl-1NdrxpG>0zh(+^x2$o&FH)8& zlU03s?X;i0kETs%hOY=8j-7(e9Lb1*rZmq*_klWeF&th9zx0A$UH_fe{lm*;coWps z=YeZO?k<8;%bb+bC5IvaSOpvkVFiuA3{dN+`K3j0sk5L7Z3p*0YkmQ1vuOE%B&@~d zL>3@`2oXM^SI(j<>R?)!4(6<}*cSSsu`c@e2ikr`Qf0D_u=e`pDhv0Vq_~erUs-#W ztE1?1oDi@o)h&qbTLEDIEP+YcVqhIb90pLRAlqKRb}9ibd#hXoeusBW0! zP4ToH79){vNbjCwfLs6Rfm7YX1Lt_ZO1;Uxu#H4jAusp9@qi$$3NQWG3pNL6 z85MceaB-T^e-f0UF6t19xEJ_?Q^+|0sAr4&qb>j?Cq4V_koFwV`=;wA$v%Yh1sAi6 zuW_##RDiJm(5TUF6>bji-UtL50M}7G^5&xJzDIZr`{PcP3%H!ujjc8YXQ0M{CugGV z$6=j#V2L79VF0Pnb+otDYG?rbkK^C?2#VzyoXEusA+~J%D++HVGgH5L|GK&E0lov3 zGgBw?J7bTgG#iL01;CtRv@%&Fg@~ldAiCHh0!{0d#d^9D4uxClG(? z#<2>%r5Y<9dU4c}39!-S02_V1#UTN@I5W-yl=C#`m5;(deW&X}NsDxO{K@`aePcSY zLHwj_Yvp^E6%s(p#$>>Wo-<7R64-Z6rvM$ne-7mmZ7LJ(nJO?kc8{v*{ zbrVWe0A1d@*kQpUhicYdyXd@h!dJiqO%*oZW5vE370n5rQgS9Jdfn2S82?#4EoveO zW}hZ*E(y4nKmQ<%9p+Yghg7Edoq6EHZ+2jR*V>k@zDG6*h~)P5@{}@FLIe!YiFPAV z#ie#c13qny$lK%PPD@BqdMpXieK-01R*#M?*-O`WG^H!FlJ>XK3S)iTM->E!W4q|4 zTUu3s4k3v7#8iWOF+rvTbjW{)*`e&0)93UA%`CJ}gym+j)2C+7`t2{r_K_?-G~0a! z*El$y1XK?fi`etrjj5ITEADNNJap*0oXC{tqmoylUSxo4YMC?XS#2NJNHCjsbSh66 zB6=49iQ<1GhUF~XZ=?$eirnK|oMyp*{h(s$DQ#3iSLP&UA|_UQM5|wsl^kxl*}zNn znpz-h1H33t7;} zWi!hx&$(}WdKS#{rWrX5UpxSJ|0z~j+TLzQ?bWk=pvnGFJYp;4kM|@*tK3R)1uI8T zo#DD%@e*fJxnX%p2k#2sIp52R z+@hC}y!U()ZflVhZgZE7S8Z3}ote}|To=;n>NJB%?tU&U9E%c@C_-85sodh`Thmpu zM^+9UW8&;}>kc1??GmLZ7`Xb{OsL3Jbl}{Quv(+N#ZAPP@p-uWo4P2Y`OFT9Pd>U& zU*;tax%3w&hh{S|*~(fSK&Pa^ps=QE1v?eL59)#=Kq_P}V7IQfXsD}c)(+bGy5Acs zkGC|dWmYo8Tfd1?%71ekEevJwTAKRQWL|3Yq`k|VD>iSpTV2iuJ=)`}L_E^-t<1sL|fmwJ{MvYtmv{PvG3LTtp;S0;7(cPxCq4oL5=*c3qVxajke! zJoNG$sBzJsyUmS252WR7%XTB#Gd7ir9D$bPfnaa^VRiy~ES7K(!behpL^4pKa zb(K;CsQVSLr+_P+Ef|$OiQENnYo^cgR}H_%x$sL5$cCvPDiw^%n*>;}O*z08ogvLc zV{lk{G#q&}d$L9jIR{$SQQ#UY@X}&E#I$^XW55})`&nCo|0e!Zr!V=LNlCN!5YstM za`~f*fYSNV<`^p5T)Vn667-3x1fVtmMLbxnd3UvbBoMoN&iHTIRY#IKSt(fyE1li# zV%{Yg2Nczwi@PbU71mv8Hoybq=rk+_c;$$W_n^fL0M}~~z=^WL6X0D0Rup2M^_@2y zGKdBI7{JHeYCxZF4hS2xM^0v{2_O8ksDP)N0GVWu*B7PcnR%Y z4uAlJ+95px&@nT#R!lEWv@7*fq5O%`#VJBE0qXN*Xu3@IWAaCBt5%9Bq*W_FiNqgct-zR$uWHm@Tzp2(64No)M_8g$=X!>TgYUwKE2S3 zF}>A*e}0&&Mmvb%iKH2S);AFR@!BN{0qB}!N?@asYebv~Fh~RZDz*ju+MJ`JFS%m#(WA=c#plf> zRw_&@*G#8YP3Ya!*||S>Dl6_VPEMLK9t|Z2-Q%6{8tVyKQP02;CblPHT&@vARFxGZvd76VoqUGJ&X!w z?MmWLz7 z-*(Qy|DYG*4et8IOf?8#2siBq-K_!PKEP>~AqYdBpqrw_yo@@N2d7eD`=#SE&lmsP zqmo1WH;!?*N#3s-8kwWTIOoEuVhLSdS=oNiSd6XteE$^*dlVas;N(XW$=P33)f8a| z0k*=+%)u_EG^2b39Td6Av(BO+m(E*`5hQGhxVw)c_ONDhoF++Fe=fY~l-Zw&CkdMe zE2}z6vg=u`p6sQ#iH<~^cIRUT(aA!Q4Tfb`h~vIMk%EClq8{iT34TXs-I{HxI!Z>m)UQ!McNwxYlByC6w!IMrYmS-5yTE z8+@e>cG%yp?!={N8QI9rOPNuO{p#yk`jCAwb(MYK3+O=4XNz$wbi`&9rdqFDzoZyK=vOi@3qlnf2wKJ&ON6lo?`z zY~cXg)+NYcd6a>jdjR7yjn_!&Akt?HFfH>=&0UQ*kIDFtY?z9#?u(xKf1K+VjumY+ z-)`0?lIRtyyS%}Gnub%l!1tbyVXR~`v8r1lr7ETR zit801f=5fu(fhN@m~i%JPX=`w!3!&ncRJb9BuX`crkt4KRTIQF-bAA@7LW2YmxT8J zNCbjdxq~KSZg0(}(NsUneg5*ek10)q?Vb6jAgHbZ)AoJXOa;*N(PAufvB^V85~iq@ ze4Nu7Gq{sT8^LY#V`!tgrtc@my&MW_=AOD@YdF`vta%n%5oAjM3^>P>w2Avzr2g~- z4qgHer?I`v^Bzw1Q-eIDUaIU2oz5|k*%mreng}43I8sdTchJ7j3-xMv>y@svMKlFq z>u9uZ=)b|`D%jJgY8sCKlv#jzLo6rkM|DRh@~cHWHAh+LIU#dM|Mfers>8kRq5 zi}(UyIsZ3qbOQMXfwSzaU4_H2q^60OlD#A2P~fbK=$76Tp>|Hm@^lkFGYX~9@T1Iw z)%_oH!Ad+3ASL& z>mdLmqJXzpyp^1gCYIryIV*cPa}?+p3-L8@BQJ#@c`2 zyWE9*xFd|yEG^~VKgg|5y33|Px5C_+;fA0)x!M&`(KwMo`{)<)dAkauXSRTT{|p7j zn)o$YUKFEAffGI0RUK?!@qH+5{ifnJ1);;C0nX2VpJ#+kiYb{@qx?H#DLh~9zDwy+9AAI0&e3dROQ)ZK zG_`H4uxt5`#AouTI{z%I+0r^H-0I!(!6`UbXyKC&S+w%FEZv+@(shFbn&De6ife?T zk-e@Q1$Pv)l6IQR<}^us{obHYtlurdF8Nx(PlmG|b@||Mioxuu7_x2dDX=vq_nz$S zg#ER-bJq-z&N~ufHLXef#A``ApX4#MDy<0axa|sxyDxu7ko|wXs{Z*^kszL3<+>$$ zuNv8}s}DJdjVoE25ufOl&B$QL)rfzOtctA)8x#Yx1c~E>jtd=y*}#+KHv3bzNs(EN z_XR&lcg~HlU7sbdB5fO{HWH~$g@vf{yueBJII4G&kf{AgLLuwL11hVv3siNB@DAJx zb3+_0Yt-9fPMrG;d#mTTyRckns$#gMp&RnBMtr1XWk=0WbFLhwiR&FIbSMMX5v zBZZ`dEX3ja&hJOq61A`V+;GEqi_;?R=gJjY<%xkD39v}s%a~%K%Emb!*j~505UKlo>(zBJs9T(DZ<+Ny%tm9o8=wye z@))Xck@fN^KJ8xJg#j8fzjnz7zQmxp7!$|Wrn`!dQ~w#LpLhbn57v)Q>@XiLME8~} z1Fz0%iF;7I=mQ_>+jDO~Rh)p@?<^hYNJ;_NG?R3Zga%asYdZW4SkM3GZ{IhA%Y|Z_ zM)HZYAY5&O(v>s#l2LDDiObI`AAn#8K!Yx9dUf=3YjrakKyJWu$=h9Qb&bT*?1E15 z;=~Xe`LxaQSwC_Zt_-<92iY-+-M{%C$rEy_%L`yY8Nqaf4C8B(D3vSJ-xpFyvT$mA z7XLMW7ufn@iBFcidX2hxD-7o66P64G&Fi8p638}QFYq25XSAhnfaRfDxF=g}m*Q96 z0JQu$(}&0F<%6PCvT%CC{VD=w);C$^!;`ND_dP z1q{Vb=WX5(Y_NweTSp{R-Sx3^q`I~Bm?%TBjBwhR*z2wFog}Y%l3Q6asM?!nR{4Vd zBe}iRu_!b)VKJUR$8pL>AO?{h6=pO~xJAP5i-Ia=$zP;DfDC{XKx?LED3j;zr3|Rn z6=6QjW+wmT=>{iib=hLX@E^{&VsD%5xhkNqx-Iq7cZZODHMvf7PZ(WV+K>GnT(1@q z;_X$bl<&?~w`a5=I)+6sH9S*<808MdH8q$ktFof6QsLGHH? zOLaM~r$;(>D=kBbwW_m*Mp=%0C4Y{#BI<{1S!gFk$-_|kbYw(!Ivtjrne5nWwxO?m zX`)02NEG@&0!~hH`$j$jWP^U!TD$D-GRS@`vp3B_Jho>7g-F}NR(3+7QOXv{#=kiY|dY!-?%p*J3*ATqlXMR;`fxO=ZTo~ofxnX_kf>zfnCc> zgF_avs)qtum)n{>s$y9xRR`v357T>AZVOlrN?#Dal9rqQsQFIR;v2~32U2b4G$iJyODS$Q3rQ0LlOh^CPbRB;focdFf%%`&&$CSr=b?=#i|0A)v^p~0z z+)OW@EF8~1ltYR}( zPP^oyRLAB_Hnw^??#uI)$#MHfYro30mQWHXgAeh~qeJ9>^@8QK05T)^LicX2(yyF9 zRW019;Kxn+ZirjZ-UM%#1^(Her#uS}WP{jSK9BP_y>%+km&!?wD!B_!m(GaIlwzFWu_<;`-T9?-=WsdYEm?uHOda^S{+!^d7u+ze|Gj$dKYxYr>{VAFo!&wv*bs@H6OyBTCEFv%Fc>Dv>w-5)&|K> zYNQl)xi;RuH%nHORlVr7r2u*8C8ar`ai{Lfj}g3l($%XnQq742w+q$9ZoRScHiG>q zfziVeds{rC$$S7DELSpyamf94?h~Ou-)&JJ-&7U*%X%*xgUYRJXdbJ6$ipw1J4vAz zRm^#Co0v3BFdJKLUw+^e&`rV3R_sf9Z@zwQyLG1AF6;J~EH4fMnOQl;UKyfDmTjVi z>&w5Ez3e4JY8utE#Sc$mW&J}4#+gHzo!C<#y`o$l6B<;FxP`toV^CwE3vR5FZ;&7> zs>Mpjpejod;NW5ZoxIc@-PrebB}q@5-zS~)N)$d)^^jne0%;Ro`V()BA$Mn$P(qY` z=ctP*f=*EdY-FL1_3A{YYME|#&NOS9As}D+QE5hZY3oRS{;C1H)_P;+U{A)WDCdnC zAtmo4=aaQ7sSLtDfPSl4d@T?lNLb{7rk?sAhoW<2a87tV@gK<^ zxo1Y?oSnLyqtgJ4s-1@IZiF5e20U)@gJlEY(Hz(gMP0e*kkczfcSyyf@!GB2T9vwsp zSO0$X2>p^Yy`^naL633K5ink8zp^M`v-D_u@9vimwXc>BsS_^~pya+Wd~_{PX{DW4#7E_@8|~r=XewiJvD1A`iQV zJGflD)jPQKi4Er7Vs>GTEkkzSZQ;P#I~6`z6m8(?j0Jq9h{#|k;=~g3GG$Ej1B;+D z+j3k5Xp=h$Cs&~4n+x5pHU2qp4IL%+2NXlKW>U2{Heleu*ON4+dm1gP&aYcv{@tu} zgOhkfhCoRag%ltK0pp=pfX@!4B&89wro3@^eXvuko%HjawI`Rl3U07_;C1Ie|E7tt z5A{D9y-!!teQ?+Py}{leKiZ?HXjZ|>aOa$&wrE1jAsKD0HwSRiLq$os- zoT-3=jYMi@N>Xm|mmG1!uM6WuRFwtVK9k45C%n|lK*6Wl`ni}>crIP3RK_1b*y$*! z0#)|zM4vvfE6U4aNQzoeGsDsBUlcXsXij~2tX2#ZyL_R0v**>_XQ6<65OGft;dGS- zc(Lrl&wBpFM8q@IcybcQVgXg)A_uYA2mqW2PO3xGY`hYZ0C9`GJOy7edn;Zlr#wVM zzlDOSM4*>|K_8>uaF&1*uGl^Hf5AiYqFAMN@CApD_?7EFPm3*n`dWfcL(f$JuoQl! zQF9s(@yEP45u7egZrv)?OA8R7)Y20R=1y2N*}wR>yV%V2A4w2_qr|I2$B2^QZ~wB7 zfzh{cSkuszWLE2RZ3iUO2Oi@t@J!nw|4r+HNj%WlSG1J)OWr#@)eG^H$i+cbVj*Mu z$xAdFvAXe%mEtaT*0LtlGjuss6918eASvBqZ_M+=zi5~;5#(Kq@kZZ=-6gi;z0ZD_&oy=&@cU?wH*4O8MIK>QV>Q z!}{@&8Sw68k*a6*_TL%;pMTk)v%Xrkn?n2PO5Hv>6`4)Qs`{>}ylP(XQ}b<5DyH^Z z_RM}~^2W!;C^I!TvmH~2gKH7a`n|K0^t8BULTPpRMNCL@c2@&ImSrq!=V+pS_J(z~ zc;OkdeUC^ZIfB}5kLC4vzgkoDunZsvDfRF#Q`2ZxE@)|d?C9r;TGR{}Vn=*l!GyJH z!GNGnH}H*RC>S2__O>?8@$fOK0hQLi(ePC`_Em&p+N0ZI*Rl$IjcuuLIgDsbEt!Qsnfvxo7;8 z5h)cWV$zBuIv9@NjpKa)xqkt>!M_ukDH)K_1oa{#AWu%$Be><5^DpCWl!acIS}rd^ zL4JPV&T1{?0<6Jz<{IX4dtsh3AJ02)1*L(QmVDAEz-&0peMiH%*fsHS%5a8vHg}HP ziW9laLZPRml{6*`uDx$sZ2v%xE;OHavtm%+4LH!oC(ESc^MOergnAin zJHS2_uVZu$e+pDC>trCl_7#!${y@ES7+c~WW2!;~9cH-kDmUOmVC-xuj&6Hs0dS0E z?6PMN4USw5e(s-g1S+MTN?1Ld?U_@t z8_~MTB+z`?OYf5Wrk}M=x7R^c2k!)qHz9fUb6nhBX4SP3b915*bnlcH_!Zs>kPx_3 zw;cD*roS4?Qw~~5tQpXpnWYtG+jgim#XDfm42TY)B@X*&uI;s4spgw3^7wZ)Ae*tO zD-W6hL*)Rsl@w>FbaT~19o{jn?e-y4qDQv zfE|5H=j^jk{?P~S4XV(~Ae$_~Kt7>J)^vl=JPq*cn>N+D&*tDJyN0rvh@A?3bWQo< z+@ORK{pEe1pabsB*a+kue!`P+w4t)^;$sQkX*HM?a}Qn~(*ye{&0J75=lW((&8TJE zKbDoio&8KItpAU$F5Q!u+Xd=fLE%Tu&BGH>7(J~IYtNb^QOj)*?xE=>MylRx-q=&7 zSY4p_h7+2(JD#~Ru3${4Qv^pORnmM5Jx_F!U-`Tbtj}{*eI#}G6#|H?8G&yu|BFO` zGeZbQ7qDG4|E7JP>>V({*g@(Fpbw1nN(i1nwz)6(%CCzsq_)sM2bVg?J17wM9cw5k z({fqR(>clo4rf+umQN=EWJPIgYqu-=5J@oRWh6EN;+V;DRR-fTcuWtGccbUZGZNq; z5ou~O<4~4Zg3$ot@{o!c3>8kvrFAJNE>1`(7?`x&A=gb7*95i1)iT@hPCDj6IiEVh z?nb4lPp0PovSSVWoe*K%%R{57Ao$oVl2epfb#9nCl_MVDh%~q-Wcinr@@>Qzm<#6^ zZ+V<*n%h(^BrxefT3|5o^D9@i)9=Qobrp zu8(3T`xmBo|DICK2ECGX%q>&RzWaNL{rn{x@s(c7-Srk48}L!A*JJfc%XyygtpVbR zYi#R>0NUw@)x<_AGQf8G3$Qx=hcNzcKVP{LWk&%!wFi?T-NsGj4?&eNdZ)C_4xP?5rVyBvrZqGEX&;#Lfy z9{TX+ra$+}q8wjUQV>}BO(EHY1T9du82m_!m_M#{8h&zccM{MRaV|4_M#J}6vW!e~ zyp{)%SCOISxS{XP9POUw6uJ82_Ca^|={}&#L8QOuTIFh}I)(hgdhHe2*2|d)Z~V4F z{FHs-Cp_V&rMkR)dDY+5oJ9WAZI9J!fvL5o1{WX(SHBOQCU*L`3HFwPTl$R z#lHXiy_0le|G4V03ag93-*S$HG)>jcm+<$5XTaKU&;CDoplKJ#?obx~Y4Glu(-j2} zvvM2wgqJcB>p{SyAybJIBG;lh=;Y!b?-o%n5WH>y5jQjOY(EU0X_1`zx^E*gUb->p!;&Uf1k3LEsAtneu%~W0K^n%FT@`|!JmJPiToqDU!7DIHw-f8Y zR#2(@bp6l+fhMSrgGfTe=My*&$T6?Q3YS}ORoSVysG&P8kOQmo29N61pun0=L_-cdJ}#Qf92{l8~aR-Ac19#?L9-pv&Yy2teTF^W_Z73)~~>IhV5_l z@dIIiz-1#ID=m~W7S8&izvD@Szc=iwv?UuwO|4VUD+ne)CKl;~o%hbJ4^w++3E9ja zof7d2;HH$8-aC8FY&-ddVM~^ES6=yOx8$uZYF8jn@{7_Sj1SCkr(N@`T1$GS=W=KI zbe6&?-N?l}l3749Z99Oaw7E&6(bbtN;SLr{yTz2{Z$Bn2@no{;MeQT67yzNWSk6za ze$*FlnP&5+#VYkBf|iiLHO1{DqecJpSX*vHY_HL}QOUO6X6XE)ILY-gpm! zgBfl;i!qud)B(tS$LK7%(Ek2=koNAOX#mHg@%2dQ+?3Vr_gh~ljnph^qS!uAxXbjF z00Ee@U7ooRru|yd&m2F&R{=>r$7n_=k2H^KG!}_O?=muz>E~0%UsPN3H=s}-Ap)<; zkd80ov1?cTC-0h{aP!G^ef{X@Hs<2lCf1U&+TUF+$|rj3-(gkYPFGXG5{+7hjDyU1 z@`MYgw|kY2`vK(Z)~{AQY&`?W;x>H z1h3)rV(r@t`6?oO?LwPii>j+CkZ$y3vA3Xb2Lr~+7d|ubri(+~pC<{KCfTJ^`#|1^=m91|CKC zX)mV%c~=1;eqam`1u?+*w8^qD)QQ9qznn-4)#=1r(5@|4vxKpaVTU}}I*e2uHe`#| zSWn4LibGcH2>jDS=)c2*g8sMff#*$;1)@7o=Y*5M!G3E;zBsfC-v_DU%oTHt(uR6r8zr{zdlmNq7UnNPmyck7wWela)y@M6G z63nj>mq)L>Fuj)Z$7wqn+wIQv+t3@a1mkF7V2~LIu|~v#jx~TqiU>nydQ-IlT9C)A zfY_r3dC6Nm(;^M)d-+dYfq`x7@VmyS0q#Mi8Fy{{CiExn6p3rHEt)vBYs14N#jgN3 zGOO;KTGqJA3FyZHf#M6h`ab?Kt;%43nFY3 zMIJYB{ti?y7=14JV#!-_$kf0 zaz)-IHo$$OzL|BaA>MoBH_Y>5CNrXkOLk9Zc2WM0WFzZf6Yr6=>C_aTC`YVXt)o&J zxv~;IJ0GRsdFj+fMQOxw=`ggon}vKC4SPDWYoxct7q{WS+Ik9J)t|6W`-VI|8o<&a z*qvyhs1*KR2b#TTHNE`8Vo*Ee1__OAaPyqEFacc+h$0}jjo0TD+^u}+@)j=zMANVG zeE}0RAz+xJF$Y(n|66d$N)ezc&;iwb(z1LE69vnI4YNEe2xf>3hQBpaaf-o-YB~4v zKavr8rDPy2ahPf?;<#C{{amfd*F z8K3vOv~KSN0(z(SE!18P1kJT{HE){gCdk$x?5}oKKt3s^C_w5+0oW}N5q-o&o2_%t zyOOcM4BAiPVA^oHWeIfWT=QE~vyQ-U&sQ=4v^^YIHN7k;l27AE;*#w*ge#Dcw6184 zDY$;A(o`S;VB!Dci*^%8WRwe7L_Wunz^~=rz*uwwS9I7rEDm-gvtcY3|zfwj^M9(4r z*2tO)hPxvH2kzF>jSqRFd*ZN+@6~u8>pX$xuEJag9A~8?-vyGI%m+OoE!&`TC)9(2 zJ-QZ|63(pywqnNQ=tQ)|?oBnXI9o!rRXOCI3|MNxG zE$5W(Tn^n-)axKM71Liw*A!^p33lZ2dFMqKzNu2BqFJ$fVk`enA@+9_l(uaZ{8g7B z{l4|_61NS$DmUF}MG|fXl`=6_gL(2CCnxxZlb0V1YCurxvZ!N}uUF}MUHodOtg&U^ z`wE0JG?zQ~cjrU{_RX(l z|F2HO8o|^3MPtaM#QpClV)*sZq#@SkSP@)&AtQuxk$JhPURGH)=k)5uRho!pGG1qT zo#|Di1HEkNiTF0zmhnv2{6cf|kNpO1Lg;CkwIU_}`CDH!CZ%x5-fqyiJ5ZU&_n152 z+lcEPqj%GA0x&lhc(;55dCt%R2HkIZkMz`!^Jj$|760+m$8>^SjS`7h0AbcS>2h~# zT0t4j$-SXZ6!yYU5ke}7Sb&hmh`z+luOTn*cH!nZa9ZpB*S98NhLxXwFpjKZ5g;5t zil4!2OEdZ0p!xmS-Y{c-@Vf=IS>c#3Oc}oSPWKXfX+F3Y{vped9?HfbGQl>7H-Jww zx)^05%VY!<8;HyPmR<>K4MofBZZn;vl_G4#ve-tQL^9uyQjXI&X)XwylchO(eDG6Z zO!^?#1~^ka?pn4=g1+QN8kS#_I`|*__}!JH>r8@x^Pu* zB+k8jau8jw$lT?AQ0DT{W}=iu1up+;SLl}3y;nQE5(oxdIv@_Q#l^2i*UryTamw5w zvR?d0!ev6*Bi^8x=Tu;b2bxDqu3C$@5sBD za~$*o+yw%d3zsNo7(23_L~P9XQM}ay?q7iLdFUyD`{%@dMCZLFWPd%JMxW8XQYeZo zA1uG1qo^m|x(2VYyUZSSDxHv<>U@Vx%{MJbTaxSL@}`qrl>JfqGTYYTb`XyjOH$~H zRBw~os!A3447s~Ge%=7`E(c2h64+#?>R0`>$VKeAmlsSsRg(jvWTL%sXxGo1`Sq7E z4rlPgqxdjSxMr@zk*xRn`f|8l+P8^MW5pT$5#$2VTzl~#Fn57zzs3>@dD0Qqp>EKylVcfb%g*L z#?_XrmX7a9))N}yTu)4})!)iaZWlG)sC|WON`F^xdqCHF0OL2?zG-;W66%hU2^dkC zm=Rv+z0cAwn9%e0p1Oj+BaiZz%o)0?O-Vj)@>3w=5@`*-$;|o&_P|b%+Owe0$lZPO zrRxm3(|Vh%lye|S|LkLOFLseix$0;-)H-Je{fN<~osW!TDz;uaZOWvA!!U25#ulSe z{)MpnW7HzDx;N)Pk}uP+=oCYs#QSVIk@puCcD7h>_0p-t6bCH;%vbsrZf312K5ki# zT1%dPHs#4B)085g(Y4ciGKFs)Vz@UrTZC}O8V%DU=TD+ws@dQ!euj~Ag1qO*xd-By z);P2E5~cQoH$|x7fOx2F9NYN<65+>Lxbu|k@E1FEj9=G9$|>#1`z2Y7G29p0yfua) z6bY`uXkep0KveTXhO$@uenQRo#}Vd%!w)tLA5fh)#7^psw~#fTlw^&KjNIwx|98TJ zfU@Z?4%{oGT$4C?VEH|9GR_sMWGg6}m_^8e-f^oaww6A|_g zF-A><)4>Aoat)`O%VvAimKLQX>)tMsO4KO^gjlxDa~)!(k{dk-t0QPw9ATH@&BD`z zgApgbwkCBWHzohoA+dorLxy>yFXt6eBQKaLVG6Vw2M0#=;;OR+WgNB(l}wx23)}<2 z&oOGc{=~;O?^2K1sVLeB&6cGoA8F(l87X~ci@0q;uQjN`y3OsV1~`)Dep8u5yVmEH z5$9J+%x~>##y4lbLXL>$JFM_B@OXbU2qO~Us}a?=ylI8e2oexkfgZzq_n*&Ev&CWMEN$Dz&*(4t(9Xlpdu)1>3Xz*3`A2;+^kFo9q?N_9y%l~&iOE`-{%+p)`|q)^bWNtJ#yi7O zh%F9^n#6d9JZ-erZ4CRAL3QXu6rI)^fL8)=V_pFhVW83FaEScenkD`p31Du6796sF zZsqp^z-!7kftKk1?TbYn|Hexj$Y+d4pB1J@OUFO$RskfEfkdmO>{XV^Gk;YF_SH(- z{)1OOPNea;*bV4=)448RThY??&J2lG#_sY=kuzzUoahXK8AKm z(S*WIKGnE*ZSnqgz|$;Ry4sZUQTQ3esUz2QYPW^IHnN-LcBEeQJ4c82eSwoyYmnBc zh-1DCAKk$)wbYxAUfj_h%U9@*QUtK-daXd`$nHEG#EHt|txpS`5`e{! z&y~L&v5;?(eZTVf2OJol6dgsF_7f#&u+p)b3wpMwfvM2faZfk>xUQ(!e?62JJWKVP zJXh_>%u?rlem%tRwecW#P0bM+`Ng_X2U;a~63VnM9~EE}t$8#Oe$443PYayeDu;7$>W;K`ChkS^oV~$!IOpkh<8$4 zXe38z%cy*DBj|*`pvT$xeR-9J`Dgi{uXk5s+Z#Ejv<+Dvagb|M@fk#Ci)hYOOWXnc zAe$R&tE%MLPU<2=ng3>7yO^2Hx_-2X{N~4NozoHGrRR2hjaZ>Q02;}!;qCxn3j9u4 zE@Hte z$Im2}o4N3gv?zbKY4AY-E4KLT+*YjAGS{{>H{xw>tatBvrLXgNGl^e!dJ=Fv?#(u* z(0+9#5jwpLnEvU+qynYHC?6Oe3m^gK`f@-}dCIS1T^)3QFviFlhy;qE%7-`j+qq!~ z+o?Tj=gKiAtT{PzNGzH98St?t--_2jqx=+AN8l7L{P)QMn@sr0Js%M@Zney}Y1yx53rbHz;4Q{xCnzLRSmV?E=_{6@#T>q`5oGggIs?XgZrfKEZF_II3G;_8h*VdP-Jih{_ z03vk+Z8OlN`j0#ARsP$J;)EN-(77h3ju~o``R3pyvD4SSJ`s2_3q7%8BjewF6h)e2 zp%he_*e<_l3~7fjjm^Q&k8hAs2q@K^7D|IZeBg1eRwBWtJ^y>mjhf4SfedH#c*O3l zMYDgNK60<1R}>YY*E9F4w-BouoFf0pt^QwvpVFLO>_ zHR>$HwJa8E4YcdcRB%0cH7M?~NY-qOe!*Rd^?T5z@#y#8X$<$nt~$at3p?!M3_3WS zLf{@!h7G8}@Ox8moX%xW|B@wgupT5)W*wc&JiGG}Fo|*XNv=CjCMriM-s$TBguXFR z$KrGJUPO?qj- z`tvr&%jR+Nr^gXq175-Y}`0BoTt{d`<>9c1A^2{Y5$G*CZ~R(45DZoTKC7v zz6Jf9GT}vYY*O1t!!#G^{+DUis0Lpt0whRpO1HHP5;I2_! zjz|P?%#HUcxFIMgW`CK$c_QZjT#~rt5z?K~!B4Kf2BYh+7BifJz`Z$gJ{-&D9Ld)2 zLyaqxXsFvh{bCJ%T@>@>+844_WAQfVtynUgn1tind?A_yBCEu8`{Lmdvr?hxoK<;7 z*@y}^x!muF_+;K3;;o&z7Qd_C0U%1rL*j#I_WQh!biAl1Cr8y}_=jmuX2x{i)d-cFVfJ6z4}&*t4*&F>~Y^EsA`2|YP2 z>k`=otxS}{Rs{HQdOvUj9hb2kv5a&M>z}6&w`PsaLt~{u+e&4=dgCA}KOJk-A*Eb_ zkrIt|?BYT@^0;4a(w|Cm&t8SQ`1kj*_O4uhLd8T1k(7k^wX`Q^2{ynh>x82KUD=)T zi1NwoN|m2vnX@OlQ^U={KN)vYu}72eb3@Bjhd>G?vmKH{4jq z>lYoJcSS{pB(33`f9qWEwV)UXlbRX2HheJA{4Fju)P(RiPkQB<`5k} z(JTlO^z%D%!P$3h631m~B=and?>&H^OnR6MDCXd9Ia#UOgy$O(UYa>!!#LU*0viB{{!wVHM8$eifMSDOykHRjHyCEe zrT_`dTE+ZF0^qfgD?dOe^-FIs2|EL{trzSw;L|10)!i1$qVOcDK&oK!;o@D>p1f{e zkzykYx>-8j1Bl_VIixyPRMXAhUi7jg4QWa!;|~~e>Yt0zi&%YEq~wgLT$gQF z^VBPa+HG~T+^lKR_dDdeO?>rP<1@I_8=QD7E>k?_2kYs055PK2*tVM9Z38%OhQj;K z@ozPsHn(soB@kjBqgN>YO+Bc;1jpZLSeB1!)ITU&jIVtVw4og=6AJCFds!Z6qSOJo zVEWXO02rp0R4M*lpuGFiR+U<>yc+Ws=)=+T#QNG3+Fqx!4%F{0Ltt^0IS{7n4tc|O z4f-k3F$?)ugJOCQHC}`Kb4=qe%bvQmgyg~ zx-aK$@@V)<(zv}Mz#Kl^qKNy}=-ja?}<4->VM&7`SVTkqaOa*&Tiuu#N zunaxdGJu%0aQR0P|Gy!kJElqiBvQ8XVc7ME-$V_5N+C_8--ca>8J_lI4x>TT4y7WE znT+c0Ne47t+%cc2-hsQz_=Z=!l{)yTJMxSnd1zrC-HMH=^-o-NbWg4nTmo1lhM8S3 zZnL`f5P=`FJ`U1rN5kBD9j14u32Lc44yvvH7h&%i)?^!O3!|tg3K0aPN2G~>2vVd* zM7l^PbVQIUAku3@qzj=42ndlTozRi4(nNZV^xj)S4UpvhZr@$bKKuOme&oV%d6DGF z{miVHS+kaJH{V1H5%IIZPbg?e)DkV06OGi5nSBk7up|^Ztk6B*8qay{)`ZdhRcXQw z))mE(#EpXp5=C3l8TvZa?6wIac#O3LatOILqwMPDW$Sv``I^r4+2bxUl2OT>g@Q59 z=zisM?+dOydFlK$$i;Ub80jH?IV^4x`5O%`U@k5oPpC`-l=tY@^!^+_j5Hm|BJ?bs zc-H1xj8@x@7)*=%xVADrs_T0uoZ6RFCMTvB|BU~6fs?a!WnIj|)cnA*s+Lk<*sT#g z9fu{wy$$3O>~Wit4+Gl^jxK0(xD`|%1mNEda~t19V!wX~WxI?<`C$;Y3oSz5`I{5+$%2R`{oiNG0=vWjCv)_ zY!z%RQbVp29#t6WdXykft;d-toW+stktC)f=ijSR%7g!qT_ZfqxPetd<+s zNzjoHJaK3LZqS3S>pCow(5!xZEbEZ#TI*>{2?o3Jrn@oDfxUShy62SzBK_VfKO{UX zjr=AAo%X5EG58&5c%2qDAp=@2`9+CrzniZ__2TIG$LZ4!fZ2yCSOK&26#{g!wy@iM zP=VSrTW7?*#0}A<;u}+Hdyol=PQot_`O|Sk72%*he*ZK+PzaumN%x&vzZP% z+E?N!ZwRO9%y{W`3@01`Y2+{V#MJy#{-PtTNj3A&rNO}!ZEAhLe4=9{ zesKO0*5yO`wpK~Vi3CF%;#_|ZD%kz#b_49f#-h>Si*JG{SwB^zch~@}z{E_GK7*m2R7~E(^W5Sp;3jWAuQP^PLTpGmfKL>9|? zwau3}MvFr&fJ1#M2}Myio_V;(bZ(Ca-Wa!Bh#%eP&D9jq)BcA{kQz9G;lFq0zOFGo z7@p}Uq7|P_`6hGCqtCIjGA>ojcIO=WbH{L^q=T9F!=|T@@Cm-rMqcmo()E(g3C_;K zL*fH-M{K3R8GGH4Dm7;vyJ7!v10rnEe9q_B2E^fHvrU}V+p+zaVQ@LHU(ybFCJR4k z_pQ@+_I~(av9jlG5ycA9r5HkMR0r-(jy6<-<&IQfVo|O+PiCIHd)V!l2B)-=(4d8{ zpKN5H$+JUEp2%(vCZ#-HvlwW*#FZ~=BwIq`?_52vu3fyo%Ucpejt}QMJImn&dwSdF z3aje9z$eG6D{`%Do1bujxf0=x3QOZ*i84PfHad1$eZJCdAKfdkW15sG0V^x|ZbJNwl^q^otwu3*aBW`;z7UeHk}MN|vVl!d4X!^Qam-k9EFrj{o=t1HuL2rC2<_-nS0~tP&6T+IQ4YPD z)!uck$1q{}3Sah4MMX!EsaCqKW9vt)IC??(1Q1I;bsKEg2~Q#Jp71VOgu><-amx#j z)0|u&6La6^F2O{Q*|ymB0E??zvFy|Qy)b(ym|ZyF#l-A%AFPYGyrAIW`R?*-8}`Uy zo?pahpKntmS2S_Vntse5v~2zxd`PX6n9Ss@7E9O{cJxE(>dg8G=Q2bzEqkBV@F2*Q z8~lMbPP_|f-k1Rr%s|nKyoEYx;3f@kYeBY`zJ2L!nj()kEWGTEfN8;aoW-RT{Uyrx zfz!wfxervn73C9hRwtO)$+{SK%T4oa%h&f_WhTP{-UL>=8dvUmJ69|x%H>XXj*#3# zVx&-gSzy7eSuemmXGnPb7^!J|2n~nsJ;jjf{(>MuA62VLtiz#2IAZT91<@5WvK||E zs12!{d>GG*%1L(Xa_axZUxc?tEL4K_hbIuK#Bg3Yx#)qq+KgZUJw}Y=m7^0!wa7u~ z2TDoIsz>6@f>3WyQMRTo*Z1y9P+NiuMu$wn0`WO|4dR2y@M zMg#FP_x`KvN%!t!O@J~^r&zZ|7VKV!CW=PVCrwma(d{!ZM%Vf~=lNd`X0m-mbcVPb z(-6bZV7c3y%UtaGj%@_COz8Cu+_XbaKVAHDe#A}8LLp>5mkuf1SxIoSrdUt0CvU6>*8)75Rm6*VMCM zu_5A3z5m@J;_4FVab2&Y(YMh&ksg%~(CJPhD7npSZLtMEU+S$Z?Za&c2Y&p#yFZ#b zjE~YwlXF@^VWG(<`d4`D4Xa{$G`$SLw5B6a&Uik2Vv<|cRNN620^6#k!5KKqdE-Ugm@B`i&obTV7z1&?q z#lt(weR7309H_XZbP5W~g1ip{KV@YLV;8lVy`bZ!>E>yOR3VkLdoe)$Q$@kGlq;3Co z975YNPlLHuB3T3FaRG%b;V8N*<=_sj9#VL zbeZ~KBEOu5$O91tcxgAKCQJMMX_f*5_Aeipx$6X{QNsSOacBb9%x!T7+EtBV!z`B|5wvS8#>xVr1o@Fh7MWO%Go@tMjS0yU%`C^ob>3ItKxd4&?H; z;3B=1L_}rWuwe4OyTBOBviZnaFr0N)`!|UbS<1gE6q-6XRE@6eYg2GNtEDL4Lde0G zPgkaox~ld{EbjYzU=zG8I$Zp0LSA{?whb?w<4QTf?60mls>p?kt=}zs9iS=STyG-H z5>?Q=b=j13zL9;ZCbv*kCQRr-67Lko3GMya!vA4J($lHlTQvzIiA$)q7j>|!Kk98_ zk@XKu6t8mpc%!f7=;_|@_$`GZBfVg5RdRgz0uyh+7DW?xqloEm&JWhda*3ns;;xgA zb%gtIwI!C}#aVS$b*C7STsBW=o?)ls^(UaWDS;lV&kr5Z{&P*YO6ZE2gJ>&U8 zrG+4}{(uNsH;;S0+%Lk!khIYhc9j^B*bJs8+~!>O+EBAg<&r45nmilFkiSdOFYlu6@EVPeb( zCWj5>Z+a!m-NPB@X5I8lVYq6yzwtU7$D4X;wa@rmr@g+!{xl;D@MNUVbWIc5Y6o|! zhtr&zcaXwth`GCNhnz#r?^C*4OoP537tda?6DQGObCZyE6gyfJmSf5?m4Upl5Q|bD z>B>LyigbD(|IYXS5|6LW&U_ikDjR#XQ)RMjWR-C{9{9;GqOGQU^{GtYCNJ$ujGjhb zvE_Xa@R-n0qr!qjyXLfJ4qj(&;b}?V7nbBhLt>Lj!FH#ZHvENc#)j(@UZ^91C(XA) zLRhvfacXlb?n9HY;r|2K%+_gBDFu`g+r$TEMn{d_>9Y|x|3GxO1vdd*}%ol5T?G5^4&|w*K+L-D*o@K%~HGz#zewvK`4VNi6y~ zQ3c%}8Upuf4D-qEyyw?rDUXiy2QN#~d~HjIM*=BMFhjM%SGIHBAH-2Y+g>!wV*|SF z&F^2AoIrCe`ZAVS@U zj+w;~egG8fk`)M*aE!VaMe)bg^M6X^Wp8^98vHR|t6!;B9k{GMg3{uH)ldo;7x$vf z2qzvpWHW>U$2!V<1emtq=qbjUUsu%$7U(@+HZBOFSHOK zVWBF6@shY3<>o4oCH$Ux`5p3A#$DDS?MIZ3->_!KX%1ssfz~I<>oeaX>vAk)Ll)G+ z$k+pjilSRPzI8T28^BGKJ&B6Lx5(ayZM43d=GX0yEv{xP*$4XYiQ(0}kt1l>%#}){ zW|;2CqDfT5fD~t`0k`bKLz2GV!q9uH;za(>x1Y#~hsCICgr`F;m&Dp zDKW7>Ypjml@nU-%lzkwu!bae%c@9+=7iWaawK+ zj>CU|vot5@jqHd8D4A^5U5S78)}6czs#eM+S3P)nLfm#>&i;4E<>92Y>gFN~vy#Nm z7y<7Ow6gs!V;^PTu-_Zi=>1;#cI9DzFLJ?wYwL~8pLQjrfO+u%UOP(zD5n@@n&FC@ z{g;q)6ukTE78d#6kJ0=@lt25#&CoRcCgjc3w-@zd4DQXMEWYxOI(8S_vxlN;y(^RRk+kX(>()_4K{H+~8aRCP1=-pV zCCW{w{q%g~oi7&wr}W$=qEa&qR^@LbUi~^H>?!itI!2~*E>MA|`9qV_&_!sgY1j`J zYM^^1hCS-C(WsTM+ICwlT_t-P7<|4ytG(H3f@JroS(E{x9V>+KN+#ZJ>sO9){w#}+ng~ox;P_* z6<|(I@yg9InTau%i{b!WIgu?$B$5;$KVc zD4(Z#-uk zG&YfUasVzJUpBO~fB91XgLT3mEzwf>{8`1kiK=~eifQw$>9?gW3(ta}ND@@_$gb7= zNpnCM+mk{`i_SPB=9RCfCBQn^Fd-ZRbN5C0PGYGGmZ!wtm+Yx|DR0f75=FO6Lc_YQ z?R~kgVtDa|8hh^mMt9y(IaRQn+LXBa)OT#Pv*ijnDH4nJLQaeAp#^F0zkfG+u#aBt z9~knc$x6^b2#|0p*C6ICzpDx)7|nUnaY{lrd{~YQ;*2|Jg_B_Zrx^M9vrkUbUhS-( zBsos+9~4`*OZ1Inuox@nc`ib@YS}geqSxyOudu^1Auqm_W6Yypt%_ zI(`~d_adLqkvi*0`fY;kha(&1PX}?qWpSPU3pD~ILt|4)O%$fb#jcD|$_WMY+P^u% zinb-uOKQ3vie9gh87k#({Z?mDQZ8pCRX=Axj3{*||3UXDpny?e_n_2YP*N{Hb#qxug{ux+F^(J{S< zS&ad%G)kwf(sL&()FXl^=cT@R&cHb$e=1HQQc!eLlFRSoy9uGggaqV|0L|fqi82z0 z|D&>j?G^-w+1!a12XSczfMulH=>a-}ps^qeZ?BI`Ii~O9s77+b*Q8N_UrQ8&Di?Rd zlg)2=RSZW>+WY)qI$1q-`{OILnyPupo54)sb@|-vd1V`& zV<7UJ8zz1RbI*MU&lxBEQE2g7o}}kf+J`&zdpU zmlEefwAU!yNE*J@+Tm7)or+r7*sO`mPL_tx<(Y-ZC_N9N=iX?YGnttJLGBj)_L55OW5gqVV=!2_cK$*%_hlP0ZPh?7Pm0d2Ix9t z@gHm<{=qnSD+(KhU)3pl!tdO_Iti*?i6uyD9Nypn^_VM0C-P27PuqX)18@#hB?hctb7}e;=AyQY9^o7`=kp?7})~HTHiSFqx`{+*`L|mh1}YlzuH&n z3i#!xQ+=@A&dyc?0Z}<7^#wC|v2=&q^?FpX#loxKNHkm0sGn7G z4E_xDBV1AXI8#kWhs*9YGO9vfcXyB@n1LhDfeDk&i&g4$G@7s*;g12YM;*v(Pt#{%==qo&;P&m^?%pf6cRYdPXp3@?7RrS3o|LQxt9VW~Q^ZqGw;t zIFQ4Q(#%kRd7O6N=sy(t5#ZBkz3uTiuiK=i@Er$8eg_RokK;ezf!f zNL%XI9T)&(Ufx`D8Ar6DY=#aY*Qa=x*PHv#$;SCu9S8=2<<{brt;mQiuP3*;wBKg4 zQ8=JShF1diZakHBGyR&W$OH;kJtjmhH}~?0ugB+PUE=Jsr6cmGLCajX6nu067ej`U z>vCo~@ht`YUVqp&*BBVUJIH%BHbk?KE-b?On(`J5@^0g8^_xDUq&)V5`aPvx@sx9N z=hqj&ww*!7PF-X#SG^l?y6`dTY^}syK_z&abEeVlO`%iq^OGl5KIFA8p|Pcb9Y>B# zZAYb?mYwBCoZoWCoTHBlWKQODq~+2n*7f-roRh*b*HYVA~#ZaOvLuD}!n`$?A)!#_F|-g7i&t0|%8Hj773#ZoKdBlk7w zykB2%qdT>vzS(8uqG9fn(Sr8Tu_EnFQmHfjNb}6RqbeOWHU0-xA z$5QK#hq;uz!jms}X3lrrt{O>IFQS8Sh7y-JiSfQjm)}ibKY>2u2bUdY+u-T_AE7Gj zAEYDqpmg~2OcGOTG2y;xo)0OAL%so!RTJfz8%VtXig`4x;pUOHnXx8o*RA=*wa$T< z!kLa*;W!Hx;8G;j|K5WqzC)LW;?>xm|F6ZtiBouE1{9E#OGS@8t00vzK4GunzUjp1EEArQ zW#o**=h6qguVL7#k_LO7f5>LRFu85yCW6+$`>6Ff&KD(mgHY5RV`lKCG2Ewv=}?+V zQw#VCX`zY)eRiV{*~&{%EdKow+^2}kq^bj4Zi}BSFA-;&+uqgKWM67wpIH0AsSieZ zREB@4%5MHLMX8GJP80iTDXBMuiZnQyixb=OsV$cltfgjtA?bp@Of;EjB5$84AN6>A z+vUJpPNlP82LW9+1-iNDOKX9U+JP#e+76Bn%1tG2`+RI4%Bu427YpGHNnnX20|UiF zKo%=uPVkbsESf$(I#N?hYus1a)^7Gpo*u~2{A|Up5(r@BCXhK7**U|4I0^IstbJZ^ zc_f)?tD>fkN^ZiV;58zUc)jm&vXJ~Eu}pK$uLOmGSEUSo0s1sSYlGjYwgQcJxbU@e zheOR{9(Eautl#OQR{vO`_J{HkKdBb2dAdETS#LT!Adg2-;#?wT8Ro0vwnP#4Hny)^ z40a9$WkhA|n(Sr_J_u*4hvQcVUNK3`e*VZL;#T%R+%agnxd(D#c;}L)jFBZO}-caytoy%>&w|wiOFZz8Whf`)2W7q_Gc3EWc zbH*=e$OtJhdp@1_S#^voOv#S1g{wHy=pV8PD-G*%Cv&H3IqoMO>Uo$M%xWOrTckaoER~bq zRoF)@2u$_UR|(2?hcBAH@xxFpz^?b^mxs;@PsFqq`Ju3d6U{|#g-^|G+N^fItnoh_ zaEO0$>M?9I)9%6U_2ftShIHgA!hZ6c;Mb7iCn6=y)5wua(6O2gq3}3_vp-jJG;;T# zz$(S95B{)!sV$H`hdoEbLlW{=WmyO>+`9F#CuI+lsHTsbQ4M^@FzNDk>I;53MxOxi3TpT>(j-e9u`iCwB|Gy_4`pn5{>H=IGZPiQ=!c-c$;d zcF;iVDqq#v`qoae_1K=MbMuG1OGR*FFA>li{d5u3yXQ7U6uo?TnN4Aymg>Y;0)i*4kYgpieOhFBh(=Yuuxs80vYjw@BtTHTq*bgQOXi7n=5YR^dxryr>}m zd->6bb<>)H(&|>R=FgPq$J3MR>l!s-#yIDkBu|FDXFGw0Sxn?pz$eM?_~&e}ryMKI zhTN?yeg!m%A3k(~+k-A(Q!23m0!{6|eqxm+!#?|yT`-cVo!RLj7XP5jJIKhcog2_L^s{G|5N$w-;=SgmoBV1t99G!m9*mv?h&^`k z5N(aMZPld0KNU#we7CZN=x$qdBPab@YY7t@*VbB@S!(hi%X~z3je0~|(dO?E<2>jf zU;7^$*#GxNcKiRBbXbV{d*Qt&#B)wplHUI@6SvlS_)b&vSW|nqJ%CrQI$9iKFq~J` z5@A#G?Qu->6m(Oc=}m=oDKdq`f(P@N9|XbD1(lq~u@5q`&r-IOwNHHEOp8L)n)6LY z+d18jPVBfyVBso!&Cz5*DA=VDvcO5bdY?hWcdRsl?^nGZS3*|SHW#@TufpyBc8#-EAq z0H2E`6*p6T7(}(F5`rx3z5P^>tr<7Vh6Hgn6BnailXyCN5H}L=7*HKtsTRDdh>LD6 zSx_CnRiPBmr83?V?{1!v0`{uf3;hw;A>lLbq-?jK&R_X39Ymtr$TQ8ygX?oTm{133 zm2`&aF68c6|C?v9i{vL5*?fNwoLPrr;h*tf;x8{~PimqD^3$m%bZaPxxI@uGzxON? zC*9#SzVC5XMN#z-P)y~|mpLEy8sigu3T8Ajnyt4GDTPP!Jpo9s;&pMd>YZF$=ZLO- z-baYrj{S|=xEqfe>nS~=EH5Z8T|TQoGkwR3>9tE=Bk=UuCY^>u{9f%K4LcacYJjbSYGN_J(XB9XePilLzC z#sKs<`N*^2hX>YXpK|5-EL0q6RWAl*m3KXLYVa@(~i)}PU;NJ%5{G+zgGpL5@o zxO(i&W08%*6=aW!UB)fXPG|c@d_ou($gf0EuN0eeVY1Frt7jL5VSOdaiwGJ(>JpYT zAubt^UX$IrCHCC=xtBbX0L9xPr=Ods(&|8qw|VSF^`NcJI`Blqjv zr;}8SC8deSWw9wIT7#ME+>(VJ|NvD4MB(>BTt;;^TspXpyoz| z$Jl@s86=IN^^uP_V$HCmNCW1D8M7zIn5OUKv~MBDlf*|v-wK>QC*N6vWUtb1oC zV6E9IxQY!eoyErD}1$K4-lLU8JL`2l*MOPV$Zae?(4`}>pok9Q7`8~~t#m~rR ziERdP^;3VpFS}c%VL#ZJliN~%niTM#_uua#wgFr>R3SRabjbg!#EDZxlmEgKwl1{k zNM1GGcAmbndi-92yBtZX#SmS)UWzX1K=<{~tjW_vh>LLm1ZNA?zb2qfzD6+w)R-{(?I%_4= zZFs|ROIG|hmDJCx{3>mpN4W4st%sDCF1n3m)F_o1C_{Mx}+R<)+j%b37h z@u^Rp$YI=8H)Qy?_xrKxzbnY_YI|o=NRmAqpRmRGint# z;5W9UN3yAg&cQ!qSER03S7`Dv5z56^UR|4eZY9RL<|#82dAXBLKc1UzgYWIfzrA(% z0JIVvDk^LH6At$+O?LE?G8f_RWqfhd0oUt0C3ge6#kl7#C@D;ww1y6qc@%HK`EWdA zE)7Yil>9s42TgL2Lve=kf5;jrkxTsFTkK$Aw|}l^(%Z!@jOminD5-CQcO^)6AWMYE zu0`v0TD7fZMS@}4ot(e%q$Sf{B6X$2>8pJ?lXK~3r6@wz0ofbPT&i(XJJe%rUoE=0 zB-=h&qMPPik8l#D=A(d3T@rn5G;Gae1hzXJU^J(B)d&x3O`6b%4=!^~HFXmR+Z84S zmwhjNSC?)8-YSZ~@4}22sLe;r)~`5o#Lw7sq^POhp!1N`5BfoWoaisT(0{@Vw7*CI zJ1PN}qL{}H9cFaeCh_3p-TFKK{rg~uyp#D>+rrpi4vP#EB3#Mr^~wCB$yMW8+3-F0 zQ`~0c2x>krnCEPUrJQgB`n&9?hW{geCh~Lz5!`$!ne;pYdKUkXRK3?3Q9M6PP%Rjr zy!XdxCnxg$v)7s7ti-#E_bkP%#N(6V^X1i5)%CQhwACVq?myA#YrC7jW5=Z?{UY~U z&9`c&WbwV9Gx2;Dt6Kje46=W}LGUfU^0sZe8OYk;*KpzO@6F|%B-u0h&B(AespZ*l zF^Z;^=hhx%7v}Y-zk3#Ms8Lw`k)yyLw`R~*CXF5ao-?18vuU`fG9kCIm`{EP3-MD# zwxKD_K@Ub$N%k|%DW+2)kFG!_KhTIVP%PnViYm&(cxKXEiG6H=FLsaeA?$ZCyj=?V z!6SfunWv^9PA#K1@~DN(-&uY9MFnQ$lC#Y6B2;^(B2gSH)Be88@00y41aOS9fzabz zWKsnq&`u!x|9$9*MccG0)krWW_PkbtHTuBhM=rj$ZX|tQ zm;o*nU0^C&IV~-`{cTJX<8`Clhg(wa&|+QD-qO@3w$^|r4X>?uA%sqMdp>Ewd;Xnx zk`d$G+t(^@44iv(Evq4bb6zOA+ALR8RZ#1|FOLTKYyT8(l@r*P+Bp9VU2Hr-y?I_V zZ~q~ELVlcSUX-?lg5xZ?zhtJjvTn0u(Pnppi9L^!7iGh{gyTn~>iyn_-JdiJL2fPw zv#0E7(9e*}U!_vb z5yX5M$)j|JZi|oy+-WBd?t{PYG=KuO*e4SWe81QJrY6I8pyfp|1hnps3E{^rho`fr zlo&KTmef7F#|bbtW)rc-Z{B4303Ur_-*HpL#fhi7KEmeR%R6hW96_v+m5M6;F(f`b zJK_R96<82D$t{XZs42Z^ndTjFho!rY9@(~`E=}MkTixt2EIJNh1G)BkqOiFI7WO@_eTMUF~$+h_iYzjX7hb+;34$c}SpNfy_nB9|bv^XRJ zO&`bY3Glq-|2#LARph*-{b>DBX~O-iV8B{1$IcQy7~2v9f!IN2WH!7}7A*N+N&yRr zu86f4SqjQim+q{5@G0kfCnXYWcp4VmD3NbHV<+XFb13*Kg@8+)irNtx`bfFtl;qo5WXw#cCUnJpbk^r`{rsZYRECxL%(fG;$GwR-T$CWabzHDAqcgFe zN&)B6g!XUIK$aKz67(mi{I9@}F1rHTF4-Dd8l{H09eolIi zyRGja4r(RekD>1@ZZOZR2(lP_I-!ltZzFcdLI#e}nHT*zU%np0@QJ+sEl+=R^Cy}H zNsE^nTF-&FUf=tA8WV8z@>Cv9CRYntpX*?o7~h|NX^X0NBFOEY=|$x_cdAWR!EILcHHzb@M}C zjp1?wy6O(=2}an=P489`xygi3)PbNQ`LhwrL?kvL_=Fk%Ney(YT>6S99lSh0FkeYg zIS`$`Rd;OQrfwlbfZ;of?1tK%3RGMKIxku~KRR-r{oNdioedWSyRUzun#5gi19=<- zE%Y%zpM2(5M{p>~|0|RI`*TT-1HaP!)7b3+4fi5sx^sQUQL`hEBMgX;)Q!BHFuTO5 zKzQnlhz9La@P}r+gLRV@c9dT9XbKwZhTTtrMK#(M;3H3sc6O|tI=}itd`dWnEuFol znju82h~wY^bbMAdNdK_cU}3wV^L#jFe`!RjT+#5PugbjzvMJSYE_WTC?6dYFX~sHxxt{xIR`%PKIv(qCcwkFF8{NE4gZj)BUg$X1?Cq z=XU#?W)!V0Cr}x1w^&|A90;2py2HTOe#vhDwn+loS_XT6XpU+f?rbZ*PI8;2fh{Rw zV4EE-fOpw$o_K_A(>$T^=s(4!{dGIT!E;@dUvPnN==|uv5}wWwr7_vM9J4oqM2lcv zueUU98@gh16fNn}&q#3GS{u~X*M401flTV0Iop|#x64Zqqryu$SKN+F-Y8>ZOpyiO z3H;OiG1(2GDr5^uk#{MD8?CYw7lf!BnDy|J9#?PiR$48?g3uAWg1MEAsbkA(8dpC} zYuZp;YQ~{Ig5nfGoqn^k!u&~t)1m&^<5L&bhlCS?qrneNCbgJ=&qBZXvoY+yfAja#(Ql5n%l2wj>FC&0GFC63s+8X%ssxac?%&+J?7lT9 zZs3rM$NYrm#BC4OL9W3Hm7u{N2D*bm8+RA^g~iuqjR^6>h(6{~R2(`_671&Ox^{I7zf@&jcKp-vurk zx?SUq!`E7TmEH8wFic|)otL zCh5twl0=zzd$>cwSxrxqH3e$_oi}XV_;CuWp zYRXN3(eGMoA9?*mpYR$}5PkCYO25{*gjno7tAd>FYc88@!!OtJ`*2FX52KtdjeTU9Uk5!h2D)e$LYVe^1~?@b>AqKt`@5L^HP#eT7J2=J%yA6{K$Ho|wbz;Tr5f*hv46-KjF2 zcBHWB*Svm^|xu_6Qu`wXG`vlS2pCQ=IeA|AcC?yp^>cI!orirSMp z68mh#rnz4e?rkU3*q*oPseMlCC&s0Oe$k0Qhq1V?=8nwo4Vax8rlxv4bL>PkPYz3j zo$sxcMDHmZ=xH-;t2) zxw>_q_1xAD5X?EjRIFvN54p2=geeV|6c1br+_-<`G$RjJBIBM1=S;8(msh0IAw9e>JRgl1e@9V$wW881@Ol-79@xs{X{}2)VkT3vk_M*e^_x zg9X%|?bYR)Gmm-bejn8rl7ON&yj_#Q7~kKnQ8aGS>vH39+=XP8X4Cz4M-9;(o;UOZ z1UV3Hb^+!odk;HhEDfxOgw5{Bu0fBkhy6YKgS=njK-{HbE1+^^043)C6?yglGw{xy z?j`>lc=i9ExT`njS!q7q>i8;TB||ZS&g;eNiwZy~TFTpQlU;%|lrCXnue$0sm@9LO zAPEb3G95d3;1YbG(kMCKK*d8wIq@^!5Yjl?2HKh?bnlYBO*V*fe5;R~QHeL8eCbq_ zFfvj--vX6!y%)VeMG5=~K*LaIS$m2+L>KQ&*c?9ycuWOXuWqSw8klOU$UM5879ARu zxMEQy5vo)-2XBnNYHF`}6w;}a&KhV+qF=A3T5k069GZNVxsY~pgKwh}kk--S zeRhjw7|$0=(P*Nt6VsbHi1nlXf<-EU!{={+*6NrNELbkPwsxqg zIl;h6Lr zsz_!o{gTVkX_%0$ivQc3Qrm9SEFJfDQK%oS<{fu=71CeC{#&*S=z!poBZOp(7OtjS zXv1VWX*wV@x6TUXf)7_p@6H8eW?i#8vN-$!i!G|w1R@==ipfM*P2#0e>O^u%yc_6| z?L7bi?+ixz;BXex%!-2%K1|dS1N(t1A#+V`oruUL{Ac||OvK@)8T*e#^|Y+VOjoY{ zOp?6C{(Vf}3wj>lfx(F2 zniWrw-)%ojQD^Ioe|XE6`82IJf#ln=;@H<>*I|QqVP6p>r9pMKMzs@OX+T8*R4nHm zpQUVB`n(1`T!rqHX4N77D~%yf;M%4is_4P~=m*4-^B7|D^smF?krYlXhyksCYmo>l=$p>to9pVT&?$>tLDxzJ zNGIxlR1cOGukib%CrvJ-Mw`E$R{l@3YE;npbP8^sX&5?Ma8ii-!gZE^@>`pi4gx|y5gt#=>1+x(BgXJ!e-6A zwcbonaa+fmv*bGp8zZk6N z7^b#DI!_RmGS&7*nb)>&MWv~*x>2GQZ-F?*CQDI=b}V(MY#122qmk7J2p7i@I%+a+1uOz z2vWT2d1GA@-6BTctM6be3L188U;d`7QmX|p+{E87NO}-9|(rh&oLu?_EezykV2c@{G? zBdu=el%+*WXNXz5bkW}lTgRXo-V8Nhf;xRzT<;A)nO2+UexawUko7kWHfO@U)*=*b5UU7Uw+3yTd;uq0vDIY<9VQVqI>xpMi ziVXzL&UHh6H&&YB+ZUaT!lk}cBF~b_fuf6fOUY~ZCdWe2A?}LyqwIiu73-G9Z*^9E zHnA=HVkYFnbijJHg6saB1uz6L*lk?|wstc?0IY#~VC_X&ksJ6h1f(y7i1?4Ayrm;I zkt14_*k;u4{{)=Kn`Yod@&T_<3wroO%fCqqvH9ELZ+W0U_`hob_!x|gAW00GyK2v) zQEMD=rdPKad`v;y55{A9GtOhr@Y(IB&aq#%GakL3M0{UZ-XU`aiX6RzXD8n=L zl^;!A%7U= zYT3w`R>~EjBL0stknqNr1rCvIZF`APq&oD~(`%S-q+U60(jazZ;@WOX6=wQ6Z}HPN z!UwM~3C4umxY6}VP!eVS)msf3o_K z(JwYOADzIWI6!$q)1b@bZrd%k&s-d-8nZY^jOq`=`0fcGf=$-WtzvW-qvZ%(gU`&c zPX*kOQwi(nzj;r+<&=n6;vd`WgxONO#Cv&m?E!=6TG(5(@{L~PVTtL3sZq{1&98|0 zAci}D__^8Ja$t$}+Ft?TwZxq2>=`RQL6Nr!_?`^(nz6MDLb!)_d53p78Y>brztsYd zq`ZrUticw~2?Dqrc5!T*S5dVPM;qJ~GM~Vve8!tkXn=P#A&OvzJ?mi*LkkqV1>bcso&Ptgb6=@Kqt$1FKe z%rjXy+u`3ux9@U=^mJM_OfIo6YqH!ngW!cbCU8xR4~cfU<^GXmNzNH^7d&92Sa9sl z&#I@bD3_4i82mQ4)v_6hSi4;sli2?g99?n4>3_)B2f@EYGbf4Y9t@w-HYj}%|CT-k zP|qoe1tgG5q(f&mH!WDa^FRdz|DP35;v|9UA-*6Jb+iOMxw08v0=wo|YtD=*5sSJQ zUFyVG_2QA*Qh{#Vfmqd1ImP$`L`lLjiIcFNxPMF#y%_}|)UTtCms{|K7I-@$21I`b zXha2qmyIlz{T%i5^iq7|MVOr?9fBeKs|2wULAsj*ye~p=yf}L)O9B-9zwGhyspIUS z#^b+cAn`WMm^j4NXgi|yi6huP%eL>tA#kOL&K8D4O=dd}Jqm1aQm!pJJ`KqbAitN=!t2%r(Og-YF*xZunY|hyJy`Gtu7QOSv6+=WqWF#Lf~Z z=oeiO0{8V>m>FPRbS5hI=6^(qK zmlMT5^ltb`+xaSGL||XGob4G!4=Q|dGxCHf$lz}41y-7M3txD*MHTNmNSMqs4?o7E z^@*aJi#cu{FA0HILIA(tmPteR{0{4jbhY&B zTu%$>$i9$WB4aBU(KcN^_f=LdbyNisa@cZ0WdW{kPs=GyAz~3O_J5u;f#+u7T{{8@ z{{YABT?)jn0&tu#ITojv(c$15HEuCJL;L_J@b0?B;rdO|JN~=vZDZhye*TZNN4{90 zLOSep0Q-+8h%wm6@I@~s4=IeY8_Q2^X!DZeA z!nhW1@B7I^i6dTxbl@y?zw}*F`BU3|2}C7m{iFtVp(m>X8YkaC)-)cqRECzBtYC`2 zAB7+f(b;x|tMMBTR$cJ6RJx>477GG}3oid2%vQtc=KW1i9jH=tGQn}}^v>#Y|9}N_ zh@2pHgv!ms@+eLJY}n*}?483~Zz!7iqtH!`%ZLezpA0iBI8%7)D|M=juhG0o*_82o zCs=9n*hi;hBOBsdU3U9-r?C(Mk{M(_g}*?qmA-BcdcrOWJth5%^KxpqrBRM97P}>) zz1IP`E_lB zQ7)hb{mR-~AfN*R4T{c1w1CyYZ0fz1bi#nv_hZfRyhjXUQppj^rd=FoYz3U9_YIMq zn4d8lFkVyfeKE z?h}Mqx#&0xZFn+i(D&SkKfi6s&y+NcwlAs^ccggl%hrcHcBld*tP=^Z$V4t~de&Y-jUBBA1 z5Saf+L4xg^zrf{Ke;{i+@6w*%C^+BH`@kS%N3k%)eZK3Y>mM=$&6oYXD0CTGbWH63 ztM1FAq2S-Qm5K^e*%ebL%Y?Ed>m*G=nGj=Fmh5ETO(olq2_cky7lZ73_Q-A!Wl#2f z8M8g_$Mbx@zw>?1d4KQuy??$Ohnc~d?Y^)3zOU(g!`tP||EFE#4INIJI1ae;dQQ7_DZ(ielrA6m=mVXAEtjY{va(Dq;`g=^~|7sd{ zc&<(c6kX@R>&shStBZWyIDdEZ55-FbY$d~ttQkGTGgW{+D<8lq0@kgKh{kz`e>WLp z^lDYl#ulo7DbMu1+Pj$GD7t%9;$hpod(`k39@S|gUOOIks>m-+5SOzaz(8P{Tc0pB zs3b{%$JYNvKOPiT0@7q(JFW=$2CLH|%+i2=8!9tkvgb;Y*M|~zx8xu;m+1~~6p77B zH>;&S)39t7u`D<>aB!}bFc3sTE_<`R;+1KZQ z#h2Wcab<+Ogj4>VCu;Y<%|W+&fzgy4)*QIWKZd8w)U!Dm=5P?N;N%HHb8(wamg&76?yemBz%mkf*ojLt>oY{+?7wWQ(jPnM4AZZh9H=Fz|IoCZjh1^bL;abLF0S{peOM#9t@78G<{Gm;=bX~ zDbs@jfCb>$EyzOu8w*G))IIp{_CZH7SAGEr{Yv~|K*S+#O=z5GRqEnBLJVn$Xe;kl+b$x1Ag@UtnJ zgHEiV=4EWStVbZW&sjE(mBWlAM@ZYBa|rK=9`Y8@Lifub-Ql0i@EGZX+~qrDyJom; zk-a24bZ7io3gOIXK`c&?lHE9O(2ri31s%@F`-UqD*yC%40bo87HK7CoW)To5dVyIE z1ZJv=!&z!$8VJrdH>4S_KiV6Zq(}SP@@Y+?JLXdiLPyVoXi-mLu(PAA$)YL88>DgN zB`jgrrEea->$!M;WjW`y@Y`VEb|qP@${bo+tSts|h~* z{U#%P-)~EPO!}aLO`Uq^0>6R%)A|C++7!Q=1h&E^{LPM z_zFRsYo6~(vyq6zV1_hyVM9u+t4rcde)a}DBu71M8Zs`@MrOXUgY9aU}s>9lM) z<1lgC6jQT7<`015H)1RAT)UwhnZ*42guio2d$qyhTPX)s`+X8_ock}D-`y?F5aNmq zyi-@FZ;f^qq-U(Z^rOI``RCN~`La5^%Vh=ihlHis>=6g3X5!e7u=qYudUjbY{E=BQ zz5DY6xy`Fu^~r-wx69k~I1FsrG>{0I6jO-30cy%U8w6&T3jeDEBxMKFzqI z{b7jVAg(Ma>Vw!*Yb^KaOVB2KkoMx#K@a@fM^?AD9RVTbmf)CtrDj86<-+Ozpno$V#q#W6!905ie5F~8azeNKxfd%J`Z&S2 zd+d9~`QzdepVBMde(GFdpKq?_Ot)as?kSu5j6Brx7N_mHO=e`{ZXZCf^3j_3lJ26v ziJOw>kI=i^hZThI+G!wEXk0>r8rn8(>#|jkRMrgckYg7|B!_^WQpD=a&}Jh!q6SF` z00EN0R->P2^@BZSYfE5;9E2FpZUAqzyd$&>FLz~Ak$n7X!r0)a({|A_YeOre^MOQE zqKV(B&EaYi+ZVJ$^7s6?!!x@BZ7f1k#XJINlDw&lTYbRbvmehR(#Y+0iNg)_UbCId z+;7-lqWkKWT?v=Ch<860tqgRMZR;~%9o@ znjF+bxTkn2_D1n8Z-X@@T~mA3zyEOo6?=Lv(qp7J@B6B{%12ZswL7fgXo>46N60dM z_HO;?{u5~37KBs`Abogw^}JOT!j{2b+?MW(iA*fArkgJqXlmBs$0o?R_M!)D#R&| zTk!)9L3#*KPK2Lq0PGp8LW}%hvxW*Lef&*xe&$qzE2OS~^oGjvx{Bel6$iDpzzO5_ zt18@I|Ljo9-#(bo%r;af1uY&DN01TnR**MGUKt>{MK?Zmu2&mon+41%l>u%8O(^Bl{wbJK;()#RK}`xX3V^Gr7C;=GuaHb{bV!>pO;31 zpb!T^@jMUZ9mD+(umedGy0Z`fJ1Cn#lgIZrus6;?4q3}mhD68T z#KvbpvlXTjR5TmHD!QW1g$!V65Buo=1yCl9nI0vpFb1Xb87{TG>!SOc-vl5z>FLq9bR>NaY^2);U1{ApowR+F? zMNaM5Eba)rZ%i_)t*6}&z+Z(4gh{D8C`iPxKb%{OPC-fL`u@ZB_*v1+FO>jMJq3;JM{`a@7@pE5aN-X zL3qCqM+=Ig-|65zy?VsFVUjLji^bq`%cckUd|Jlnj^UJzCsvh%RqE%g%1>c;+SEjH z(ACTV@{dnEnHfoP8QE6`uvrl}ViT*fVnj{bqWkJ&^UQr#UBz;_nEA?IG#yI$nodt; zuN;7Bx2ZVU%ZX5v*jNe5l=P8$lfP)@uz?~j4!pxorp0>ALS$8FR~@Va=)qaNKwqMd zx%Nsl!J~ctLaLz21ETJ3>dEvugz`nNPq559Vwt&f))@6t@ro_=lp%i5 zyfVshe_j9mJNjOFnn>?9m-n$EM%KFpxX1>n6h)n1nY7e|2?*swVrLDZd@ejfNduA;Bw&J>U3*<{K0JTEV@~1v+m9e|_TclP|gO z`&9x)*%*!SljnPBY_k#@cDH8kBNHw9t&5aofpGp}oyLW1F2675h91c#+PS_v>d}RK zwY2(=A!JxHpXQ4{^7Nq#o_-{%>Ca~J+j)aySusSOnq1yt%6i81{Nl_FR%g`wZDF<1 zQ8!qju+ck{3jz8x(-i*|zIrFMOj}XwFLC9Z1M&R$51{zYd-;!EhV`;qj5-g4i|qMd zdYEb_+zK0ETgqccXMQW?4+V)e;i$MFhOaZfcyDiU-TZHyn&`yl% zMK$gl%z{G(o(i&H!%4(+$A^7s_}(A`dXZnZ9JhA1Ai^JacPM*r?0d*wkJs2NMuu-p z)VB?{cG}DtI2d&*Jd_3`%?;Sa+4wdG_I48{S>2Rp-gfZhn1Ed&BO2FEfziRbfVeCr zlix-fdwDZXhn#LHM9Sab_gqsK|E+p|w^;Q$mfz4qx1FAEFwNRhbLT#ojo8gye56M0 z47Bleby)iS@EiH98|TV#9mk)iHc*6 zChY{s;hN(q^hPu+wV0BoH*K5D`5LMF+NFw25ARJIdS|qcIqBV-veoys@;1AN&)ZiD zda3UMp2iSSDmmnPn;L1@FJ94;;yzcqKJ+c0MDlSC1d><&K*q(WYOzxkUjWsqXNY$P z)|^n^iyO*oc#1m`g)#m|`jtmM>~b2bC9mm@bcZ8;S1vdd~?TBk&TUxshV9Wh*Zjrjsa|=pjE4+$R-w=-_g5|z^>aSK@sXS<`c|8LEqIpVa+&yo4 z2{lG#1Hc470w#KJjUImS$s?v%h49Lc1=STh9n-dF;`*qHs6}+tXLUs}06cQJ=+WUu zB4@sGYC+8(a)=a!cnHv3S?Yls>tuzv`7EjXsGIy7ba4Lyoh=5_6>Z8g$+sE44YNV- ztw6!FF}cK9Du{*YJxT_2{*R9tv3Fd2AJ~La>OB!zKtX4Dmj9kA3qa4%8ln5TKozLN z*mUTkoS;WcwDNIv&ps$>VK)CdQfxw~RH>+oPpyFC=cj8WsMr}}_zJHR)w|I5 z!S&B5$CR8OdN5mIpH+pcO%bxT9MQ~5*P>EYL?WX*F334w?c8-j0C`IzT5acoUHW?& z;~$KnFzx2}$NF9=>b12o-LKE)&PQ!(;2BOY^;lwHy49f%iB{T#(di^lkFbpm<4BcQ zwt>SE*6+*kGVU4+8^uf4eHDfQKGKp`nz07L3Ln*~m$^bZuz%!gsp3J3vc! zt=bAYPsk-|=eEgRW-h;a zd+?miyIU9!!463lN$tF^Heaoe%L-e*N+K8(8Z4nat=LAkWp;H04zd zL9GVFG7rw(g3Igne%kbHuZ*N0m^`8IaGTeBM<&np&Wqa_4!(>jK(jT=`BIwQ!{c!I zY+T1YObha3&=p&@vV|*q97FCwmMO~%@C?U}fzUpcGyUm~ipz<8m%nIot7(sQyihSy zfOE}lJ^8eM45aL*EGVT6f`NlZzfqej(-2-&hHkKo`KuTyNX|B#m&^F|DD^(ke{xi1V z24_V2hgqJ21+2Ej>=sS751th~!IsOaqWzPU;!_ey!$;%RVN1tN9K{;43ioL*zxW{R zWlahU<`Ucc@KJqi!le~gy|;A=Wifu%Ur&+cKWxW zwo?Y{9T6xFcU?MCixlLC!(TLi-k2UV;nZY4M&o%dm2yb0cW&c^k?V=_3Gf;_d_cF; zUo?ta=&9q_(yxEf$n%0^a}u?mv5MS3PBrhU%To~k9f|ez8+*+=nNv)*8w}`%khjCB z1XjU|UQmy8?~mhUnpyI}iJl)L9zj|>hyhnWy@_!@!NfuW4nMy%BZr4HPStgNiB<8I zn95LN;p={+$A2&Oj2dRqCzzA9({A$oSG%_oQNyi1*tDW1?I0blo%HpD9!#@#D@@(B z<9FDsOSF*grE>`JdS$%<5h=tq#P-6Pa3mUP&<=F?*C2;vC(=HgkbfYw@Ywm_K+&v1 zMPL0V{t|-qhx+t~D4zSnar{bgR#+PJ9RAy{4i=OccU`;J@Yc0o)nr&htQUSnASSfA zkLmEaGMF~zT-F-N2`*z)ZcVJ?3W#;w?h~3m)*QKtd@7fCa1rujyi3l4^t@aGS?2VE zexkNP+!YfdiTSPQlU_dumStO6^+Z;l(;R;g3I}FyAVyQpUGpov-{sU6e^_`Xok_bR z7l*7Tu+FZyiLjjORk29$CT|&0`kFVmCT?D^X1~X@C=F&Fc;PTi2UOiKo`-RZv0|gl zhCRw*rGX7PSn7CvU1aQYf6e*fa|kVK3rVe2r}e~J(g!xtw}AZPh^65_sz0JnXT3UT zxgI5*e^Z;NyX&Uu@+9hXgeNZ%aQ6Sq= zudLdL8|2^(P^~YRQt0t;wdx{JtUCgS#=i^ww_sAbBT!WW)`J|4H$fK!ZY?|_9slWr z%t&)9rj7&^D#AgGT5k?gAHsD+^jg&tkjDHAe8k!(?$r>`e!bP(YMt;K@Ip_WUNWN8 z?k}1Xwww3G7k(wE)a+1%(^s1aHIDpG6>MU!cCjSjYhEK%jDxQ=zJ|}cf@Wg%3LQ1V zfOKYus%Sw3!Yx8Jr^RdQg`n>)_rKzYw=-Sn$#%nhgu~hSvIi#&e7iDEc?4BTS6?hi zX)gUFEs&-sb#IhnjlKsg!}jHvJ`L<81c4~SfIVN5hxi!sciKaEMAVQ272eY)KO>w*>{tY zUTRI0!jm912Mi@*zrRsV`t9HM-dMTAQOL?4qZuYIQtq_(3Lc60~>PZxklFY7kD(c-?Rk zsITFpq`sso!wcOLty-Mum-5r6?p2t{b=7_Y3GR#f#Fa4uaR^yc_wH(WU1?={npC~j z=0;m(73o%>&-B5~SZG3W7{5_67YF^aAHxNyw2kRhF%>%gXQ#(e z&yRW2xP(zE^S~bdr)&-T&DjKt5cOb_S%K1enKVI47d?a`gVGt$w|eX+ei%$J7#bn~ zYxwLgI0I9HfR%*Om5Z=w9kWiyoU-)7Dh-hlS=~9d{nz)mBR>uAU*6|!FeSuqHfnCV zcN^U=bkDA){9Kmxy|Ic`HZ{Qe-|d>vEQ=o8XGxsYET40J*PJiz%!+*@h|?vsZ066k zNJH`$7k8E%UB)=A{gWp8sqVk^TFh$ZuJWrRehECO9$Y_B(R&B(4Y|+r29>%v-Uj?< z7DG|A3!%v>ja4{_JdvEZLCeHnU9umyjPyQVvV#?JE>F7(e?VD%M(Tm|3JoDEF=eL< zg+?yA(`Asbn}uD6HDy_9V1LQP%$DTVxj%ttWVJdjexETnZXGGies_v%9>lqY8~M>Q zA;F{_&i#rQW9jwb)%8EAh3C_;Vqr<&50(t-gx5@HR)v&4`xiX-8E^Dr7jWFey8CLg9+6+zH zy0gPa-K3`Hj~eS&jvKjJ&a^uEdcbE;T=g7Atyx)#LYMuc^XPWTO9_Q>S;mgHw-s#b zDXVPfIM*}j19t@anHCC`iS-q+P76kts-gUL2Vh#Qv~1VA=Wp=mN!5<%#othOIs^}X zdFF*^PCZ}KcL^O;S+`12LSU=J@KW)++kor%xuM-q`EM8F%@6pF*6J2QLK;pElWp?P`reN47>N4wv-t2FT6=A350_uZ zJtHF4%%%0i=Ehntm%g6gwsZk>UiM6{Yei_JR!U3bahnFZLEXuodzCK7_QTT^Z6K+; znqw&aJEMq>*NdlWC~dM%EJLTV8b-M zOT@eEhi1j)Vu8tL8^6igH6gvOSyC-98`e4Bo3_=D2-OxBbZ%XJIE8|voOCtFSzU55 z`Xp`e24@+^o3nFbw4X(L!5Z?xDd3n_`bkCY2_y5=$cM~_uZ-G&AasLMulBSXEL(>v zIO;`_Fg4dig23GDp$6qG{b7$oo#RQV55?`|D{4b$Y)Q>tyj724TBF4>a(PRf{8(p_ zW*?1&Sr&-=uGkm56#Qxt#8n6{O z4Iu=9mlw-4)LY)yGBW(pap_&c>#C z0C1}3koNvoPFR^1{EK_{;uANdBp519i9``mc9~?~E%)S;+hfbZr{KO!?c`zXZ+%~Z z{vz{UDcnEWlGYTUEeX!7Nhos6u6jYM&L=~g%D<3N6s@;NB~t9x`hc{g;7D5XH`<1~ zg`VuqVBoiMZu75BHw-uC={+_msDr+I)bNS@7p16efr#R~WU2QwP$T`N;tfJfYR?Q% z8Jhg~UUgPz%>T(Y1h@qGBexo7R+C{v2x<(6g9aY?ZPId=XKruF^oFVaPraY=(vR!J zuZ-)o(@Tw>cICTP4tp${C|LM7#0>LrKx0}|FpC~+e)drblt&ayFO4I9H76@ySAl*~ zk2Z{KXXUoT?;>9&Uu!`4Bi45J9BZe1Ax-GFJ_5ni<6geV?J1pPhx>^6B9ChUfzfcT zZ}EIS-3E%txfdmOwfBPK?upJvEO;a zu(>yxX;@icD_SV;On6+)A0tutjtPlrE0)e8f+TUcX61Bt63Mz!dESmXjw7i7Z%W z-i-`7apNo95bG#s-Z}gmU5lrk5BSnpCkX1T6$cU`*_AgVqTVE>Dt`C%w+l`$!377o z+i>5uw|ppBo!*yLI2|e2^pd7(S#G$h&FkPzot=03T1EA>h3@RZ01N19%92T8maT4L zR(_wAY*$DX@H6Fp>lZ0@QY&y&=O^hp&odlA7lV4Qiskl$eI%iJsCK*DEioi=g%by^Uw-dR8z}lw(fazK4zpLo;L3GxDF(~ z<9V-Vj*8>_=xEH8k>r6EyX(rt7*lh>pjEvv<4+OaEexhq(c{s#UzlS8w1t2IV4$)u zo7}854D&Qt5}c~5de%+MXZkahy0K);-VA-iyb>&8)vBp_D)GiZa{#IeKHyITcA2{l zZ!_6$4NMp`ziT-+9+ldpsi%RvcbcV6F3o20(h%QJS-558D+Sq-b-e?Dk#fQG>R@2_ z{qWg;9Me7a-~4yTmUPb`fTBh@_e~gy#r4l9)t^#LyKxKp4Je# zkm;1yyeU1@-Z%}CiD$)x20EHA06x1gwGk>%9u7~wn-p4 zhIyXjR*i@{jD%TAek!30-}wWx)!1)ZM&Ad(X?0!m|Kcg<>YO(xl(u|0#je~s_%OS`1DpwErRVzUJtVa? zCXVgI6_DhXx4Nc&j2C9L8rQW6PGL2lqytlF_1cB%5~XDect2|Bi4IBiOh?~1YIVhR zfi`dDasGjx`E|4eW@UHdC6&42Yprzr8+3qiD)kI#BJwoVlaD2MYQ4 z2ocYcc4VY#@^jNP@6f)av-M|XtoYaQEy1H?^Z)dPyGh8}OhRs%spl@)R@79S78$rk zBtp@>^|Bb7iKmL3qmtok%#Y)`H7mWYh0esb3kUhf%>6|}V@;JR-QGw)Z`=-dbqQQR znlCF9{O;o+z;rePaz}^}{P$G17c}3$^9iKLVwTl{nm*#!yDtY+szPD7vIEAywt^djin;7Ki*l6t3JLEwLDXuH%;29MSE5qa}uX# zuUhQVK2or;Gi?dr878aex8*G9;v@%ox|<_^WAD)A7408tE+kZp4F3EcW!S#`U|u*~ z=xW&FDcM9K`waAJRoPxRYRd>F@~s77@iUZimYCw0+z>nBCqXdHCMfo9ahf4P$UPcDnQLm3u*-oYIQ z?Q;b3yio9D38R27o2akj;p~wkb^;1BG1$(9dM=Qcu6rq#ljP|*2g=?14jh@GKoWmu zAscvOzOmXn}ZZR=FeIT({bfavSH2+ zjwiE7*87p5T%~J`cbm=qZsqhXfrxU27iDJ1FXJhPmgn15NGf?nzBmD3OuXFuUZ?<} zw{836lZtmO#zttfb-PM$xR&{4MjpbH~XN1B=Mz#B5(X4;@aI^{(7L| zAFHSg9y#%PQVJv3j=#r2!_@Qy-yu{j9$j3fiMRt0BlY|j85=736092{&oM;O&~Kbxsg!&9o!g@H??3`Tlk zwS$1;7xnB&ow1k5uCIza-9b!Mt20(>YN*gxC6G$9w4WvFr{cX|Et}*O@PurL`;FOW zl+!3165%T!S~D+yeKY96D9sZM+_R7H7IYAo^qcdd3rha^=f7`@>Pk?X*7jCR!XBIw zLOh-DjW7^o(fXg6_54o^eg5~~Iri6&pUh=~G~h!iAvDIqC((B}aHsQI`HJZO0ty{$ Af&c&j literal 0 HcmV?d00001 diff --git a/examples/aac_audiocaps/README.md b/examples/aac_audiocaps/README.md index 4eeaddcd..07a760b0 100644 --- a/examples/aac_audiocaps/README.md +++ b/examples/aac_audiocaps/README.md @@ -1,7 +1,7 @@ # AAC_Audiocaps ## Performance and checkpoints -We use [EAT](https://github.com/cwx-worst-one/EAT) and [BEATs](https://github.com/microsoft/unilm/tree/master/beats) as the main audio encoder for SLAM-AAC. Be sure to set up the corresponding environments based on the instructions provided in each repository. Here are checkpoints and performance for training only the linear layer and training the linear layer with LLM tuning via LoRA. +We use [EAT](https://github.com/cwx-worst-one/EAT) as the audio encoder in this repo. Be sure to set up the corresponding environments based on the instructions provided in each repository. Here are checkpoints and performance for training only the linear layer and training the linear layer with LLM tuning via LoRA. Audio Encoder | Projector | LLM | PEFT | METEOR | CIDEr | SPICE | SPIDEr |---|---|---|---|---|---|---|---| [EAT-base (fine-tuned)](https://drive.google.com/file/d/1aCYiQmoZv_Gh1FxnR-CCWpNAp6DIJzn6/view?usp=sharing) | [Linear](https://drive.google.com/file/d/1xyhgx8cUKSIKpYgPlEWjHL-jLgSnhfGJ/view?usp=sharing)(~16.26M) | [vicuna-7b-v1.5](https://huggingface.co/lmsys/vicuna-7b-v1.5) | x | 0.2508 | 0.7532 | **0.1853** |0.4692 @@ -11,8 +11,8 @@ Audio Encoder | Projector | LLM | PEFT | METEOR | CIDEr | SPICE | SPIDEr ## Data preparation Prepare your `jsonl` data in the following format: ```json -{"key": "Y7fmOlUlwoNg_1", "prompt": "", "source": "/root/data/AudioCaps/waveforms/test/Y7fmOlUlwoNg.wav", "target": "Constant rattling noise and sharp vibrations", "target_len": 6, "source_len": 6, "text-type": "Transcribe", "audio_language": "english", "text_language": "english", "task-type": ""} -{"key": "Y6BJ455B1aAs_1", "prompt": "", "source": "/root/data/AudioCaps/waveforms/test/Y6BJ455B1aAs.wav", "target": "A rocket flies by followed by a loud explosion and fire crackling as a truck engine runs idle", "target_len": 18, "source_len": 18, "text-type": "Transcribe", "audio_language": "english", "text_language": "english", "task-type": ""} +{"key": "Y7fmOlUlwoNg_1", "source": "/root/data/AudioCaps/waveforms/test/Y7fmOlUlwoNg.wav", "target": "Constant rattling noise and sharp vibrations"} +{"key": "Y6BJ455B1aAs_1", "source": "/root/data/AudioCaps/waveforms/test/Y6BJ455B1aAs.wav", "target": "A rocket flies by followed by a loud explosion and fire crackling as a truck engine runs idle"} ``` Ensure your data aligns with this structure for consistent results. @@ -29,4 +29,4 @@ To perform inference with trained models, you could use this command: ```bash bash scripts/inference_eat_audiocaps.sh ``` -Ensure your environment is set up and data paths are correct for accurate results. \ No newline at end of file +Ensure your environment is set up and data paths are correct to reproduce results. \ No newline at end of file diff --git a/examples/aac_audiocaps/conf/prompt.yaml b/examples/aac_audiocaps/conf/prompt.yaml index 23be6aeb..c874afe6 100644 --- a/examples/aac_audiocaps/conf/prompt.yaml +++ b/examples/aac_audiocaps/conf/prompt.yaml @@ -1,4 +1,3 @@ dataset_config: # we put prompt here, because the hydra override in shell script only support a small subset of chars - # prompt: "Describe the audio you hear. Output the audio caption directly without redundant content. Ensure that the output is not duplicated." prompt: "Describe the audio you hear. Output the audio caption directly without redundant content. Ensure that the output is not duplicated." diff --git a/examples/asr_librispeech/README.md b/examples/asr_librispeech/README.md index a5a14118..c0890817 100644 --- a/examples/asr_librispeech/README.md +++ b/examples/asr_librispeech/README.md @@ -34,3 +34,26 @@ Whisper takes mel as input. Pay attention to the key `dataset_config.mel_size` f bash finetune_wavlm_large_linear_vicuna_7b.sh ``` WavLM takes raw wavform as input. Pay attention to the key `dataset_config.normalize` and `model_config.normalize` for different version of the SSL models for different SSL models are different in these keys. + +**Note**: +- if you are running on a machine with multiple GPUs please make sure to only make one of them visible using `export CUDA_VISIBLE_DEVICES=GPU:id` +- If you want to run with FSDP, you can set `++train_config.enable_fsdp=true` and `++train_config.enable_ddp=false`. + +### Flash Attention and Xformer Memory Efficient Kernels + +Setting `use_fast_kernels` will enable using of Flash Attention or Xformer memory-efficient kernels based on the hardware being used. This would speed up the fine-tuning job. This has been enabled in `optimum` library from HuggingFace as a one-liner API, please read more [here](https://pytorch.org/blog/out-of-the-box-acceleration/). + +### Fine-tuning using FSDP on 70B Model + +If you are interested in running full parameter fine-tuning on the 70B model, you can enable `low_cpu_fsdp` mode as the following command. This option will load model on rank0 only before moving model to devices to construct FSDP. This can dramatically save cpu memory when loading large models like 70B (on a 8-gpu node, this reduces cpu memory from 2+T to 280G for 70B model). This has been tested with `BF16` on 16xA100, 80GB GPUs. + +## Citation +You can refer to the paper for more results. +``` +@article{ma2024embarrassingly, + title={An Embarrassingly Simple Approach for LLM with Strong ASR Capacity}, + author={Ma, Ziyang and Yang, Guanrou and Yang, Yifan and Gao, Zhifu and Wang, Jiaming and Du, Zhihao and Yu, Fan and Chen, Qian and Zheng, Siqi and Zhang, Shiliang and others}, + journal={arXiv preprint arXiv:2402.08846}, + year={2024} +} +``` \ No newline at end of file diff --git a/scripts/finetune_aac_llama.sh b/scripts/finetune_aac_llama.sh deleted file mode 100644 index fc85a941..00000000 --- a/scripts/finetune_aac_llama.sh +++ /dev/null @@ -1,103 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0 -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -# speech_encoder_path=/nfs/zhifu.gzf/ckpt/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt -audio_encoder_path=/nfs/maziyang.mzy/models/BEATs/BEATs_iter3_plus_AS2M.pt - -llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5/vicuna-13b-v1.5 - -output_dir=/nfs/maziyang.mzy/exps/debug - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---model_name aac \ ---freeze_encoder \ ---freeze_llm \ ---llm_name llama-2-7b-hf \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name beats \ ---encoder_ds_rate 2 \ ---encoder_path $audio_encoder_path \ ---encoder_dim 768 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset audio_dataset \ ---audio_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---audio_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 4 \ ---val_batch_size 4 \ ---num_workers_dataloader 1 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ -# --log_file $output_dir/test.log \ -# --use_wandb \ -# --wandb_dir $output_dir \ -# --wandb_entity_name zym22 \ -# --wandb_project_name slam-llm \ -# --wandb_exp_name test \ -# --log_interval 5 \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5" \ -# --use_peft --peft_method lora \ - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 2 \ -src/llama_recipes/pipeline/finetune.py \ ---model_name aac \ ---freeze_encoder \ ---freeze_llm \ ---enable_fsdp \ ---llm_name llama-2-7b-hf \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name beats \ ---encoder_ds_rate 2 \ ---encoder_path $audio_encoder_path \ ---encoder_dim 768 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset audio_dataset \ ---audio_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---audio_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 4 \ ---val_batch_size 4 \ ---num_workers_dataloader 4 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ ---log_file /$output_dir/train.log \ ---use_wandb \ ---wandb_dir $output_dir \ ---wandb_entity_name zym22 \ ---wandb_project_name slam-llm \ ---wandb_exp_name test \ ---log_interval 5 \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -# --use_peft --peft_method lora \ -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} \ No newline at end of file diff --git a/scripts/finetune_asr_llama.sh b/scripts/finetune_asr_llama.sh deleted file mode 100644 index b8452cbd..00000000 --- a/scripts/finetune_asr_llama.sh +++ /dev/null @@ -1,104 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0,1,2,3 -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/zhifu.gzf/ckpt/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt - -# llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -llm_path=/nfs/maziyang.mzy/models/Llama-2-7b-chat-hf - -output_dir=/nfs/maziyang.mzy/exps/llama-2-chat-hf-finetune-asr-ds5-proj2048-lr1e-4-whisper-prompt-padding30-20240111 - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---model_name asr \ ---freeze_encoder \ ---freeze_llm \ ---use_fp16 \ ---llm_name llama-2-7b-hf \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_dim 1280 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset speech_dataset \ ---speech_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---speech_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 4 \ ---val_batch_size 4 \ ---num_workers_dataloader 4 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ -# --log_file $output_dir/test.log \ -# --use_wandb \ -# --wandb_dir $output_dir \ -# --wandb_entity_name zym22 \ -# --wandb_project_name slam-llm \ -# --wandb_exp_name test \ -# --log_interval 5 \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5" \ -# --use_peft --peft_method lora \ - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 4 \ -src/llama_recipes/pipeline/finetune.py \ ---model_name asr \ ---freeze_encoder \ ---freeze_llm \ ---enable_fsdp \ ---use_fp16 \ ---llm_name llama-2-7b-chat-hf \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_dim 1280 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset speech_dataset \ ---speech_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---speech_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 6 \ ---val_batch_size 6 \ ---num_workers_dataloader 4 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ ---log_file /$output_dir/train.log \ ---use_wandb \ ---wandb_dir $output_dir \ ---wandb_entity_name zym22 \ ---wandb_project_name slam-llm \ ---wandb_exp_name test \ ---log_interval 5 \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -# --use_peft --peft_method lora \ -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} \ No newline at end of file diff --git a/scripts/finetune_asr_tinyllama.sh b/scripts/finetune_asr_tinyllama.sh deleted file mode 100644 index a38a6243..00000000 --- a/scripts/finetune_asr_tinyllama.sh +++ /dev/null @@ -1,100 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=4,5,6,7 -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/zhifu.gzf/ckpt/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt - -llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-Chat-v0.4 - -output_dir=/nfs/maziyang.mzy/exps/TinyLlama-1.1B-Chat-v0.4-finetune-asr-ds5-proj2048-lr1e-4-finetune-whisper-large-v2-prompt-padding30-20240115 - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---model_name asr \ ---freeze_encoder \ ---freeze_llm \ ---llm_name vicuna-13b-v1.5 \ ---llm_path $llm_path \ ---llm_dim 5120 \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_dim 1280 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset speech_dataset \ ---speech_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---speech_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 4 \ ---val_batch_size 4 \ ---num_workers_dataloader 4 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ -# --log_file $output_dir/test.log \ -# --use_wandb \ -# --wandb_dir $output_dir \ -# --wandb_entity_name zym22 \ -# --wandb_project_name slam-llm \ -# --wandb_exp_name test \ -# --log_interval 5 \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5" \ -# --use_peft --peft_method lora \ - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 4 \ ---master_port=29501 \ -src/llama_recipes/pipeline/finetune.py \ ---model_name asr \ ---freeze_llm \ ---use_fp16 \ ---enable_fsdp \ ---llm_name tinyllama-1.1b-chat-v0.4 \ ---llm_path $llm_path \ ---llm_dim 2048 \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_dim 1280 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset speech_dataset \ ---speech_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---speech_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 4 \ ---val_batch_size 4 \ ---num_workers_dataloader 4 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ ---log_file /$output_dir/train.log \ ---use_wandb \ ---wandb_dir $output_dir \ ---wandb_entity_name zym22 \ ---wandb_project_name slam-llm \ ---wandb_exp_name test \ ---log_interval 5 \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -# --use_peft --peft_method lora \ -# --freeze_encoder \ -fi \ No newline at end of file diff --git a/scripts/finetune_asr_vicuna.sh b/scripts/finetune_asr_vicuna.sh deleted file mode 100644 index 839ed6fe..00000000 --- a/scripts/finetune_asr_vicuna.sh +++ /dev/null @@ -1,132 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0,1 -export TOKENIZERS_PARALLELISM=false -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/tiny.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/base.pt -# speech_encoder_path=//nfs/maziyang.mzy/models/Whisper/small.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/medium.pt -speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt - -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-intermediate-step-1431k-3T -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-Chat-v0.4 -# llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -# llm_path=/nfs/maziyang.mzy/models/Llama-2-7b-chat-hf -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/nfs/maziyang.mzy/exps/vicuna-7b-v1.5-finetune-asr-qformer64-steplrwarmupkeep1e-4-whisper-largev2-prompt-padding30-20240125-test - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---config-path "/root/SLAM-LLM/scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -hydra.run.dir=$output_dir \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=q-former \ -++dataset_config.fix_length_audio=64 \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.train_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ -++dataset_config.val_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ -++train_config.model_name=asr \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=true \ -++train_config.batching_strategy=custom \ -++train_config.warmup_steps=1000 \ -++train_config.total_steps=100000 \ -++train_config.lr=1e-4 \ -++train_config.validation_interval=1000 \ -++train_config.batch_size_training=4 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++metric=acc \ -# ++model_config.encoder_projector=linear \ -# ++model_config.encoder_projector_ds_rate=5 \ -# ++train_config.use_peft=true \ -# ++train_config.peft_config.peft_method=lora \ -#++log_config.log_file=/$output_dir/train.log \ -#++log_config.use_wandb=true \ -#++log_config.wandb_dir=$output_dir \ -#++log_config.wandb_entity_name=zym22 \ -#++log_config.wandb_project_name=slam-llm \ -#++log_config.wandb_exp_name=${0##*/%.*} \ -#++log_config.log_interval 5 \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5" \ - - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 2 \ -src/llama_recipes/pipeline/finetune.py \ ---config-path "/root/SLAM-LLM/scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -hydra.run.dir=$output_dir \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=q-former \ -++dataset_config.fix_length_audio=64 \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.train_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ -++dataset_config.val_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ -++dataset_config.input_type=raw \ -++train_config.model_name=asr \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=true \ -++train_config.batching_strategy=custom \ -++train_config.warmup_steps=1000 \ -++train_config.total_steps=100000 \ -++train_config.lr=1e-4 \ -++train_config.validation_interval=1000 \ -++train_config.batch_size_training=4 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++train_config.enable_fsdp=false \ -++train_config.enable_ddp=true \ -++train_config.use_fp16=true \ -++metric=acc \ -# ++log_config.log_file=/$output_dir/train.log \ -# ++log_config.use_wandb=true \ -# ++log_config.wandb_dir=$output_dir \ -# ++log_config.wandb_entity_name=zym22 \ -# ++log_config.wandb_project_name=slam-llm \ -# ++log_config.wandb_exp_name=${0##*/%.*} \ -# ++log_config.log_interval=5 \ -# ++model_config.encoder_projector=linear \ -# ++model_config.encoder_projector_ds_rate=5 \ -# ++train_config.use_peft=true \ -# ++train_config.peft_config.peft_method=lora \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -# --master_port=29501 \ -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} \ No newline at end of file diff --git a/scripts/finetune_avsr.sh b/scripts/finetune_avsr.sh deleted file mode 100644 index 278fe777..00000000 --- a/scripts/finetune_avsr.sh +++ /dev/null @@ -1,107 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0,1,2,3 -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -# speech_encoder_path= TODO! - - -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/nfs/yangguanrou.ygr/vicuna-13b-v1.5-finetune-avsr-20230115 - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python src/llama_recipes/pipeline/finetune.py \ ---model_name avsr \ ---freeze_encoder \ ---freeze_llm \ ---llm_name vicuna-13b-v1.5 \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name moco_wav2vec2 \ ---encoder_ds_rate 2 \ ---encoder_dim 512 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset avsr_dataset \ ---avsr_dataset.file src/llama_recipes/datasets/avsr_dataset.py:get_audio_dataset \ ---batching_strategy custom \ ---num_epochs 20 \ ---batch_size_training 6 \ ---val_batch_size 2 \ ---num_workers_dataloader 2 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ ---log_file "/root/SLAM-LLM/log/second_try.log" \ ---use_wandb \ ---wandb_dir $output_dir \ ---wandb_entity_name yanghaha \ ---wandb_project_name slam-llm \ ---wandb_exp_name avsr \ ---log_interval 5 \ - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 4 \ -src/llama_recipes/pipeline/finetune.py \ ---model_name avsr \ ---freeze_encoder \ ---freeze_llm \ ---use_fp16 \ ---enable_fsdp \ ---llm_name vicuna-13b-v1.5 \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name moco_wav2vec2 \ ---encoder_ds_rate 2 \ ---encoder_dim 512 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset avsr_dataset \ ---avsr_dataset.file src/llama_recipes/datasets/avsr_dataset.py:get_audio_dataset \ ---batching_strategy custom \ ---num_epochs 20 \ ---batch_size_training 2 \ ---val_batch_size 2 \ ---num_workers_dataloader 2 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ ---log_file "/root/SLAM-LLM/log/second_try.log" \ ---use_wandb \ ---wandb_dir $output_dir \ ---wandb_entity_name yanghaha \ ---wandb_project_name slam-llm \ ---wandb_exp_name avsr \ ---log_interval 5 \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -# --use_peft --peft_method lora \ -# --master_port=29501 \ -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} - - - -# 没用 encoder_ds_rate - -# 1.15 - -# 7b batch size 开到2 ok的 - -# 6 2 0 可以 \ No newline at end of file diff --git a/scripts/finetune_avsr_debug.sh b/scripts/finetune_avsr_debug.sh deleted file mode 100644 index ca9f8780..00000000 --- a/scripts/finetune_avsr_debug.sh +++ /dev/null @@ -1,104 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0 -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/zhifu.gzf/ckpt/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt - -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/nfs/maziyang.mzy/exps/vicuna-7b-v1.5-finetune-asr-ds5-proj2048-lr1e-4-whisper-prompt-paddingr-20240112 - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---model_name asr \ ---freeze_encoder \ ---freeze_llm \ ---llm_name vicuna-13b-v1.5 \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_dim 1280 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset speech_dataset \ ---speech_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---speech_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 4 \ ---val_batch_size 4 \ ---num_workers_dataloader 4 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ -# --log_file $output_dir/test.log \ -# --use_wandb \ -# --wandb_dir $output_dir \ -# --wandb_entity_name zym22 \ -# --wandb_project_name slam-llm \ -# --wandb_exp_name test \ -# --log_interval 5 \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5" \ -# --use_peft --peft_method lora \ - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 4 \ -src/llama_recipes/pipeline/finetune.py \ ---model_name asr \ ---freeze_encoder \ ---freeze_llm \ ---use_fp16 \ ---enable_fsdp \ ---llm_name vicuna-7b-v1.5 \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_dim 1280 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset speech_dataset \ ---speech_dataset.train_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_train_960h.jsonl \ ---speech_dataset.val_data_path /nfs/maziyang.mzy/data/librispeech/librispeech_dev_other_filtered.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 6 \ ---val_batch_size 6 \ ---num_workers_dataloader 4 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ ---log_file /$output_dir/train.log \ ---use_wandb \ ---wandb_dir $output_dir \ ---wandb_entity_name zym22 \ ---wandb_project_name slam-llm \ ---wandb_exp_name test \ ---log_interval 5 \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -# --use_peft --peft_method lora \ -# --master_port=29501 \ -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} \ No newline at end of file diff --git a/scripts/finetune_avsr_vicuna_debug_0113.sh b/scripts/finetune_avsr_vicuna_debug_0113.sh deleted file mode 100644 index ab135502..00000000 --- a/scripts/finetune_avsr_vicuna_debug_0113.sh +++ /dev/null @@ -1,53 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=1 -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -# speech_encoder_path= TODO! - - -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/nfs/yangguanrou.ygr/vicuna-7b-v1.5-finetune-avsr - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5679 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---model_name avsr \ ---freeze_encoder \ ---freeze_llm \ ---llm_name vicuna-13b-v1.5 \ ---llm_path $llm_path \ ---llm_dim 4096 \ ---encoder_name moco_wav2vec2 \ ---encoder_ds_rate 2 \ ---encoder_dim 512 \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset avsr_dataset \ ---avsr_dataset.file src/llama_recipes/datasets/avsr_dataset.py:get_audio_dataset \ ---batching_strategy custom \ ---num_epochs 1 \ ---batch_size_training 2 \ ---num_workers_dataloader 2 \ ---lr 1e-4 \ ---output_dir $output_dir \ ---metric acc \ ---log_file "/root/SLAM-LLM/log/first_try.log" \ - - -# --avsr_dataset.file src/llama_recipes/datasets/avsr_dataset.py:get_audio_dataset \ - - -# --encoder_path $speech_encoder_path \ #TODO! -# --encoder_dim 1280 \ #TODO! \ No newline at end of file diff --git a/scripts/finetune_echat.sh b/scripts/finetune_echat.sh deleted file mode 100644 index 866df6b2..00000000 --- a/scripts/finetune_echat.sh +++ /dev/null @@ -1,102 +0,0 @@ -#!/bin/bash -#export PYTHONPATH=/root/whisper:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0,1,2,3 -export CUDA_LAUNCH_BLOCKING=1 -# export OMP_NUM_THREADS=1 -# export PYTORCH_CUDA_ALLOC_CONF=max_split_size_mb:128 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -cd /root/SLAM-LLM - -speech_encoder_path=/nfs/zhifu.gzf/ckpt/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt -llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -output_dir=/nfs/maziyang.mzy/exps/llama-2-hf-finetune-echat-ds5-proj2048-debug - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---model_name echat \ ---freeze_encoder \ ---freeze_llm \ ---use_fp16 \ ---llm_name llama-2-7b-hf \ ---llm_path $llm_path \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset custom_dataset \ ---custom_dataset.file src/llama_recipes/datasets/echat_dataset.py:get_audio_dataset \ ---custom_dataset.data_path /nfs/zhifu.gzf/data/IEMOCAP_full_release/datalist.jsonl \ ---batching_strategy custom \ ---custom_dataset.max_words 1024 \ ---num_epochs 100 \ ---batch_size_training 2 \ ---val_batch_size 2 \ ---output_dir $output_dir \ ---run_test_during_validation \ ---run_test_during_validation_file /nfs/zhifu.gzf/data/IEMOCAP_full_release/Session5/sentences/wav/Ses05M_impro04/Ses05M_impro04_M040.wav \ -# --ckpt_path "/nfs/maziyang.mzy/models/llama-2-hf-finetune/echat/1/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/models/llama-2-hf-finetune/echat/1" -# --use_peft --peft_method lora \ - -# train -# {"trans": "Well, do you have your passport?\n", -# "emotion": "xxx", -# "wav": "/nfs/zhifu.gzf/data/IEMOCAP_full_release/Session1/sentences/wav/Ses01M_impro01/Ses01M_impro01_F009.wav"} -# {"trans": "No, I don't have a passport.\n", -# "emotion": "neu", -# "wav": "/nfs/zhifu.gzf/data/IEMOCAP_full_release/Session1/sentences/wav/Ses01M_impro01/Ses01M_impro01_M010.wav"} - -# val -# {"trans": "Yeah, well thanks for your help.\n", -# "emotion": "ang", -# "wav": "/nfs/zhifu.gzf/data/IEMOCAP_full_release/Session5/sentences/wav/Ses05M_impro04/Ses05M_impro04_M040.wav"} -# {"trans": "I'm sorry. Good luck, man.\n", -# "emotion": "xxx", -# "wav": "/nfs/zhifu.gzf/data/IEMOCAP_full_release/Session5/sentences/wav/Ses05M_impro04/Ses05M_impro04_F038.wav"} - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 4 \ -src/llama_recipes/pipeline/finetune.py \ ---model_name echat \ ---freeze_encoder \ ---use_fp16 \ ---use_peft --peft_method lora \ ---enable_fsdp \ ---llm_name llama-2-7b-hf \ ---llm_path $llm_path \ ---encoder_name whisper \ ---encoder_ds_rate 2 \ ---encoder_path $speech_encoder_path \ ---encoder_projector linear \ ---encoder_projector_ds_rate 5 \ ---dataset custom_dataset \ ---custom_dataset.file src/llama_recipes/datasets/echat_dataset.py:get_audio_dataset \ ---custom_dataset.data_path /nfs/zhifu.gzf/data/IEMOCAP_full_release/datalist.jsonl \ ---batching_strategy custom \ ---num_epochs 100 \ ---batch_size_training 8 \ ---val_batch_size 8 \ ---output_dir $output_dir \ ---run_test_during_validation \ ---run_test_during_validation_file /nfs/zhifu.gzf/data/IEMOCAP_full_release/Session1/sentences/wav/Ses01M_impro01/Ses01M_impro01_F009.wav \ ---run_test_during_validation_prompt """ -Please provide an emotional response based on the emotional speech you hear. -Remember to format your answer as follows: <|EMOTION|><|REPLY|>. -<|EMOTION|> is a standalone adjective. -<|REPLY|> is a reply based on a the speech. -""" \ ---metric acc \ -# --ckpt_path "/nfs/maziyang.mzy/models/llama-2-hf-finetune/echat/1/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/models/llama-2-hf-finetune/echat/1" -# --freeze_llm \ -fi diff --git a/scripts/finetune_mls_aya.sh b/scripts/finetune_mls_aya.sh deleted file mode 100755 index 8dc81e80..00000000 --- a/scripts/finetune_mls_aya.sh +++ /dev/null @@ -1,113 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -# export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0,1 -export TOKENIZERS_PARALLELISM=false -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -code_dir=/work/SLAM-LLM -cd $code_dir - -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/tiny.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/base.pt -# speech_encoder_path=//nfs/maziyang.mzy/models/Whisper/small.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/medium.pt -speech_encoder_path=/host/model_ckpt/whisper/large-v3.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt - -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-intermediate-step-1431k-3T -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-Chat-v0.4 -# llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -# llm_path=/nfs/maziyang.mzy/models/Llama-2-7b-chat-hf -llm_path=/host/model_ckpt/CohereForAI/aya-101 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/work/exps/aya-finetune-asr-linear-steplrwarmupkeep1e-4-whisper-largev3-$(date +"%Y%m%d")-test - -hydra_args=" -hydra.run.dir=$output_dir \ -++model_config.llm_name="aya-101" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=linear \ -++dataset_config.fix_length_audio=64 \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.train_data_path=data/mls/polish_train.jsonl \ -++dataset_config.val_data_path=data/mls/polish_dev.jsonl \ -++dataset_config.mel_size=128 \ -++train_config.model_name=asr \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=true \ -++train_config.batching_strategy=custom \ -++train_config.warmup_steps=1000 \ -++train_config.total_steps=100000 \ -++train_config.lr=1e-4 \ -++train_config.validation_interval=1000 \ -++train_config.output_dir=$output_dir \ -++train_config.batch_size_training=4 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++metric=acc \ -" -# ++log_config.log_file=/$output_dir/train.log \ -# ++log_config.use_wandb=true \ -# ++log_config.wandb_dir=$output_dir \ -# ++log_config.wandb_entity_name=zym22 \ -# ++log_config.wandb_project_name=slam-llm \ -# ++log_config.wandb_exp_name=${0##*/%.*} \ -# ++log_config.log_interval=5 \ -# ++model_config.encoder_projector=linear \ -# ++model_config.encoder_projector_ds_rate=5 \ -# ++train_config.use_peft=true \ -# ++train_config.peft_config.peft_method=lora \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -# --master_port=29501 \ -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ ---config-path "${code_dir}/scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -$hydra_args -# ++model_config.encoder_projector=linear \ -# ++model_config.encoder_projector_ds_rate=5 \ -# ++train_config.use_peft=true \ -# ++train_config.peft_config.peft_method=lora \ -#++log_config.log_file=/$output_dir/train.log \ -#++log_config.use_wandb=true \ -#++log_config.wandb_dir=$output_dir \ -#++log_config.wandb_entity_name=zym22 \ -#++log_config.wandb_project_name=slam-llm \ -#++log_config.wandb_exp_name=${0##*/%.*} \ -#++log_config.log_interval 5 \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5/model.pt" \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-lora-prompt/asr/5" \ - - -else -torchrun \ ---nnodes 1 \ ---nproc_per_node 2 \ -src/llama_recipes/pipeline/finetune.py \ ---config-path "${code_dir}/scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -$hydra_args -hydra.run.dir=$output_dir \ -$hydra_args \ -++train_config.enable_fsdp=false \ -++train_config.enable_ddp=true \ -++train_config.use_fp16=true \ -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} diff --git a/scripts/finetune_mls_llama.sh b/scripts/finetune_mls_llama.sh deleted file mode 100755 index db6a77ac..00000000 --- a/scripts/finetune_mls_llama.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=2,3 -export TOKENIZERS_PARALLELISM=false -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -code_dir=/work/SLAM-LLM -cd $code_dir - -speech_encoder_path=/cxgroup/model/whisper/large-v3.pt - -llm_path=/cxgroup/model/Llama-2-7b-chat-hf -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/work/exps/Llama-2-7b-chat-finetune-asr-linear-lora-32-steplrwarmupkeep1e-4-whisper-largev3-$(date +"%Y%m%d")-test - -hydra_args=" -hydra.run.dir=$output_dir \ -++model_config.llm_name="llama-2-7b-chat-hf" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=linear \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.train_data_path=data/mls/polish_train.jsonl \ -++dataset_config.val_data_path=data/mls/polish_dev.jsonl \ -++dataset_config.input_type=mel \ -++dataset_config.mel_size=128 \ -++train_config.use_peft=true \ -++train_config.peft_config.r=32 \ -++train_config.model_name=asr \ -++train_config.num_epochs=12 \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=false \ -++train_config.batching_strategy=custom \ -++train_config.warmup_steps=1000 \ -++train_config.total_steps=100000 \ -++train_config.lr=1e-4 \ -++train_config.validation_interval=1000 \ -++train_config.batch_size_training=4 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++metric=acc \ -" -# ++model_config.encoder_projector=linear \ -# ++model_config.encoder_projector_ds_rate=5 \ -# ++train_config.peft_config.peft_method=lora \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -#++log_config.log_file=/$output_dir/train.log \ -#++log_config.use_wandb=true \ -#++log_config.wandb_dir=$output_dir \ -#++log_config.wandb_entity_name=zym22 \ -#++log_config.wandb_project_name=slam-llm \ -#++log_config.wandb_exp_name=${0##*/%.*} \ -#++log_config.log_interval 5 \ - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then - python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ - --config-path "/root/SLAM-LLM/scripts/conf" \ - --config-name "asr_vicuna_lora.yaml" \ - $hydra_args -else - torchrun \ - --nnodes 1 \ - --nproc_per_node 2 \ - --master_port=29501 \ - src/llama_recipes/pipeline/finetune.py \ - --config-path "${code_dir}/scripts/conf" \ - --config-name "asr_vicuna_lora.yaml" \ - ++train_config.enable_fsdp=false \ - ++train_config.enable_ddp=true \ - ++train_config.use_fp16=false \ - $hydra_args -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} diff --git a/scripts/finetune_mls_vicuna.sh b/scripts/finetune_mls_vicuna.sh deleted file mode 100755 index 24c81acc..00000000 --- a/scripts/finetune_mls_vicuna.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash -# export PYTHONPATH=/root/whisper:$PYTHONPATH -export PYTHONPATH=/root/fairseq:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=2,3 -export TOKENIZERS_PARALLELISM=false -# export CUDA_LAUNCH_BLOCKING=1 -export OMP_NUM_THREADS=1 - -# debug setting for multiple gpus -# export NCCL_DEBUG=INFO -# export NCCL_DEBUG_SUBSYS=ALL -# export TORCH_DISTRIBUTED_DEBUG=INFO - -code_dir=/work/SLAM-LLM -cd $code_dir - -speech_encoder_path=/cxgroup/model/whisper/large-v3.pt - -llm_path=/cxgroup/model/vicuna-7b-v1.5 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/work/exps/vicuna-7b-v1.5-finetune-asr-linear-lora-32-steplrwarmupkeep1e-4-whisper-largev3-$(date +"%Y%m%d")-test - -hydra_args=" -hydra.run.dir=$output_dir \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=linear \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.train_data_path=data/mls/polish_train.jsonl \ -++dataset_config.val_data_path=data/mls/polish_dev.jsonl \ -++dataset_config.input_type=mel \ -++dataset_config.mel_size=128 \ -++train_config.use_peft=true \ -++train_config.peft_config.r=32 \ -++train_config.model_name=asr \ -++train_config.num_epochs=12 \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=false \ -++train_config.batching_strategy=custom \ -++train_config.warmup_steps=1000 \ -++train_config.total_steps=100000 \ -++train_config.lr=1e-4 \ -++train_config.validation_interval=1000 \ -++train_config.batch_size_training=4 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++metric=acc \ -" -# ++model_config.encoder_projector=linear \ -# ++model_config.encoder_projector_ds_rate=5 \ -# ++train_config.peft_config.peft_method=lora \ -# --peft_ckpt "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4" \ -# --ckpt_path "/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-5-whisper-prompt-padding30-20231228/asr/4/model.pt" \ -#++log_config.log_file=/$output_dir/train.log \ -#++log_config.use_wandb=true \ -#++log_config.wandb_dir=$output_dir \ -#++log_config.wandb_entity_name=zym22 \ -#++log_config.wandb_project_name=slam-llm \ -#++log_config.wandb_exp_name=${0##*/%.*} \ -#++log_config.log_interval 5 \ - -# -m debugpy --listen 5678 --wait-for-client -if [[ $CUDA_VISIBLE_DEVICES != *","* ]]; then - python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ - --config-path "/root/SLAM-LLM/scripts/conf" \ - --config-name "asr_vicuna_lora.yaml" \ - $hydra_args -else - torchrun \ - --nnodes 1 \ - --nproc_per_node 2 \ - --master_port=29501 \ - src/llama_recipes/pipeline/finetune.py \ - --config-path "${code_dir}/scripts/conf" \ - --config-name "asr_vicuna_lora.yaml" \ - ++train_config.enable_fsdp=false \ - ++train_config.enable_ddp=true \ - ++train_config.use_fp16=false \ - $hydra_args -fi - -# {"key": "1001-134707-0000_ASR", "prompt": "", "source": "/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0000.wav", "target": "1 little recks the laborer. How near his work is holding him to God, The loving laborer through space and time, after all, not to create, only or found only.", "target_len": 157, "source_len": 1581, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} -# {"key": "1688-142285-0005", "prompt": "", "source": "/nfs/beinian.lzr/workspace/datasets/data/16k/opendata/librispeech/test_other/wav/1688-142285-0005.wav", "target": "YOU WHO WERE ALWAYS ACCUSING PEOPLE OF BEING SHOPPY AT HELSTONE", "target_len": 11, "source_len": 220, "text-type": "Transcribe", "audio_language": "en", "text_language": "en", "task-type": ""} diff --git a/scripts/inference_asr.sh b/scripts/inference_asr.sh deleted file mode 100644 index 35ae0d43..00000000 --- a/scripts/inference_asr.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/bin/bash -#export PYTHONPATH=/root/whisper:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0 -export TOKENIZERS_PARALLELISM=false -# export CUDA_LAUNCH_BLOCKING=1 - -cd /root/SLAM-LLM - -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/tiny.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/base.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/small.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/medium.pt -speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt - -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-intermediate-step-1431k-3T -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-Chat-v0.4 -# llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -# llm_path=/nfs/maziyang.mzy/models/Llama-2-7b-chat-hf -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 - -output_dir=/nfs/maziyang.mzy/exps/vicuna-7b-v1.5-finetune-asr-qformer64-steplrwarmupkeep1e-4-whisper-largev2-promptshort-lowergt-padding30-20240126 -ckpt_path=$output_dir/asr/2 -# peft_ckpt=/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-4-whisper-lora-prompt-paddinglr-20240102-renew5/asr/1 - -# -m debugpy --listen 5678 --wait-for-client -python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/inference.py \ ---config-path "/root/SLAM-LLM/scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=whisper \ -++model_config.encoder_ds_rate=2 \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1280 \ -++model_config.encoder_projector=q-former \ -++dataset_config.fix_length_audio=64 \ -++ckpt_path=$ckpt_path/model.pt \ -++wav_path="/cpfs01/shared/Group-speech/beinian.lzr/data/open_data/librispeech_audio/audio/se_librispeech_1001-134707-0032.wav" \ -++prompt="Transcribe speech to text. Output the transcription directly without redundant content. Ensure that the output is not duplicated. " \ -++train_config.model_name=asr \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=true \ -# ++model_config.encoder_projector=linear \ -# ++model_config.encoder_projector_ds_rate=5 \ -# --peft_ckpt $peft_ckpt \ -# --use_peft --peft_method lora \ \ No newline at end of file diff --git a/scripts/inference_asr_batch.sh b/scripts/inference_asr_batch.sh deleted file mode 100755 index c2e8b41a..00000000 --- a/scripts/inference_asr_batch.sh +++ /dev/null @@ -1,65 +0,0 @@ -#!/bin/bash -#export PYTHONPATH=/root/whisper:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0 -export TOKENIZERS_PARALLELISM=false -# export CUDA_LAUNCH_BLOCKING=1 - -cd /root/SLAM-LLM - -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/tiny.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/base.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/small.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/medium.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/wavlm/WavLM-Base.pt -speech_encoder_path=/nfs/maziyang.mzy/models/wavlm/WavLM-Large.pt - -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-intermediate-step-1431k-3T -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-Chat-v0.4 -# llm_path=/nfs/maziyang.mzy/models/phi-2 -# llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -# llm_path=/nfs/maziyang.mzy/models/Llama-2-7b-chat-hf -llm_path=/nfs/maziyang.mzy/models/vicuna-7b-v1.5 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/nfs/maziyang.mzy/exps/vicuna-7b-v1.5-finetune-asr-ds5-proj2048-steplrwarmup1e-4keep-WavLM-Large-promptshort-lowergt-20240127 -ckpt_path=$output_dir/asr/3 -# peft_ckpt=/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-4-whisper-lora-prompt-paddinglr-20240102/asr/4 -val_data_path=/nfs/maziyang.mzy/data/librispeech/librispeech_test_clean_filtered.jsonl -decode_log=$ckpt_path/decode_log_test_clean_beam4_repetition_penalty1 - -# -m debugpy --listen 5678 --wait-for-client -python src/llama_recipes/pipeline/inference_batch.py \ ---config-path "/root/SLAM-LLM/scripts/conf" \ ---config-name "asr_vicuna_lora.yaml" \ -hydra.run.dir=$ckpt_path \ -++model_config.llm_name="vicuna-7b-v1.5" \ -++model_config.llm_path=$llm_path \ -++model_config.llm_dim=4096 \ -++model_config.encoder_name=wavlm \ -++dataset_config.normalize=true \ -++model_config.normalize=true \ -++model_config.encoder_path=$speech_encoder_path \ -++model_config.encoder_dim=1024 \ -++model_config.encoder_projector=linear \ -++model_config.encoder_projector_ds_rate=5 \ -++dataset_config.dataset=speech_dataset \ -++dataset_config.prompt="Transcribe speech to text. " \ -++dataset_config.val_data_path=$val_data_path \ -++dataset_config.input_type=raw \ -++dataset_config.inference_mode=true \ -++train_config.model_name=asr \ -++train_config.batching_strategy=custom \ -++train_config.num_epochs=1 \ -++train_config.val_batch_size=4 \ -++train_config.num_workers_dataloader=4 \ -++train_config.output_dir=$output_dir \ -++ckpt_path=$ckpt_path/model.pt \ -++decode_log=$decode_log \ -++train_config.freeze_encoder=true \ -++train_config.freeze_llm=true \ -# ++model_config.encoder_projector=q-former \ -# ++dataset_config.fix_length_audio=64 \ -# --peft_ckpt $peft_ckpt \ -# --use_peft --peft_method lora \ \ No newline at end of file diff --git a/scripts/inference_asr_batch_2.sh b/scripts/inference_asr_batch_2.sh deleted file mode 100755 index e17fe097..00000000 --- a/scripts/inference_asr_batch_2.sh +++ /dev/null @@ -1,67 +0,0 @@ -#!/bin/bash -#export PYTHONPATH=/root/whisper:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=0 -export TOKENIZERS_PARALLELISM=false -# export CUDA_LAUNCH_BLOCKING=1 - -code_dir=/work/SLAM-LLM -cd $code_dir - -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/tiny.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/base.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/small.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/medium.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt -# speech_encoder_path=/nfs/maziyang.mzy/models/wavlm/WavLM-Base.pt -speech_encoder_path=/host/model_ckpt/whisper/large-v3.pt - -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-intermediate-step-1431k-3T -# llm_path=/nfs/maziyang.mzy/models/TinyLlama-1.1B-Chat-v0.4 -# llm_path=/nfs/maziyang.mzy/models/phi-2 -# llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -# llm_path=/nfs/maziyang.mzy/models/Llama-2-7b-chat-hf -llm_path=/host/model_ckpt/vicuna-7b-v1.5 -# llm_path=/nfs/maziyang.mzy/models/vicuna-13b-v1.5 - -output_dir=/work/exps/vicuna-7b-v1.5-finetune-asr-linear-steplrwarmupkeep1e-4-whisper-largev3-20240301-test -ckpt_path=$output_dir/asr/4 -# peft_ckpt=/nfs/maziyang.mzy/exps/llama-2-hf-finetune-asr-ds5-proj2048-lr1e-4-whisper-lora-prompt-paddinglr-20240102/asr/4 -val_data_path=data/mls/polish_tem.jsonl -decode_log=$ckpt_path/decode_log_test_clean_beam4_repetition_penalty1 - -# -m debugpy --listen 5678 --wait-for-client -python src/llama_recipes/pipeline/inference_batch.py \ - --config-path "${code_dir}/scripts/conf" \ - --config-name "asr_vicuna_lora.yaml" \ - hydra.run.dir=$ckpt_path \ - ++model_config.llm_name="vicuna-7b-v1.5" \ - ++model_config.llm_path=$llm_path \ - ++model_config.llm_dim=4096 \ - ++model_config.encoder_name=whisper \ - ++model_config.encoder_path=$speech_encoder_path \ - ++model_config.encoder_dim=1280 \ - ++model_config.encoder_projector=linear \ - ++model_config.encoder_ds_rate=2 \ - ++dataset_config.dataset=speech_dataset \ - ++dataset_config.fix_length_audio=64 \ - ++dataset_config.val_data_path=$val_data_path \ - ++dataset_config.input_type=mel \ - ++dataset_config.mel_size=128 \ - ++dataset_config.inference_mode=true \ - ++train_config.model_name=asr \ - ++train_config.batching_strategy=custom \ - ++train_config.num_epochs=1 \ - ++train_config.val_batch_size=4 \ - ++train_config.num_workers_dataloader=4 \ - ++train_config.output_dir=$output_dir \ - ++decode_log=$decode_log \ - ++ckpt_path=$ckpt_path/model.pt \ - ++train_config.freeze_encoder=true \ - ++train_config.freeze_llm=true \ - # ++dataset_config.normalize=true \ - # ++model_config.encoder_projector=q-former \ - # ++dataset_config.fix_length_audio=64 \ - # --peft_ckpt $peft_ckpt \ - # ++ckpt_path=$ckpt_path/model.pt \ - # --use_peft --peft_method lora \ \ No newline at end of file diff --git a/scripts/inference_echat.sh b/scripts/inference_echat.sh deleted file mode 100644 index f34726d2..00000000 --- a/scripts/inference_echat.sh +++ /dev/null @@ -1,42 +0,0 @@ -#!/bin/bash -#export PYTHONPATH=/root/whisper:$PYTHONPATH -export CUDA_VISIBLE_DEVICES=1 -export CUDA_LAUNCH_BLOCKING=1 - -cd /root/SLAM-LLM - -# speech_encoder_path=/nfs/zhifu.gzf/ckpt/Whisper/base.pt -speech_encoder_path=/nfs/maziyang.mzy/models/Whisper/large-v2-qwen.pt -llm_path=/nfs/zhifu.gzf/ckpt/Llama-2-7b-hf -output_dir=/nfs/maziyang.mzy/models/llama-2-hf-finetune - -# -m debugpy --listen 5678 --wait-for-client -#python -m debugpy --listen 5678 --wait-for-client src/llama_recipes/pipeline/finetune.py \ -python src/llama_recipes/pipeline/inference.py \ ---model_name echat \ ---freeze_llm \ ---use_fp16 \ ---quantization \ ---llm_name llama-2-7b-hf \ ---llm_path $llm_path \ ---encoder_name whisper \ ---encoder_path $speech_encoder_path \ ---encoder_projector linear \ ---dataset custom_dataset \ ---custom_dataset.file src/llama_recipes/datasets/speech_text_dataset.py:get_audio_dataset \ ---custom_dataset.data_path /nfs/zhifu.gzf/data/IEMOCAP_full_release/datalist.jsonl \ ---batching_strategy custom \ ---custom_dataset.max_words 1024 \ ---num_epochs 1 \ ---batch_size_training 2 \ ---output_dir $output_dir \ ---ckpt_path "/nfs/maziyang.mzy/models/llama-2-hf-finetune/echat/1/model.pt" \ ---wav_path "/nfs/zhifu.gzf/data/IEMOCAP_full_release/Session5/sentences/wav/Ses05M_impro04/Ses05M_impro04_F035.wav" \ ---prompt """ - Please provide an emotional response based on the emotional speech you hear. - Remember to format your answer as follows: <|EMOTION|><|REPLY|>. - <|EMOTION|> is a standalone adjective. - <|REPLY|> is a reply based on a the speech. - """ \ -# --peft_ckpt "/nfs/maziyang.mzy/models/llama-2-hf-finetune/echat/1" -# --use_peft --peft_method lora \ \ No newline at end of file