diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..d0c3cbf --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..dc1312a --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..517d8ac --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,12 @@ +sphinx_autodoc_typehints +toml +sphinx-panels +sphinx-autobuild +sphinx_book_theme +sphinx-toolbox +sphinx-copybutton +sphinx_design +jinja2 +sphinx +setuptools +textgrad \ No newline at end of file diff --git a/docs/source/assets/analogy.png b/docs/source/assets/analogy.png new file mode 100644 index 0000000..a94b3bd Binary files /dev/null and b/docs/source/assets/analogy.png differ diff --git a/docs/source/assets/logo_full.png b/docs/source/assets/logo_full.png new file mode 100644 index 0000000..d24f129 Binary files /dev/null and b/docs/source/assets/logo_full.png differ diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..997da48 --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,72 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + + +# -- Project information ----------------------------------------------------- + +project = 'TextGrad' +copyright = '2024, TextGrad authors' +author = 'TextGrad authors' + +# The full version, including alpha/beta/rc tags +release = '0.1.3' + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = ['sphinx.ext.duration', + 'sphinx.ext.doctest', + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.napoleon', + 'sphinx_book_theme' + ] + +autodoc_default_options = { + 'members': True, + 'special-members': ',__call__', # Add other special methods as needed + 'undoc-members': True, + 'show-inheritance': True +} + + +autodoc_typehints = 'description' +autosummary_generate = True # Enable autosummary +autoclass_content = 'both' + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'sphinx_book_theme' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] \ No newline at end of file diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..1d7613f --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,306 @@ +.. TextGrad documentation master file, created by + sphinx-quickstart on Sat May 4 17:35:54 2024. + You can adapt this file completely to your liking, but it should at least + contain the root toctree directive. +Welcome to TextGrad's documentation! +==================================== + +TextGrad is a Python package that provides a simple interface to implement LLM-"gradients" pipelines for text optimization! + +Check out the :doc:`usage` section for further information, including how to +:doc:`install ` the project. Want to directly jump to the optimization process? +Check out the :doc:`quickstart` guide! + + +.. image:: assets/logo_full.png + :align: center + +An autograd engine -- for textual gradients! + +TextGrad is a powerful framework building automatic ``differentiation'' via text. +TextGrad implements backpropagation through text feedback provided by LLMs, strongly building on the gradient metaphor. + +We provide a simple and intuitive API that allows you to define your own loss functions and optimize them using text feedback. +This API is similar to the Pytorch API, making it simple to adapt to your use cases. + +.. image:: assets/analogy.png + :align: center + +QuickStart +========== + +If you know PyTorch, you know 80% of TextGrad. +Let's walk through the key components with a simple example. Say we want to use GPT-4o to generate a punchline for TextGrad. + +.. code-block:: python + + import textgrad as tg + # Step 1: Get an initial response from an LLM + model = tg.BlackboxLLM("gpt-4o") + punchline = model(tg.Variable("write a punchline for my github package about optimizing compound AI systems", role_description="prompt", requires_grad=False)) + punchline.set_role_description("a concise punchline that must hook everyone") + +Initial `punchline` from the model: +> Supercharge your AI synergy with our optimization toolkit – where compound intelligence meets peak performance! + +Not bad, but we (gpt-4o, I guess) can do better! Let's optimize the punchline using TextGrad. + +.. code-block:: python + + # Step 2: Define the loss function and the optimizer, just like in PyTorch! + loss_fn = tg.TextLoss("We want to have a super smart and funny punchline. Is the current one concise and addictive? Is the punch fun, makes sense, and subtle enough?") + optimizer = tg.TGD(parameters=[punchline]) + +.. code-block:: python + + # Step 3: Do the loss computation, backward pass, and update the punchline + loss = loss_fn(punchline) + loss.backward() + optimizer.step() + +Optimized punchline: +> Boost your AI with our toolkit – because even robots need a tune-up! + +Okay this model isn’t really ready for a comedy show yet (and maybe a bit cringy) but it is clearly trying. But who gets to maxima in one step? + +We have many more examples around how TextGrad can optimize all kinds of variables -- code, solutions to problems, molecules, prompts, and all that! + +Tutorials +--------- + +We have prepared a couple of tutorials to get you started with TextGrad. +You can run them directly in Google Colab by clicking on the links below. + +.. |primiti| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/github/zou-group/TextGrad/blob/main/examples/notebooks/Tutorial-Primitives.ipynb + :alt: Open In Colab + +.. |code| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/github/zou-group/textgrad/blob/main/examples/notebooks/Tutorial-Test-Time-Loss-for-Code.ipynb + :alt: Open In Colab + +.. |promptopt| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/github/zou-group/TextGrad/blob/main/examples/notebooks/Tutorial-Prompt-Optimization.ipynb + :alt: Open In Colab + +.. |solut| image:: https://colab.research.google.com/assets/colab-badge.svg + :target: https://colab.research.google.com/github/zou-group/TextGrad/blob/main/examples/notebooks/Tutorial-Solution-Optimization.ipynb + :alt: Open In Colab + + + ++--------------------------------------------------------------------------------+------------------+ +| Name | Link | ++================================================================================+==================+ +| Introduction to TextGrad primitives | |primiti| | ++--------------------------------------------------------------------------------+------------------+ +| Code Optimization and New Loss Implementation | |code| | ++--------------------------------------------------------------------------------+------------------+ +| Prompt Optimization | |promptopt| | ++--------------------------------------------------------------------------------+------------------+ +| Solution Optimization | |solut| | ++--------------------------------------------------------------------------------+------------------+ + +Installation +============ + +You can install TextGrad via pip: + +.. code-block:: bash + + pip install textgrad + +Examples +======== + +Minimal Instance Optimization Example +------------------------------------- + +TextGrad can optimize unstructured variables, such as text. Let us have an initial solution to a math problem that we want to improve. Here's how to do it with TextGrad, using GPT-4o: + +.. code-block:: python + + import textgrad as tg + tg.set_backward_engine(tg.get_engine("gpt-4o")) + + initial_solution = """To solve the equation 3x^2 - 7x + 2 = 0, we use the quadratic formula: + x = (-b ± √(b^2 - 4ac)) / 2a + a = 3, b = -7, c = 2 + x = (7 ± √((-7)^2 - 4 * 3(2))) / 6 + x = (7 ± √(7^3) / 6 + The solutions are: + x1 = (7 + √73) + x2 = (7 - √73)""" + + # Define the variable to optimize, let requires_grad=True to enable gradient computation + solution = tg.Variable(initial_solution, + requires_grad=True, + role_description="solution to the math question") + + # Define the loss function, via a system prompt to an LLM + loss_system_prompt = tg.Variable("""You will evaluate a solution to a math question. Do not attempt to solve it yourself, do not give a solution, only identify errors. Be super concise.""", + requires_grad=False, + role_description="system prompt") + + loss_fn = tg.TextLoss(loss_system_prompt) + + # Define the optimizer, let the optimizer know which variables to optimize + optimizer = tg.TGD(parameters=[solution]) + + loss = loss_fn(solution) + +Output: + + Variable(value=Errors: + 1. Incorrect sign in the discriminant calculation: it should be b^2 - 4ac, not b^2 + 4ac. + 2. Incorrect simplification of the quadratic formula: the denominator should be 2a, not 6. + 3. Final solutions are missing the division by 2a., role=response from the language model, grads=) + +.. code-block:: python + + loss.backward() + optimizer.step() + print(solution.value) + +Output: + + To solve the equation 3x^2 - 7x + 2 = 0, we use the quadratic formula: + x = (-b ± √(b^2 - 4ac)) / 2a + + Given: + a = 3, b = -7, c = 2 + + Substitute the values into the formula: + x = (7 ± √((-7)^2 - 4(3)(2))) / 6 + x = (7 ± √(49 - 24)) / 6 + x = (7 ± √25) / 6 + x = (7 ± 5) / 6 + + The solutions are: + x1 = (7 + 5) / 6 = 12 / 6 = 2 + x2 = (7 - 5) / 6 = 2 / 6 = 1/3 + +Minimal Prompt Optimization Example +----------------------------------- + +TextGrad can also optimize prompts in PyTorch style! Here's how to do it with TextGrad, using GPT-4o for feedback, and optimizing a prompt for gpt-3.5-turbo: + +.. code-block:: python + + import textgrad as tg + llm_engine = tg.get_engine("gpt-3.5-turbo") + tg.set_backward_engine(tg.get_engine("gpt-4o")) + + _, val_set, _, eval_fn = load_task("BBH_object_counting", llm_engine) + question_str, answer_str = val_set[0] + question = tg.Variable(question_str, role_description="question to the LLM", requires_grad=False) + answer = tg.Variable(answer_str, role_description="answer to the question", requires_grad=False) + +Question: + + I have two stalks of celery, two garlics, a potato, three heads of broccoli, a carrot, and a yam. How many vegetables do I have? + +Ground Truth Answer: + + 10 + +.. code-block:: python + + system_prompt = tg.Variable("You are a concise LLM. Think step by step.", + requires_grad=True, + role_description="system prompt to guide the LLM's reasoning strategy for accurate responses") + + model = tg.BlackboxLLM(llm_engine, system_prompt=system_prompt) + optimizer = tg.TGD(parameters=list(model.parameters())) + + prediction = model(question) + +Prediction: + + You have a total of seven vegetables: two stalks of celery, two garlics, one potato, three heads of broccoli, one carrot, and one yam. + +.. code-block:: python + + loss = eval_fn(inputs=dict(prediction=prediction, ground_truth_answer=answer)) + +Loss denoting accuracy: + + Variable(value=0, grads=) + +.. code-block:: python + + loss.backward() + +System prompt gradients: + + ... 2. **Encourage Explicit Summation**: - The prompt should encourage the model to explicitly state the summation process. This can help in verifying the accuracy of the count. For example, "Explain your calculations clearly and verify the total.".... + +.. code-block:: python + + optimizer.step() + +New system prompt value: + + You are a concise LLM. Think step by step. Prioritize accuracy in your calculations. Identify and count each item individually. Explain your calculations clearly and verify the total. After calculating, review your steps to ensure the total is correct. If you notice a discrepancy in your count, re-evaluate the list and correct any mistakes. + +.. code-block:: python + + prediction = model(question) + +New prediction: + + Let's count the number of each vegetable: + 1. Celery stalks: 2 + 2. Garlics: 2 + 3. Potato: 1 + 4. Broccoli heads: 3 + 5. Carrot: 1 + 6. Yam: 1 + + Now, let's add up the total number of vegetables: + 2 + 2 + 1 + 3 + 1 + 1 = 10 + + You have a total of 10 vegetables. + +Resources +========= + +Inspiration +----------- + +Many existing works greatly inspired this project! Here is a non-exhaustive list: + +- 📚 `PyTorch `_ The one and only. We owe a ton to PyTorch, hard to do justice here. +- 📚 `DSPy `_ is a pioneer in writing LM-based programs in many different ways! Has been a huge inspiration for us. +- 📚 `Micrograd `_: A tiny autograd engine greatly inspired our simple design! +- 📚 `ProTeGi `_: We owe the term "Textual Gradients" to ProTeGi! +- 📚 `Reflexion `_: A self-reflection that showed us the power of text-based reflection! + +Citation +======== + +.. code-block:: bibtex + + @article{yuksekgonul2024textgrad, + title={{TextGrad: Automatic ``Differentiation'' with Text}}, + author={Mert Yuksekgonul and Federico Bianchi and Joseph Boen and Sheng Liu and Zhi Huang and Carlos Guestrin and James Zou}, + year={2024}, + } + +Contents +-------- + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + usage + textgrad + quickstart + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` \ No newline at end of file diff --git a/docs/source/quickstart.rst b/docs/source/quickstart.rst new file mode 100644 index 0000000..84544f2 --- /dev/null +++ b/docs/source/quickstart.rst @@ -0,0 +1,46 @@ +QuickStart +========== + +What can TextGrad do? TextGrad can optimize your prompts from a language model in an automatic way. + + +.. code-block:: python + + import textgrad + + # Set the backward engine as an External LLM API object. + See textgrad.config for more details. + textgrad.set_backward_engine(llm_api) + basic_system_prompt = "You are a language model that summarizes \ + a given document" + + system_prompt = textgrad.Variable(basic_system_prompt, requires_grad=True) + + api_model = textgrad.model.BlackboxLLM(llm_api) + + # this tells the model to use the following system prompt + api_model = api_model + system_prompt + + big_document = "This is a big document that we want to summarize." + + # Since we will not need the criticisms for the document, + # we will explicitly set requires_grad=False + doc = textgrad.Variable(data, requires_grad=False) + # Get the summary + summary = api_model(big_document) + + # Compute a loss + evaluation_prompt = "Evaluate if this is a good summary \ + based on completeness and fluency." + + loss_fn = textgrad.ResponseEvaluation(engine=llm_api, + evaluation_instruction=Variable(evaluation_prompt, + requires_grad=False)) + + loss = loss_fn(summary) + loss.backward() # This populates gradients + + optimizer = textgrad.TextualGradientDescent(engine=llm_api, + parameters=[system_prompt]) + optimizer.step() + print(system_prompt) \ No newline at end of file diff --git a/docs/source/textgrad.rst b/docs/source/textgrad.rst new file mode 100644 index 0000000..c4d578e --- /dev/null +++ b/docs/source/textgrad.rst @@ -0,0 +1,14 @@ +Documentation +============= + +.. autosummary:: + :toctree: modules + :recursive: + + textgrad.variable + textgrad.loss + textgrad.optimizer + textgrad.engine + textgrad.model + textgrad.autograd + \ No newline at end of file diff --git a/docs/source/usage.rst b/docs/source/usage.rst new file mode 100644 index 0000000..63bfee4 --- /dev/null +++ b/docs/source/usage.rst @@ -0,0 +1,13 @@ +Usage +===== + +.. _installation: + +Installation +------------ + +To use TextGrad, first install it using pip: + +.. code-block:: console + + $ pip install textgrad diff --git a/examples/notebooks/logs/2024-07-07_23-34-38.jsonl b/examples/notebooks/logs/2024-07-07_23-34-38.jsonl new file mode 100644 index 0000000..063daf1 --- /dev/null +++ b/examples/notebooks/logs/2024-07-07_23-34-38.jsonl @@ -0,0 +1,48 @@ +{ + "name": "textgrad", + "msg": "MultimodalLLMCall function forward", + "args": "()", + "levelname": "INFO", + "levelno": "20", + "pathname": "/Users/zhihuang/Desktop/Projects/textgrad/textgrad/autograd/multimodal_ops.py", + "filename": "multimodal_ops.py", + "module": "multimodal_ops", + "exc_info": "None", + "exc_text": "None", + "stack_info": "None", + "lineno": "78", + "funcName": "forward", + "created": "1720409722.431386", + "msecs": "431.38599395751953", + "relativeCreated": "46288.64908218384", + "thread": "8607539776", + "threadName": "MainThread", + "processName": "MainProcess", + "process": "77228", + "text": "System:None\n[Variable(value=b'\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x011\\x00\\x00\\x01$\\x08\\x06\\x00\\x00\\x00G~\\x06@\\x00\\x00\\x01FiCCPICC Profile\\x00\\x00(\\x91c``\\x12I,(\\xc8aa``\\xc8\\xcd+)\\nrwR\\x88\\x88\\x8cR`\\x7f\\xc2\\xc0\\xcc \\xc0 \\xce\\xa0\\xcd \\x91\\x98\\\\\\\\\\xe0\\x18\\x10\\xe0\\x03T\\xc2\\x00\\xa3Q\\xc1\\xb7k\\x0c\\x8c \\xfa\\xb2.\\xc8\\xac\\xdbKzn\\xed+Y\\xc3k:\\xfbh\\xa3\\x9d\\xea\\x9b\\x1cL\\xf5(\\x80+%\\xb58\\x19H\\xff\\x01\\xe2\\xf4\\xe4\\x82\\xa2\\x12\\x06\\x06\\xc6\\x14 [\\xb9\\xbc\\xa4\\x00\\xc4\\xee\\x00\\xb2E\\x8a\\x80\\x8e\\x02\\xb2\\xe7\\x80\\xd8\\xe9\\x10\\xf6\\x06\\x10;\\t\\xc2>\\x02V\\x13\\x12\\xe4\\x0cd\\xdf\\x00\\xb2\\x05\\x923\\x12\\x81f0\\xbe\\x00\\xb2u\\x92\\x90\\xc4\\xd3\\x91\\xd8P{A\\x80\\xd7\\xc5\\xd5\\xc7G!\\xd8\\xd5\\xc8\\xdc\\xd0\\xd2\\x83\\x80{I\\x06%\\xa9\\x15% \\xda9\\xbf\\xa0\\xb2(3=\\xa3D\\xc1\\x11\\x18J\\xa9\\n\\x9ey\\xc9z:\\nF\\x06F\\x06\\x0c\\x0c\\xa00\\x87\\xa8\\xfe|\\x03\\x1c\\x96\\x8cb\\x1c\\x08\\xb1\\x94v\\x06\\x06c-\\xa0 \\x17B,\\xeb\\t\\x03\\xc3\\x9e\\xe9\\x0c\\x0c\\x82\\xa7\\x10b\\xea\\xd9@o\\xd910\\x1c\\x88,H,J\\x84;\\x80\\xf1\\x1bKq\\x9a\\xb1\\x11\\x84\\xcd\\xbd\\x9d\\x81\\x81u\\xda\\xff\\xff\\x9f\\xc3\\x19\\x18\\xd85\\x19\\x18\\xfe^\\xff\\xff\\xff\\xf7\\xf6\\xff\\xff\\xff.c``\\xbe\\x05\\xd4\\xfb\\r\\x00\\xab\\xcf`OMh\\xed\\t\\x00\\x00\\x00VeXIfMM\\x00*\\x00\\x00\\x00\\x08\\x00\\x01\\x87i\\x00\\x04\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x03\\x92\\x86\\x00\\x07\\x00\\x00\\x00\\x12\\x00\\x00\\x00D\\xa0\\x02\\x00\\x04\\x00\\x00\\x00\\x01\\x00\\x00\\x011\\xa0\\x03\\x00\\x04\\x00\\x00\\x00\\x01\\x00\\x00\\x01$\\x00\\x00\\x00\\x00ASCII\\x00\\x00\\x00Screenshotp\\xfdUJ\\x00\\x00\\x01\\xd6iTXtXML:com.adobe.xmp\\x00\\x00\\x00\\x00\\x00\\n \\n \\n 305\\n Screenshot\\n 292\\n \\n \\n\\n`\\x12\\xf6\\x8e\\x00\\x005jIDATx\\x01\\xed\\x9d\\x07\\x98\\x14\\xc5\\xd6\\x86\\x8f\\xe4\\x9ce\\xc99\\xe7\\x8c\\x0b(?\\xa8\\xa0\"` )`\\xce\\tA\\x05\\x01\\xc9\\x88\\x80\\x08\\xe25\\xeb5\\x12\\x04\\x0c\\x80((WA%\\xe7\\x9cs\\x94\\x9cs\\xd8\\xbf\\xbe\\x9a\\xe9\\xa1w\\x99\\xdd\\x9d\\xd9\\xed\\x99\\xee\\x9e\\xf9\\xce\\xf3\\xc0LwWWU\\xbf\\xd5{\\xa6\\xbb\\xea\\x84\\x1b\\xe2\\x94\\x08\\x85\\x04H\\x80\\x04\\\\J \\x8dK\\xfb\\xcdn\\x93\\x00\\t\\x90\\x80&@%\\xc6\\x1b\\x81\\x04H\\xc0\\xd5\\x04\\xa8\\xc4\\\\=|\\xec<\\t\\x90\\x00\\x95\\x18\\xef\\x01\\x12 \\x01W\\x13\\xa0\\x12s\\xf5\\xf0\\xb1\\xf3$@\\x02Tb\\xbc\\x07H\\x80\\x04\\\\M\\x80J\\xcc\\xd5\\xc3\\xc7\\xce\\x93\\x00\\tP\\x89\\xf1\\x1e \\x01\\x12p5\\x01*1W\\x0f\\x1f;O\\x02$@%\\xc6{\\x80\\x04H\\xc0\\xd5\\x04\\xa8\\xc4\\\\=|\\xec<\\t\\x90\\x00\\x95\\x18\\xef\\x01\\x12 \\x01W\\x13\\xa0\\x12s\\xf5\\xf0\\xb1\\xf3$@\\x02Tb\\xbc\\x07H\\x80\\x04\\\\M\\x80J\\xcc\\xd5\\xc3\\xc7\\xce\\x93\\x00\\tP\\x89\\xf1\\x1e \\x01\\x12p5\\x01*1W\\x0f\\x1f;O\\x02$@%\\xc6{\\x80\\x04H\\xc0\\xd5\\x04\\xa8\\xc4\\\\=|\\xec<\\t\\x90\\x00\\x95\\x18\\xef\\x01\\x12 \\x01W\\x13\\xa0\\x12s\\xf5\\xf0\\xb1\\xf3$@\\x02\\xe9\\x88\\x80\\x04\\xb6o\\xdb&3\\xff\\xf7\\xbf\\x80@\\xa4O\\x9f^\\xf2\\xe6\\xcd+\\x85\\x0b\\x15\\x92\\x9a\\xb5jI\\x9a4\\xfc\\x1d\\x0c\\x08\\x1c\\x0b\\x85\\x8c\\x00\\x95X\\xc8\\xd0\\xba\\xa7\\xe2\\xa9?\\xff,\\xc3\\x86\\x0e\\r\\xba\\xc3\\xf9\\xf3\\xe7\\x97\\'\\x9exB^x\\xf1\\xc5\\xa0\\xcf\\xe5\\t$`\\x15\\x01\\xfe\\x8cZE\\xd2\\xc5\\xf5,_\\xbe\\xfc\\xe0\\x83\\x14\\x9d\\xcf\\x93H\\xc0\\n\\x0270y\\xae\\x15\\x18\\xdd]G\\x8d\\xea\\xd5\\x05\\n\\t\\xd2\\xb4iS\\x89\\x8d\\x8d\\xf5{AW\\xaf^\\x95\\x13\\'N\\xc8\\xdf\\x7f\\xff-k\\xd6\\xac\\xf1\\x95\\xb9\\xe1\\x86\\x1bd\\xe6\\xcc\\x99R\\xa9re\\xdf>~!\\x81p\\x11\\xe0\\xebd\\xb8H;\\xb4\\x9d\\xbd{\\xf7\\xfa\\x14\\x18\\xba\\xf8\\xf0#\\x8f\\xc8\\xed\\xb7\\xdf\\x9edo\\x914\\xfe\\x95W^\\x91\\xef\\xc6\\x8f\\xd7\\xe5\\xb0\\xbdL=\\xcdQ\\x89%\\x89\\x8d\\x07CD\\x80\\xaf\\x93!\\x02\\xeb\\x96j\\x13\\xbeJ\\xd6\\xacY3\\xd9\\xae\\xe3\\xc9\\xabS\\xa7N\\xf1\\xcaa\\xc2\\x9fB\\x02v\\x10\\xa0\\x12\\xb3\\x83\\xba\\x83\\xda\\\\a\\x9a\\x0f+Z\\xb4\\xa8\\xe4\\xcb\\x97/\\xa0\\xdee\\xca\\x981^\\xb9@\\xcf\\x8bw\\x127H\\xc0\\x02\\x02Tb\\x16@ts\\x15\\xcb\\x96-\\xf3u?\\x90\\xa70\\xa3\\xf0\\xf4\\xe9\\xd3\\x8d\\xaf\\x82\\xa7\\xb0Z\\xca\\xdc\\x82B\\x02v\\x10\\xa0\\x12\\xb3\\x83\\xbaC\\xda\\xbcr\\xe5\\x8a\\xacZ\\xb5\\xca\\xd7\\x1b\\xd8}\\x05\"\\xe7\\xcf\\x9f\\x971c\\xc7\\xfa\\x8a\\xder\\xcb-\\x92;wn\\xdf6\\xbf\\x90@8\\tP\\x89\\x85\\x93\\xb6\\xc3\\xda\\xda\\xb4q\\xa3\\x9c={\\xd6\\xd7\\xab@\\x9e\\xc4\\x8e\\x1f?.\\x1d\\xda\\xb7\\x97\\x03\\xff\\xfe\\xab\\xcf\\xc3SX\\xcf\\x9e=}u\\xf0\\x0b\\t\\x84\\x9b\\x00W\\'\\xc3M\\xdcA\\xed%\\x9c\\xd4_\\xab\\xcc&\\xb6m\\xddz]\\x0f\\xf1\\xc4\\x06\\xe5\\xb5v\\xdd:\\xf9\\xfd\\xb7\\xdf|\\x8a/m\\xda\\xb42l\\xd80\\xaeJ^G\\x8c;\\xc2I\\x80J,\\x9c\\xb4\\x1d\\xd6VB%\\xd6\\xbbw\\xef\\x80{\\x88\\x89\\xfc\\x0f\\x94\\x91\\xeb\\xcd\\xeaU\\x92B\\x02v\\x12\\xe0\\xeb\\xa4\\x9d\\xf4mn\\x1b\\xb6])\\x15\\xcc\\x8be\\xcc\\x94)\\xa5\\xa7\\xf3<\\x12\\xb0\\x8c\\x00-\\xf6-C\\xe9\\xae\\x8a0\\x17V\\xbe\\\\9\\xc1\\xab\"\\xe4\\xce;\\xef\\x942e\\xcb\\xfa\\xbd\\x88\\xd3\\xa7O\\xcb\\xc6\\r\\x1bd\\xe5\\xca\\x95r\\xe6\\xcc\\x19_\\x99\\xda\\xb5k\\xcb\\xcf\\xd3\\xa6\\xf9\\xb6\\xf9\\x85\\x04\\xec \\xc0\\xd7I;\\xa8;\\xa0\\xcd\\xd5\\xabW\\xfb\\x14\\x18\\xba\\xf3Z\\xf7\\xeeR\\xa1B\\x85${\\xb6y\\xf3fis\\xff\\xfdr\\xe8\\xd0!]n\\xab\\x9f\\xf9\\xb3$+\\xe0A\\x12\\x08\\x01\\x01\\xbeN\\x86\\x00\\xaa\\x1b\\xaa4\\xdb\\x87e\\xcd\\x9aU\\xca\\xa9\\xa7\\xb2\\xe4\\xa4\\xaczRk\\xd1\\xa2\\x85\\xafX\\x8e\\x1c9|\\xdf\\xf9\\x85\\x04\\xec\"@%f\\x17y\\x9b\\xdb5[\\xeaWW\\x0e\\xe0\\x81\\xc6\\x05\\xcbnR\\\\9s\\xe6\\xb4\\xf9*\\xd8<\\t\\x88P\\x89E\\xe9]`~\\x12\\xab\\x11\\x80\\xbf\\xa4\\x81\\tf\\x18\\x86\\xe0\\xc9\\x8cB\\x02v\\x13\\xa0\\x12\\xb3{\\x04lh\\x1fsZ\\x88^aH0.C\\x98K3$\\x90WP\\xa3,?I T\\x04\\xa8\\xc4BE\\xd6\\xc1\\xf5\\x9a_%\\xd1\\xcd@,\\xf5Q\\x0e\\xb1\\xc4\\x8cI}l\\x97/_\\x1e\\x1f\\x14\\x12\\xb0\\x95\\x00\\x95\\x98\\xad\\xf8\\xedi\\xdc\\xfc*\\x19\\x13\\x13#\\x05\\x0b\\x16\\x0c\\xa8#\\xbbv\\xed\\x8aW\\xaet\\x992\\xf1\\xb6\\xb9A\\x02v\\x10\\xa0\\x12\\xb3\\x83\\xba\\xcdm._\\xb1\\xc2\\xd7\\x83@\\x9f\\xc2p\\xc2\\xe1\\xc3\\x87}\\xe7\\xe1\\xcb\\xd6-[\\xe2ms\\x83\\x04\\xec @%f\\x07u\\x9b\\xdb\\xdc\\xa8\\x1c\\xbf\\r\\tF\\x89\\x9d;w\\xce8M\\x7f\\xbe\\xff\\xfe\\xfb\\xf1\\xb6\\xb9A\\x02v\\x10\\xa0\\xc5\\xbe\\x1d\\xd4\\xd9&\\t\\x90\\x80e\\x04\\xf8$f\\x19JVD\\x02$`\\x07\\x01*1;\\xa8\\xb3M\\x12 \\x01\\xcb\\x08P\\x89Y\\x86\\x92\\x15\\x91\\x00\\t\\xd8A\\x80J\\xcc\\x0e\\xea.hs\\xef\\xb13\\xf2\\xc7\\xdak\\x06\\xb1.\\xe82\\xbb\\x18\\xa5\\x04\\xa8\\xc4\\xa2t\\xe0\\x93\\xbb\\xec\\xd7\\',\\x94\\x17\\xbe\\x99#\\x97\\xae\\\\M\\xae(\\x8f\\x93\\x80\\xad\\x04\\xa8\\xc4l\\xc5\\xef\\xcc\\xc6\\x17l9 c\\xe7m\\x96\\r\\xfb\\x8e\\xcb\\x073\\xd7:\\xb3\\x93\\xec\\x15\\tx\\tP\\x89\\xf1V\\x88G@%\\xf3\\x96.c\\xe6\\t>!\\xdd\\xbf[ \\x87N\\x9d\\xf7l\\xf0\\x7f\\x12p \\x01*1\\x07\\x0e\\x8a\\x9d]\\xfav\\xee&Y\\xb4\\xf5\\xa0\\xaf\\x0bx\\x9d\\xec\\xf3\\xfdb\\xdf6\\xbf\\x90\\x80\\xd3\\x08P\\x899mDl\\xec\\xcf\\xe9\\xf3\\x97\\xa4\\xe7\\xc4E\\xba\\x07\\xb9\\xb2d\\x9021\\x9exa\\x9f\\xcf^/+w\\x1d\\xb1\\xb1gl\\x9a\\x04\\x12\\'@%\\x968\\x9b\\xa8;2d\\xear\\xd9\\xa7V%!\\xfd\\xef\\xab#c\\x9em*7\\xdc r\\xe5j\\x9c\\xbc\\xac^1)$\\xe0D\\x02TbN\\x1c\\x15\\x1b\\xfa\\xb4\\xed\\xe0I\\x195\\xc3\\x93\\r\\xbcb\\xa1\\xdc\\xf2\\xdcm\\x95\\xa5~\\xe9\\xfc\\xd2\\xb9\\xa1\\'l\\xf5\\xec\\xf5\\xfb\\xe4\\x87\\xc5\\xdbl\\xe8\\x19\\x9b$\\x81\\xa4\\tP\\x89%\\xcd\\'j\\x8e\\xbe6~\\x81\\x9c\\xbf\\xe4\\xc9|4\\xaaS\\xac\\xa4O\\xeb\\xb95\\xdejWO\\xb2eJ\\xaf9\\x98\\xcbD\\r\\x18^\\xa8\\xe3\\tP\\x899~\\x88B\\xdf\\xc1?\\xd7\\xed\\x95\\x1f\\x97l\\xd7\\r\\xdd]\\xb3\\xb84\\xafZ\\xd4\\xd7h\\xa1\\xdcY\\xa5g\\xcb\\x9az{\\xfb\\xa1S\\xf2\\xce\\xaf+}\\xc7\\xf8\\x85\\x04\\x9c@\\x80J\\xcc\\t\\xa3`c\\x1f0\\xdf\\xd5u\\xcc|\\xdd\\x83\\x0c\\xe9\\xd2\\xc8;\\x0f\\xc6^\\xd7\\x9bnwV\\x93\\x927f\\xd7\\xfb\\x87N[!\\xb0\\xe6\\xa7\\x90\\x80S\\x08P\\x899e$l\\xea\\xc7\\xa7\\xb3\\xd6\\xcb\\xaa\\xdd\\x9e\\x95\\xc7\\x17\\x9bU\\x91r\\x05\\xae\\xcf`\\x94)}Zy\\xfb\\x81\\x9bt\\x0f\\xf5\\n\\xe6\\x04\\xcf\\n\\xa6M]f\\xb3$\\x10\\x8f\\x00\\x95X<\\x1c\\xd1\\xb5q\\xec\\xcc\\x05\\xe9\\xfb\\x83\\xc7\\x06,\\x7f\\x8e\\xcc\\xd2\\xa7u\\xedD\\x01\\xdc_\\xb7\\x944\\xa9TH\\x1f\\x1f3o\\x93,4\\xd9\\x92%z\\x12\\x0f\\x90@\\x18\\x08P\\x89\\x85\\x01\\xb2S\\x9b\\x18\\xf0\\xd3R9\\xec\\xb5\\xc6\\x7f\\xb3m=\\xc9\\xa9l\\xc3\\x92\\x92w;5\\x90\\xb4in\\xd0\\xd6\\xfc]\\xbe\\x9d\\xeb\\xb3\\xeaO\\xea\\x1c\\x1e#\\x81P\\x13\\xa0\\x12\\x0b5a\\x87\\xd6\\xbf~\\xdf1\\xf9\\xf0\\x7f\\x1e\\xbf\\xc8\\x9a\\xc5\\xf3\\xc9c\\x8d\\x93\\xcf\\\\T\\xadh^y\\xf2\\xff*\\xea+\\xc2\\x93\\x18\\xac\\xfb)$`7\\x01*1\\xbbG\\xc0\\xa6\\xf6\\xbb\\x8d\\x9d\\xaf#T\\xc0\\x98\\x15OXi\\xf0%\\x00\\x19\\xd4\\xa6\\xae\\xe4\\xce\\x9aQ\\x97\\x84u?\\xe6\\xc8($`\\'\\x01*1;\\xe9\\xdb\\xd4\\xf6\\xb4\\xe5;e\\xc6\\xaa\\xdd\\xba\\xf5\\xb6\\xf5J\\xcb-\\x15\\x02K\\xd9\\x86\\x13\\xf2e\\xcf$}\\xef\\xf1\\xcc\\x9d\\xc1\\xba\\xff\\xad\\x9f\\x97\\xdbt\\x15l\\x96\\x04<\\x04\\xa8\\xc4\\xa2\\xecN\\xb8x\\xf9\\xaa\\xbc2\\xcecR\\x919C:\\x19\\xde\\xa1~\\xd0\\x04\\x9e\\xbf\\xbd\\xb2\\xc0\\xaa\\x1f2r\\xfa*\\x81\\xfd\\x18\\x85\\x04\\xec\"@%f\\x17y\\x9b\\xda\\xfd\\xcf\\xefkd\\xd3\\xbf\\'t\\xeb\\xaf\\xb5\\xa8.\\xc5\\xf3y\\xec\\xbf\\x82\\xe9\\x0e\\xac\\xf9Gv\\xf4\\xd8\\x93\\xc1\\xca\\x1f\\x96\\xfc\\x14\\x12\\xb0\\x8b\\x00\\x95\\x98]\\xe4mh\\xf7\\xe0\\xc9s2h\\xcaR\\xddr\\x91\\xa5\\xe2{\\xf5\\xf2f\\xf1\\xce\\x93-\\xa3\\x0c\\xb8\\xbfN\\xd8{\\xf1`lY\\x89-\\x13\\xa3\\xdb\\xfd\\xe2\\xaf\\r\\xb2b\\xa7\\'\\x86\\x7f\\xd8;\\xc2\\x06\\xa3\\x82\\x00\\x95X\\x84\\r\\xf3\\x9bS\\x96\\xc9\\xfe\\xe3g\\xf5U\\xf5\\xbf\\xb7\\x8e\\xe4\\xcd\\x96)\\xecW\\x88\\xf8\\x8a\\xf0\\n\\x80w\\x80\\'{\\xf8\\xdc\\xb0\\xf7\\x81\\rF\\x0f\\x01*\\xb1\\x08\\x1a\\xeb\\xad*\\x8b\\xf7\\xbb3V\\xeb+\\xaa\\\\8\\xb7<{k%\\xdb\\xae\\xaen\\xa9\\x1b\\x05\\xde\\x01\\x90\\xbf\\x94\\xb7\\xc0\\xa4E\\xcc\\x1en\\xdb`Dx\\xc3Tb\\x114\\xc0\\xaf*\\xb3\\x86\\x0b\\x97=Y\\xbc\\x11a\\x02\\x93\\xfav\\n\\xbc\\x03\\xe0%\\x00\\xe9\\xaeb\\x8e\\xc1{\\x80B\\x02V\\x13\\xb0\\xf7.\\xb7\\xfaj\\xa2\\xb8\\xbe?\\xd6\\xee\\x95\\xc9Kwh\\x02\\xadj\\x95\\x90fU\\x8b\\xd8N\\x03\\xde\\x01\\xbdZ\\xd5\\xd2\\xfd\\xd8\\xa1\\xbc\\x06\\xdeQQ`)$`5\\x01*1\\xab\\x89\\xdaP\\xdfe\\xe5\\xea\\x83(\\x15\\x10\\xc4\\xf7\\x1a\\xe1MtkCW\\xaek\\xb2\\xeb\\x9dU\\xa5\\x94\\xf2\\x16\\x80\\x0cU\\xf1\\xf8\\xe1E@!\\x01+\\tP\\x89YI\\xd3\\xa6\\xba>QY\\xbc\\xd7\\xec9\\xaa[\\xef\\xd2\\xbc\\xaa\\x94\\xf5\\x93\\xc5\\xdb\\xa6\\xaei/\\x01C\\xa9\\xc2{\\xe0\\xf5\\t\\x1egt\\xbb\\xfa\\xc3v#\\x8f\\x00\\x95\\x98\\xcb\\xc7\\xf4\\xe8\\xe9\\x0b\\xd2\\xef\\x07O\\xf8\\x9b\\x98\\x9c\\x99\\xe5\\r\\xe5\\xe4\\xed4\\xb9\\xb7NI\\xb9Uy\\r@\\xc6\\xcd\\xdf,\\xf3\\xb7\\x1cpZ\\x17\\xd9\\x1f\\x17\\x13\\xa0\\x12s\\xf1\\xe0\\xa1\\xeb\\xfdU\\xfc\\xae#\\xa7\\xcf\\xeb\\xab@\\x16\\xef\\x1c\\x9938\\xf2\\x8aFy\\x17\\x1a\\x10\\xa0\\x11\\xd9\\xc3\\xe1U@!\\x01+\\x08P\\x89YA\\xd1\\xa6:\\xd6\\xed=&\\x1f\\xfd\\xb1N\\xb7^[\\x05:|\\xf4\\x96\\xe4\\xb3x\\xdb\\xd4U\\xa9Z4\\x8f<\\xa5\\xbc\\x07 \\xf0&\\xf8v\\xcef\\xbb\\xba\\xc2v#\\x8c\\x00\\x95\\x98\\x8b\\x07\\x14q\\xbb0\\xa9\\x0f\\xe3\\xd2`\\xb2x\\xdbu\\xc9\\x03\\x95\\xf7\\xc0\\xb5\\xec\\xe1\\x0b\\x05\\xde\\x05\\x14\\x12H-\\x01*\\xb1\\xd4\\x12\\xb4\\xe9\\xfc\\x9fU\\x16\\xef\\xdfW\\xef\\xd1\\xad\\xb7\\xaf_F\\x1a\\x95+`SO\\x02o\\x16\\xde\\x03\\xfd\\xef\\xf3\\xb8A\\xc1\\xab`\\xc8\\xd4e\\x81\\x9f\\xcc\\x92$\\x90\\x08\\x01*\\xb1D\\xc08y\\xb79\\x8bw\\x16\\x15\\xbfkX\\n\\xb2x\\xdbu}\\xcf)/\\x82J\\xca\\x9b\\x002j\\xfaj\\xd9\\xa6\\xbc\\x0c($\\x90\\x1a\\x02Tb\\xa9\\xa1g\\xd3\\xb9\\xa3\\x7f[-\\x9bMY\\xbc\\x8b\\xe5\\xcdfSO\\x82oV\\x87\\x06R\\x93\\xfc\\x10x\\x17\\xbc\\xca\\xec\\xe1\\xc1C\\xe4\\x19\\xf1\\x08P\\x89\\xc5\\xc3\\xe1\\xfc\\x8d\\x03\\'\\xce\\xc9`\\xe5\\xe4\\r)\\xaa\\x94W\\xf7\\xbbS\\x9e\\xc5\\xdb\\xae\\xab\\x857A\\xcb\\x9a\\xc5u\\xf3?-\\xd9.\\x7f\\xae\\xdbkWW\\xd8n\\x04\\x10\\xa0\\x12s\\xd9 \\xf6\\x9e\\xb4HN\\x9e\\xf3d\\xf1\\x1e\\xae^#\\xf1:\\xe9FyG\\x85\\xcbf\\xf6p7\\x8e\\x9c\\xf3\\xfaL%\\xe6\\xbc1I\\xb4GK\\xb7\\x1f\\x92/\\xff\\xde\\xa8\\x8fc\"\\x1f\\x13\\xfan\\x15x\\x15\\xbc\\xd4\\xcc\\x93\\xb8d\\xf5\\xee\\xa3\\xf2\\xc9\\x9f\\x1eS\\x11\\xb7^\\x0f\\xfbm\\x1f\\x01*1\\xfb\\xd8\\x07\\xd52lC\\xcdY\\xbcaR\\x01\\xd3\\n7K\\x9f{j\\t\\xbc\\x0c }\\x95\\xd7\\xc1\\xb13\\x17\\xdc|9\\xec\\xbbM\\x04\\xa8\\xc4l\\x02\\x1fl\\xb3\\x13\\x16n\\x919\\x9b\\xfe\\xd5\\xa7=\\xa2\\x8cZa\\xdc\\xeav\\x81w\\xc1\\xe06\\x9ed\\xbe\\xf0:\\xe8\\xff#\\xb3\\x87\\xbb}L\\xed\\xe8?\\x95\\x98\\x1d\\xd4\\x83l\\xf3\\xac\\x8a\\xc3\\xd5\\xc3\\x9b\\xc5\\x1b\\x7f\\xf8o\\xb6\\xad\\x1bd\\r\\xce-\\xfeX\\xe3\\xf2R\\xabD>\\xdd\\xc1\\x0f\\x95\\xf7\\x01\\xbc\\x10($\\x10\\x0c\\x01*\\xb1`h\\xd9T\\xf6\\xed_V\\xca\\xae#\\xa7u\\xebp\\xf0.\\x90\\xd3\\xfa,\\xde6]\\x9a\\x0eam\\xbc\\x1a\\xc3\\xfb\\xa0\\xdb\\xd8\\xf9vu\\x85\\xed\\xba\\x94\\x00\\x95\\x98\\xc3\\x07n\\xb7R^\\xc3\\xa7\\xad\\xd0\\xbd,\\x13\\xa3&\\xc3\\x9bWqx\\x8f\\x83\\xef\\xde\\xcd\\xe5\\x0bJ\\xbb\\xfa\\xa5\\xf5\\x89\\xbf\\xad\\xde-\\xf0F\\xa0\\x90@\\xa0\\x04\\xa8\\xc4\\x02%eS\\xb9\\xee\\xea5\\x12\\xaf\\x93\\x90w\\x1e\\xbc)\\xe4Y\\xbcm\\xbaL\\x19\\xde\\x81\\xd9\\xc3\\xedb\\xef\\xf6v\\xa9\\xc4\\x1c<\\x82s\\xd5D>&\\xf4!\\xb7W)\"\\x08;\\x1d\\xa9\\x02\\xaf\\x83\\xd7ZT\\xd7\\x97\\x07o\\x84\\xf7~\\xf7$<\\x89\\xd4\\xeb\\xe5uYG\\x80J\\xcc:\\x96\\x96\\xd6\\x84x[]T\\xc8i\\x98V\\x84;\\x8b\\xb7\\xa5\\x17\\x12De=\\x94\\xf7\\x01\\xbc\\x10 \\xf0J\\x80w\\x02\\x85\\x04\\x92#@%\\x96\\x1c!\\x9b\\x8e\\x7f\\xa5\\x8cZa\\xdc\\nA\\xea5\\xa4`\\x8bt\\xd1\\xce\\xec\\xf1\\xb2\\x87/\\x8a\\xf4K\\xe6\\xf5Y@\\x80J\\xcc\\x02\\x88VW\\x01\\xb7\\xa2\\xde\\x93\\x16\\xebju\\xf8\\x1a\\x95\\x047Z\\xa4\\xc3Me\\xa4\\xa17\\xac\\xd0\\x17\\x7fm\\x94e;\\x0eG\\xcb\\xa5\\xf3:SH\\x80J,\\x85\\xe0By\\xda\\x9bS\\x96\\xcb\\xbf\\'\\x192d\\x88,[\\x16\\x19\\t~\\x7f\\xff\\xed7}\\xdf?\\xf1\\xf8\\xe3!\\x1a!\\xfb\\xaaug\\xaa\\x9c\\x14\\xf2\\xc2M\\xbah\\x91\\xc7\\x1f\\xaf\\\\\\xf9\\xf2)\\xac\\xc5\\xba\\xd3\\xa6,\\xdd!\\xff[\\xebIW\\xf6@l\\x19iP6\\xc6\\xba\\xca#\\xac&(0(\\xb2\\x17\\xbf\\x99\\xab\\xbd\\x19\\x06O^fY\\xd2\\xe0\\xd5\\xabW\\x0b\\xfe\\xb8w\\xef\\xde\\x1d\\x8f\\x1a\\xee\\x15\\xdc3=z\\xf4\\x90\\x17_z)\\xde1\\xb7m\\xcc\\x9c9S\\xd6\\xaf\\xf7<\\xf1\\xbb\\xad\\xef\\xc9\\xf57j\\x9e\\xc4\\xf6\\xed\\xdb\\'\\x1f\\xa8\\x1b\\xd2)\\x82\\xc4\\xb1\\xaf\\x8c\\xf3D1\\x85\\xe3\\xf3\\xd0v\\xf5\\x9d\\xd25\\xc7\\xf6\\x03\\xaf\\x94\\x86#<\\xbc\\x1a\\xe0\\xdd\\x90Z\\xb9r\\xe5\\x8a\\xbc\\xf0\\xfc\\xf3>\\x05\\x96?\\x7f~i\\xa5\\x9e\\xd6\\x1b\\xdd|\\xb3\\xa4I\\x93F\\xae^\\xbd*o\\xbd\\xf5\\x96\\xcc\\x9a5+\\xb5M\\xd9v\\xfe\\xd6\\xad[\\xe5\\xe7i\\xd3lk?\\xd4\\rG\\x8d\\x12\\x1b4h\\x90\\x9c?\\x7f>\\xd4<\\x03\\xae\\xff\\xdd\\x19\\xabe\\xeb\\xc1\\x93\\xba\\xbc9\\x04M\\xc0\\x15DaAsH\"\\xab\\xb2\\x87O\\x992E6o\\xde\\xaci6l\\xd8P\\x96\\xaa\\xd7\\xc7\\x8f?\\xfeX&N\\x9c(_}\\xfd\\xb5\\xca(\\xe5I)5r\\xe4H\\xd7\\x10\\x8fS+ \\xc7\\x8e\\x1d\\x93\\x8d\\x1b7\\xeaki\\xd7\\xb6\\xad\\x9c<\\x91z\\x85\\xefT\\x00Q\\xf1:\\xb9x\\xf1b\\x992y\\xb2d\\xce\\x9cY0/f\\xb7\\xc0\\xb9\\xfbMo\\x16\\xef\\xe2\\xf9\\xb2\\xfb\\x82\\x01\\xda\\xdd/7\\xb4\\x8f\\xe0\\x900\\xbb\\xc0\\xab\\xb8\\xf1:\\x9e\\x1a\\xcf\\x86\\x85\\x0b\\x16\\xf8.\\xfb\\xad\\xa1C%m\\xda\\xb4\\xbe\\xed\\xdbn\\xbbM\\x1a6j$s\\xfe\\xf9GV\\xaeX!xj3\\x1f\\xf7\\x15t\\xd8\\x97]\\xbbvI\\xecM79\\xacW\\xa1\\xebN\\xc4?\\x89\\xe1u\\xa0\\xcf\\x1boh\\x82\\xcf\\xa9\\xd7\\x86\\x9c9=\\x16\\xe0I!\\xfdZ\\xfd\\x02\\x0fS7\\xf4\\xb8\\xb1c\\xfd\\x16;~\\xfc\\xb8\\x0c\\x1f6L\\x97\\x99>}\\xba\\xdf2I\\xed4OL\\x0fS\\xf1\\xb32\\xbb4\\x8bwR\\xd7\\x18\\xcac0\\x80\\x85!,\\xc4X\\x18Ii{\\xdbw\\xec\\xd0\\xa7\\xe6\\xcd\\x9bW\\xca\\x94\\xb9>\\x19q\\xe5J\\x9eU\\xd1\\xcb\\x97/\\xcb\\xde\\xbd\\x9e\\xf9Ks[\\xa1\\xbeW\\xccm\\xf1\\xbb\\x7f\\x02\\x11\\xff$6I\\xbd\\x16\\xacZ\\xb5J\\n\\x16,(\\xcf=\\xf7\\x9c|\\xfe\\xd9g\\xfeI\\x98\\xf6.V\\x13\\xba?\\xfe\\xf8\\xa3\\xa4O\\x9f^\\x9a5o.\\xf9\\xf2\\xe5\\xf3\\x1d\\xc5\\xcd\\xfc\\xd4\\x93O\\xca\\x9c9s$\\x87R\\x883\\x82Tbf\\x13\\x01s\\x82\\x0c_\\x03\\xfc\\x92,\\x01\\xb8\"\\xc1%i\\xf8/+\\xc40Qy\\xfe\\xb6\\xca\\xc9\\x9e\\xe7\\xaf\\xc0\\xe9S\\xa7\\xf4n\\xcc\\x85\\xf9\\x93t\\xea\\x1e0\\xe4\\xecYOx$c\\x1b\\x9f\\xa1\\xb8W\\x0e\\x1e<(\\x17/^\\x94\\x98\\x98\\x18}\\x0f\\x9a\\xdb\\x0b\\xe4{\\xd1\\xa2Ee]\\x82I\\xfc\\x0e\\xed\\xdb\\xeb\\xbf\\x83@\\xcew[\\x99\\x88~\\x12;}\\xfa\\xb4\\x9e\\x94\\xc5\\xa0\\xbc\\xa1\\x9e\\xc6\\xf0:\\x19\\x88t\\xe8\\xd0A\\x17\\xbbt\\xe9\\x92|7~|\\xbcS\\xf0T\\x07\\x05\\x86I\\xdf\\x8f>\\xfaHJ\\x94,\\x19\\xefxR\\x1b0\\xd6\\xec\\xf2\\xed\\\\A\\x9c,\\x98R\\x18\\xa9\\xca\\x92:\\x87\\xc7\\xfc\\x13\\x80s\\xb8\\x91\\xba\\xcel,\\xec\\xbft\\xe2{\\x1f}\\xec1\\xe9\\xdd\\xbb\\xb7<\\xfd\\xcc3~\\x0b\\xed\\xd8\\xbe]\\xef\\xc7\\xdcX\\xa1B\\x85\\xae+\\x13\\x8a{\\xe5\\xb1G\\x1f\\x95zu\\xeb\\xca7\\xea\\x8d %\\x82{3W\\xae\\\\\\xf1\\xfee\\xca\\x94)%U\\xb9\\xe2\\x9c\\x88Vb\\xa3G\\x8f\\x16\\xfc\\xaa\\xd5\\xae][\\xee\\xb9\\xf7\\xde\\x80\\x07\\x04\\xf3 E\\x8a\\x14\\xd1\\xe5\\xc7\\x8c\\x19\\xa3W\\xa8\\xb0\\xf1\\xd5W_\\t^\\x1f ={\\xf6\\x94&M\\x9a\\xe8\\xef\\x81\\xfe7~\\xfe\\x16\\x99\\xb7\\xf9\\x80.nN\\x1a\\x1b\\xe8\\xf9,w\\x8d\\x809\\x89\\xb0\\xd9m\\xebZ\\x89\\xc0\\xbe\\xb5U\\x93\\xde\\xcf\\xbf\\xf0\\x82\\xb4k\\xd7\\xee\\xba\\x13\\xf6\\xec\\xd9\\xe3[\\x95,\\xa9~\\xacr\\xe4\\xb8\\xde\\x9f5T\\xf7\\n:sY\\xcd\\xc1Q\\x92\\'\\x10\\xb1Jl\\x87\\x9a\\xeb\\xf8\\xec\\xd3O\\xf5\\xea\\xd2\\x80\\x81\\x03}\\xabL\\xc9#\\x11]\\xb6\\xbdz\\xfc\\x86`\\x92t\\xf6\\xec\\xd9\\xfa\\xe9\\xabo\\x9f>z\\x1f\\x96\\xe0q\\xe3\\x07#p`~}\\xe2B}J\\xce,\\xc8\\xe2]/\\x98\\xd3Y\\xd6\\x0f\\x81Gn)/\\xb5U\\x00E\\x88\\xd9\\x81\\xdeO\\xd1\\xa0wa\\x01\\xe8\\xd1G\\x1e\\x11\\xe3\\x15\\xf2a\\xf5\\xdd\\x9f\\xe0\\t\\xcd\\xea{\\xc5_;\\xdc\\x978\\x81\\x88Ub\\x03\\x07\\x0c\\xd0\\xf3\\n\\xf7\\xddw\\x9f\\xd4\\xaa\\x15\\xbcCu[\\xf5\\xcbl,\\xaf\\xbf\\xf9\\xe6\\x9bz\\x1e\\x0c\\xf3a\\x95*W\\x96Q\\xa3F%N4\\x91#\\xe6P2\\xc8\\xe2\\x9d?G`\\xaf\\xb6\\x89T\\xc7\\xdd\\x8a\\x00^\\xc9G\\xab\\xc0\\x91\\xeaC{=\\x18\\xa1\\x8cR\\x0b\\x07\\xd3\\x08\\xcf\\xab\\xf9\\xd3\\xb5k\\xd7\\xea\\xaabcc\\xe5\\xe1\\x87\\x1fN\\xb4Z\\xab\\xef\\x95D\\x1b\\xe2\\x01\\xbf\\x04\"R\\x89aI|\\xc6\\x8c\\x19z\\x0e\\xac\\xb7we\\xd2\\xef\\xd5\\'\\xb1\\xb3X\\xb1b\\xd2\\xa0A\\x03]b\\xfd\\xbau\\x82\\x15\\xc9\\xdc\\xb9s\\xcb\\x97_~\\x19\\xf0\\xdc\\x9aQ\\xbd9\\xa8_\\xb9\\x02*\\x8bw\\xb3\\xaa\\xc6!~\\xa6\\x92\\x00\\x92\\x8a \\xb9\\x08\\xc4\\x1cT2\\xa5\\xd5bB\\xfdI\\xb5p\\x83\\xfb\\x07RQ\\xadN~\\xa9\\xa6\\x112d\\xc8\\x90h\\x95V\\xde+\\x896\\xc2\\x03\\x89\\x12\\x88\\xb8\\xd5I\\xd8\\xf2\\xf4\\xeb\\xd7O_0^\\xf9\\n\\x14(\\x90\\xe8\\xc5\\'w\\xa0\\x9dz\\xa5\\x9c;w\\xae.\\x86\\xc9\\xd2O>\\xf9D\\xb0\\xf2\\x13\\xac\\x98\\xc3+#\\xd8a\\x86t\\x11\\xf9\\xdb\\x11,\\x16\\xcb\\xca\\xc3L\\x056cxeGx\\xef\\xe6\\xd5\\x8a\\xfaL0\\x82i\\x04\\xc6\\xd0\\x98T\\xc7\\xf4\\x01\\x04\\x8b6\\xe3\\xc6\\x8d\\xf3;\\x17\\xa6\\x0b\\x98\\xfeK\\xe9\\xbdr\\xe0\\xc0\\x01\\xc1\\xdc\\x9bY\\xce\\x9c9\\xa37a\\xac\\xbat\\xe9R\\xf3!\\xa9R\\xa5\\x8ad\\xcc\\x981\\xde\\xbeh\\xdf\\x888%\\x06\\xbb-\\xc3G\\xec\\x92\\xfaU\\xc5\\xe4\\xbeY.\\\\\\xf0\\x84\\xbc\\xd9\\xa4n\\x10\\x1c\\xbb]\\x194\\xe2\\x15\\xd1\\x9f\\xcc\\xf3*0\\x1c\\xc3\\xa4\\xeeM\\xea\\xb5\"X1\\'\\xbah^\\xb5\\xa8\\xdc]\\xb3x\\xb0U\\xb0|2\\x04\\x90p\\xb7\\xbbJ\\xbc\\xdb_\\xc5d3\\x12\\xadtoQ#\\x99\\xb3\\xe2\\x1f\\x86\\xe2x\\xf8\\xa1\\x87d\\xde\\xbcy\\xfa\\x00\\x94\\xc5X\\xa5\\xc0n\\xbc\\xd13\\xe7\\x16\\xbf\\xf4\\xf5[)\\xbdW`\\xca3H\\xcd\\xd9\\xfa\\x13\\xac\\x8c\\'\\\\\\x1d\\x9f\\xa3\\xee\\xc9R\\xa5J\\xf9+\\x1e\\xb5\\xfb\"N\\x89\\x99\\xdd+\\x12*0\\xf3(C\\xd1\\xe1\\x1f\\\\4\\xfc)\\xb1/\\xbf\\xf8B&L\\x98\\xe0;\\x05\\xaf\\x93\\xb3\\xfe\\xfcSnW\\x11\\x0e\\x02\\x15s\\xca\\xb1\\xf4i\\xd3\\xc8\\xa8N\\xc1+\\xc1@\\xdb\\x8a\\xf6r\\xaf\\xb5\\xa8._\\xfc\\xb5A\\xf0\\xea\\x8e\\x94w\\x0f5*\\xe73\\xc1H\\x8e\\r&\\xf1;u\\xec(\\x0b\\x17z\\x16^\\xe0~\\xf4\\x85\\x9a6\\xc8\\x9e={r\\xa7\\xea\\xe3V\\xdc+\\x015\\xc4B~\\tD\\xdc{M\\x96,Y\\xb4a+\\x8c[\\xfd\\xfd3&\\xebA\\x03\\xc7\\xb3g\\xcbv\\x1d\\x18\\xfc\\x1a\\x1b\\xaf\\xa4-Z\\xb4\\xf0=\\xbeO\\x9a4\\xe9\\xba\\xb2I\\xed0\\'\\x7fE\\x16\\xef\\x8a\\x85r\\'U\\x9c\\xc7RA\\x00N\\xf4\\xc3;x\\\\m\\xcc\\xc9\\x87\\x03\\xa9\\xf2\\xd5W_\\xf5)0\\x8c7\\x9e\\xc0\\x02U`\\xa9\\xbdW\\x9eQ\\xf6i\\xbb\\xd5\\xeb\\xa4\\xf9_\\xcd\\x9a5u\\xb7\\xfb\\xf4\\xed\\x1bo?\\xca\\xf0)\\xcc\\xcf\\x88\\xaa\\'\\x91\\xa8\\x92\\n\\xe5\\xcb\\xc7\\x15,P N\\xdd<~\\xaf[\\x85c\\x89\\xabR\\xb9\\xb2.\\xd3\\xb2e\\xcb85\\xd1\\x1b\\xf7\\xf4SO\\xe9\\xed\\xe2\\xc5\\x8a\\xc5\\xa9\\'2\\xbf\\xe7%\\xdcy\\xfc\\xcc\\x85\\xb8\\x98\\xe7\\xbf\\x8e\\x93N\\x1f\\xc7\\xe5{\\xf6\\xab\\xb8\\xa3\\xa7\\xcf\\',\\xc2m\\x8b\\t\\\\U\\x8f\\xbe\\x8d\\x06N\\xd6\\xcc\\xd3t\\xfe$n\\xc9\\xb6\\x83\\xc9\\xb6\\xa0\\xc2\\xf0\\xe8\\xb1\\xc5=\\xd1\\xb6M\\x9b8\\xb52\\x99\\xec9F\\x01\\xab\\xee\\x15\\xa3>\\xe3\\xb3\\xc5]w\\xe9>)GtcW\\xaa?[\\xb7j\\xa5\\xebl\\xda\\xa4I\\xaa\\xebrZ\\x05\\x11\\xf7$\\xe6GO\\x07\\xbc\\xcb\\x98\\xd8=r\\xe4\\x88^\\x10\\xf8\\xfc\\xf3\\xcf\\xb5\\xdb\\x07&m!X\\xb9\\x9a:uj@\\xf5\\rV\\x0e\\xde\\x07Nx\\x9c\\xcd\\x07\\xde_Wrg\\xe5dl@\\xe0RQ\\x08\\xa6\\x16F`I\\xbc\\xcawQ\\x01\\'\\xe1%\\x91\\x94 \\xce\\x16\\x04\\x0b7\\x1f|\\xf8\\xa1\\xa4K\\x17\\xd8\\x0c\\x8b\\x95\\xf7JR\\xfd\\xe3\\xb1\\xe4\\t\\x046b\\xc9\\xd7\\x13\\x11%\\xbau\\xeb&k\\xd6\\xac\\xd1\\xaf\\x8f\\x98\\x131\\xfc\\xe9\\x1a7n\\xac\\xfd\\xd8\\xb0\\x92\\x84W\\xca\\xce\\x9d;\\'y\\xbd\\x9b\\xff=!\\xef\\xfd\\xee\\xc9\\xe2]\\xb5h\\x1ey*\\n\\xb3x\\'\\t(\\x84\\x07k\\x95\\xc8\\'\\x8f*#\\xd8\\xff\\xaa\\xf91$\\x15Ar\\x11\\x04\\x9cLL\\x8c\\tyD\\xa70V%\\x13+[W\\xb9\\x02\\x95(QB\\x1f\\xb6\\xea^I\\xac-\\xee\\x0f\\x9c\\x00\\x95\\x98\\x97\\xd5\\x87\\xeaWx\\xf2O?\\xe9\\xad\\xb7G\\x8c\\x90\\x1a5\\xae\\xadn\\xe1\\x06\\xbf\\xff\\xfe\\xfb\\x05e\\x96\\xa8\\xb0>\\xf0\\xa7K\\xcag\\xb2\\x9b\\nvx\\xf1\\xf2U]\\x17\\xfc#\\xd3\\xa6Q\\x8f\\x08\\x94\\xb0\\x11\\x807\\xc4\\xa4E\\xdb\\x04sc=T\\x9a7\\x84\\xee\\xc1\\x9c\\x99?\\xd9\\xb7\\x7f\\xbf\\xde\\r\\x03\\xd7.\\xc9Do\\x1d\\xf5\\xee\\xbbZ\\x89Yy\\xaf\\xf8\\xeb\\x13\\xf7\\x05G j_\\'3\\x98\\xa2\\x13,_\\xbe\\\\\\xdeR\\xf1\\xd4!O?\\xfd\\xb4\\xb4i\\xd3\\xe6:\\x8a\\xb0\\xca6\\xe4\\x97_~1\\xbe^\\xf7\\xf9\\xdb\\xea\\xdd2m\\xf9N\\xbd\\xff\\xde:%\\xa5i\\xa5\\xc2\\xd7\\x95\\xe1\\x8e\\xd0\\x12\\x88\\xc9\\x99Y\\xe0\\x15\\x01A\\xc2\\xdd\\xe1*\\xf1nbrJ\\xc5\\xd6\\x0fTp\\xcfXy\\xaf$\\xd6nR\\x86\\xb5\\x89\\x9d\\x93\\xdc~#\\x0e\\x1a\"\\xb3D\\x9a\\xdc\\x80I\\xbaH\\xbb(\\xbb\\xae\\xe7\\x92\\xca\\xe2]\\xa3\\xf7\\xf7\\xb2N%\\xc1\\xcd\\x94>\\xad\\xac\\x1d\\xdaNJ1\\t\\xae-\\xc3\\x81\\'\\xe1*=\\'\\n^\\xed\\xf1\\x14\\xb6~x{)\\xa6\\xec\\xc9(\\x91G j\\x9f\\xc4B1\\x94\\x1f\\xfd\\xb1N+0\\xd4\\xdd\\xf5\\x8ejT`\\xa1\\x80\\x1c`\\x9d\\xf0\\x8a\\x80w\\x04Dg\\x0f\\xff\\xcec\\x03\\x16\\xe0\\xe9,\\xe6\"\\x02Tb\\x16\\r\\xd6\\xe1S\\xe7\\xb5\\xc58\\xaa+\\x98+\\x8b\\xf4l\\xe5\\xb1\\xf5\\xb1\\xa8zV\\x93\\x02\\x02-\\x95wD\\xb3\\xaa\\x9e\\x90J\\x13\\x16n\\x919\\xcc\\x1e\\x9e\\x02\\x8a\\xce?\\x85J\\xcc\\xa21\\xea\\xfb\\xc3b9v\\xc6\\xe3\\xd2\\xf4\\x96\\xca\\\\\\xc4,\\xde\\x16\\x81Me5\\xa3:6\\xf0e\\x0f7\\x02R\\xa6\\xb2J\\x9e\\xee0\\x02Tb\\x16\\x0c\\xc8\\xea\\xddG\\xe5So\\x16\\xefz\\xa5\\xf3K\\xe7Fe-\\xa8\\x95UXA\\xa0\\x92\\xca\\x1e\\x0eo\\t\\xc8\\xb2\\x1d\\x87\\xe5\\xcb\\xbf7ZQ-\\xebp\\x10\\x01*1\\x0b\\x06\\xe3eeTy\\xe5j\\x9c\\x8ek\\x05\\x93\\nf\\xf1\\xb6\\x00\\xaa\\x85U\\xf4\\xbf\\xb7\\x8e\\xe4\\xcd\\xe6\\t\\xcf\\xdc{\\xd2\"mzaa\\xf5\\xac\\xcaf\\x02Tb\\xa9\\x1c\\x80\\x9f\\x96l\\x97?\\xd7\\xed\\xd5\\xb5tlPVb\\xcb\\xc4\\xa4\\xb2F\\x9en5\\x01d\\x0f\\x1fx\\x7f\\x1d]-\\xbc(\\xe0MA\\x89\\x1c\\x02Tb\\xa9\\x18Ks\\x02\\xd7\\xac\\x19U\\x16o\\x15\\xd7\\x8a\\xe2L\\x02\\xf0\\x9a\\x80\\xf7\\x04\\x04\\xd9\\xc3azA\\x89\\x0c\\x02Tb\\xa9\\x18\\xc7Q\\xd3W\\xcb6o\\x16\\xef\\xd7[\\xd6\\x94\\xc2\\xb9\\xb3\\xa6\\xa26\\x9e\\x1aJ\\x02:{\\xb8\\x9a\\xe4\\x87\\xc0\\x86\\xec\\x15\\xe5UA\\x89\\x0c\\x02Tb)\\x1c\\xc7\\xfd\\xc7\\xcf\\xca\\x90\\xa9\\x9e\\xd7\\x92\\x12*\\x8b\\xf7+wVKaM<-\\\\\\x04n\\xad\\\\X\\xeeQ.H\\x90\\x9f\\x95W\\xc5\\xef\\xab\\xf7\\xe8\\xef\\xfc\\xcf\\xdd\\x04\\xa8\\xc4R8~=U\\xe6\\xa2S\\xe7/\\xe9\\xb3\\x87\\xab\\x8c\\xd4\\xcc\\xe2\\x9dB\\x90a>m\\x842\\x80\\xf5e\\x0f\\x1f;O\\'\\x18\\ts\\x17\\xd8\\x9c\\xc5\\x04\\xa8\\xc4R\\x00t\\xd1\\xd6\\x83\\xf2\\xed\\x9c\\xcd\\xfa\\xcc\\xc6\\x15\\nJ\\xdbz\\xa5RP\\x0bO\\xb1\\x83@i\\xe5\\x06\\xd6\\xf5NO\\xa2\\x16\\xb8\\x87\\xc1\\xcb\\x82\\xe2n\\x02TbA\\x8e\\x1f~\\x81\\x9cS\\x19\\xa6!#;\\xc6\\n\\xf2HR\\xdcO\\x00^\\x16\\xf0\\xb6\\x80\\xbc\\xaa\\x0c`\\xe1\\x85Aq\\x17\\x01\\xfe%\\x060^\\x7fm\\xd8\\xafc\\xb6\\xa3\\xe8\\x9d\\xd5\\x8b\\xca]\\xd5\\x8b\\x05p\\x16\\x8b\\xb8\\x81@\\x91\\xadk\\xebK\\x83w\\xc6\\xb0i\\xcb#\\xf12#\\xea\\x9a\\xa8\\xc4\\x12\\x0c\\'\\xb2xw\\x1b\\xe7qAA\\x16\\xef\\xb7U\\xac0Jt\\x11\\x807F\\xb9\\x029\\xf5E\\x8f\\xf8u\\x95\\xecT\\xde\\x1a\\x14\\xe7\\x12\\xa0\\x12K06\\x1f\\xcc\\\\+\\x1b\\xf6\\x1d\\xd7{\\xbb\\xa9h\\xad%o\\xf4\\xd8\\x0f%(\\xc6\\xcd\\x08&\\x80\\xec\\xe1#\\xbd\\xa1\\xac\\xe1\\xa5\\xd1\\x9d\\xd9\\xc3\\x1d=\\xdaTb\\xa6\\xe19\\xa4\\xb2x\\x0fP\\xf1\\xa5 \\x85T\\xbc\\xfc\\x9e*n>%:\\t\\xb4\\xa8QL\\xee\\xa8VT_\\xfc\\xa4E[\\xe5o\\xe5\\xb5Aq&\\x01*1\\xd3\\xb8\\xf4U6a\\xc7\\x95m\\x18dh\\xfbz\\x92M\\xc5\\x9d\\xa2D/\\x01\\xc3G\\xd6\\x08\\x84\\t\\xef\\r\\x8a\\xf3\\x08P\\x89y\\xc7d\\xe5\\xae#\\xf2\\xd9\\xec\\xf5z\\xab\\xbe\\xca\\xe2\\xdd\\xa9A9\\xe7\\x8d\\x16{\\x14V\\x02\\x15\\x0b\\xe5\\x96\\xe7n\\xab\\xac\\xdb\\\\\\xbe\\xf3\\xb0|\\xa1\\xbc7(\\xce#@%\\xe6\\x1d\\x93\\xae*i\\x84\\x91\\xc5{tgO\\x9c)\\xe7\\r\\x17{\\x14n\\x02\\xfd\\xee\\xad-\\xf9\\x94\\xb7\\x06\\xe4\\r\\xf5\\xa4\\x0e/\\x0e\\x8a\\xb3\\x08P\\x89\\xa9\\xf1\\xf8a\\xf16\\x99\\xb5n\\x9f\\x1e\\x19<\\x81\\xe1I\\x8cB\\x02 \\x90;kF_\\x04_xo\\x0c\\x9a\\xb2\\x94`\\x1cF \\xea\\x95\\xd8\\xf9KW\\xe45\\x15+\\x0c\\x8290\\xcc\\x85QH\\xc0L\\x00\\xde\\x1a\\xd5\\x8azr)\\xfc\\xe7\\xf75\\xb2\\x89\\xd9\\xc3\\xcdxl\\xff\\x1e\\xf5Jl\\xa4\\x8a\\xd6\\xba\\xfd\\x90\\xc7\\x0e\\x08\\xab\\x91X\\x95\\xa4\\x90\\x80\\x99\\x80\\'\\xab\\x95\\'\\x04\\x13\\xb3\\x87\\x9b\\xc98\\xe3{T+\\xb1}\\xc7\\xce\\xc8[*n>\\x04\\xf6`\\xb0\\x0b\\xa3\\x90\\x80?\\x02M*\\x15\\x92\\xfb\\xea\\x94\\xd4\\x87\\xa6\\xa9\\xec\\xe13V\\xed\\xf6W\\x8c\\xfbl \\x10\\xd5J\\xec\\xf5\\t\\x8b\\xe4\\xb47\\x8b7,\\xf3a\\xa1O!\\x81\\xc4\\x08 {\\xb8q\\x8ft\\x1b;_\\xe0\\xddA\\xb1\\x9f@\\xd4*\\xb1\\x85*\\x8b\\xf7\\x98y\\x9b\\xf4\\x08\\xe0W\\x16>\\x92\\x14\\x12H\\x8a\\x00\\x9e\\xd6\\xbb\\xde\\xe1yZ_\\xbf\\xef\\x98|\\xf8\\xbf\\xb5I\\x15\\xe7\\xb10\\x11\\x88J%\\x06\\xe3\\xc5.\\xdfz\\xe2Ea\\xbec\\x94\\xd7\\xc5$L\\xcc\\xd9\\x8c\\x8b\\t\\xf4jum\\xdet\\xc0OK\\xe5\\xb0\\xf2\\xf2\\xa0\\xd8K *\\x95\\x18\\x9e\\xc0\\xf0$\\x06\\xc1\\xca\\x13\\xe2\\x85QH \\x10\\x02X\\xc1~\\xcb\\x9b=\\xfc\\xd8\\x19d\\x0f\\xf7D\\xfe\\r\\xe4\\\\\\x96\\t\\r\\x81\\xa8Sb\\x98\\x03\\xc3\\\\\\x18\\x04\\x91Z\\x072\\x8bwh\\xee\\xac\\x08\\xae\\xb5s\\xc3rR\\xcfkK\\x88\\xec\\xe1\\xabv{r0D\\xf0%;\\xfa\\xd2\\xa2N\\x89a5\\x12\\xab\\x92\\x10\\xc4\\xccG\\xec|\\n\\t\\x04C\\x00Y\\xc3Gwj\\xa0\\xb3\\x87{\\xb2ayB7\\x05S\\x07\\xcbZG \\xaa\\x94\\x18\\xec\\xc1`\\x17\\x06A\\xd6\"d/\\xa2\\x90@J\\x08\\xdcT&F:6\\xf0d\\x0f\\x87\\xb7\\xc7\\x8fK\\xb6\\xa7\\xa4\\x1a\\x9ec\\x01\\x81\\xa8Rb\\xb0\\xcc\\x87\\x85>\\x04\\xf9#\\x99\\xc5\\xdb\\x82;(\\x8a\\xab@\\xf6p#\\xd2\\x89\\xf9\\xde\\x8ab$\\xb6\\\\z\\xd4(\\xb1\\xd9\\xeb\\xf7i\\x1fIPF\\xac(d\\xf2\\xa6\\x90@j\\x08\\x14V\\xde\\x1d\\xaf\\xdf]CW\\xb1\\r\\xd9\\xc3gx\\x9e\\xf2SS\\'\\xcf\\r\\x9e@T(1\\xf3\\xbc\\x05\\xa2v\\xbe\\xa3\\x8c\\x16)$`\\x05\\x81W\\xee\\xaa.%|\\xd9\\xc3\\xaf\\xcd\\xb7ZQ7\\xeb\\x08\\x8c@T(\\xb1\\xcfU\\x9c0\\xc4\\x0b\\x83\\xbcp{\\x15)_\\x90Y\\xbc\\x03\\xbb=X*9\\x02\\xe6<\\x0cX\\xf9\\xee\\xc9\\xec\\xe1\\xc9!\\xb3\\xfcx\\xc4+1Dj5\\xb2x#\\x93M\\xdf{j[\\x0e\\x91\\x15F7\\x816\\xf5\\xaee\\xc4\\xfav\\xee&Y\\xe4\\xb5A\\x8cn*\\xe1\\xbb\\xfa\\x88Wb\\x88\\x99\\x8f\\xd8\\xf9\\x90A\\xa6\\x9c\\x82\\xe1C\\xcc\\x96\\xa2\\x81\\x80\\x91=\\\\{\\x83\\x8c\\x99\\xc7\\xec\\xe1a\\x1c\\xf4\\x88Vb\\xc8Z\\x84\\xecE\\x90\\x1a\\xc5\\xf3\\xca\\xe3\\xde\\xec\\xcea\\xe4\\xcb\\xa6\\xa2\\x84\\x00\\xbc>\\x9e\\xf0f\\x89_\\xb0\\xe5\\x80\\x8c\\x9d\\xb79J\\xae\\xdc\\xfe\\xcb\\x8ch%\\x86\\xfc\\x91F\\xa4\\x81w;5\\x14\\xf8IRH T\\x04\\xf0\\xa4\\x0f/\\x10\\xc8\\xeb\\x13\\x162{x\\xa8@\\'\\xa87b\\x95\\xd8\\xaf+w\\t2yC\\xda\\xaa9\\x8b\\xc6\\x15\\n&\\xb8tn\\x92\\x80\\xb5\\x04\\xe0\\xfd\\xd1W\\xc5\\xe4\\x87\\xec5\\xc5\\xaa\\xb3\\xb6\\x15\\xd6\\x96\\x90@D*1\\x9d\\xc5[\\xc5{\\x82d\\xce\\x90N\\x863\\x8bw\\xc2q\\xe7v\\x88\\x08`\\xf5\\x1b\\xde \\x90w~]);\\x98=\\xc1\\xf8\\x10q\\x9f($`\\x17\\x01\\xacT\\x1a\\xf1\\xea\\xe05\\x02\\xef\\x11\\x8a\\xf5\\x04\"F\\x89MRs\\x0f\\x7fm\\xd8\\xaf\\tunT\\xd6\\x17y\\xd3zd\\xac\\x91\\x04\\x02#\\x00\\x9b1\\xd8\\x8eA0W;P\\xc5\\xe4\\xa7XO \"\\x94\\xd8\\xb9\\x8b\\x97\\xa5\\xbb7\\x8bwv\\x1d\\x03\\xbd\\xbe\\xf5\\xa4X#\\t\\xa4\\x80\\x00\\xac\\xf8\\x8d\\x1c\\x0e\\xef\\xcf\\xbc\\xb6j\\x9e\\x82\\xaaxJ\"\\x04\"B\\x89\\xbd\\xa3\\xa2\\xb5\\x1a\\xf68=U6\\x9a\\x82\\xb9\\xb2$r\\xb9\\xdcM\\x02\\xe1%`\\xce\\x1en\\xb6_\\x0co/\"\\xbb5\\xd7+1XF\\x0f\\xf5f\\xf1.\\x95?\\x07\\xb3xG\\xf6\\xfd\\xea\\xca\\xab\\xfb\\xbf\\x8a\\x85\\x04\\x91. fO\\x12W^\\x8c\\x03;\\xedz%f\\xf6Q\\x1b\\xa1,\\xf33\\xa6c\\x16o\\x07\\xdegQ\\xdf%s\\x86y\\xb3Oo\\xd4\\x83\\xb1\\x00\\x80\\xab\\x95\\xd8|S\\xb4\\x80\\xa6\\x95\\n\\xcb\\xbduJZ\\x80\\x84U\\x90\\x80\\xf5\\x04\\xe05\\x82(\\xb0\\x10DW\\xc1\\xfc\\x18\\xc5\\x1a\\x02\\xaeUbWU\\xe0&\\x18\\x11\"~\\x93y\\xde\\xc1\\x1a,\\xac\\x85\\x04\\xac\\'\\xd0Sy\\x8f .?\\x04+\\x95F\\x9c;\\xeb[\\x8a\\xae\\x1a]\\xab\\xc4\\xbe\\x9d\\xb3\\xd9\\x17A\\xf3\\xa9&\\x15\\xa5j\\xd1<\\xd15r\\xbcZ\\xd7\\x11\\x80\\xf7\\x082$A\\xcc\\x11\\x87]w!\\x0e\\xeb\\xb0+\\x95\\xd8)\\x1d\\xcb|\\xa1F\\x99;kF\\x19x\\xbf\\xc7\\x16\\xc7al\\xd9\\x1d\\x12\\xb8\\x8e\\x00rU\"g%\\xc4\\x9c\\xfb\\xe1\\xba\\x82\\xdc\\x110\\x01W*\\xb1\\xb7\\xa6.\\x97\\xfd\\xc7\\xcf\\xea\\x8b\\xec\\x7f_\\x1d\\xc9\\xc7,\\xde\\x01\\x0f8\\x0b\\xdaK\\x80\\xd9\\xc3\\xad\\xe7\\xef:%f\\xce\\xefW\\xa9pny\\xf6\\xd6J\\xd6Sa\\x8d$\\x10B\\x02\\xf5J\\xe7\\x97\\xce\\r\\xcb\\xe9\\x16\\x10\\xe1\\x02\\x91.()\\'\\xe0:%\\xf6\\xaa9\\x8bw\\xc7Xf\\xf1N\\xf9\\xd8\\xf3L\\x1b\\t\\xbc\\xd5\\xae\\x1e\\xb3\\x87[\\xc4\\xdfUJ\\xec\\xcfu{\\xe5\\xa7%\\xdb\\xf5\\xa5\\xdf]\\xb3\\xb84\\xaf\\xca,\\xde\\x16\\xdd\\x07\\xac&\\xcc\\x04\\n\\xa9U\\xca^\\xca\\xbb\\x04\\x02o\\x13D\\x81\\xa5\\xa4\\x8c\\x80k\\x94\\x18\\xb3x\\xa7l\\x80y\\x96s\\tt\\xbd\\xa3\\x9a\\xc0\\xcb\\x042t\\xda\\n\\x1d\\x97\\xdf\\xb9\\xbdun\\xcf\\\\\\xa3\\xc4>\\x9d\\xb5^V\\xef>\\xaaI\\xbe\\xd4\\xac\\xaa\\x94+\\x90\\xd3\\xb9T\\xd93\\x12\\x08\\x80\\xc0u\\xd9\\xc3\\',\\n\\xe0,\\x16IH\\xc0\\x15J\\xec\\xd8\\x99\\x0b\\xd2\\xf7\\x87\\xc5\\xba\\xef193\\xcb\\x1b\\xadk%\\xbc\\x0en\\x93\\x80+\\t\\xdc\\xa7\\xbcL\\x9aT*\\xa4\\xfb>f\\xde&A\\xceJJp\\x04\\\\\\xa1\\xc4\\xfa\\xff\\xb8D\\x0e{\\xb3x\\x0fnSOrzs\\xfb\\x05w\\xa9,M\\x02\\xce$`\\xce\\x1enx\\xa18\\xb3\\xa7\\xce\\xec\\x95\\xe3\\x95\\xd8\\xfa}\\xc7\\xe4\\xa3?\\xd6iz\\xb5J\\xe4\\x93\\xc7\\x1a\\x97w&I\\xf6\\x8a\\x04RH\\xa0Z\\xd1\\xbc\\x02\\xaf\\x13\\xc8\\xc2\\xad\\x07\\xe5\\xdb\\xb9\\x9bRXSt\\x9e\\xe6x%\\xd6u\\xcc|\\x9d\\xc5\\x1bF\\x82\\xf8\\xc5B\\xe8i\\n\\tD\\x1a\\x01x\\x9d\\xc0\\xfb\\x04\\xd2s\\xe2\"9\\xad\\xbcR(\\x81\\x11p\\xb4\\x12\\x9b\\xb6|\\xa7\\xfc\\xb6\\xda\\x93\\xc5\\xbb]\\xfd\\xd2rsyf\\xf1\\x0elXY\\xcam\\x04\\xe0u\\xd2\\xcf\\x9b=|\\x1f\\xb3\\x87\\x075|\\x8eUb\\x17/_\\x95W\\xc6]\\xcb\\xe2=\\xcc\\xeb8\\x1b\\xd4\\xd5\\xb10\\t\\xb8\\x88\\xc0s\\xb7U\\x96\\x8a\\x85r\\xeb\\x1e\\x8fT\\xd1\\x8a\\xb7\\x1f:\\xe5\\xa2\\xde\\xdb\\xd7U\\xc7*\\xb1\\xf7~_-\\x9b\\xfe=\\xa1\\xc9\\xbc\\xd6\\xa2\\xba\\x14W\\xf1\\x98($\\x10\\xc9\\x04\\x90=\\x1c\\xf9*!\\xc8\\x1e\\xfe\\xaa\\xf7G<\\x92\\xaf\\xd9\\x8aks\\xa4\\x12;x\\xf2\\x9c\\x0c\\x9e\\xb2L__\\xd1\\xbc\\xd9\\xa4\\xc7\\xdd5\\xac\\xb8V\\xd6A\\x02\\x8e\\'pG\\xb5\\xa2\\xd2\\xa2\\x86\\'{\\xf8\\x8f\\xca;e\\xd6:f\\x0fOn\\xd0\\x1c\\xa9\\xc4z\\xab,\\xde\\'\\xbc9\\xfa\\x86\\xb6\\xab/Y20\\x8bwr\\x03\\xc9\\xe3\\x91C`d\\xc7\\x06\\x92!\\x9d\\xe7O\\x93\\xd9\\xc3\\x93\\x1fW\\xc7)\\xb1\\xe5;\\x0f\\xcb\\x17\\x7fm\\xd4=oP6F\\x1e\\x88-\\x93\\xfcU\\xb0\\x04\\tD\\x10\\x01x\\xa3\\xbc\\xd8\\xcc\\x93=|\\xd5\\xee#\\xf2\\x19\\xb3\\x87\\'9\\xba\\x8eRb\\x085\\xdd\\xe5\\xdby\\x82\\xd0\\xd30\\xa5\\x18\\xdd\\xb9!\\xb3x\\'9|<\\x18\\xa9\\x04\\xfa\\xb4\\xae-\\xf9sd\\xd6\\x97\\x87\\xec\\xe1\\xf0Z\\xa1\\xf8\\'pC\\x9c\\x12\\xff\\x87\\xc2\\xbfw\\xe2\\xc2\\xad\\xd2\\xfe\\xfd\\xff\\xf9\\x1a6\\x06\\xd1\\xb7\\x83_H \\x8a\\x08`n\\xd8\\x90.\\xcd\\xabj;Ic\\x9b\\x9f\\xd7\\x088F\\x89!\\x8bw\\xc5\\x1e\\x13e\\xa7\\nKB!\\x01\\x12\\x88O\\x00+\\x97+\\x87\\xb4\\xf1\\x99`\\xc4?\\x1a\\xdd[\\x8e\\x991_\\xb0\\xe5\\xa04,\\x17\\xa3\\xffE\\xf7\\x90\\xf0\\xeaI\\xc0?\\x81\\x7f6\\xfeK%\\xe6\\x07\\x8dc\\x9e\\xc4\\xfc\\xf4\\x8d\\xbbH\\x80\\x04H Y\\x02\\x8e\\x9a\\xd8O\\xb6\\xb7,@\\x02$@\\x02\\t\\x08P\\x89%\\x00\\xc2M\\x12 \\x01w\\x11\\xa0\\x12s\\xd7x\\xb1\\xb7$@\\x02\\t\\x088fb?A\\xbf\\xb8\\x99\\x0c\\x81\\xcb\\x97/\\xcb\\xf8\\xf1\\xe3e\\xc1\\x82\\x05\\xb2m\\xebVI\\x9f!\\x83\\xc4\\xc6\\xc6J\\x9b6m\\xa4l\\xd9\\xb2\\xc9\\x9c\\xed\\x8e\\xc3\\xbf\\xff\\xf6\\x9b\\x0c\\x1b6LJ\\x96,)\\x9f\\xff\\xf7\\xbf\\xee\\xe84{\\x19v\\x02\\x8eTbW\\xae\\\\\\x914i\\xd2(CW\\xc6\\x0e\\xf3wG\\x1c>|X\\xda\\xb5m+\\x1b6l\\x88wx\\xc9\\xe2\\xc5\\xf2\\xf9g\\x9f\\xc9\\xfb\\x1f| w\\xdeyg\\xbccn\\xdc\\x989s\\xa6\\xac_\\xbf\\xde\\x8d]\\x0fI\\x9f\\xf1w\\x916m\\xda\\x90\\xd4\\xed\\xe6JmWb\\x17.\\\\\\x90?\\xff\\xfcS~\\x996M6m\\xda$\\xff\\xfe\\xfb\\xaf\\x1c9rD+\\xb1\\\\\\xb9rI\\xcd\\x9a5\\xa5a\\xa3F\\xd2\\xae];\\xc9\\x9d\\xdb\\x13\\xa6\\xc4\\xcd\\xc0S\\xdb\\xf7\\xabW\\xaf\\xca\\xe3\\x8f=\\xe6S`e\\xca\\x94\\x91\\xf2\\xe5\\xcb\\xcb\\xc1\\x83\\x07e\\xb1Rb\\xe7\\xce\\x9d\\x93n]\\xbbJ\\x83\\x06\\r$gN\\xf7&S\\xd9\\xaa\\x9e.\\x7fV\\xf7D4\\xcb\\xf2\\xe5\\xcb\\xf5\\xdf\\xc5\\xe6\\xcd\\x9b\\xf5\\xdf\\xc6\\xee\\xdd\\xbb%]\\xbat\\x92\\'O\\x1e)R\\xb4\\xa84i\\xd2D\\xee\\xbb\\xef>)^\\xbcx4c\\x12\\xdbL,.]\\xba$\\x9f}\\xfa\\xa9\\x8c\\x1e=ZN\\x9dJ\\xde\\xc05\\x87\\xfa\\x83|\\xfd\\xf5\\xd7\\xe5\\x91G\\x1e\\x89\\xea\\x01\\xfbm\\xc6\\x0cy\\xf4\\xd1G5\\x83;\\xee\\xb8C>Q\\x0c\\xd3\\xa7O\\xaf\\xb7\\xdfx\\xe3\\r\\xf9\\xc2\\xfb\\xda\\xf5\\xe6\\x90!\\xbern\\x00\\x06\\xc7\\x91\\xe3\\xc7\\x8fke\\xa1\\xfc~\\xf4\\xe8Qy\\xe4\\xe1\\x87}\\n,S\\xa6L\\xf2\\xdcs\\xcfI\\x8b\\x16-\\xa4b\\xa5J\\xbai\\xbc\\xc1\\xfc\\xfd\\xf7\\xdf\\xf2\\x9ez\\x83\\xc1\\xf4\\x81!\\xf8;\\x89\\x84yP\\xe3z\\x82\\xfaTZ=l\\xa2\\xde\\xe9\\xe3\\xca\\x96)\\x13W\\xb0@\\x01\\xdf\\xbf\\xa7\\x9fz*NMT\\'\\xda\\x87\\xbd{\\xf7\\xc6\\xdd\\xdc\\xa8\\x91\\xaf<\\xceU\\xbfT\\x89\\x96\\x8f\\xe4\\x03\\xe0d\\xb0\\xfb\\xcf{\\xef\\x05}\\xa9j\\x15\\xd3w~\\x9f>}\\xae;\\x7f\\xf6\\xec\\xd9qE\\n\\x17\\xd6ez\\xf5\\xeau\\xdd\\xf1P\\xeeP\\x93\\xd6q\\xc7\\x8e\\x1d\\x8b\\xf7\\xafy\\xb3f\\xba/M\\x9b4\\te\\xd3\\x8e\\xa9\\x1b\\x7f\\x0b\\xc6\\xf8\\xd6\\xa9];n\\xdd\\xda\\xb5\\x89\\xf6M\\xcd}\\xc6\\xb5j\\xd9\\xd2W\\xbeL\\xe9\\xd2q\\xea\\xa9,\\xd1\\xf2\\x91| lvb\\n\\xa2tU\\xaf\\x81\\xa7O\\x9f\\xd6J\\x16\\xab,\\x1f\\x7f\\xfc\\xb1|\\xfc\\xc9\\'\\x82\\'\\xb1\\xc4\\xa4P\\xa1B2y\\xca\\x14\\xc1\\xa7!\\xcb\\x96-\\x93\\xf3\\xe7\\xcf\\x1b\\x9bQ\\xf3\\xb9v\\xcd\\x1a\\xdf\\xb5\\x96\\xf1\\x9aQ\\x9c9sF0\\x87\\xb4J=\\xa1\\xe1W:))Y\\xaa\\x94\\xd4\\xabWO\\x17\\x998qb<\\x86\\xdb\\xb6m\\x93\\xa7\\x9f~Z\\xb0\\x02\\x86\\'\\xdd\\x01\\x03\\x06$U\\x95\\xe5\\xc7\\xb0\\x1a\\x8d\\x85\\x1c\\xf3?<\\x89D\\x8b`\\xc5y\\xfa\\xf4\\xe9\\xbe\\xcb\\xed\\xd9\\xab\\x97\\xef\\xe9\\xcb\\xb7\\xd3\\xf4\\x05l\\xcc\\xf3\\xc3\\xb8\\x0f\\xf0j\\x19\\x8d\\x126%\\xf6\\xe5\\x17_\\xc8\\xdc\\xb9s}\\x8c_{\\xed5i\\xd5\\xba\\xb5o;\\xa9/X\\x8dim*\\x0b\\x1b)\\xe3\\x95\\'\\xa9\\xf3\"\\xed\\x98\\xf9\\xb5\\xba`\\xc1\\x82\\xf2\\xcc3\\xcfH%5\\xe9\\xddQ\\xbd\\x1a\\xde\\xd1\\xbc\\xb9\\x94S\\x8a\\xed\\xb5W_MR\\x99=\\xf0\\xc0\\x03\\x1a\\x0b\\xe6\\x18\\xa7L\\x9e\\xec\\xf9\\xae\\xe6_\\x1e\\xea\\xdcY\\xb0\\xafp\\xe1\\xc2\\xf2\\xa92\\xd3\\xc0*\\x18%|\\x04~\\xfc\\xf1G\\xdf\\xb8a\\x0cZ\\xa9y\\xb1\\xe4\\xa4\\x9cZ\\x956\\x04\\xe3\\x859\\xb3h\\x94\\xb0(\\xb1\\x1d\\xdb\\xb7\\xcb\\xe0\\xc1\\x83}|\\x1b7n,/\\xbe\\xf4\\x92o;\\x90/w\\xa9y\\x01\\xb3`\\xce,\\xda\\xe4\\x9c\\xe9\\xe9s\\x88Z}\\x9c\\xaa\\x9eP\\xcdO_0W\\x19;v\\xac\\xb4W\\xe6(P\\xf4\\xfe\\xe4\\xee\\x96-%K\\x96,\\xfa\\xd07\\xdf|\\xa3\\x9f\\xbc\\xd4k\\x8c\\xe0I\\x0c\\xbf\\xee_|\\xf9e\\x92O\\xc6\\xfe\\xea\\xe4\\xbe\\xd4\\x13\\xd8\\xa2\\xcc(\\x0c\\xc9\\x9f?\\x7f@?\"0\\x02~W\\xcd\\x8d\\xe1\\x1f\\xdeh\\xa2U\\xc2\\xa2\\xc4\\xf0\\xc7b\\xbc\\xfea\\x95\\xe5\\xbd\\xff\\xfc\\'hC\\xd6Zj\\xd2\\xda0%\\xc0`\\xc1\\x96,\\xda\\x04J\\xca\\x90\\x7f\\xd4\\xe4n\\xdd\\xbaue\\xfcw\\xdf\\xc9\\xbc\\xf9\\xf3\\xf5M\\x1c\\x13\\x13\\xa3\\x0f\\xc3\\x8a\\x7f\\xb2\\xf7)\\xcb(o|\\xe2\\xd7\\xba\\xa5Rd\\x10\\xd8!uh\\xdf^\\xfe\\xfa\\xeb/\\xbd=\\xe2\\x9dw\\xa4j\\xd5\\xaa\\xfa;\\xff\\x0b/\\x01\\x98\\x97\\x18\\x82q\\xf9\\xf0\\xc3\\x0f}\\x7f3\\xc6\\xfe\\x84\\x9f\\xf81\\x82\\xfd$\\xfe\\xddu\\xd7]\\t\\x0fG\\xcdv\\xc8\\xdf\\x19`\\x9ci\\xfe\\x83\\xc2\\xaa\\xda\\x8d7\\xde\\x184`\\x0c\\x86\\x93\\xc9\\tV?\\xe7\\x99\\xa6\\x05\\x92+o\\x1c\\xc7\\x1cO\\xed\\xda\\xb5\\x8dM~& P\\xae\\\\\\xb9x{\\x06\\x0f\\x1a$\\x1f\\x7f\\xf4\\x914m\\xdaTnR\\xa6\\'\\xf8WB=yQ\\xae\\'\\x10r%\\xb6p\\xe1Bm\\x85o4}\\x7f\"\\x7fX\\xc6\\xf1\\xc4>a\\x1fc\\x96hTb\\xf0\\x8f4\\x04s&\\x86\\x023\\xf6\\xe1\\x0f\\x01\\x93\\xf2PL\\xb07J\\xccM\\x05\\xe6)\\xb0\\xf2\\xde\\xb9s\\xa7>\\xb5\\xd1\\xcd7\\xcb\\x1b}\\xfa\\x18\\xd5$\\xf9\\t\\xaf\\x8ay\\xf3\\xe6%Y\\xc6\\xdfA\\x18pR\\x12\\'\\xf0\\xcc\\xb3\\xcfj\\xe3VcLP\\x12\\x93\\xfdX\\x80\\xc1?\\x08\\x9e\\xb41v7\\xa91\\xbe\\xfb\\xee\\xbb%\\x1a\\xff\\x064\\x88\\x04\\xff\\x85\\\\\\x89\\xfd\\xfa\\xeb\\xaf\\xbe&\\xe1\\x06\\x83\\'\\xb1\\x94\\xc8!\\xe5Vc\\x96\\xa4V4\\xcd\\xe5\"\\xe9\\xbby\\xe2\\xb6t\\x02\\x1b1\\xe3:a\\x1f\\x06%\\x86\\xa7V,\\x04\\xc0\\x082\\xa1@\\x11\\x1d:t\\xc8\\xb7\\xfb\\xd6[o\\r\\xd8\\'\\x0f\\x0b,\\xb0\\xa0\\x0fV\\xf8\\x9a\\x9a41\\xfc }\\xf3\\xed\\xb7za\\x06\\xb6\\x94\\xfe\\xe4\\xc0\\x81\\x032u\\xeaT\\xfd\\x0f\\x06\\xae\\x18\\x0bL\\x01\\xc0j?\\x9a%\\xe4Jl\\xdf\\xde\\xbd>\\xbe\\xb8\\x91S\\n\\x1c\\x83g\\x08\\xac\\xd5\\xcd+3\\xc6\\xfeH\\xff4&\\xe4q\\x9d\\xc8\\x06\\xe5O\\xd2\\x9bV\\x15\\xfdM\\xee\\xe3\\x89\\x16nKg\\xcf\\x9e\\xf5\\x9d\\xfe\\xa3ro\\xc1\\xebd \\x02\\x83\\xca\\xa85\\xaa\\x0c\\x04P*\\xca \\xfa\\x08\\xcc\\x89\\x94}\\x98\\xf6\\'\\xc6\\xdc&\\x0cZ\\xfd\\xb9\\xe5\\xe1G\\xea\\xfb\\xef\\xbf\\x97Kj\\x9a\\x05\\xf3g\\xd1\\x1c,!\\xe4\\x13\\xfbx$6$\\xa5\\x0e\\xc9\\x98\\xe3\\x99\\xe4\\xb54G]U\\xaaT\\x11\\xac\\xe0D\\x9b`\\xe9\\xdd\\x10\\xf3D\\xb0\\xb1\\x0f\\x9f\\xb0\\x17\\x82`\\xc9=\\xe1S\\x188>\\xab\\xcc2\\xb0Z\\x8c_\\xfe\\xdb\\x9b5\\xd3eW+\\x0b}\\xbc~R\\x9cA\\xa0R\\xe5\\xca\\xf2\\xc2\\x8b/\\xca\\x18\\xb5\\xd2\\xbcNE\\xf1\\x98\\xa1B\\x12\\xe1u\\x1f\\xaf\\x92\\t\\xa3X\\xc0Lf\\xbc\\x9a\\x1b\\x8df\\t\\xbd\\x123\\xad\"\\xe6T\\xc6\\x8c)\\x91\\xb9s\\xe6\\xc8\\xbe}\\xfb|\\xa7\\xde\\x1b\\xc0\\x04\\xb4\\xafp\\x04}AD\\x0fc\\x85v\\xcb\\x96-~\\xafl\\xa9r\\xd5\\x82@\\xe1%\\xb4\\xf5\\x82Y\\xc6\\xec\\xd9\\xb3\\xf5\\xaf\\xf6\\x07*\\\\\\x0f\\x1c\\xc6\\r1\\xffH\\x18\\xfb\\xf8i?\\x01(-\\xb8\\xe3\\xc1\\xfd\\xe8\\'\\xa5\\xb0\\x16\\xa89\\xe6\\xe6\\xca\\xf1\\xdf,xb\\x8bf\\t\\xb9\\x12;\\xeb}2\\x00\\xe4S\\t\\x9cZ\\x03\\x05o\\x0e\\x88\\x07_\\xbe\\x8e\\x1d;\\x06zjD\\x95\\x83\\x1dW\\x8d\\x1a5\\xf45\\xe1\\xf5:\\xe1\\xd3\\x18<\\x19\\x8c\\xf8[\\r\\x1a6\\x8cw\\xed\\xf8\\xc5\\xfeP).\\xc8k\\xdd\\xbb\\xeb\\xa70\\xbc\\xbe@1B\\xf0J\\x89\\'5J\\xf8\\x08\\xc0d\\xe6}en\\x84\\x15\\xdfQ\\xa3F\\x05\\xd4\\xb06FV\\x91K\\xccO\\xd9\\xf0\\x86\\x89f\\t\\xb9\\x123;\"#nXR\\xae\\x11XM\\xeb\\xad\"3\\x18vK\\x18\\x18\\x0c\\xf2\\xcc\\xdf\\x7f\\xf7\\x8d\\xd1\\xe3*\\xfaE4;%wR\\x96\\xf5\\x10\\xc4\\rk\\xabVzW\\xae\\\\\\xa9\\xe7L\\x10@\\xf0)o8\\x16<\\x81\\xbdd2&^\\xab\\xe6X\\xbau\\xeb\\xa6\\xcf\\xc3\\xaaV\\x97.]\\xf4w\\xfc\\xd7V\\xd9\\x18A\\xb0z8\\xe7\\x9f\\x7f\\xf4w\\xfe\\x17\\x1e\\x02x\\x85\\xc7\\xd3\\xf1\\x88\\xb7\\xdf\\xd6&1\\x81\\xb6\\x8a\\xa7q\\xb3\\xf3~JL\\x96\\x02m\\xcb\\r\\xe5B\\xae\\xc4\\x8c\\'\\x07\\xc0\\xc0|\\xcd_\\xeau\\xc6\\x9f`\\xa2\\x12\\xf35pO\\x82\\x0b\\x0c\\xa2) <\\xf1\\xd0\\xa1C}\\xc5\\x11\\xb5\\xa1K\\x14\\x87\\xe1\\x01\\x88\\xb6*\\xa2\\xeb\\x83\\xde\\'Q(\\xa7;\\xd5\\xabEyeZ\\x81\\xc8 \\xc6+\\xf7\\x0b/\\xbc\\xe0\\x0b\\x94\\x87\\xc8\\x08\\x8f\\xaa\\x18lPz\\x88\\x84\\x00\\xebn\\xf3$\\xf0=*|\\x8f\\xb1\\xd8b,\\xe5\\xfb\\x80\\xf3KH\\t\\x1c?v\\xccW?\\xfe6\\xcc\\x8b-\\xbe\\x03~\\xbe`\\x9c\\x8di\\x03\\x1c\\x86-Y4K\\xc8\\x95XM\\xef\\xeb\\x8f\\x01\\x19\\x8e\\xc5{\\xf6\\xec16}\\x9fp{\\xf9\\xe3\\x8f?\\xf46\\x14\\x1a\\\\a\\x1eS\\x11L\\x8dW\\x1c\\xfc\\xfa\\x8cz\\xf7]\\xc9\\x981\\xa3\\xef\\x9ch\\xfd\\xf2\\xa6\\n\\x98w\\xcf\\xbd\\xf7\\xea\\xe8\\xb7f\\x06`3T\\x85*\\xea\\xde\\xa3\\x87o7\\x9c\\xee\\xc1\\x1bQq\\xbfT.E\\xe6\\x15N\\x14\\x82\\xc3u3\\xef\\x04\\xffo\\xeaG\\x03O\\xc3N\\x11c\\x12\\xdb\\x98\\x07tJ\\xbf\\xac\\xea\\xc7\\t\\xd3\\xf4\\nV\\x92\\xe1\\x85\\x11\\x88\\x8c\\x181B\\x0c\\xef\\r\\xcc\\x97\\xc1\\xce/\\x9a%\\xe4\\x91]\\xf1\\x04\\xd0P\\x85J6\\x1b;\\xc2y\\x19+/\\t\\xed\\x8d\\x10#\\x1eV\\xe6\\t_9q\\x13\\x7f\\xa6\\x9c\\x92\\x9b)\\'g\\xca5\\x02\\xf8E\\xc6\\x1c\\x98\\na\\xa3\\x9f\\xbc\\xaa\\xabXl\\xc6S\\xd5\\xb5R\\xfc\\xe6T\\x02X\\x9c\\xb9\\xc5\\xa4\\x80\\x10\\xbd\\x18\\xbe\\xaf\\x89y6\\xc0\\xb6\\xaf\\xb7\\xf2|\\x98f\\n\\xdb\\x8d\\xc8\\xbe\\x86\\x1b\\x99S\\xaf3\\xd4\\xfd\\n\\xb9\\x12\\xc3\\x05`R\\xf9Ye\\x91\\x9cP\\x8a\\xaa8\\xe1\\xb5\\x94+J\\te=\\x0e\\x03L8u\\x9b#5\\x18\\xe5a\\n\\xf0\\xf5\\xd7_\\x1b\\x9b\\xfc$\\x81\\x88!\\x00\\xbf\\xc7\\x84s\\x910\\xa5@\\xfc|X\\xe8\\xe3M\\xe4_e\\xe4:_\\xf9\\xc7.R+\\x93\\xe6\\x1f\\xf8\\x87U\\x00E\\x84!G\\x18\\xa3h\\x96\\xb0(1\\x00\\xfeT\\xfdb\\xf4\\xef\\xd7/\\xc5\\xac\\x11C\\xbc\\xbfz\\x155\\xcf\\xe7\\xa4\\xb82\\x9eH\\x02\\x0e!\\x80W\\xfd\\x96j\\xb1\\x05\\xd6\\xf8\\x81\\n\\xdeL\\x06\\xab)\\x85\\xce\\xdeE\\x9e@\\xcf\\x8b\\xd4raS\\xe1O\\xa99\\xae\\x9f\\x7f\\xfeYj\\xd7\\xa9\\x93$K\\xfc\\xaa\\xc0\\r\\x06\\x91.\\x8c\\x90\\xbc8\\x01\\xaf\\x93\\x83\\x06\\x0eL\\xf2\\\\\\x1e$\\x01\\xb7\\x11\\x80\\xa9\\xc44\\xb5\\x88\\x85\\xa4/\\xc9=Qa\\x15\\xf2\\t\\xf5c\\x0e\\xe3W*\\xb0k#\\x1d\\xb6\\'\\xb1kMz\\x96\\xf3\\xe1\\xdf\\x87\\xa4\\xaf\\'U\\xa6#L6c0a\\x03SY\\xad\\xa0\\xc5\\xa8\\x08\\r\\x10$K\\xe8\\xdc\\xa9\\x93v\\xbd\\xc0\\x044\\x92d\\x98W;\\xcdu\\xf2;\\t\\xb8\\x9d\\x00V\\x921\\xb9\\xbfO\\xf9\\xbc\\xe2;\\xc2W\\xdd\\xa8\\xa2\\xb5\\xe0\\xef\\x01\\x0e\\xfb\\x08\\xbdd,v\\xb8\\xfdZ\\xad\\xec\\xbf-J,\\x98\\x0b\\xc0\\xc2\\x00\\x9c]\\x1fS\\xf6a\\x085C!\\x01\\x12 \\x013\\x01\\xc7+1sg\\xf9\\x9d\\x04H\\x80\\x04\\x12\\x12\\x08\\xdb\\x9cX\\xc2\\x86\\xb9M\\x02$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#\\xf0\\xffN\\x8fL\\xd7\\x0c~lM\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82', role=image to answer a question about, grads=set()), Variable(value=Hint: Please answer the question and provide the correct option letter, e.g., A, B, C, D, at the end. \nQuestion: Find $RS$ if $\triangle QRS$ is an equilateral triangle. \nChoices: (A) 0.5 (B) 1 (C) 1.5 (D) 2, role=question, grads=set())]", + "message": "MultimodalLLMCall function forward" +} +{ + "name": "textgrad", + "msg": "MultimodalLLMCall function forward", + "args": "()", + "levelname": "INFO", + "levelno": "20", + "pathname": "/Users/zhihuang/Desktop/Projects/textgrad/textgrad/autograd/multimodal_ops.py", + "filename": "multimodal_ops.py", + "module": "multimodal_ops", + "exc_info": "None", + "exc_text": "None", + "stack_info": "None", + "lineno": "240", + "funcName": "forward", + "created": "1720409737.029258", + "msecs": "29.258012771606445", + "relativeCreated": "60886.521100997925", + "thread": "8607539776", + "threadName": "MainThread", + "processName": "MainProcess", + "process": "77228", + "text": "System:You are an evaluation system that evaluates image-related questions.\n['Evaluation Instruction: Does this seem like a complete and good answer for the image? Criticize heavily.', 'Question: Hint: Please answer the question and provide the correct option letter, e.g., A, B, C, D, at the end. \\nQuestion: Find $RS$ if $\\triangle QRS$ is an equilateral triangle. \\nChoices: (A) 0.5 (B) 1 (C) 1.5 (D) 2', b'\\x89PNG\\r\\n\\x1a\\n\\x00\\x00\\x00\\rIHDR\\x00\\x00\\x011\\x00\\x00\\x01$\\x08\\x06\\x00\\x00\\x00G~\\x06@\\x00\\x00\\x01FiCCPICC Profile\\x00\\x00(\\x91c``\\x12I,(\\xc8aa``\\xc8\\xcd+)\\nrwR\\x88\\x88\\x8cR`\\x7f\\xc2\\xc0\\xcc \\xc0 \\xce\\xa0\\xcd \\x91\\x98\\\\\\\\\\xe0\\x18\\x10\\xe0\\x03T\\xc2\\x00\\xa3Q\\xc1\\xb7k\\x0c\\x8c \\xfa\\xb2.\\xc8\\xac\\xdbKzn\\xed+Y\\xc3k:\\xfbh\\xa3\\x9d\\xea\\x9b\\x1cL\\xf5(\\x80+%\\xb58\\x19H\\xff\\x01\\xe2\\xf4\\xe4\\x82\\xa2\\x12\\x06\\x06\\xc6\\x14 [\\xb9\\xbc\\xa4\\x00\\xc4\\xee\\x00\\xb2E\\x8a\\x80\\x8e\\x02\\xb2\\xe7\\x80\\xd8\\xe9\\x10\\xf6\\x06\\x10;\\t\\xc2>\\x02V\\x13\\x12\\xe4\\x0cd\\xdf\\x00\\xb2\\x05\\x923\\x12\\x81f0\\xbe\\x00\\xb2u\\x92\\x90\\xc4\\xd3\\x91\\xd8P{A\\x80\\xd7\\xc5\\xd5\\xc7G!\\xd8\\xd5\\xc8\\xdc\\xd0\\xd2\\x83\\x80{I\\x06%\\xa9\\x15% \\xda9\\xbf\\xa0\\xb2(3=\\xa3D\\xc1\\x11\\x18J\\xa9\\n\\x9ey\\xc9z:\\nF\\x06F\\x06\\x0c\\x0c\\xa00\\x87\\xa8\\xfe|\\x03\\x1c\\x96\\x8cb\\x1c\\x08\\xb1\\x94v\\x06\\x06c-\\xa0 \\x17B,\\xeb\\t\\x03\\xc3\\x9e\\xe9\\x0c\\x0c\\x82\\xa7\\x10b\\xea\\xd9@o\\xd910\\x1c\\x88,H,J\\x84;\\x80\\xf1\\x1bKq\\x9a\\xb1\\x11\\x84\\xcd\\xbd\\x9d\\x81\\x81u\\xda\\xff\\xff\\x9f\\xc3\\x19\\x18\\xd85\\x19\\x18\\xfe^\\xff\\xff\\xff\\xf7\\xf6\\xff\\xff\\xff.c``\\xbe\\x05\\xd4\\xfb\\r\\x00\\xab\\xcf`OMh\\xed\\t\\x00\\x00\\x00VeXIfMM\\x00*\\x00\\x00\\x00\\x08\\x00\\x01\\x87i\\x00\\x04\\x00\\x00\\x00\\x01\\x00\\x00\\x00\\x1a\\x00\\x00\\x00\\x00\\x00\\x03\\x92\\x86\\x00\\x07\\x00\\x00\\x00\\x12\\x00\\x00\\x00D\\xa0\\x02\\x00\\x04\\x00\\x00\\x00\\x01\\x00\\x00\\x011\\xa0\\x03\\x00\\x04\\x00\\x00\\x00\\x01\\x00\\x00\\x01$\\x00\\x00\\x00\\x00ASCII\\x00\\x00\\x00Screenshotp\\xfdUJ\\x00\\x00\\x01\\xd6iTXtXML:com.adobe.xmp\\x00\\x00\\x00\\x00\\x00\\n \\n \\n 305\\n Screenshot\\n 292\\n \\n \\n\\n`\\x12\\xf6\\x8e\\x00\\x005jIDATx\\x01\\xed\\x9d\\x07\\x98\\x14\\xc5\\xd6\\x86\\x8f\\xe4\\x9ce\\xc99\\xe7\\x8c\\x0b(?\\xa8\\xa0\"` )`\\xce\\tA\\x05\\x01\\xc9\\x88\\x80\\x08\\xe25\\xeb5\\x12\\x04\\x0c\\x80((WA%\\xe7\\x9cs\\x94\\x9cs\\xd8\\xbf\\xbe\\x9a\\xe9\\xa1w\\x99\\xdd\\x9d\\xd9\\xed\\x99\\xee\\x9e\\xf9\\xce\\xf3\\xc0LwWWU\\xbf\\xd5{\\xa6\\xbb\\xea\\x84\\x1b\\xe2\\x94\\x08\\x85\\x04H\\x80\\x04\\\\J \\x8dK\\xfb\\xcdn\\x93\\x00\\t\\x90\\x80&@%\\xc6\\x1b\\x81\\x04H\\xc0\\xd5\\x04\\xa8\\xc4\\\\=|\\xec<\\t\\x90\\x00\\x95\\x18\\xef\\x01\\x12 \\x01W\\x13\\xa0\\x12s\\xf5\\xf0\\xb1\\xf3$@\\x02Tb\\xbc\\x07H\\x80\\x04\\\\M\\x80J\\xcc\\xd5\\xc3\\xc7\\xce\\x93\\x00\\tP\\x89\\xf1\\x1e \\x01\\x12p5\\x01*1W\\x0f\\x1f;O\\x02$@%\\xc6{\\x80\\x04H\\xc0\\xd5\\x04\\xa8\\xc4\\\\=|\\xec<\\t\\x90\\x00\\x95\\x18\\xef\\x01\\x12 \\x01W\\x13\\xa0\\x12s\\xf5\\xf0\\xb1\\xf3$@\\x02Tb\\xbc\\x07H\\x80\\x04\\\\M\\x80J\\xcc\\xd5\\xc3\\xc7\\xce\\x93\\x00\\tP\\x89\\xf1\\x1e \\x01\\x12p5\\x01*1W\\x0f\\x1f;O\\x02$@%\\xc6{\\x80\\x04H\\xc0\\xd5\\x04\\xa8\\xc4\\\\=|\\xec<\\t\\x90\\x00\\x95\\x18\\xef\\x01\\x12 \\x01W\\x13\\xa0\\x12s\\xf5\\xf0\\xb1\\xf3$@\\x02\\xe9\\x88\\x80\\x04\\xb6o\\xdb&3\\xff\\xf7\\xbf\\x80@\\xa4O\\x9f^\\xf2\\xe6\\xcd+\\x85\\x0b\\x15\\x92\\x9a\\xb5jI\\x9a4\\xfc\\x1d\\x0c\\x08\\x1c\\x0b\\x85\\x8c\\x00\\x95X\\xc8\\xd0\\xba\\xa7\\xe2\\xa9?\\xff,\\xc3\\x86\\x0e\\r\\xba\\xc3\\xf9\\xf3\\xe7\\x97\\'\\x9exB^x\\xf1\\xc5\\xa0\\xcf\\xe5\\t$`\\x15\\x01\\xfe\\x8cZE\\xd2\\xc5\\xf5,_\\xbe\\xfc\\xe0\\x83\\x14\\x9d\\xcf\\x93H\\xc0\\n\\x0270y\\xae\\x15\\x18\\xdd]G\\x8d\\xea\\xd5\\x05\\n\\t\\xd2\\xb4iS\\x89\\x8d\\x8d\\xf5{AW\\xaf^\\x95\\x13\\'N\\xc8\\xdf\\x7f\\xff-k\\xd6\\xac\\xf1\\x95\\xb9\\xe1\\x86\\x1bd\\xe6\\xcc\\x99R\\xa9re\\xdf>~!\\x81p\\x11\\xe0\\xebd\\xb8H;\\xb4\\x9d\\xbd{\\xf7\\xfa\\x14\\x18\\xba\\xf8\\xf0#\\x8f\\xc8\\xed\\xb7\\xdf\\x9edo\\x914\\xfe\\x95W^\\x91\\xef\\xc6\\x8f\\xd7\\xe5\\xb0\\xbdL=\\xcdQ\\x89%\\x89\\x8d\\x07CD\\x80\\xaf\\x93!\\x02\\xeb\\x96j\\x13\\xbeJ\\xd6\\xacY3\\xd9\\xae\\xe3\\xc9\\xabS\\xa7N\\xf1\\xcaa\\xc2\\x9fB\\x02v\\x10\\xa0\\x12\\xb3\\x83\\xba\\x83\\xda\\\\a\\x9a\\x0f+Z\\xb4\\xa8\\xe4\\xcb\\x97/\\xa0\\xdee\\xca\\x981^\\xb9@\\xcf\\x8bw\\x127H\\xc0\\x02\\x02Tb\\x16@ts\\x15\\xcb\\x96-\\xf3u?\\x90\\xa70\\xa3\\xf0\\xf4\\xe9\\xd3\\x8d\\xaf\\x82\\xa7\\xb0Z\\xca\\xdc\\x82B\\x02v\\x10\\xa0\\x12\\xb3\\x83\\xbaC\\xda\\xbcr\\xe5\\x8a\\xacZ\\xb5\\xca\\xd7\\x1b\\xd8}\\x05\"\\xe7\\xcf\\x9f\\x971c\\xc7\\xfa\\x8a\\xder\\xcb-\\x92;wn\\xdf6\\xbf\\x90@8\\tP\\x89\\x85\\x93\\xb6\\xc3\\xda\\xda\\xb4q\\xa3\\x9c={\\xd6\\xd7\\xab@\\x9e\\xc4\\x8e\\x1f?.\\x1d\\xda\\xb7\\x97\\x03\\xff\\xfe\\xab\\xcf\\xc3SX\\xcf\\x9e=}u\\xf0\\x0b\\t\\x84\\x9b\\x00W\\'\\xc3M\\xdcA\\xed%\\x9c\\xd4_\\xab\\xcc&\\xb6m\\xddz]\\x0f\\xf1\\xc4\\x06\\xe5\\xb5v\\xdd:\\xf9\\xfd\\xb7\\xdf|\\x8a/m\\xda\\xb42l\\xd80\\xaeJ^G\\x8c;\\xc2I\\x80J,\\x9c\\xb4\\x1d\\xd6VB%\\xd6\\xbbw\\xef\\x80{\\x88\\x89\\xfc\\x0f\\x94\\x91\\xeb\\xcd\\xeaU\\x92B\\x02v\\x12\\xe0\\xeb\\xa4\\x9d\\xf4mn\\x1b\\xb6])\\x15\\xcc\\x8be\\xcc\\x94)\\xa5\\xa7\\xf3<\\x12\\xb0\\x8c\\x00-\\xf6-C\\xe9\\xae\\x8a0\\x17V\\xbe\\\\9\\xc1\\xab\"\\xe4\\xce;\\xef\\x942e\\xcb\\xfa\\xbd\\x88\\xd3\\xa7O\\xcb\\xc6\\r\\x1bd\\xe5\\xca\\x95r\\xe6\\xcc\\x19_\\x99\\xda\\xb5k\\xcb\\xcf\\xd3\\xa6\\xf9\\xb6\\xf9\\x85\\x04\\xec \\xc0\\xd7I;\\xa8;\\xa0\\xcd\\xd5\\xabW\\xfb\\x14\\x18\\xba\\xf3Z\\xf7\\xeeR\\xa1B\\x85${\\xb6y\\xf3fis\\xff\\xfdr\\xe8\\xd0!]n\\xab\\x9f\\xf9\\xb3$+\\xe0A\\x12\\x08\\x01\\x01\\xbeN\\x86\\x00\\xaa\\x1b\\xaa4\\xdb\\x87e\\xcd\\x9aU\\xca\\xa9\\xa7\\xb2\\xe4\\xa4\\xaczRk\\xd1\\xa2\\x85\\xafX\\x8e\\x1c9|\\xdf\\xf9\\x85\\x04\\xec\"@%f\\x17y\\x9b\\xdb5[\\xeaWW\\x0e\\xe0\\x81\\xc6\\x05\\xcbnR\\\\9s\\xe6\\xb4\\xf9*\\xd8<\\t\\x88P\\x89E\\xe9]`~\\x12\\xab\\x11\\x80\\xbf\\xa4\\x81\\tf\\x18\\x86\\xe0\\xc9\\x8cB\\x02v\\x13\\xa0\\x12\\xb3{\\x04lh\\x1fsZ\\x88^aH0.C\\x98K3$\\x90WP\\xa3,?I T\\x04\\xa8\\xc4BE\\xd6\\xc1\\xf5\\x9a_%\\xd1\\xcd@,\\xf5Q\\x0e\\xb1\\xc4\\x8cI}l\\x97/_\\x1e\\x1f\\x14\\x12\\xb0\\x95\\x00\\x95\\x98\\xad\\xf8\\xedi\\xdc\\xfc*\\x19\\x13\\x13#\\x05\\x0b\\x16\\x0c\\xa8#\\xbbv\\xed\\x8aW\\xaet\\x992\\xf1\\xb6\\xb9A\\x02v\\x10\\xa0\\x12\\xb3\\x83\\xba\\xcdm._\\xb1\\xc2\\xd7\\x83@\\x9f\\xc2p\\xc2\\xe1\\xc3\\x87}\\xe7\\xe1\\xcb\\xd6-[\\xe2ms\\x83\\x04\\xec @%f\\x07u\\x9b\\xdb\\xdc\\xa8\\x1c\\xbf\\r\\tF\\x89\\x9d;w\\xce8M\\x7f\\xbe\\xff\\xfe\\xfb\\xf1\\xb6\\xb9A\\x02v\\x10\\xa0\\xc5\\xbe\\x1d\\xd4\\xd9&\\t\\x90\\x80e\\x04\\xf8$f\\x19JVD\\x02$`\\x07\\x01*1;\\xa8\\xb3M\\x12 \\x01\\xcb\\x08P\\x89Y\\x86\\x92\\x15\\x91\\x00\\t\\xd8A\\x80J\\xcc\\x0e\\xea.hs\\xef\\xb13\\xf2\\xc7\\xdak\\x06\\xb1.\\xe82\\xbb\\x18\\xa5\\x04\\xa8\\xc4\\xa2t\\xe0\\x93\\xbb\\xec\\xd7\\',\\x94\\x17\\xbe\\x99#\\x97\\xae\\\\M\\xae(\\x8f\\x93\\x80\\xad\\x04\\xa8\\xc4l\\xc5\\xef\\xcc\\xc6\\x17l9 c\\xe7m\\x96\\r\\xfb\\x8e\\xcb\\x073\\xd7:\\xb3\\x93\\xec\\x15\\tx\\tP\\x89\\xf1V\\x88G@%\\xf3\\x96.c\\xe6\\t>!\\xdd\\xbf[ \\x87N\\x9d\\xf7l\\xf0\\x7f\\x12p \\x01*1\\x07\\x0e\\x8a\\x9d]\\xfav\\xee&Y\\xb4\\xf5\\xa0\\xaf\\x0bx\\x9d\\xec\\xf3\\xfdb\\xdf6\\xbf\\x90\\x80\\xd3\\x08P\\x899mDl\\xec\\xcf\\xe9\\xf3\\x97\\xa4\\xe7\\xc4E\\xba\\x07\\xb9\\xb2d\\x9021\\x9exa\\x9f\\xcf^/+w\\x1d\\xb1\\xb1gl\\x9a\\x04\\x12\\'@%\\x968\\x9b\\xa8;2d\\xear\\xd9\\xa7V%!\\xfd\\xef\\xab#c\\x9em*7\\xdc r\\xe5j\\x9c\\xbc\\xac^1)$\\xe0D\\x02TbN\\x1c\\x15\\x1b\\xfa\\xb4\\xed\\xe0I\\x195\\xc3\\x93\\r\\xbcb\\xa1\\xdc\\xf2\\xdcm\\x95\\xa5~\\xe9\\xfc\\xd2\\xb9\\xa1\\'l\\xf5\\xec\\xf5\\xfb\\xe4\\x87\\xc5\\xdbl\\xe8\\x19\\x9b$\\x81\\xa4\\tP\\x89%\\xcd\\'j\\x8e\\xbe6~\\x81\\x9c\\xbf\\xe4\\xc9|4\\xaaS\\xac\\xa4O\\xeb\\xb95\\xdejWO\\xb2eJ\\xaf9\\x98\\xcbD\\r\\x18^\\xa8\\xe3\\tP\\x899~\\x88B\\xdf\\xc1?\\xd7\\xed\\x95\\x1f\\x97l\\xd7\\r\\xdd]\\xb3\\xb84\\xafZ\\xd4\\xd7h\\xa1\\xdcY\\xa5g\\xcb\\x9az{\\xfb\\xa1S\\xf2\\xce\\xaf+}\\xc7\\xf8\\x85\\x04\\x9c@\\x80J\\xcc\\t\\xa3`c\\x1f0\\xdf\\xd5u\\xcc|\\xdd\\x83\\x0c\\xe9\\xd2\\xc8;\\x0f\\xc6^\\xd7\\x9bnwV\\x93\\x927f\\xd7\\xfb\\x87N[!\\xb0\\xe6\\xa7\\x90\\x80S\\x08P\\x899e$l\\xea\\xc7\\xa7\\xb3\\xd6\\xcb\\xaa\\xdd\\x9e\\x95\\xc7\\x17\\x9bU\\x91r\\x05\\xae\\xcf`\\x94)}Zy\\xfb\\x81\\x9bt\\x0f\\xf5\\n\\xe6\\x04\\xcf\\n\\xa6M]f\\xb3$\\x10\\x8f\\x00\\x95X<\\x1c\\xd1\\xb5q\\xec\\xcc\\x05\\xe9\\xfb\\x83\\xc7\\x06,\\x7f\\x8e\\xcc\\xd2\\xa7u\\xedD\\x01\\xdc_\\xb7\\x944\\xa9TH\\x1f\\x1f3o\\x93,4\\xd9\\x92%z\\x12\\x0f\\x90@\\x18\\x08P\\x89\\x85\\x01\\xb2S\\x9b\\x18\\xf0\\xd3R9\\xec\\xb5\\xc6\\x7f\\xb3m=\\xc9\\xa9l\\xc3\\x92\\x92w;5\\x90\\xb4in\\xd0\\xd6\\xfc]\\xbe\\x9d\\xeb\\xb3\\xeaO\\xea\\x1c\\x1e#\\x81P\\x13\\xa0\\x12\\x0b5a\\x87\\xd6\\xbf~\\xdf1\\xf9\\xf0\\x7f\\x1e\\xbf\\xc8\\x9a\\xc5\\xf3\\xc9c\\x8d\\x93\\xcf\\\\T\\xadh^y\\xf2\\xff*\\xea+\\xc2\\x93\\x18\\xac\\xfb)$`7\\x01*1\\xbbG\\xc0\\xa6\\xf6\\xbb\\x8d\\x9d\\xaf#T\\xc0\\x98\\x15OXi\\xf0%\\x00\\x19\\xd4\\xa6\\xae\\xe4\\xce\\x9aQ\\x97\\x84u?\\xe6\\xc8($`\\'\\x01*1;\\xe9\\xdb\\xd4\\xf6\\xb4\\xe5;e\\xc6\\xaa\\xdd\\xba\\xf5\\xb6\\xf5J\\xcb-\\x15\\x02K\\xd9\\x86\\x13\\xf2e\\xcf$}\\xef\\xf1\\xcc\\x9d\\xc1\\xba\\xff\\xad\\x9f\\x97\\xdbt\\x15l\\x96\\x04<\\x04\\xa8\\xc4\\xa2\\xecN\\xb8x\\xf9\\xaa\\xbc2\\xcecR\\x919C:\\x19\\xde\\xa1~\\xd0\\x04\\x9e\\xbf\\xbd\\xb2\\xc0\\xaa\\x1f2r\\xfa*\\x81\\xfd\\x18\\x85\\x04\\xec\"@%f\\x17y\\x9b\\xda\\xfd\\xcf\\xefkd\\xd3\\xbf\\'t\\xeb\\xaf\\xb5\\xa8.\\xc5\\xf3y\\xec\\xbf\\x82\\xe9\\x0e\\xac\\xf9Gv\\xf4\\xd8\\x93\\xc1\\xca\\x1f\\x96\\xfc\\x14\\x12\\xb0\\x8b\\x00\\x95\\x98]\\xe4mh\\xf7\\xe0\\xc9s2h\\xcaR\\xddr\\x91\\xa5\\xe2{\\xf5\\xf2f\\xf1\\xce\\x93-\\xa3\\x0c\\xb8\\xbfN\\xd8{\\xf1`lY\\x89-\\x13\\xa3\\xdb\\xfd\\xe2\\xaf\\r\\xb2b\\xa7\\'\\x86\\x7f\\xd8;\\xc2\\x06\\xa3\\x82\\x00\\x95X\\x84\\r\\xf3\\x9bS\\x96\\xc9\\xfe\\xe3g\\xf5U\\xf5\\xbf\\xb7\\x8e\\xe4\\xcd\\x96)\\xecW\\x88\\xf8\\x8a\\xf0\\n\\x80w\\x80\\'{\\xf8\\xdc\\xb0\\xf7\\x81\\rF\\x0f\\x01*\\xb1\\x08\\x1a\\xeb\\xad*\\x8b\\xf7\\xbb3V\\xeb+\\xaa\\\\8\\xb7<{k%\\xdb\\xae\\xaen\\xa9\\x1b\\x05\\xde\\x01\\x90\\xbf\\x94\\xb7\\xc0\\xa4E\\xcc\\x1en\\xdb`Dx\\xc3Tb\\x114\\xc0\\xaf*\\xb3\\x86\\x0b\\x97=Y\\xbc\\x11a\\x02\\x93\\xfav\\n\\xbc\\x03\\xe0%\\x00\\xe9\\xaeb\\x8e\\xc1{\\x80B\\x02V\\x13\\xb0\\xf7.\\xb7\\xfaj\\xa2\\xb8\\xbe?\\xd6\\xee\\x95\\xc9Kwh\\x02\\xadj\\x95\\x90fU\\x8b\\xd8N\\x03\\xde\\x01\\xbdZ\\xd5\\xd2\\xfd\\xd8\\xa1\\xbc\\x06\\xdeQQ`)$`5\\x01*1\\xab\\x89\\xdaP\\xdfe\\xe5\\xea\\x83(\\x15\\x10\\xc4\\xf7\\x1a\\xe1MtkCW\\xaek\\xb2\\xeb\\x9dU\\xa5\\x94\\xf2\\x16\\x80\\x0cU\\xf1\\xf8\\xe1E@!\\x01+\\tP\\x89YI\\xd3\\xa6\\xba>QY\\xbc\\xd7\\xec9\\xaa[\\xef\\xd2\\xbc\\xaa\\x94\\xf5\\x93\\xc5\\xdb\\xa6\\xaei/\\x01C\\xa9\\xc2{\\xe0\\xf5\\t\\x1egt\\xbb\\xfa\\xc3v#\\x8f\\x00\\x95\\x98\\xcb\\xc7\\xf4\\xe8\\xe9\\x0b\\xd2\\xef\\x07O\\xf8\\x9b\\x98\\x9c\\x99\\xe5\\r\\xe5\\xe4\\xed4\\xb9\\xb7NI\\xb9Uy\\r@\\xc6\\xcd\\xdf,\\xf3\\xb7\\x1cpZ\\x17\\xd9\\x1f\\x17\\x13\\xa0\\x12s\\xf1\\xe0\\xa1\\xeb\\xfdU\\xfc\\xae#\\xa7\\xcf\\xeb\\xab@\\x16\\xef\\x1c\\x9938\\xf2\\x8aFy\\x17\\x1a\\x10\\xa0\\x11\\xd9\\xc3\\xe1U@!\\x01+\\x08P\\x89YA\\xd1\\xa6:\\xd6\\xed=&\\x1f\\xfd\\xb1N\\xb7^[\\x05:|\\xf4\\x96\\xe4\\xb3x\\xdb\\xd4U\\xa9Z4\\x8f<\\xa5\\xbc\\x07 \\xf0&\\xf8v\\xcef\\xbb\\xba\\xc2v#\\x8c\\x00\\x95\\x98\\x8b\\x07\\x14q\\xbb0\\xa9\\x0f\\xe3\\xd2`\\xb2x\\xdbu\\xc9\\x03\\x95\\xf7\\xc0\\xb5\\xec\\xe1\\x0b\\x05\\xde\\x05\\x14\\x12H-\\x01*\\xb1\\xd4\\x12\\xb4\\xe9\\xfc\\x9fU\\x16\\xef\\xdfW\\xef\\xd1\\xad\\xb7\\xaf_F\\x1a\\x95+`SO\\x02o\\x16\\xde\\x03\\xfd\\xef\\xf3\\xb8A\\xc1\\xab`\\xc8\\xd4e\\x81\\x9f\\xcc\\x92$\\x90\\x08\\x01*\\xb1D\\xc08y\\xb79\\x8bw\\x16\\x15\\xbfkX\\n\\xb2x\\xdbu}\\xcf)/\\x82J\\xca\\x9b\\x002j\\xfaj\\xd9\\xa6\\xbc\\x0c($\\x90\\x1a\\x02Tb\\xa9\\xa1g\\xd3\\xb9\\xa3\\x7f[-\\x9bMY\\xbc\\x8b\\xe5\\xcdfSO\\x82oV\\x87\\x06R\\x93\\xfc\\x10x\\x17\\xbc\\xca\\xec\\xe1\\xc1C\\xe4\\x19\\xf1\\x08P\\x89\\xc5\\xc3\\xe1\\xfc\\x8d\\x03\\'\\xce\\xc9`\\xe5\\xe4\\r)\\xaa\\x94W\\xf7\\xbbS\\x9e\\xc5\\xdb\\xae\\xab\\x857A\\xcb\\x9a\\xc5u\\xf3?-\\xd9.\\x7f\\xae\\xdbkWW\\xd8n\\x04\\x10\\xa0\\x12s\\xd9 \\xf6\\x9e\\xb4HN\\x9e\\xf3d\\xf1\\x1e\\xae^#\\xf1:\\xe9FyG\\x85\\xcbf\\xf6p7\\x8e\\x9c\\xf3\\xfaL%\\xe6\\xbc1I\\xb4GK\\xb7\\x1f\\x92/\\xff\\xde\\xa8\\x8fc\"\\x1f\\x13\\xfan\\x15x\\x15\\xbc\\xd4\\xcc\\x93\\xb8d\\xf5\\xee\\xa3\\xf2\\xc9\\x9f\\x1eS\\x11\\xb7^\\x0f\\xfbm\\x1f\\x01*1\\xfb\\xd8\\x07\\xd52lC\\xcdY\\xbcaR\\x01\\xd3\\n7K\\x9f{j\\t\\xbc\\x0c }\\x95\\xd7\\xc1\\xb13\\x17\\xdc|9\\xec\\xbbM\\x04\\xa8\\xc4l\\x02\\x1fl\\xb3\\x13\\x16n\\x919\\x9b\\xfe\\xd5\\xa7=\\xa2\\x8cZa\\xdc\\xeav\\x81w\\xc1\\xe06\\x9ed\\xbe\\xf0:\\xe8\\xff#\\xb3\\x87\\xbb}L\\xed\\xe8?\\x95\\x98\\x1d\\xd4\\x83l\\xf3\\xac\\x8a\\xc3\\xd5\\xc3\\x9b\\xc5\\x1b\\x7f\\xf8o\\xb6\\xad\\x1bd\\r\\xce-\\xfeX\\xe3\\xf2R\\xabD>\\xdd\\xc1\\x0f\\x95\\xf7\\x01\\xbc\\x10($\\x10\\x0c\\x01*\\xb1`h\\xd9T\\xf6\\xed_V\\xca\\xae#\\xa7u\\xebp\\xf0.\\x90\\xd3\\xfa,\\xde6]\\x9a\\x0eam\\xbc\\x1a\\xc3\\xfb\\xa0\\xdb\\xd8\\xf9vu\\x85\\xed\\xba\\x94\\x00\\x95\\x98\\xc3\\x07n\\xb7R^\\xc3\\xa7\\xad\\xd0\\xbd,\\x13\\xa3&\\xc3\\x9bWqx\\x8f\\x83\\xef\\xde\\xcd\\xe5\\x0bJ\\xbb\\xfa\\xa5\\xf5\\x89\\xbf\\xad\\xde-\\xf0F\\xa0\\x90@\\xa0\\x04\\xa8\\xc4\\x02%eS\\xb9\\xee\\xea5\\x12\\xaf\\x93\\x90w\\x1e\\xbc)\\xe4Y\\xbcm\\xbaL\\x19\\xde\\x81\\xd9\\xc3\\xedb\\xef\\xf6v\\xa9\\xc4\\x1c<\\x82s\\xd5D>&\\xf4!\\xb7W)\"\\x08;\\x1d\\xa9\\x02\\xaf\\x83\\xd7ZT\\xd7\\x97\\x07o\\x84\\xf7~\\xf7$<\\x89\\xd4\\xeb\\xe5uYG\\x80J\\xcc:\\x96\\x96\\xd6\\x84x[]T\\xc8i\\x98V\\x84;\\x8b\\xb7\\xa5\\x17\\x12De=\\x94\\xf7\\x01\\xbc\\x10 \\xf0J\\x80w\\x02\\x85\\x04\\x92#@%\\x96\\x1c!\\x9b\\x8e\\x7f\\xa5\\x8cZa\\xdc\\nA\\xea5\\xa4`\\x8bt\\xd1\\xce\\xec\\xf1\\xb2\\x87/\\x8a\\xf4K\\xe6\\xf5Y@\\x80J\\xcc\\x02\\x88VW\\x01\\xb7\\xa2\\xde\\x93\\x16\\xebju\\xf8\\x1a\\x95\\x047Z\\xa4\\xc3Me\\xa4\\xa17\\xac\\xd0\\x17\\x7fm\\x94e;\\x0eG\\xcb\\xa5\\xf3:SH\\x80J,\\x85\\xe0By\\xda\\x9bS\\x96\\xcb\\xbf\\'\\x192d\\x88,[\\x16\\x19\\t~\\x7f\\xff\\xed7}\\xdf?\\xf1\\xf8\\xe3!\\x1a!\\xfb\\xaaug\\xaa\\x9c\\x14\\xf2\\xc2M\\xbah\\x91\\xc7\\x1f\\xaf\\\\\\xf9\\xf2)\\xac\\xc5\\xba\\xd3\\xa6,\\xdd!\\xff[\\xebIW\\xf6@l\\x19iP6\\xc6\\xba\\xca#\\xac&(0(\\xb2\\x17\\xbf\\x99\\xab\\xbd\\x19\\x06O^fY\\xd2\\xe0\\xd5\\xabW\\x0b\\xfe\\xb8w\\xef\\xde\\x1d\\x8f\\x1a\\xee\\x15\\xdc3=z\\xf4\\x90\\x17_z)\\xde1\\xb7m\\xcc\\x9c9S\\xd6\\xaf\\xf7<\\xf1\\xbb\\xad\\xef\\xc9\\xf57j\\x9e\\xc4\\xf6\\xed\\xdb\\'\\x1f\\xa8\\x1b\\xd2)\\x82\\xc4\\xb1\\xaf\\x8c\\xf3D1\\x85\\xe3\\xf3\\xd0v\\xf5\\x9d\\xd25\\xc7\\xf6\\x03\\xaf\\x94\\x86#<\\xbc\\x1a\\xe0\\xdd\\x90Z\\xb9r\\xe5\\x8a\\xbc\\xf0\\xfc\\xf3>\\x05\\x96?\\x7f~i\\xa5\\x9e\\xd6\\x1b\\xdd|\\xb3\\xa4I\\x93F\\xae^\\xbd*o\\xbd\\xf5\\x96\\xcc\\x9a5+\\xb5M\\xd9v\\xfe\\xd6\\xad[\\xe5\\xe7i\\xd3lk?\\xd4\\rG\\x8d\\x12\\x1b4h\\x90\\x9c?\\x7f>\\xd4<\\x03\\xae\\xff\\xdd\\x19\\xabe\\xeb\\xc1\\x93\\xba\\xbc9\\x04M\\xc0\\x15DaAsH\"\\xab\\xb2\\x87O\\x992E6o\\xde\\xaci6l\\xd8P\\x96\\xaa\\xd7\\xc7\\x8f?\\xfeX&N\\x9c(_}\\xfd\\xb5\\xca(\\xe5I)5r\\xe4H\\xd7\\x10\\x8fS+ \\xc7\\x8e\\x1d\\x93\\x8d\\x1b7\\xeaki\\xd7\\xb6\\xad\\x9c<\\x91z\\x85\\xefT\\x00Q\\xf1:\\xb9x\\xf1b\\x992y\\xb2d\\xce\\x9cY0/f\\xb7\\xc0\\xb9\\xfbMo\\x16\\xef\\xe2\\xf9\\xb2\\xfb\\x82\\x01\\xda\\xdd/7\\xb4\\x8f\\xe0\\x900\\xbb\\xc0\\xab\\xb8\\xf1:\\x9e\\x1a\\xcf\\x86\\x85\\x0b\\x16\\xf8.\\xfb\\xad\\xa1C%m\\xda\\xb4\\xbe\\xed\\xdbn\\xbbM\\x1a6j$s\\xfe\\xf9GV\\xaeX!xj3\\x1f\\xf7\\x15t\\xd8\\x97]\\xbbvI\\xecM79\\xacW\\xa1\\xebN\\xc4?\\x89\\xe1u\\xa0\\xcf\\x1boh\\x82\\xcf\\xa9\\xd7\\x86\\x9c9=\\x16\\xe0I!\\xfdZ\\xfd\\x02\\x0fS7\\xf4\\xb8\\xb1c\\xfd\\x16;~\\xfc\\xb8\\x0c\\x1f6L\\x97\\x99>}\\xba\\xdf2I\\xed4OL\\x0fS\\xf1\\xb32\\xbb4\\x8bwR\\xd7\\x18\\xcac0\\x80\\x85!,\\xc4X\\x18Ii{\\xdbw\\xec\\xd0\\xa7\\xe6\\xcd\\x9bW\\xca\\x94\\xb9>\\x19q\\xe5J\\x9eU\\xd1\\xcb\\x97/\\xcb\\xde\\xbd\\x9e\\xf9Ks[\\xa1\\xbeW\\xccm\\xf1\\xbb\\x7f\\x02\\x11\\xff$6I\\xbd\\x16\\xacZ\\xb5J\\n\\x16,(\\xcf=\\xf7\\x9c|\\xfe\\xd9g\\xfeI\\x98\\xf6.V\\x13\\xba?\\xfe\\xf8\\xa3\\xa4O\\x9f^\\x9a5o.\\xf9\\xf2\\xe5\\xf3\\x1d\\xc5\\xcd\\xfc\\xd4\\x93O\\xca\\x9c9s$\\x87R\\x883\\x82Tbf\\x13\\x01s\\x82\\x0c_\\x03\\xfc\\x92,\\x01\\xb8\"\\xc1%i\\xf8/+\\xc40Qy\\xfe\\xb6\\xca\\xc9\\x9e\\xe7\\xaf\\xc0\\xe9S\\xa7\\xf4n\\xcc\\x85\\xf9\\x93t\\xea\\x1e0\\xe4\\xecYOx$c\\x1b\\x9f\\xa1\\xb8W\\x0e\\x1e<(\\x17/^\\x94\\x98\\x98\\x18}\\x0f\\x9a\\xdb\\x0b\\xe4{\\xd1\\xa2Ee]\\x82I\\xfc\\x0e\\xed\\xdb\\xeb\\xbf\\x83@\\xcew[\\x99\\x88~\\x12;}\\xfa\\xb4\\x9e\\x94\\xc5\\xa0\\xbc\\xa1\\x9e\\xc6\\xf0:\\x19\\x88t\\xe8\\xd0A\\x17\\xbbt\\xe9\\x92|7~|\\xbcS\\xf0T\\x07\\x05\\x86I\\xdf\\x8f>\\xfaHJ\\x94,\\x19\\xefxR\\x1b0\\xd6\\xec\\xf2\\xed\\\\A\\x9c,\\x98R\\x18\\xa9\\xca\\x92:\\x87\\xc7\\xfc\\x13\\x80s\\xb8\\x91\\xba\\xcel,\\xec\\xbft\\xe2{\\x1f}\\xec1\\xe9\\xdd\\xbb\\xb7<\\xfd\\xcc3~\\x0b\\xed\\xd8\\xbe]\\xef\\xc7\\xdcX\\xa1B\\x85\\xae+\\x13\\x8a{\\xe5\\xb1G\\x1f\\x95zu\\xeb\\xca7\\xea\\x8d %\\x82{3W\\xae\\\\\\xf1\\xfee\\xca\\x94)%U\\xb9\\xe2\\x9c\\x88Vb\\xa3G\\x8f\\x16\\xfc\\xaa\\xd5\\xae][\\xee\\xb9\\xf7\\xde\\x80\\x07\\x04\\xf3 E\\x8a\\x14\\xd1\\xe5\\xc7\\x8c\\x19\\xa3W\\xa8\\xb0\\xf1\\xd5W_\\t^\\x1f ={\\xf6\\x94&M\\x9a\\xe8\\xef\\x81\\xfe7~\\xfe\\x16\\x99\\xb7\\xf9\\x80.nN\\x1a\\x1b\\xe8\\xf9,w\\x8d\\x809\\x89\\xb0\\xd9m\\xebZ\\x89\\xc0\\xbe\\xb5U\\x93\\xde\\xcf\\xbf\\xf0\\x82\\xb4k\\xd7\\xee\\xba\\x13\\xf6\\xec\\xd9\\xe3[\\x95,\\xa9~\\xacr\\xe4\\xb8\\xde\\x9f5T\\xf7\\n:sY\\xcd\\xc1Q\\x92\\'\\x10\\xb1Jl\\x87\\x9a\\xeb\\xf8\\xec\\xd3O\\xf5\\xea\\xd2\\x80\\x81\\x03}\\xabL\\xc9#\\x11]\\xb6\\xbdz\\xfc\\x86`\\x92t\\xf6\\xec\\xd9\\xfa\\xe9\\xabo\\x9f>z\\x1f\\x96\\xe0q\\xe3\\x07#p`~}\\xe2B}J\\xce,\\xc8\\xe2]/\\x98\\xd3Y\\xd6\\x0f\\x81Gn)/\\xb5U\\x00E\\x88\\xd9\\x81\\xdeO\\xd1\\xa0wa\\x01\\xe8\\xd1G\\x1e\\x11\\xe3\\x15\\xf2a\\xf5\\xdd\\x9f\\xe0\\t\\xcd\\xea{\\xc5_;\\xdc\\x978\\x81\\x88Ub\\x03\\x07\\x0c\\xd0\\xf3\\n\\xf7\\xddw\\x9f\\xd4\\xaa\\x15\\xbcCu[\\xf5\\xcbl,\\xaf\\xbf\\xf9\\xe6\\x9bz\\x1e\\x0c\\xf3a\\x95*W\\x96Q\\xa3F%N4\\x91#\\xe6P2\\xc8\\xe2\\x9d?G`\\xaf\\xb6\\x89T\\xc7\\xdd\\x8a\\x00^\\xc9G\\xab\\xc0\\x91\\xeaC{=\\x18\\xa1\\x8cR\\x0b\\x07\\xd3\\x08\\xcf\\xab\\xf9\\xd3\\xb5k\\xd7\\xea\\xaabcc\\xe5\\xe1\\x87\\x1fN\\xb4Z\\xab\\xef\\x95D\\x1b\\xe2\\x01\\xbf\\x04\"R\\x89aI|\\xc6\\x8c\\x19z\\x0e\\xac\\xb7we\\xd2\\xef\\xd5\\'\\xb1\\xb3X\\xb1b\\xd2\\xa0A\\x03]b\\xfd\\xbau\\x82\\x15\\xc9\\xdc\\xb9s\\xcb\\x97_~\\x19\\xf0\\xdc\\x9aQ\\xbd9\\xa8_\\xb9\\x02*\\x8bw\\xb3\\xaa\\xc6!~\\xa6\\x92\\x00\\x92\\x8a \\xb9\\x08\\xc4\\x1cT2\\xa5\\xd5bB\\xfdI\\xb5p\\x83\\xfb\\x07RQ\\xadN~\\xa9\\xa6\\x112d\\xc8\\x90h\\x95V\\xde+\\x896\\xc2\\x03\\x89\\x12\\x88\\xb8\\xd5I\\xd8\\xf2\\xf4\\xeb\\xd7O_0^\\xf9\\n\\x14(\\x90\\xe8\\xc5\\'w\\xa0\\x9dz\\xa5\\x9c;w\\xae.\\x86\\xc9\\xd2O>\\xf9D\\xb0\\xf2\\x13\\xac\\x98\\xc3+#\\xd8a\\x86t\\x11\\xf9\\xdb\\x11,\\x16\\xcb\\xca\\xc3L\\x056cxeGx\\xef\\xe6\\xd5\\x8a\\xfaL0\\x82i\\x04\\xc6\\xd0\\x98T\\xc7\\xf4\\x01\\x04\\x8b6\\xe3\\xc6\\x8d\\xf3;\\x17\\xa6\\x0b\\x98\\xfeK\\xe9\\xbdr\\xe0\\xc0\\x01\\xc1\\xdc\\x9bY\\xce\\x9c9\\xa37a\\xac\\xbat\\xe9R\\xf3!\\xa9R\\xa5\\x8ad\\xcc\\x981\\xde\\xbeh\\xdf\\x888%\\x06\\xbb-\\xc3G\\xec\\x92\\xfaU\\xc5\\xe4\\xbeY.\\\\\\xf0\\x84\\xbc\\xd9\\xa4n\\x10\\x1c\\xbb]\\x194\\xe2\\x15\\xd1\\x9f\\xcc\\xf3*0\\x1c\\xc3\\xa4\\xeeM\\xea\\xb5\"X1\\'\\xbah^\\xb5\\xa8\\xdc]\\xb3x\\xb0U\\xb0|2\\x04\\x90p\\xb7\\xbbJ\\xbc\\xdb_\\xc5d3\\x12\\xadtoQ#\\x99\\xb3\\xe2\\x1f\\x86\\xe2x\\xf8\\xa1\\x87d\\xde\\xbcy\\xfa\\x00\\x94\\xc5X\\xa5\\xc0n\\xbc\\xd13\\xe7\\x16\\xbf\\xf4\\xf5[)\\xbdW`\\xca3H\\xcd\\xd9\\xfa\\x13\\xac\\x8c\\'\\\\\\x1d\\x9f\\xa3\\xee\\xc9R\\xa5J\\xf9+\\x1e\\xb5\\xfb\"N\\x89\\x99\\xdd+\\x12*0\\xf3(C\\xd1\\xe1\\x1f\\\\4\\xfc)\\xb1/\\xbf\\xf8B&L\\x98\\xe0;\\x05\\xaf\\x93\\xb3\\xfe\\xfcSnW\\x11\\x0e\\x02\\x15s\\xca\\xb1\\xf4i\\xd3\\xc8\\xa8N\\xc1+\\xc1@\\xdb\\x8a\\xf6r\\xaf\\xb5\\xa8._\\xfc\\xb5A\\xf0\\xea\\x8e\\x94w\\x0f5*\\xe73\\xc1H\\x8e\\r&\\xf1;u\\xec(\\x0b\\x17z\\x16^\\xe0~\\xf4\\x85\\x9a6\\xc8\\x9e={r\\xa7\\xea\\xe3V\\xdc+\\x015\\xc4B~\\tD\\xdc{M\\x96,Y\\xb4a+\\x8c[\\xfd\\xfd3&\\xebA\\x03\\xc7\\xb3g\\xcbv\\x1d\\x18\\xfc\\x1a\\x1b\\xaf\\xa4-Z\\xb4\\xf0=\\xbeO\\x9a4\\xe9\\xba\\xb2I\\xed0\\'\\x7fE\\x16\\xef\\x8a\\x85r\\'U\\x9c\\xc7RA\\x00N\\xf4\\xc3;x\\\\m\\xcc\\xc9\\x87\\x03\\xa9\\xf2\\xd5W_\\xf5)0\\x8c7\\x9e\\xc0\\x02U`\\xa9\\xbdW\\x9eQ\\xf6i\\xbb\\xd5\\xeb\\xa4\\xf9_\\xcd\\x9a5u\\xb7\\xfb\\xf4\\xed\\x1bo?\\xca\\xf0)\\xcc\\xcf\\x88\\xaa\\'\\x91\\xa8\\x92\\n\\xe5\\xcb\\xc7\\x15,P N\\xdd<~\\xaf[\\x85c\\x89\\xabR\\xb9\\xb2.\\xd3\\xb2e\\xcb85\\xd1\\x1b\\xf7\\xf4SO\\xe9\\xed\\xe2\\xc5\\x8a\\xc5\\xa9\\'2\\xbf\\xe7%\\xdcy\\xfc\\xcc\\x85\\xb8\\x98\\xe7\\xbf\\x8e\\x93N\\x1f\\xc7\\xe5{\\xf6\\xab\\xb8\\xa3\\xa7\\xcf\\',\\xc2m\\x8b\\t\\\\U\\x8f\\xbe\\x8d\\x06N\\xd6\\xcc\\xd3t\\xfe$n\\xc9\\xb6\\x83\\xc9\\xb6\\xa0\\xc2\\xf0\\xe8\\xb1\\xc5=\\xd1\\xb6M\\x9b8\\xb52\\x99\\xec9F\\x01\\xab\\xee\\x15\\xa3>\\xe3\\xb3\\xc5]w\\xe9>)GtcW\\xaa?[\\xb7j\\xa5\\xebl\\xda\\xa4I\\xaa\\xebrZ\\x05\\x11\\xf7$\\xe6GO\\x07\\xbc\\xcb\\x98\\xd8=r\\xe4\\x88^\\x10\\xf8\\xfc\\xf3\\xcf\\xb5\\xdb\\x07&m!X\\xb9\\x9a:uj@\\xf5\\rV\\x0e\\xde\\x07Nx\\x9c\\xcd\\x07\\xde_Wrg\\xe5dl@\\xe0RQ\\x08\\xa6\\x16F`I\\xbc\\xcawQ\\x01\\'\\xe1%\\x91\\x94 \\xce\\x16\\x04\\x0b7\\x1f|\\xf8\\xa1\\xa4K\\x17\\xd8\\x0c\\x8b\\x95\\xf7JR\\xfd\\xe3\\xb1\\xe4\\t\\x046b\\xc9\\xd7\\x13\\x11%\\xbau\\xeb&k\\xd6\\xac\\xd1\\xaf\\x8f\\x98\\x131\\xfc\\xe9\\x1a7n\\xac\\xfd\\xd8\\xb0\\x92\\x84W\\xca\\xce\\x9d;\\'y\\xbd\\x9b\\xff=!\\xef\\xfd\\xee\\xc9\\xe2]\\xb5h\\x1ey*\\n\\xb3x\\'\\t(\\x84\\x07k\\x95\\xc8\\'\\x8f*#\\xd8\\xff\\xaa\\xf91$\\x15Ar\\x11\\x04\\x9cLL\\x8c\\tyD\\xa70V%\\x13+[W\\xb9\\x02\\x95(QB\\x1f\\xb6\\xea^I\\xac-\\xee\\x0f\\x9c\\x00\\x95\\x98\\x97\\xd5\\x87\\xeaWx\\xf2O?\\xe9\\xad\\xb7G\\x8c\\x90\\x1a5\\xae\\xadn\\xe1\\x06\\xbf\\xff\\xfe\\xfb\\x05e\\x96\\xa8\\xb0>\\xf0\\xa7K\\xcag\\xb2\\x9b\\nvx\\xf1\\xf2U]\\x17\\xfc#\\xd3\\xa6Q\\x8f\\x08\\x94\\xb0\\x11\\x807\\xc4\\xa4E\\xdb\\x04sc=T\\x9a7\\x84\\xee\\xc1\\x9c\\x99?\\xd9\\xb7\\x7f\\xbf\\xde\\r\\x03\\xd7.\\xc9Do\\x1d\\xf5\\xee\\xbbZ\\x89Yy\\xaf\\xf8\\xeb\\x13\\xf7\\x05G j_\\'3\\x98\\xa2\\x13,_\\xbe\\\\\\xdeR\\xf1\\xd4!O?\\xfd\\xb4\\xb4i\\xd3\\xe6:\\x8a\\xb0\\xca6\\xe4\\x97_~1\\xbe^\\xf7\\xf9\\xdb\\xea\\xdd2m\\xf9N\\xbd\\xff\\xde:%\\xa5i\\xa5\\xc2\\xd7\\x95\\xe1\\x8e\\xd0\\x12\\x88\\xc9\\x99Y\\xe0\\x15\\x01A\\xc2\\xdd\\xe1*\\xf1nbrJ\\xc5\\xd6\\x0fTp\\xcfXy\\xaf$\\xd6nR\\x86\\xb5\\x89\\x9d\\x93\\xdc~#\\x0e\\x1a\"\\xb3D\\x9a\\xdc\\x80I\\xbaH\\xbb(\\xbb\\xae\\xe7\\x92\\xca\\xe2]\\xa3\\xf7\\xf7\\xb2N%\\xc1\\xcd\\x94>\\xad\\xac\\x1d\\xdaNJ1\\t\\xae-\\xc3\\x81\\'\\xe1*=\\'\\n^\\xed\\xf1\\x14\\xb6~x{)\\xa6\\xec\\xc9(\\x91G j\\x9f\\xc4B1\\x94\\x1f\\xfd\\xb1N+0\\xd4\\xdd\\xf5\\x8ejT`\\xa1\\x80\\x1c`\\x9d\\xf0\\x8a\\x80w\\x04Dg\\x0f\\xff\\xcec\\x03\\x16\\xe0\\xe9,\\xe6\"\\x02Tb\\x16\\r\\xd6\\xe1S\\xe7\\xb5\\xc58\\xaa+\\x98+\\x8b\\xf4l\\xe5\\xb1\\xf5\\xb1\\xa8zV\\x93\\x02\\x02-\\x95wD\\xb3\\xaa\\x9e\\x90J\\x13\\x16n\\x919\\xcc\\x1e\\x9e\\x02\\x8a\\xce?\\x85J\\xcc\\xa21\\xea\\xfb\\xc3b9v\\xc6\\xe3\\xd2\\xf4\\x96\\xca\\\\\\xc4,\\xde\\x16\\x81Me5\\xa3:6\\xf0e\\x0f7\\x02R\\xa6\\xb2J\\x9e\\xee0\\x02Tb\\x16\\x0c\\xc8\\xea\\xddG\\xe5So\\x16\\xefz\\xa5\\xf3K\\xe7Fe-\\xa8\\x95UXA\\xa0\\x92\\xca\\x1e\\x0eo\\t\\xc8\\xb2\\x1d\\x87\\xe5\\xcb\\xbf7ZQ-\\xebp\\x10\\x01*1\\x0b\\x06\\xe3eeTy\\xe5j\\x9c\\x8ek\\x05\\x93\\nf\\xf1\\xb6\\x00\\xaa\\x85U\\xf4\\xbf\\xb7\\x8e\\xe4\\xcd\\xe6\\t\\xcf\\xdc{\\xd2\"mzaa\\xf5\\xac\\xcaf\\x02Tb\\xa9\\x1c\\x80\\x9f\\x96l\\x97?\\xd7\\xed\\xd5\\xb5tlPVb\\xcb\\xc4\\xa4\\xb2F\\x9en5\\x01d\\x0f\\x1fx\\x7f\\x1d]-\\xbc(\\xe0MA\\x89\\x1c\\x02Tb\\xa9\\x18Ks\\x02\\xd7\\xac\\x19U\\x16o\\x15\\xd7\\x8a\\xe2L\\x02\\xf0\\x9a\\x80\\xf7\\x04\\x04\\xd9\\xc3azA\\x89\\x0c\\x02Tb\\xa9\\x18\\xc7Q\\xd3W\\xcb6o\\x16\\xef\\xd7[\\xd6\\x94\\xc2\\xb9\\xb3\\xa6\\xa26\\x9e\\x1aJ\\x02:{\\xb8\\x9a\\xe4\\x87\\xc0\\x86\\xec\\x15\\xe5UA\\x89\\x0c\\x02Tb)\\x1c\\xc7\\xfd\\xc7\\xcf\\xca\\x90\\xa9\\x9e\\xd7\\x92\\x12*\\x8b\\xf7+wVKaM<-\\\\\\x04n\\xad\\\\X\\xeeQ.H\\x90\\x9f\\x95W\\xc5\\xef\\xab\\xf7\\xe8\\xef\\xfc\\xcf\\xdd\\x04\\xa8\\xc4R8~=U\\xe6\\xa2S\\xe7/\\xe9\\xb3\\x87\\xab\\x8c\\xd4\\xcc\\xe2\\x9dB\\x90a>m\\x842\\x80\\xf5e\\x0f\\x1f;O\\'\\x18\\ts\\x17\\xd8\\x9c\\xc5\\x04\\xa8\\xc4R\\x00t\\xd1\\xd6\\x83\\xf2\\xed\\x9c\\xcd\\xfa\\xcc\\xc6\\x15\\nJ\\xdbz\\xa5RP\\x0bO\\xb1\\x83@i\\xe5\\x06\\xd6\\xf5NO\\xa2\\x16\\xb8\\x87\\xc1\\xcb\\x82\\xe2n\\x02TbA\\x8e\\x1f~\\x81\\x9cS\\x19\\xa6!#;\\xc6\\n\\xf2HR\\xdcO\\x00^\\x16\\xf0\\xb6\\x80\\xbc\\xaa\\x0c`\\xe1\\x85Aq\\x17\\x01\\xfe%\\x060^\\x7fm\\xd8\\xafc\\xb6\\xa3\\xe8\\x9d\\xd5\\x8b\\xca]\\xd5\\x8b\\x05p\\x16\\x8b\\xb8\\x81@\\x91\\xadk\\xebK\\x83w\\xc6\\xb0i\\xcb#\\xf12#\\xea\\x9a\\xa8\\xc4\\x12\\x0c\\'\\xb2xw\\x1b\\xe7qAA\\x16\\xef\\xb7U\\xac0Jt\\x11\\x807F\\xb9\\x029\\xf5E\\x8f\\xf8u\\x95\\xecT\\xde\\x1a\\x14\\xe7\\x12\\xa0\\x12K06\\x1f\\xcc\\\\+\\x1b\\xf6\\x1d\\xd7{\\xbb\\xa9h\\xad%o\\xf4\\xd8\\x0f%(\\xc6\\xcd\\x08&\\x80\\xec\\xe1#\\xbd\\xa1\\xac\\xe1\\xa5\\xd1\\x9d\\xd9\\xc3\\x1d=\\xdaTb\\xa6\\xe19\\xa4\\xb2x\\x0fP\\xf1\\xa5 \\x85T\\xbc\\xfc\\x9e*n>%:\\t\\xb4\\xa8QL\\xee\\xa8VT_\\xfc\\xa4E[\\xe5o\\xe5\\xb5Aq&\\x01*1\\xd3\\xb8\\xf4U6a\\xc7\\x95m\\x18dh\\xfbz\\x92M\\xc5\\x9d\\xa2D/\\x01\\xc3G\\xd6\\x08\\x84\\t\\xef\\r\\x8a\\xf3\\x08P\\x89y\\xc7d\\xe5\\xae#\\xf2\\xd9\\xec\\xf5z\\xab\\xbe\\xca\\xe2\\xdd\\xa9A9\\xe7\\x8d\\x16{\\x14V\\x02\\x15\\x0b\\xe5\\x96\\xe7n\\xab\\xac\\xdb\\\\\\xbe\\xf3\\xb0|\\xa1\\xbc7(\\xce#@%\\xe6\\x1d\\x93\\xae*i\\x84\\x91\\xc5{tgO\\x9c)\\xe7\\r\\x17{\\x14n\\x02\\xfd\\xee\\xad-\\xf9\\x94\\xb7\\x06\\xe4\\r\\xf5\\xa4\\x0e/\\x0e\\x8a\\xb3\\x08P\\x89\\xa9\\xf1\\xf8a\\xf16\\x99\\xb5n\\x9f\\x1e\\x19<\\x81\\xe1I\\x8cB\\x02 \\x90;kF_\\x04_xo\\x0c\\x9a\\xb2\\x94`\\x1cF \\xea\\x95\\xd8\\xf9KW\\xe45\\x15+\\x0c\\x8290\\xcc\\x85QH\\xc0L\\x00\\xde\\x1a\\xd5\\x8azr)\\xfc\\xe7\\xf75\\xb2\\x89\\xd9\\xc3\\xcdxl\\xff\\x1e\\xf5Jl\\xa4\\x8a\\xd6\\xba\\xfd\\x90\\xc7\\x0e\\x08\\xab\\x91X\\x95\\xa4\\x90\\x80\\x99\\x80\\'\\xab\\x95\\'\\x04\\x13\\xb3\\x87\\x9b\\xc98\\xe3{T+\\xb1}\\xc7\\xce\\xc8[*n>\\x04\\xf6`\\xb0\\x0b\\xa3\\x90\\x80?\\x02M*\\x15\\x92\\xfb\\xea\\x94\\xd4\\x87\\xa6\\xa9\\xec\\xe13V\\xed\\xf6W\\x8c\\xfbl \\x10\\xd5J\\xec\\xf5\\t\\x8b\\xe4\\xb47\\x8b7,\\xf3a\\xa1O!\\x81\\xc4\\x08 {\\xb8q\\x8ft\\x1b;_\\xe0\\xddA\\xb1\\x9f@\\xd4*\\xb1\\x85*\\x8b\\xf7\\x98y\\x9b\\xf4\\x08\\xe0W\\x16>\\x92\\x14\\x12H\\x8a\\x00\\x9e\\xd6\\xbb\\xde\\xe1yZ_\\xbf\\xef\\x98|\\xf8\\xbf\\xb5I\\x15\\xe7\\xb10\\x11\\x88J%\\x06\\xe3\\xc5.\\xdfz\\xe2Ea\\xbec\\x94\\xd7\\xc5$L\\xcc\\xd9\\x8c\\x8b\\t\\xf4jum\\xdet\\xc0OK\\xe5\\xb0\\xf2\\xf2\\xa0\\xd8K *\\x95\\x18\\x9e\\xc0\\xf0$\\x06\\xc1\\xca\\x13\\xe2\\x85QH \\x10\\x02X\\xc1~\\xcb\\x9b=\\xfc\\xd8\\x19d\\x0f\\xf7D\\xfe\\r\\xe4\\\\\\x96\\t\\r\\x81\\xa8Sb\\x98\\x03\\xc3\\\\\\x18\\x04\\x91Z\\x072\\x8bwh\\xee\\xac\\x08\\xae\\xb5s\\xc3rR\\xcfkK\\x88\\xec\\xe1\\xabv{r0D\\xf0%;\\xfa\\xd2\\xa2N\\x89a5\\x12\\xab\\x92\\x10\\xc4\\xccG\\xec|\\n\\t\\x04C\\x00Y\\xc3Gwj\\xa0\\xb3\\x87{\\xb2ayB7\\x05S\\x07\\xcbZG \\xaa\\x94\\x18\\xec\\xc1`\\x17\\x06A\\xd6\"d/\\xa2\\x90@J\\x08\\xdcT&F:6\\xf0d\\x0f\\x87\\xb7\\xc7\\x8fK\\xb6\\xa7\\xa4\\x1a\\x9ec\\x01\\x81\\xa8Rb\\xb0\\xcc\\x87\\x85>\\x04\\xf9#\\x99\\xc5\\xdb\\x82;(\\x8a\\xab@\\xf6p#\\xd2\\x89\\xf9\\xde\\x8ab$\\xb6\\\\z\\xd4(\\xb1\\xd9\\xeb\\xf7i\\x1fIPF\\xac(d\\xf2\\xa6\\x90@j\\x08\\x14V\\xde\\x1d\\xaf\\xdf]CW\\xb1\\r\\xd9\\xc3gx\\x9e\\xf2SS\\'\\xcf\\r\\x9e@T(1\\xf3\\xbc\\x05\\xa2v\\xbe\\xa3\\x8c\\x16)$`\\x05\\x81W\\xee\\xaa.%|\\xd9\\xc3\\xaf\\xcd\\xb7ZQ7\\xeb\\x08\\x8c@T(\\xb1\\xcfU\\x9c0\\xc4\\x0b\\x83\\xbcp{\\x15)_\\x90Y\\xbc\\x03\\xbb=X*9\\x02\\xe6<\\x0cX\\xf9\\xee\\xc9\\xec\\xe1\\xc9!\\xb3\\xfcx\\xc4+1Dj5\\xb2x#\\x93M\\xdf{j[\\x0e\\x91\\x15F7\\x816\\xf5\\xaee\\xc4\\xfav\\xee&Y\\xe4\\xb5A\\x8cn*\\xe1\\xbb\\xfa\\x88Wb\\x88\\x99\\x8f\\xd8\\xf9\\x90A\\xa6\\x9c\\x82\\xe1C\\xcc\\x96\\xa2\\x81\\x80\\x91=\\\\{\\x83\\x8c\\x99\\xc7\\xec\\xe1a\\x1c\\xf4\\x88Vb\\xc8Z\\x84\\xecE\\x90\\x1a\\xc5\\xf3\\xca\\xe3\\xde\\xec\\xcea\\xe4\\xcb\\xa6\\xa2\\x84\\x00\\xbc>\\x9e\\xf0f\\x89_\\xb0\\xe5\\x80\\x8c\\x9d\\xb79J\\xae\\xdc\\xfe\\xcb\\x8ch%\\x86\\xfc\\x91F\\xa4\\x81w;5\\x14\\xf8IRH T\\x04\\xf0\\xa4\\x0f/\\x10\\xc8\\xeb\\x13\\x162{x\\xa8@\\'\\xa87b\\x95\\xd8\\xaf+w\\t2yC\\xda\\xaa9\\x8b\\xc6\\x15\\n&\\xb8tn\\x92\\x80\\xb5\\x04\\xe0\\xfd\\xd1W\\xc5\\xe4\\x87\\xec5\\xc5\\xaa\\xb3\\xb6\\x15\\xd6\\x96\\x90@D*1\\x9d\\xc5[\\xc5{\\x82d\\xce\\x90N\\x863\\x8bw\\xc2q\\xe7v\\x88\\x08`\\xf5\\x1b\\xde \\x90w~]);\\x98=\\xc1\\xf8\\x10q\\x9f($`\\x17\\x01\\xacT\\x1a\\xf1\\xea\\xe05\\x02\\xef\\x11\\x8a\\xf5\\x04\"F\\x89MRs\\x0f\\x7fm\\xd8\\xaf\\tunT\\xd6\\x17y\\xd3zd\\xac\\x91\\x04\\x02#\\x00\\x9b1\\xd8\\x8eA0W;P\\xc5\\xe4\\xa7XO \"\\x94\\xd8\\xb9\\x8b\\x97\\xa5\\xbb7\\x8bwv\\x1d\\x03\\xbd\\xbe\\xf5\\xa4X#\\t\\xa4\\x80\\x00\\xac\\xf8\\x8d\\x1c\\x0e\\xef\\xcf\\xbc\\xb6j\\x9e\\x82\\xaaxJ\"\\x04\"B\\x89\\xbd\\xa3\\xa2\\xb5\\x1a\\xf68=U6\\x9a\\x82\\xb9\\xb2$r\\xb9\\xdcM\\x02\\xe1%`\\xce\\x1en\\xb6_\\x0co/\"\\xbb5\\xd7+1XF\\x0f\\xf5f\\xf1.\\x95?\\x07\\xb3xG\\xf6\\xfd\\xea\\xca\\xab\\xfb\\xbf\\x8a\\x85\\x04\\x91. fO\\x12W^\\x8c\\x03;\\xedz%f\\xf6Q\\x1b\\xa1,\\xf33\\xa6c\\x16o\\x07\\xdegQ\\xdf%s\\x86y\\xb3Oo\\xd4\\x83\\xb1\\x00\\x80\\xab\\x95\\xd8|S\\xb4\\x80\\xa6\\x95\\n\\xcb\\xbduJZ\\x80\\x84U\\x90\\x80\\xf5\\x04\\xe05\\x82(\\xb0\\x10DW\\xc1\\xfc\\x18\\xc5\\x1a\\x02\\xaeUbWU\\xe0&\\x18\\x11\"~\\x93y\\xde\\xc1\\x1a,\\xac\\x85\\x04\\xac\\'\\xd0Sy\\x8f .?\\x04+\\x95F\\x9c;\\xeb[\\x8a\\xae\\x1a]\\xab\\xc4\\xbe\\x9d\\xb3\\xd9\\x17A\\xf3\\xa9&\\x15\\xa5j\\xd1<\\xd15r\\xbcZ\\xd7\\x11\\x80\\xf7\\x082$A\\xcc\\x11\\x87]w!\\x0e\\xeb\\xb0+\\x95\\xd8)\\x1d\\xcb|\\xa1F\\x99;kF\\x19x\\xbf\\xc7\\x16\\xc7al\\xd9\\x1d\\x12\\xb8\\x8e\\x00rU\"g%\\xc4\\x9c\\xfb\\xe1\\xba\\x82\\xdc\\x110\\x01W*\\xb1\\xb7\\xa6.\\x97\\xfd\\xc7\\xcf\\xea\\x8b\\xec\\x7f_\\x1d\\xc9\\xc7,\\xde\\x01\\x0f8\\x0b\\xdaK\\x80\\xd9\\xc3\\xad\\xe7\\xef:%f\\xce\\xefW\\xa9pny\\xf6\\xd6J\\xd6Sa\\x8d$\\x10B\\x02\\xf5J\\xe7\\x97\\xce\\r\\xcb\\xe9\\x16\\x10\\xe1\\x02\\x91.()\\'\\xe0:%\\xf6\\xaa9\\x8bw\\xc7Xf\\xf1N\\xf9\\xd8\\xf3L\\x1b\\t\\xbc\\xd5\\xae\\x1e\\xb3\\x87[\\xc4\\xdfUJ\\xec\\xcfu{\\xe5\\xa7%\\xdb\\xf5\\xa5\\xdf]\\xb3\\xb84\\xaf\\xca,\\xde\\x16\\xdd\\x07\\xac&\\xcc\\x04\\n\\xa9U\\xca^\\xca\\xbb\\x04\\x02o\\x13D\\x81\\xa5\\xa4\\x8c\\x80k\\x94\\x18\\xb3x\\xa7l\\x80y\\x96s\\tt\\xbd\\xa3\\x9a\\xc0\\xcb\\x042t\\xda\\n\\x1d\\x97\\xdf\\xb9\\xbdun\\xcf\\\\\\xa3\\xc4>\\x9d\\xb5^V\\xef>\\xaaI\\xbe\\xd4\\xac\\xaa\\x94+\\x90\\xd3\\xb9T\\xd93\\x12\\x08\\x80\\xc0u\\xd9\\xc3\\',\\n\\xe0,\\x16IH\\xc0\\x15J\\xec\\xd8\\x99\\x0b\\xd2\\xf7\\x87\\xc5\\xba\\xef193\\xcb\\x1b\\xadk%\\xbc\\x0en\\x93\\x80+\\t\\xdc\\xa7\\xbcL\\x9aT*\\xa4\\xfb>f\\xde&A\\xceJJp\\x04\\\\\\xa1\\xc4\\xfa\\xff\\xb8D\\x0e{\\xb3x\\x0fnSOrzs\\xfb\\x05w\\xa9,M\\x02\\xce$`\\xce\\x1enx\\xa18\\xb3\\xa7\\xce\\xec\\x95\\xe3\\x95\\xd8\\xfa}\\xc7\\xe4\\xa3?\\xd6iz\\xb5J\\xe4\\x93\\xc7\\x1a\\x97w&I\\xf6\\x8a\\x04RH\\xa0Z\\xd1\\xbc\\x02\\xaf\\x13\\xc8\\xc2\\xad\\x07\\xe5\\xdb\\xb9\\x9bRXSt\\x9e\\xe6x%\\xd6u\\xcc|\\x9d\\xc5\\x1bF\\x82\\xf8\\xc5B\\xe8i\\n\\tD\\x1a\\x01x\\x9d\\xc0\\xfb\\x04\\xd2s\\xe2\"9\\xad\\xbcR(\\x81\\x11p\\xb4\\x12\\x9b\\xb6|\\xa7\\xfc\\xb6\\xda\\x93\\xc5\\xbb]\\xfd\\xd2rsyf\\xf1\\x0elXY\\xcam\\x04\\xe0u\\xd2\\xcf\\x9b=|\\x1f\\xb3\\x87\\x075|\\x8eUb\\x17/_\\x95W\\xc6]\\xcb\\xe2=\\xcc\\xeb8\\x1b\\xd4\\xd5\\xb10\\t\\xb8\\x88\\xc0s\\xb7U\\x96\\x8a\\x85r\\xeb\\x1e\\x8fT\\xd1\\x8a\\xb7\\x1f:\\xe5\\xa2\\xde\\xdb\\xd7U\\xc7*\\xb1\\xf7~_-\\x9b\\xfe=\\xa1\\xc9\\xbc\\xd6\\xa2\\xba\\x14W\\xf1\\x98($\\x10\\xc9\\x04\\x90=\\x1c\\xf9*!\\xc8\\x1e\\xfe\\xaa\\xf7G<\\x92\\xaf\\xd9\\x8aks\\xa4\\x12;x\\xf2\\x9c\\x0c\\x9e\\xb2L__\\xd1\\xbc\\xd9\\xa4\\xc7\\xdd5\\xac\\xb8V\\xd6A\\x02\\x8e\\'pG\\xb5\\xa2\\xd2\\xa2\\x86\\'{\\xf8\\x8f\\xca;e\\xd6:f\\x0fOn\\xd0\\x1c\\xa9\\xc4z\\xab,\\xde\\'\\xbc9\\xfa\\x86\\xb6\\xab/Y20\\x8bwr\\x03\\xc9\\xe3\\x91C`d\\xc7\\x06\\x92!\\x9d\\xe7O\\x93\\xd9\\xc3\\x93\\x1fW\\xc7)\\xb1\\xe5;\\x0f\\xcb\\x17\\x7fm\\xd4=oP6F\\x1e\\x88-\\x93\\xfcU\\xb0\\x04\\tD\\x10\\x01x\\xa3\\xbc\\xd8\\xcc\\x93=|\\xd5\\xee#\\xf2\\x19\\xb3\\x87\\'9\\xba\\x8eRb\\x085\\xdd\\xe5\\xdby\\x82\\xd0\\xd30\\xa5\\x18\\xdd\\xb9!\\xb3x\\'9|<\\x18\\xa9\\x04\\xfa\\xb4\\xae-\\xf9sd\\xd6\\x97\\x87\\xec\\xe1\\xf0Z\\xa1\\xf8\\'pC\\x9c\\x12\\xff\\x87\\xc2\\xbfw\\xe2\\xc2\\xad\\xd2\\xfe\\xfd\\xff\\xf9\\x1a6\\x06\\xd1\\xb7\\x83_H \\x8a\\x08`n\\xd8\\x90.\\xcd\\xabj;Ic\\x9b\\x9f\\xd7\\x088F\\x89!\\x8bw\\xc5\\x1e\\x13e\\xa7\\nKB!\\x01\\x12\\x88O\\x00+\\x97+\\x87\\xb4\\xf1\\x99`\\xc4?\\x1a\\xdd[\\x8e\\x991_\\xb0\\xe5\\xa04,\\x17\\xa3\\xffE\\xf7\\x90\\xf0\\xeaI\\xc0?\\x81\\x7f6\\xfeK%\\xe6\\x07\\x8dc\\x9e\\xc4\\xfc\\xf4\\x8d\\xbbH\\x80\\x04H Y\\x02\\x8e\\x9a\\xd8O\\xb6\\xb7,@\\x02$@\\x02\\t\\x08P\\x89%\\x00\\xc2M\\x12 \\x01w\\x11\\xa0\\x12s\\xd7x\\xb1\\xb7$@\\x02\\t\\x088fb?A\\xbf\\xb8\\x99\\x0c\\x81\\xcb\\x97/\\xcb\\xf8\\xf1\\xe3e\\xc1\\x82\\x05\\xb2m\\xebVI\\x9f!\\x83\\xc4\\xc6\\xc6J\\x9b6m\\xa4l\\xd9\\xb2\\xc9\\x9c\\xed\\x8e\\xc3\\xbf\\xff\\xf6\\x9b\\x0c\\x1b6LJ\\x96,)\\x9f\\xff\\xf7\\xbf\\xee\\xe84{\\x19v\\x02\\x8eTbW\\xae\\\\\\x914i\\xd2(CW\\xc6\\x0e\\xf3wG\\x1c>|X\\xda\\xb5m+\\x1b6l\\x88wx\\xc9\\xe2\\xc5\\xf2\\xf9g\\x9f\\xc9\\xfb\\x1f| w\\xdeyg\\xbccn\\xdc\\x989s\\xa6\\xac_\\xbf\\xde\\x8d]\\x0fI\\x9f\\xf1w\\x916m\\xda\\x90\\xd4\\xed\\xe6JmWb\\x17.\\\\\\x90?\\xff\\xfcS~\\x996M6m\\xda$\\xff\\xfe\\xfb\\xaf\\x1c9rD+\\xb1\\\\\\xb9rI\\xcd\\x9a5\\xa5a\\xa3F\\xd2\\xae];\\xc9\\x9d\\xdb\\x13\\xa6\\xc4\\xcd\\xc0S\\xdb\\xf7\\xabW\\xaf\\xca\\xe3\\x8f=\\xe6S`e\\xca\\x94\\x91\\xf2\\xe5\\xcb\\xcb\\xc1\\x83\\x07e\\xb1Rb\\xe7\\xce\\x9d\\x93n]\\xbbJ\\x83\\x06\\r$gN\\xf7&S\\xd9\\xaa\\x9e.\\x7fV\\xf7D4\\xcb\\xf2\\xe5\\xcb\\xf5\\xdf\\xc5\\xe6\\xcd\\x9b\\xf5\\xdf\\xc6\\xee\\xdd\\xbb%]\\xbat\\x92\\'O\\x1e)R\\xb4\\xa84i\\xd2D\\xee\\xbb\\xef>)^\\xbcx4c\\x12\\xdbL,.]\\xba$\\x9f}\\xfa\\xa9\\x8c\\x1e=ZN\\x9dJ\\xde\\xc05\\x87\\xfa\\x83|\\xfd\\xf5\\xd7\\xe5\\x91G\\x1e\\x89\\xea\\x01\\xfbm\\xc6\\x0cy\\xf4\\xd1G5\\x83;\\xee\\xb8C>Q\\x0c\\xd3\\xa7O\\xaf\\xb7\\xdfx\\xe3\\r\\xf9\\xc2\\xfb\\xda\\xf5\\xe6\\x90!\\xbern\\x00\\x06\\xc7\\x91\\xe3\\xc7\\x8fke\\xa1\\xfc~\\xf4\\xe8Qy\\xe4\\xe1\\x87}\\n,S\\xa6L\\xf2\\xdcs\\xcfI\\x8b\\x16-\\xa4b\\xa5J\\xbai\\xbc\\xc1\\xfc\\xfd\\xf7\\xdf\\xf2\\x9ez\\x83\\xc1\\xf4\\x81!\\xf8;\\x89\\x84yP\\xe3z\\x82\\xfaTZ=l\\xa2\\xde\\xe9\\xe3\\xca\\x96)\\x13W\\xb0@\\x01\\xdf\\xbf\\xa7\\x9fz*NMT\\'\\xda\\x87\\xbd{\\xf7\\xc6\\xdd\\xdc\\xa8\\x91\\xaf<\\xceU\\xbfT\\x89\\x96\\x8f\\xe4\\x03\\xe0d\\xb0\\xfb\\xcf{\\xef\\x05}\\xa9j\\x15\\xd3w~\\x9f>}\\xae;\\x7f\\xf6\\xec\\xd9qE\\n\\x17\\xd6ez\\xf5\\xeau\\xdd\\xf1P\\xeeP\\x93\\xd6q\\xc7\\x8e\\x1d\\x8b\\xf7\\xafy\\xb3f\\xba/M\\x9b4\\te\\xd3\\x8e\\xa9\\x1b\\x7f\\x0b\\xc6\\xf8\\xd6\\xa9];n\\xdd\\xda\\xb5\\x89\\xf6M\\xcd}\\xc6\\xb5j\\xd9\\xd2W\\xbeL\\xe9\\xd2q\\xea\\xa9,\\xd1\\xf2\\x91| lvb\\n\\xa2tU\\xaf\\x81\\xa7O\\x9f\\xd6J\\x16\\xab,\\x1f\\x7f\\xfc\\xb1|\\xfc\\xc9\\'\\x82\\'\\xb1\\xc4\\xa4P\\xa1B2y\\xca\\x14\\xc1\\xa7!\\xcb\\x96-\\x93\\xf3\\xe7\\xcf\\x1b\\x9bQ\\xf3\\xb9v\\xcd\\x1a\\xdf\\xb5\\x96\\xf1\\x9aQ\\x9c9sF0\\x87\\xb4J=\\xa1\\xe1W:))Y\\xaa\\x94\\xd4\\xabWO\\x17\\x998qb<\\x86\\xdb\\xb6m\\x93\\xa7\\x9f~Z\\xb0\\x02\\x86\\'\\xdd\\x01\\x03\\x06$U\\x95\\xe5\\xc7\\xb0\\x1a\\x8d\\x85\\x1c\\xf3?<\\x89D\\x8b`\\xc5y\\xfa\\xf4\\xe9\\xbe\\xcb\\xed\\xd9\\xab\\x97\\xef\\xe9\\xcb\\xb7\\xd3\\xf4\\x05l\\xcc\\xf3\\xc3\\xb8\\x0f\\xf0j\\x19\\x8d\\x126%\\xf6\\xe5\\x17_\\xc8\\xdc\\xb9s}\\x8c_{\\xed5i\\xd5\\xba\\xb5o;\\xa9/X\\x8dim*\\x0b\\x1b)\\xe3\\x95\\'\\xa9\\xf3\"\\xed\\x98\\xf9\\xb5\\xba`\\xc1\\x82\\xf2\\xcc3\\xcfH%5\\xe9\\xddQ\\xbd\\x1a\\xde\\xd1\\xbc\\xb9\\x94S\\x8a\\xed\\xb5W_MR\\x99=\\xf0\\xc0\\x03\\x1a\\x0b\\xe6\\x18\\xa7L\\x9e\\xec\\xf9\\xae\\xe6_\\x1e\\xea\\xdcY\\xb0\\xafp\\xe1\\xc2\\xf2\\xa92\\xd3\\xc0*\\x18%|\\x04~\\xfc\\xf1G\\xdf\\xb8a\\x0cZ\\xa9y\\xb1\\xe4\\xa4\\x9cZ\\x956\\x04\\xe3\\x859\\xb3h\\x94\\xb0(\\xb1\\x1d\\xdb\\xb7\\xcb\\xe0\\xc1\\x83}|\\x1b7n,/\\xbe\\xf4\\x92o;\\x90/w\\xa9y\\x01\\xb3`\\xce,\\xda\\xe4\\x9c\\xe9\\xe9s\\x88Z}\\x9c\\xaa\\x9eP\\xcdO_0W\\x19;v\\xac\\xb4W\\xe6(P\\xf4\\xfe\\xe4\\xee\\x96-%K\\x96,\\xfa\\xd07\\xdf|\\xa3\\x9f\\xbc\\xd4k\\x8c\\xe0I\\x0c\\xbf\\xee_|\\xf9e\\x92O\\xc6\\xfe\\xea\\xe4\\xbe\\xd4\\x13\\xd8\\xa2\\xcc(\\x0c\\xc9\\x9f?\\x7f@?\"0\\x02~W\\xcd\\x8d\\xe1\\x1f\\xdeh\\xa2U\\xc2\\xa2\\xc4\\xf0\\xc7b\\xbc\\xfea\\x95\\xe5\\xbd\\xff\\xfc\\'hC\\xd6Zj\\xd2\\xda0%\\xc0`\\xc1\\x96,\\xda\\x04J\\xca\\x90\\x7f\\xd4\\xe4n\\xdd\\xbaue\\xfcw\\xdf\\xc9\\xbc\\xf9\\xf3\\xf5M\\x1c\\x13\\x13\\xa3\\x0f\\xc3\\x8a\\x7f\\xb2\\xf7)\\xcb(o|\\xe2\\xd7\\xba\\xa5Rd\\x10\\xd8!uh\\xdf^\\xfe\\xfa\\xeb/\\xbd=\\xe2\\x9dw\\xa4j\\xd5\\xaa\\xfa;\\xff\\x0b/\\x01\\x98\\x97\\x18\\x82q\\xf9\\xf0\\xc3\\x0f}\\x7f3\\xc6\\xfe\\x84\\x9f\\xf81\\x82\\xfd$\\xfe\\xddu\\xd7]\\t\\x0fG\\xcdv\\xc8\\xdf\\x19`\\x9ci\\xfe\\x83\\xc2\\xaa\\xda\\x8d7\\xde\\x184`\\x0c\\x86\\x93\\xc9\\tV?\\xe7\\x99\\xa6\\x05\\x92+o\\x1c\\xc7\\x1cO\\xed\\xda\\xb5\\x8dM~& P\\xae\\\\\\xb9x{\\x06\\x0f\\x1a$\\x1f\\x7f\\xf4\\x914m\\xdaTnR\\xa6\\'\\xf8WB=yQ\\xae\\'\\x10r%\\xb6p\\xe1Bm\\x85o4}\\x7f\"\\x7fX\\xc6\\xf1\\xc4>a\\x1fc\\x96hTb\\xf0\\x8f4\\x04s&\\x86\\x023\\xf6\\xe1\\x0f\\x01\\x93\\xf2PL\\xb07J\\xccM\\x05\\xe6)\\xb0\\xf2\\xde\\xb9s\\xa7>\\xb5\\xd1\\xcd7\\xcb\\x1b}\\xfa\\x18\\xd5$\\xf9\\t\\xaf\\x8ay\\xf3\\xe6%Y\\xc6\\xdfA\\x18pR\\x12\\'\\xf0\\xcc\\xb3\\xcfj\\xe3VcLP\\x12\\x93\\xfdX\\x80\\xc1?\\x08\\x9e\\xb41v7\\xa91\\xbe\\xfb\\xee\\xbb%\\x1a\\xff\\x064\\x88\\x04\\xff\\x85\\\\\\x89\\xfd\\xfa\\xeb\\xaf\\xbe&\\xe1\\x06\\x83\\'\\xb1\\x94\\xc8!\\xe5Vc\\x96\\xa4V4\\xcd\\xe5\"\\xe9\\xbby\\xe2\\xb6t\\x02\\x1b1\\xe3:a\\x1f\\x06%\\x86\\xa7V,\\x04\\xc0\\x082\\xa1@\\x11\\x1d:t\\xc8\\xb7\\xfb\\xd6[o\\r\\xd8\\'\\x0f\\x0b,\\xb0\\xa0\\x0fV\\xf8\\x9a\\x9a41\\xfc }\\xf3\\xed\\xb7za\\x06\\xb6\\x94\\xfe\\xe4\\xc0\\x81\\x032u\\xeaT\\xfd\\x0f\\x06\\xae\\x18\\x0bL\\x01\\xc0j?\\x9a%\\xe4Jl\\xdf\\xde\\xbd>\\xbe\\xb8\\x91S\\n\\x1c\\x83g\\x08\\xac\\xd5\\xcd+3\\xc6\\xfeH\\xff4&\\xe4q\\x9d\\xc8\\x06\\xe5O\\xd2\\x9bV\\x15\\xfdM\\xee\\xe3\\x89\\x16nKg\\xcf\\x9e\\xf5\\x9d\\xfe\\xa3ro\\xc1\\xebd \\x02\\x83\\xca\\xa85\\xaa\\x0c\\x04P*\\xca \\xfa\\x08\\xcc\\x89\\x94}\\x98\\xf6\\'\\xc6\\xdc&\\x0cZ\\xfd\\xb9\\xe5\\xe1G\\xea\\xfb\\xef\\xbf\\x97Kj\\x9a\\x05\\xf3g\\xd1\\x1c,!\\xe4\\x13\\xfbx$6$\\xa5\\x0e\\xc9\\x98\\xe3\\x99\\xe4\\xb54G]U\\xaaT\\x11\\xac\\xe0D\\x9b`\\xe9\\xdd\\x10\\xf3D\\xb0\\xb1\\x0f\\x9f\\xb0\\x17\\x82`\\xc9=\\xe1S\\x188>\\xab\\xcc2\\xb0Z\\x8c_\\xfe\\xdb\\x9b5\\xd3eW+\\x0b}\\xbc~R\\x9cA\\xa0R\\xe5\\xca\\xf2\\xc2\\x8b/\\xca\\x18\\xb5\\xd2\\xbcNE\\xf1\\x98\\xa1B\\x12\\xe1u\\x1f\\xaf\\x92\\t\\xa3X\\xc0Lf\\xbc\\x9a\\x1b\\x8df\\t\\xbd\\x123\\xad\"\\xe6T\\xc6\\x8c)\\x91\\xb9s\\xe6\\xc8\\xbe}\\xfb|\\xa7\\xde\\x1b\\xc0\\x04\\xb4\\xafp\\x04}AD\\x0fc\\x85v\\xcb\\x96-~\\xafl\\xa9r\\xd5\\x82@\\xe1%\\xb4\\xf5\\x82Y\\xc6\\xec\\xd9\\xb3\\xf5\\xaf\\xf6\\x07*\\\\\\x0f\\x1c\\xc6\\r1\\xffH\\x18\\xfb\\xf8i?\\x01(-\\xb8\\xe3\\xc1\\xfd\\xe8\\'\\xa5\\xb0\\x16\\xa89\\xe6\\xe6\\xca\\xf1\\xdf,xb\\x8bf\\t\\xb9\\x12;\\xeb}2\\x00\\xe4S\\t\\x9cZ\\x03\\x05o\\x0e\\x88\\x07_\\xbe\\x8e\\x1d;\\x06zjD\\x95\\x83\\x1dW\\x8d\\x1a5\\xf45\\xe1\\xf5:\\xe1\\xd3\\x18<\\x19\\x8c\\xf8[\\r\\x1a6\\x8cw\\xed\\xf8\\xc5\\xfeP).\\xc8k\\xdd\\xbb\\xeb\\xa70\\xbc\\xbe@1B\\xf0J\\x89\\'5J\\xf8\\x08\\xc0d\\xe6}en\\x84\\x15\\xdfQ\\xa3F\\x05\\xd4\\xb06FV\\x91K\\xccO\\xd9\\xf0\\x86\\x89f\\t\\xb9\\x123;\"#nXR\\xae\\x11XM\\xeb\\xad\"3\\x18vK\\x18\\x18\\x0c\\xf2\\xcc\\xdf\\x7f\\xf7\\x8d\\xd1\\xe3*\\xfaE4;%wR\\x96\\xf5\\x10\\xc4\\rk\\xabVzW\\xae\\\\\\xa9\\xe7L\\x10@\\xf0)o8\\x16<\\x81\\xbdd2&^\\xab\\xe6X\\xbau\\xeb\\xa6\\xcf\\xc3\\xaaV\\x97.]\\xf4w\\xfc\\xd7V\\xd9\\x18A\\xb0z8\\xe7\\x9f\\x7f\\xf4w\\xfe\\x17\\x1e\\x02x\\x85\\xc7\\xd3\\xf1\\x88\\xb7\\xdf\\xd6&1\\x81\\xb6\\x8a\\xa7q\\xb3\\xf3~JL\\x96\\x02m\\xcb\\r\\xe5B\\xae\\xc4\\x8c\\'\\x07\\xc0\\xc0|\\xcd_\\xeau\\xc6\\x9f`\\xa2\\x12\\xf35pO\\x82\\x0b\\x0c\\xa2) <\\xf1\\xd0\\xa1C}\\xc5\\x11\\xb5\\xa1K\\x14\\x87\\xe1\\x01\\x88\\xb6*\\xa2\\xeb\\x83\\xde\\'Q(\\xa7;\\xd5\\xabEyeZ\\x81\\xc8 \\xc6+\\xf7\\x0b/\\xbc\\xe0\\x0b\\x94\\x87\\xc8\\x08\\x8f\\xaa\\x18lPz\\x88\\x84\\x00\\xebn\\xf3$\\xf0=*|\\x8f\\xb1\\xd8b,\\xe5\\xfb\\x80\\xf3KH\\t\\x1c?v\\xccW?\\xfe6\\xcc\\x8b-\\xbe\\x03~\\xbe`\\x9c\\x8di\\x03\\x1c\\x86-Y4K\\xc8\\x95XM\\xef\\xeb\\x8f\\x01\\x19\\x8e\\xc5{\\xf6\\xec16}\\x9fp{\\xf9\\xe3\\x8f?\\xf46\\x14\\x1a\\\\a\\x1eS\\x11L\\x8dW\\x1c\\xfc\\xfa\\x8cz\\xf7]\\xc9\\x981\\xa3\\xef\\x9ch\\xfd\\xf2\\xa6\\n\\x98w\\xcf\\xbd\\xf7\\xea\\xe8\\xb7f\\x06`3T\\x85*\\xea\\xde\\xa3\\x87o7\\x9c\\xee\\xc1\\x1bQq\\xbfT.E\\xe6\\x15N\\x14\\x82\\xc3u3\\xef\\x04\\xffo\\xeaG\\x03O\\xc3N\\x11c\\x12\\xdb\\x98\\x07tJ\\xbf\\xac\\xea\\xc7\\t\\xd3\\xf4\\nV\\x92\\xe1\\x85\\x11\\x88\\x8c\\x181B\\x0c\\xef\\r\\xcc\\x97\\xc1\\xce/\\x9a%\\xe4\\x91]\\xf1\\x04\\xd0P\\x85J6\\x1b;\\xc2y\\x19+/\\t\\xed\\x8d\\x10#\\x1eV\\xe6\\t_9q\\x13\\x7f\\xa6\\x9c\\x92\\x9b)\\'g\\xca5\\x02\\xf8E\\xc6\\x1c\\x98\\na\\xa3\\x9f\\xbc\\xaa\\xabXl\\xc6S\\xd5\\xb5R\\xfc\\xe6T\\x02X\\x9c\\xb9\\xc5\\xa4\\x80\\x10\\xbd\\x18\\xbe\\xaf\\x89y6\\xc0\\xb6\\xaf\\xb7\\xf2|\\x98f\\n\\xdb\\x8d\\xc8\\xbe\\x86\\x1b\\x99S\\xaf3\\xd4\\xfd\\n\\xb9\\x12\\xc3\\x05`R\\xf9Ye\\x91\\x9cP\\x8a\\xaa8\\xe1\\xb5\\x94+J\\te=\\x0e\\x03L8u\\x9b#5\\x18\\xe5a\\n\\xf0\\xf5\\xd7_\\x1b\\x9b\\xfc$\\x81\\x88!\\x00\\xbf\\xc7\\x84s\\x910\\xa5@\\xfc|X\\xe8\\xe3M\\xe4_e\\xe4:_\\xf9\\xc7.R+\\x93\\xe6\\x1f\\xf8\\x87U\\x00E\\x84!G\\x18\\xa3h\\x96\\xb0(1\\x00\\xfeT\\xfdb\\xf4\\xef\\xd7/\\xc5\\xac\\x11C\\xbc\\xbfz\\x155\\xcf\\xe7\\xa4\\xb82\\x9eH\\x02\\x0e!\\x80W\\xfd\\x96j\\xb1\\x05\\xd6\\xf8\\x81\\n\\xdeL\\x06\\xab)\\x85\\xce\\xdeE\\x9e@\\xcf\\x8b\\xd4raS\\xe1O\\xa99\\xae\\x9f\\x7f\\xfeYj\\xd7\\xa9\\x93$K\\xfc\\xaa\\xc0\\r\\x06\\x91.\\x8c\\x90\\xbc8\\x01\\xaf\\x93\\x83\\x06\\x0eL\\xf2\\\\\\x1e$\\x01\\xb7\\x11\\x80\\xa9\\xc44\\xb5\\x88\\x85\\xa4/\\xc9=Qa\\x15\\xf2\\t\\xf5c\\x0e\\xe3W*\\xb0k#\\x1d\\xb6\\'\\xb1kMz\\x96\\xf3\\xe1\\xdf\\x87\\xa4\\xaf\\'U\\xa6#L6c0a\\x03SY\\xad\\xa0\\xc5\\xa8\\x08\\r\\x10$K\\xe8\\xdc\\xa9\\x93v\\xbd\\xc0\\x044\\x92d\\x98W;\\xcdu\\xf2;\\t\\xb8\\x9d\\x00V\\x921\\xb9\\xbfO\\xf9\\xbc\\xe2;\\xc2W\\xdd\\xa8\\xa2\\xb5\\xe0\\xef\\x01\\x0e\\xfb\\x08\\xbdd,v\\xb8\\xfdZ\\xad\\xec\\xbf-J,\\x98\\x0b\\xc0\\xc2\\x00\\x9c]\\x1fS\\xf6a\\x085C!\\x01\\x12 \\x013\\x01\\xc7+1sg\\xf9\\x9d\\x04H\\x80\\x04\\x12\\x12\\x08\\xdb\\x9cX\\xc2\\x86\\xb9M\\x02$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#@%f\\x1bz6L\\x02$`\\x05\\x01*1+(\\xb2\\x0e\\x12 \\x01\\xdb\\x08P\\x89\\xd9\\x86\\x9e\\r\\x93\\x00\\tXA\\x80J\\xcc\\n\\x8a\\xac\\x83\\x04H\\xc06\\x02Tb\\xb6\\xa1g\\xc3$@\\x02V\\x10\\xa0\\x12\\xb3\\x82\"\\xeb \\x01\\x12\\xb0\\x8d\\x00\\x95\\x98m\\xe8\\xd90\\t\\x90\\x80\\x15\\x04\\xa8\\xc4\\xac\\xa0\\xc8:H\\x80\\x04l#\\xf0\\xffN\\x8fL\\xd7\\x0c~lM\\x00\\x00\\x00\\x00IEND\\xaeB`\\x82', 'Answer: Since \\\\( \\\\triangle QRS \\\\) is an equilateral triangle, all its sides are equal. Therefore, we can set up the following equations based on the given side lengths:\\n\\n\\\\[ 4x = 6x - 1 \\\\]\\n\\\\[ 4x = 2x + 1 \\\\]\\n\\nFirst, solve the equation \\\\( 4x = 6x - 1 \\\\):\\n\\n\\\\[ 4x = 6x - 1 \\\\]\\n\\\\[ 4x - 6x = -1 \\\\]\\n\\\\[ -2x = -1 \\\\]\\n\\\\[ x = \\\\frac{1}{2} \\\\]\\n\\nNext, solve the equation \\\\( 4x = 2x + 1 \\\\):\\n\\n\\\\[ 4x = 2x + 1 \\\\]\\n\\\\[ 4x - 2x = 1 \\\\]\\n\\\\[ 2x = 1 \\\\]\\n\\\\[ x = \\\\frac{1}{2} \\\\]\\n\\nNow that we have \\\\( x = \\\\frac{1}{2} \\\\), we can find the length of \\\\( RS \\\\):\\n\\n\\\\[ RS = 2x + 1 \\\\]\\n\\\\[ RS = 2\\\\left(\\\\frac{1}{2}\\\\right) + 1 \\\\]\\n\\\\[ RS = 1 + 1 \\\\]\\n\\\\[ RS = 2 \\\\]\\n\\nTherefore, the length of \\\\( RS \\\\) is \\\\( 2 \\\\).\\n\\nThe correct option is (D) 2.']", + "message": "MultimodalLLMCall function forward" +} diff --git a/logs/2024-07-07_23-03-41.jsonl b/logs/2024-07-07_23-03-41.jsonl new file mode 100644 index 0000000..e69de29 diff --git a/readthedocs.yaml b/readthedocs.yaml new file mode 100644 index 0000000..ac3d7ab --- /dev/null +++ b/readthedocs.yaml @@ -0,0 +1,35 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.12" + # You can also specify other tool versions: + # nodejs: "20" + # rust: "1.70" + # golang: "1.20" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/source/conf.py + # You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs + # builder: "dirhtml" + # Fail on all warnings to avoid broken references + # fail_on_warning: true + +# Optionally build your docs in additional formats such as PDF and ePub +# formats: +# - pdf +# - epub + +# Optional but recommended, declare the Python requirements required +# to build your documentation +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html +python: + install: + - requirements: docs/requirements.txt \ No newline at end of file diff --git a/textgrad/__init__.py b/textgrad/__init__.py deleted file mode 100644 index 24aad83..0000000 --- a/textgrad/__init__.py +++ /dev/null @@ -1,28 +0,0 @@ -import os -import logging -import json -from datetime import datetime -class CustomJsonFormatter(logging.Formatter): - def format(self, record: logging.LogRecord) -> str: - super(CustomJsonFormatter, self).format(record) - output = {k: str(v) for k, v in record.__dict__.items()} - return json.dumps(output, indent=4) - -cf = CustomJsonFormatter() -os.makedirs("./logs/", exist_ok=True) -sh = logging.FileHandler(f"./logs/{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.jsonl") -sh.setFormatter(cf) - -logger = logging.getLogger(__name__) -logger.setLevel(logging.INFO) -logger.addHandler(sh) - -from .variable import Variable -from .loss import TextLoss -from .model import BlackboxLLM -from .engine import EngineLM, get_engine -from .optimizer import TextualGradientDescent, TGD -from .config import set_backward_engine, SingletonBackwardEngine -from .autograd import sum, aggregate - -singleton_backward_engine = SingletonBackwardEngine() \ No newline at end of file diff --git a/textgrad/autograd/algebra.py b/textgrad/autograd/algebra.py deleted file mode 100644 index c000c58..0000000 --- a/textgrad/autograd/algebra.py +++ /dev/null @@ -1,156 +0,0 @@ -## Operations over variables. -from typing import List, Set -from textgrad import logger -from textgrad.variable import Variable -from textgrad.engine import EngineLM -from .reduce_prompts import construct_reduce_prompt, REDUCE_MEAN_SYSTEM_PROMPT -from .function import Function, BackwardContext - - -def _reduce_gradients_mean(gradients: Set[Variable], backward_engine: EngineLM) -> Variable: - """A function to reduce gradients by taking the "mean" of the gradients. - In this case, we use a summarization model to summarize the gradients. - This can be helpful in batch training, where we want to average the gradients over a batch. - - :param gradients: Gradients to summarize together - :type gradients: Set[Variable] - :param backward_engine: The engine to summarize gradients. - :type backward_engine: EngineLM - :return: The reduce(summarized) gradients - :rtype: Variable - """ - gradient_reduce_prompt = construct_reduce_prompt(gradients) - reduced_gradients = backward_engine(gradient_reduce_prompt, system_prompt=REDUCE_MEAN_SYSTEM_PROMPT) - gradient_descriptions = set([g.get_role_description() for g in gradients]) - gradient_descriptions = ", ".join(gradient_descriptions) - reduced_gradients_variable = Variable(reduced_gradients, role_description=gradient_descriptions) - logger.info(f"Reduced gradients", extra={"reduced_gradients": reduced_gradients_variable.value}) - # TODO: We need to add context for these gradients - # Otherwise, .get_gradient_and_context_text() will return an empty string - logger.info(f"Reduced gradients", extra={"reduced_gradients": reduced_gradients_variable.value}) - return reduced_gradients_variable - - -class Sum(Function): - """ - Represents a sum operation on a list of variables. - In TextGrad, sum is simply concatenation of the values of the variables. - - :param variables: The list of variables to be summed (concatenated). - :type variables: List[Variable] - :return: A new variable representing the sum of the input variables. - :rtype: Variable - """ - - def forward(self, variables: List[Variable]) -> Variable: - """ - Performs the forward pass of the sum (concatenation) operation. - - :param variables: The list of variables to be summed. - :type variables: List[Variable] - :return: A new variable representing the sum of the input variables. - :rtype: Variable - """ - concat_values = "\n".join([v.get_value() for v in variables]) - role_descriptions = set([v.get_role_description() for v in variables]) - role_descriptions = ", ".join(role_descriptions) - - total = Variable( - value=concat_values, - predecessors=variables, - role_description=f"a combination of the following: {role_descriptions}", - requires_grad=any([v.requires_grad for v in variables]), - ) - - total.set_grad_fn(BackwardContext(backward_fn=self.backward, - summation=total)) - - return total - - - def backward(self, summation: Variable, backward_engine: EngineLM): - """ - Performs the backward pass of the sum operation. - This is simply an idempotent operation, where we pass the feedback to the predecessors variables. - - :param summation: The variable representing the sum. - :type summation: Variable - :param backward_engine: The backward engine used for backpropagation. - :type backward_engine: EngineLM - """ - children_variables = summation.predecessors - summation_gradients = summation.get_gradient_text() - for variable in children_variables: - if summation_gradients == "": - variable_gradient_value = "" - else: - variable_gradient_value = f"Here is the combined feedback we got for this specific {variable.get_role_description()} and other variables: {summation_gradients}." - - logger.info(f"Idempotent backward", extra={"v_gradient_value": variable_gradient_value, - "summation_role": summation.get_role_description()}) - - var_gradients = Variable(value=variable_gradient_value, - role_description=f"feedback to {variable.get_role_description()}") - variable.gradients.add(var_gradients) - - if summation._reduce_meta != []: - var_gradients._reduce_meta.extend(summation._reduce_meta) - variable._reduce_meta.extend(summation._reduce_meta) - - variable.gradients.add(Variable(value=variable_gradient_value, - role_description=f"feedback to {variable.get_role_description()}")) - - -class Aggregate(Function): - """This function is WIP - """ - def forward(self, variables: List[Variable]) -> Variable: - """ - Aggregates a list of variables. - In TextGrad, forward pass of aggregation is simply concatenation of the values of the variables. - The backward pass performs a reduction operation on the gradients of the variables. - This reduction is currently an LLM call to summarize the gradients. - - :param variables: The list of variables to be aggregated. - :type variables: List[Variable] - :return: The aggregated variable. - :rtype: Variable - """ - concat_values = "\n".join([v.get_value() for v in variables]) - role_descriptions = set([v.get_role_description() for v in variables]) - role_descriptions = ", ".join(role_descriptions) - - # We create a unique meta tag that identifies which variables are aggregated together. - # We also need to communicate to the variables that they are part of a mean operation. - reduce_meta = {"op": _reduce_gradients_mean, "id": id(variables)} - - aggregated_variable = Variable(value=concat_values, - role_description=f"a combination of the following variables: {role_descriptions}.", - predecessors=variables, - requires_grad=any([v.requires_grad for v in variables])) - - aggregated_variable.set_grad_fn(BackwardContext(backward_fn=self.backward, - aggregated_variable=aggregated_variable)) - - aggregated_variable._reduce_meta = [reduce_meta] - return aggregated_variable - - def backward(self, aggregated_variable: Variable, backward_engine: EngineLM): - children_variable = aggregated_variable.predecessors - for variable in children_variable: - aggregate_gradients = aggregated_variable.get_gradient_text() - if aggregate_gradients == "": - variable_gradient_value = "" - else: - variable_gradient_value = f"Here is the combined feedback we got for this specific {variable.get_role_description()} and other variables: {aggregate_gradients}." - - logger.info(f"aggregation backward", extra={"v_gradient_value": variable_gradient_value, - "aggregation_role": aggregated_variable.get_role_description()}) - - var_gradients = Variable(value=variable_gradient_value, - role_description=f"feedback to {variable.get_role_description()}") - variable.gradients.add(var_gradients) - - if aggregated_variable._reduce_meta != []: - var_gradients._reduce_meta.extend(aggregated_variable._reduce_meta) - variable._reduce_meta.extend(aggregated_variable._reduce_meta) diff --git a/textgrad/autograd/function.py b/textgrad/autograd/function.py deleted file mode 100644 index bd08c49..0000000 --- a/textgrad/autograd/function.py +++ /dev/null @@ -1,80 +0,0 @@ -from textgrad.variable import Variable -from textgrad.engine import EngineLM - -from abc import ABC, abstractmethod -from typing import List - - -class Function(ABC): - """ - The class to define a function that can be called and backpropagated through. - """ - - def __init__(self): - super().__init__() - - def __call__(self, *args, **kwargs): - return self.forward(*args, **kwargs) - - @abstractmethod - def forward(self, *args, **kwargs) -> Variable: - pass - - @abstractmethod - def backward(self, *args, **kwargs): - pass - - -class BackwardContext: - """ - Represents a context for backward computation. - - :param backward_fn: The backward function to be called during backward computation. - :type backward_fn: callable - :param args: Variable length argument list to be passed to the backward function. - :param kwargs: Arbitrary keyword arguments to be passed to the backward function. - - :ivar backward_fn: The backward function to be called during backward computation. - :vartype backward_fn: callable - :ivar fn_name: The fully qualified name of the backward function. - :vartype fn_name: str - :ivar args: Variable length argument list to be passed to the backward function. - :ivar kwargs: Arbitrary keyword arguments to be passed to the backward function. - - :method __call__(backward_engine: EngineLM) -> Any: - Calls the backward function with the given backward engine and returns the result. - :method __repr__() -> str: - Returns a string representation of the BackwardContext object. - """ - - def __init__(self, backward_fn, *args, **kwargs): - self.backward_fn = backward_fn - self.fn_name = f"{backward_fn.__module__}.{backward_fn.__qualname__}" - self.args = args - self.kwargs = kwargs - - def __call__(self, backward_engine: EngineLM): - return self.backward_fn(*self.args, **self.kwargs, backward_engine=backward_engine) - - def __repr__(self): - return f"{self.fn_name}" - - -class Module(ABC): - """Abstract module class with parameters akin to PyTorch's nn.Module. - """ - parameters: List[Variable] - def zero_grad(self): - for p in self.parameters(): - p.reset_gradients() - - def named_parameters(self): - for p in self.parameters(): - yield p.get_role_description(), p - - @abstractmethod - def forward(self, *args, **kwargs): - pass - - def __call__(self, *args, **kwargs): - return self.forward(*args, **kwargs) \ No newline at end of file diff --git a/textgrad/autograd/functional.py b/textgrad/autograd/functional.py deleted file mode 100644 index 6025d8f..0000000 --- a/textgrad/autograd/functional.py +++ /dev/null @@ -1,89 +0,0 @@ -from typing import List - -from textgrad import Variable -from textgrad.engine import EngineLM -from textgrad.autograd.algebra import Sum, Aggregate -from textgrad.autograd.llm_ops import LLMCall, FormattedLLMCall - -def sum(variables: List[Variable]) -> Variable: - """ - Represents a sum operation on a list of variables. - In TextGrad, sum is simply concatenation of the values of the variables. - - :param variables: The list of variables to be summed (concatenated). - :type variables: List[Variable] - :return: A new variable representing the sum of the input variables. - :rtype: Variable - """ - return Sum()(variables) - - -def aggregate(variables: List[Variable]) -> Variable: - """ - WIP - Aggregates a list of variables. - In TextGrad, forward pass of aggregation is simply concatenation of the values of the variables. - The backward pass performs a reduction operation on the gradients of the variables. - This reduction is currently an LLM call to summarize the gradients. - - :param variables: The list of variables to be aggregated. - :type variables: List[Variable] - :return: The aggregated variable. - :rtype: Variable - """ - return Aggregate()(variables) - - -def llm_call(input_variable: Variable, engine: EngineLM, - response_role_description: str = None, system_prompt: Variable = None): - """A functional version of the LLMCall. - The simple LLM call function. This function will call the LLM with the input and return the response, also register the grad_fn for backpropagation. - - :param input_variable: The input variable (aka prompt) to use for the LLM call. - :type input_variable: Variable - :param response_role_description: Role description for the LLM response, defaults to VARIABLE_OUTPUT_DEFAULT_ROLE - :type response_role_description: str, optional - :return: response sampled from the LLM - :rtype: Variable - :param engine: engine to use for the LLM call - :type engine: EngineLM - :param input_role_description: role description for the input variable, defaults to VARIABLE_INPUT_DEFAULT_ROLE - :type input_role_description: str, optional - :param system_prompt: system prompt to use for the LLM call, default depends on the engine. - :type system_prompt: Variable, optional - - >>> from textgrad import Variable, get_engine - >>> from textgrad.autograd.functional import llm_call - >>> engine = get_engine("gpt-3.5-turbo") - >>> prompt = Variable("What is the capital of France?", role_description="prompt to the LM") - >>> response = llm_call(prompt, engine=engine) - # This returns something like Variable(data=The capital of France is Paris., grads=) - - """ - return LLMCall(engine=engine, system_prompt=system_prompt)(input_variable, response_role_description) - - -def formatted_llm_call(inputs: List[Variable], response_role_description: str, - engine: EngineLM, format_string: str, - fields: dict[str, str], system_prompt: Variable = None): - """A functional version of the LLM call with formatted strings. - Just a wrapper around the FormattedLLMCall class. - - This function will call the LLM with the input and return the response, also register the grad_fn for backpropagation. - - :param inputs: Variables to use for the input. This should be a mapping of the fields to the variables. - :type inputs: dict[str, Variable] - :param response_role_description: Role description for the response variable, defaults to VARIABLE_OUTPUT_DEFAULT_ROLE - :type response_role_description: str, optional - :param engine: The engine to use for the LLM call. - :type engine: EngineLM - :param format_string: The format string to use for the input. For instance, "The capital of {country} is {capital}". For a format string like this, we'll expect to have the fields dictionary to have the keys "country" and "capital". Similarly, in the forward pass, we'll expect the input variables to have the keys "country" and "capital". - :type format_string: str - :param fields: The fields to use for the format string. For the above example, this would be {"country": {}, "capital": {}}. This is currently a dictionary in case we'd want to inject more information later on. - :type fields: dict[str, str] - :param system_prompt: The system prompt to use for the LLM call. Default value depends on the engine. - :type system_prompt: Variable, optional - :return: Sampled response from the LLM - :rtype: Variable - """ - call_object = FormattedLLMCall(engine=engine, format_string=format_string, fields=fields, system_prompt=system_prompt) - return call_object(inputs, response_role_description) \ No newline at end of file diff --git a/textgrad/autograd/llm_backward_prompts.py b/textgrad/autograd/llm_backward_prompts.py deleted file mode 100644 index 00e1f6e..0000000 --- a/textgrad/autograd/llm_backward_prompts.py +++ /dev/null @@ -1,79 +0,0 @@ -GLOSSARY_TEXT_BACKWARD = """ -### Glossary of tags that will be sent to you: -# - : The system prompt for the language model. -# - : The input to the language model. -# - : The output of the language model. -# - : The objective of the optimization task. -# - : Specifies the span of the variable. -# - : The role description of the variable.""" - -### Backward engine prompts - -# System prompt to the backward engine. -BACKWARD_SYSTEM_PROMPT = ( - "You are part of an optimization system that improves a given text (i.e. the variable). You are the gradient (feedback) engine. " - "Your only responsibility is to give intelligent and creative feedback and constructive criticism to variables, given an objective specified in tags. " - "The variables may be solutions to problems, prompts to language models, code, or any other text-based variable. " - "Pay attention to the role description of the variable, and the context in which it is used. You should assume that the variable will be used in a similar context in the future. " - "Only provide strategies, explanations, and methods to change in the variable. DO NOT propose a new version of the variable, that will be the job of the optimizer. Your only job is to send feedback and criticism (compute 'gradients'). " - "For instance, feedback can be in the form of 'Since language models have the X failure mode...', 'Adding X can fix this error because...', 'Removing X can improve the objective function because...', 'Changing X to Y would fix the mistake ...', that gets at the downstream objective.\n" - "If a variable is already working well (e.g. the objective function is perfect, an evaluation shows the response is accurate), you should not give feedback.\n" - f"{GLOSSARY_TEXT_BACKWARD}") - -# First part of the prompt for the llm backward function -CONVERSATION_TEMPLATE = ( - " {system_prompt} \n\n" - " {prompt} \n\n" - " {response_value} \n\n" -) - -# Has the gradient on the output. -CONVERSATION_START_INSTRUCTION_CHAIN = ( - "You will give feedback to a variable with the following role: {variable_desc} . " - "Here is a conversation with a language model (LM):\n\n" - "{conversation}" -) -OBJECTIVE_INSTRUCTION_CHAIN = ( - "This conversation is part of a larger system. The was later used as {response_desc}.\n\n" - "Your goal is to give feedback to the variable to address the following feedback on the LM_OUTPUT: {response_gradient} \n\n" -) - -# Does not have gradient on the output -CONVERSATION_START_INSTRUCTION_BASE = ( - "You will give feedback to a variable with the following role: {variable_desc} . " - "Here is an evaluation of the variable using a language model:\n\n" - "{conversation}" -) - -OBJECTIVE_INSTRUCTION_BASE = ( - "Your goal is to give feedback and criticism to the variable given the above evaluation output. " - "Our only goal is to improve the above metric, and nothing else. \n\n" -) - -# Third part of the prompt for the llm backward function. -# Asks the user to evaluate a variable in the conversation. -EVALUATE_VARIABLE_INSTRUCTION = ( - "We are interested in giving feedback to the {variable_desc} " - "for this conversation. Specifically, give feedback to the following span " - "of text:\n\n " - "{variable_short} \n\n" - "Given the above history, describe how the {variable_desc} " - "could be improved to improve the . Be very creative, critical, and intelligent.\n\n" -) - -SEARCH_QUERY_BACKWARD_INSTRUCTION = ( - "Here is a query and a response from searching with {engine_name}:\n" - " {query} \n" - " {results} \n\n" -) - - -GRADIENT_OF_RESULTS_INSTRUCTION = ( - "For the search results from {engine_name} we got the following feedback:\n\n" - "{results_gradient}\n\n" -) - -IN_CONTEXT_EXAMPLE_PROMPT_ADDITION = ( - "You must base on the following examples when give feedback and criticism to the variable:\n\n" - "{in_context_examples}\n\n" -) diff --git a/textgrad/autograd/reduce_prompts.py b/textgrad/autograd/reduce_prompts.py deleted file mode 100644 index 3d65cfe..0000000 --- a/textgrad/autograd/reduce_prompts.py +++ /dev/null @@ -1,18 +0,0 @@ -REDUCE_MEAN_SYSTEM_PROMPT = ( - "You are part of an optimization system that improves a given text (i.e. the variable). " - "Your only responsibility is to critically aggregate and summarize the feedback from sources. " - "The variables may be solutions to problems, prompts to language models, code, or any other text-based variable. " - "The multiple sources of feedback will be given to you in tags. " - "When giving a response, only provide the core summary of the feedback. Do not recommend a new version for the variable -- only summarize the feedback critically. " -) - -def construct_reduce_prompt(gradients): - """ - Construct a prompt that reduces the gradients. - """ - gradient_texts = [] - for i, gradient in enumerate(gradients): - gradient_texts.append(f"{gradient.get_value()}") - gradient_texts = "\n".join(gradient_texts) - - return gradient_texts \ No newline at end of file diff --git a/textgrad/autograd/string_based_ops.py b/textgrad/autograd/string_based_ops.py deleted file mode 100644 index 74a7b6d..0000000 --- a/textgrad/autograd/string_based_ops.py +++ /dev/null @@ -1,194 +0,0 @@ -from textgrad import logger -from textgrad.variable import Variable -from textgrad.engine import EngineLM -from .function import Function, BackwardContext -from typing import Callable, Dict, List - -CONVERSATION_TEMPLATE_STRING = ( - "Function purpose: {function_purpose}\n\n" - " {inputs_string} \n\n" - " {response_value} \n\n" -) - -# Has the gradient on the output. -CONVERSATION_START_INSTRUCTION_STRING_FN_CHAIN = ( - "You will give feedback to a variable with the following role: {variable_desc} . " - "Here is an evaluation of a string-based function with inputs and outputs :\n\n" - "{conversation}" -) - -# Does not have gradient on the output -CONVERSATION_START_INSTRUCTION_STRING_FN_BASE = ( - "You will give feedback to a variable with the following role: {variable_desc} . " - "Here is an evaluation of the variable using a string-based function:\n\n" - "{conversation}" -) - -OBJECTIVE_INSTRUCTION_CHAIN = ( - "This conversation is part of a larger system. The was later used as {response_desc}.\n\n" - "Your goal is to give feedback to the variable to address the following feedback on the OUTPUT_OF_FUNCTION: {response_gradient} \n\n" -) - -OBJECTIVE_INSTRUCTION_BASE = ( - "Your goal is to give feedback and criticism to the variable given the above evaluation output. " - "Our only goal is to improve the above metric, and nothing else. \n\n" -) - -# Some instructions for the backward pass are shared with LLMs -from .llm_backward_prompts import ( - EVALUATE_VARIABLE_INSTRUCTION, - BACKWARD_SYSTEM_PROMPT -) - -class StringBasedFunction(Function): - def __init__(self, fn: Callable, function_purpose: str): - """ - Autograd function for string-based functions. - - :param fn: The function to execute for the forward pass. - :type fn: Callable - :param function_purpose: The description of the purpose of the function. Analogous to role description for variables. - :type function_purpose: str - """ - super().__init__() - self.fn = fn - self.function_purpose = function_purpose - - def forward(self, - inputs: Dict[str, Variable], - response_role_description: str = None) -> Variable: - """ - The forward mode for string-based functions - - :param inputs: The arguments that will be passed to the string based function. The keys are the names of the arguments. - :type fn: Dict[str, Variable] - :param response_role_description: The role description of the output variable. - :type response_role_description: str - """ - if response_role_description is None: - response_role_description = f"Output of the string-based function with purpose: {self.function_purpose}" - response_string = self.fn(**inputs) - - # Create the response variable - response = Variable( - value=response_string, - predecessors=list(inputs.values()), - role_description=response_role_description - ) - - logger.info(f"StringBasedFunction", extra={"text": f"In: {inputs}, Out: {response_string}"}) - - # Populate the gradient function, using a container to store the backward function and the context - response.set_grad_fn(BackwardContext(backward_fn=self.backward, - response=response, - function_purpose=self.function_purpose, - inputs=inputs)) - - return response - - def backward(self, response: Variable, - function_purpose: str, - inputs: Dict[str, Variable], - backward_engine: EngineLM): - children_variables = response.predecessors - if response.get_gradient_text().strip() == "": - self._backward_through_string_fn_base(children_variables, response, inputs, function_purpose, backward_engine) - else: - self._backward_through_string_fn_chain(children_variables, response, inputs, function_purpose, backward_engine) - - @staticmethod - def _construct_string_fn_chain_backward_prompt(backward_info: dict[str, str]) -> str: - conversation = CONVERSATION_TEMPLATE_STRING.format(**backward_info) - backward_prompt = CONVERSATION_START_INSTRUCTION_STRING_FN_CHAIN.format(conversation=conversation, **backward_info) - backward_prompt += OBJECTIVE_INSTRUCTION_CHAIN.format(**backward_info) - backward_prompt += EVALUATE_VARIABLE_INSTRUCTION.format(**backward_info) - return backward_prompt - - @staticmethod - def _backward_through_string_fn_chain(variables: List[Variable], - response: Variable, - inputs: Dict[str, Variable], - function_purpose: str, - backward_engine: EngineLM): - inputs_string = "\n\n".join([f"**{k.replace('_', ' ').capitalize()}(role: {v.get_role_description()})**: {v.get_short_value()}" for k, v in inputs.items()]) - - for variable in variables: - if not variable.requires_grad: - continue - - backward_info = { - "response_desc": response.get_role_description(), - "response_value": response.get_value(), - "response_gradient": response.get_gradient_text(), - "function_purpose": function_purpose, - "inputs_string": inputs_string, - "variable_desc": variable.get_role_description(), - "variable_short": variable.get_short_value() - } - - backward_prompt = StringBasedFunction._construct_string_fn_chain_backward_prompt(backward_info) - - logger.info(f"_backward_through_string_fn", extra={"_backward_through_string_fn": backward_prompt}) - gradient_value = backward_engine(backward_prompt, system_prompt=BACKWARD_SYSTEM_PROMPT) - logger.info(f"_backward_through_string_fn gradient", extra={"_backward_through_string_fn": gradient_value}) - - var_gradients = Variable(value=gradient_value, role_description=f"feedback to {variable.get_role_description()}") - variable.gradients.add(var_gradients) - conversation = CONVERSATION_TEMPLATE_STRING.format(**backward_info) - variable.gradients_context[var_gradients] = { - "context": conversation, - "response_desc": response.get_role_description(), - "variable_desc": variable.get_role_description() - } - - if response._reduce_meta: - var_gradients._reduce_meta.extend(response._reduce_meta) - variable._reduce_meta.extend(response._reduce_meta) - - @staticmethod - def _construct_string_fn_base_backward_prompt(backward_info: dict[str, str]) -> str: - conversation = CONVERSATION_TEMPLATE_STRING.format(**backward_info) - backward_prompt = CONVERSATION_START_INSTRUCTION_STRING_FN_BASE.format(conversation=conversation, **backward_info) - backward_prompt += OBJECTIVE_INSTRUCTION_BASE.format(**backward_info) - backward_prompt += EVALUATE_VARIABLE_INSTRUCTION.format(**backward_info) - return backward_prompt - - @staticmethod - def _backward_through_string_fn_base(variables: List[Variable], - response: Variable, - inputs: Dict[str, Variable], - function_purpose: str, - backward_engine: EngineLM): - inputs_string = "\n\n".join([f"**{k.replace('_', ' ').capitalize()}(role: {v.get_role_description()})**: {v.get_short_value()}" for k, v in inputs.items()]) - - for variable in variables: - if not variable.requires_grad: - continue - - backward_info = { - "response_desc": response.get_role_description(), - "response_value": response.get_value(), - "response_gradient": response.get_gradient_text(), - "function_purpose": function_purpose, - "inputs_string": inputs_string, - "variable_desc": variable.get_role_description(), - "variable_short": variable.get_short_value() - } - backward_prompt = StringBasedFunction._construct_string_fn_base_backward_prompt(backward_info) - - logger.info(f"_backward_through_string_fn prompt", extra={"_backward_through_string_fn": backward_prompt}) - gradient_value = backward_engine(backward_prompt, system_prompt=BACKWARD_SYSTEM_PROMPT) - logger.info(f"_backward_through_string_fn gradient", extra={"_backward_through_string_fn": gradient_value}) - - conversation = CONVERSATION_TEMPLATE_STRING.format(**backward_info) - var_gradients = Variable(value=gradient_value, role_description=f"feedback to {variable.get_role_description()}") - variable.gradients.add(var_gradients) - variable.gradients_context[var_gradients] = { - "context": conversation, - "response_desc": response.get_role_description(), - "variable_desc": variable.get_role_description() - } - - if response._reduce_meta: - var_gradients._reduce_meta.extend(response._reduce_meta) - variable._reduce_meta.extend(response._reduce_meta) diff --git a/textgrad/defaults.py b/textgrad/defaults.py deleted file mode 100644 index 613c5b5..0000000 --- a/textgrad/defaults.py +++ /dev/null @@ -1,5 +0,0 @@ -SYSTEM_PROMPT_DEFAULT_ROLE = ( - "system prompt to the language model that specifies the behavior and strategies, which will be reused across queries" -) -VARIABLE_INPUT_DEFAULT_ROLE = "query to the language model" -VARIABLE_OUTPUT_DEFAULT_ROLE = "response from the language model" diff --git a/textgrad/engine/cohere.py b/textgrad/engine/cohere.py deleted file mode 100644 index af93319..0000000 --- a/textgrad/engine/cohere.py +++ /dev/null @@ -1,66 +0,0 @@ -try: - import cohere -except ImportError: - raise ImportError("If you'd like to use Cohere models, please install the openai package by running `pip install cohere`, and add 'COHERE_API_KEY' to your environment variables.") - -import os -import platformdirs -from tenacity import ( - retry, - stop_after_attempt, - wait_random_exponential, -) - -from .base import EngineLM, CachedEngine - -class ChatCohere(EngineLM, CachedEngine): - DEFAULT_SYSTEM_PROMPT = "You are a helpful, creative, and smart assistant." - - def __init__( - self, - model_string="command-r-plus", - system_prompt=DEFAULT_SYSTEM_PROMPT): - """ - :param model_string: - :param system_prompt: - """ - root = platformdirs.user_cache_dir("textgrad") - cache_path = os.path.join(root, f"cache_cohere_{model_string}.db") - super().__init__(cache_path=cache_path) - - self.system_prompt = system_prompt - if os.getenv("COHERE_API_KEY") is None: - raise ValueError("Please set the COHERE_API_KEY environment variable if you'd like to use Cohere models.") - - self.client = cohere.Client( - api_key=os.getenv("COHERE_API_KEY"), - ) - self.model_string = model_string - - def generate( - self, prompt, system_prompt=None, temperature=0, max_tokens=2000, top_p=0.99 - ): - - sys_prompt_arg = system_prompt if system_prompt else self.system_prompt - - cache_or_none = self._check_cache(sys_prompt_arg + prompt) - if cache_or_none is not None: - return cache_or_none - - response = self.client.chat( - model=self.model_string, - message=prompt, - preamble=sys_prompt_arg, - temperature=temperature, - max_tokens=max_tokens, - p=top_p, - ) - - response = response.text - self._save_cache(sys_prompt_arg + prompt, response) - return response - - @retry(wait=wait_random_exponential(min=1, max=5), stop=stop_after_attempt(5)) - def __call__(self, prompt, **kwargs): - return self.generate(prompt, **kwargs) - diff --git a/textgrad/engine/gemini.py b/textgrad/engine/gemini.py deleted file mode 100644 index 84fa749..0000000 --- a/textgrad/engine/gemini.py +++ /dev/null @@ -1,67 +0,0 @@ -try: - import google.generativeai as genai - -except ImportError: - raise ImportError("If you'd like to use Gemini models, please install the google-generativeai package by running `pip install google-generativeai`, and add 'GOOGLE_API_KEY' to your environment variables.") - -import os -import platformdirs -from tenacity import ( - retry, - stop_after_attempt, - wait_random_exponential, -) -from .base import EngineLM, CachedEngine - - -class ChatGemini(EngineLM, CachedEngine): - SYSTEM_PROMPT = "You are a helpful, creative, and smart assistant." - - def __init__( - self, - model_string="gemini-pro", - system_prompt=SYSTEM_PROMPT, - ): - - root = platformdirs.user_cache_dir("textgrad") - cache_path = os.path.join(root, f"cache_gemini_{model_string}.db") - super().__init__(cache_path=cache_path) - if os.getenv("GOOGLE_API_KEY") is None: - raise ValueError("Please set the GOOGLE_API_KEY environment variable if you'd like to use Gemini models.") - - genai.configure(api_key=os.getenv("GOOGLE_API_KEY")) - - self.model_string = model_string - self.system_prompt = system_prompt - assert isinstance(self.system_prompt, str) - - @retry(wait=wait_random_exponential(min=1, max=5), stop=stop_after_attempt(5)) - def __call__(self, prompt, **kwargs): - return self.generate(prompt, **kwargs) - - @retry(wait=wait_random_exponential(min=1, max=5), stop=stop_after_attempt(5)) - def generate( - self, prompt, system_prompt=None, temperature=0, max_tokens=2000, top_p=0.99 - ): - - sys_prompt_arg = system_prompt if system_prompt else self.system_prompt - cache_or_none = self._check_cache(sys_prompt_arg + prompt) - if cache_or_none is not None: - return cache_or_none - - client = genai.GenerativeModel(self.model_string, - system_instruction=sys_prompt_arg) - messages = [{'role':'user', 'parts': [prompt]}] - generation_config = genai.types.GenerationConfig(max_output_tokens=max_tokens, - temperature=temperature, - top_p=top_p, - candidate_count=1) - - - response = client.generate_content(messages, - generation_config=generation_config) - - - response = response.text - self._save_cache(sys_prompt_arg + prompt, response) - return response \ No newline at end of file diff --git a/textgrad/engine/together.py b/textgrad/engine/together.py deleted file mode 100644 index c4a20cb..0000000 --- a/textgrad/engine/together.py +++ /dev/null @@ -1,71 +0,0 @@ -try: - from together import Together -except ImportError: - raise ImportError("If you'd like to use OpenAI models, please install the openai package by running `pip install together`, and add 'TOGETHER_API_KEY' to your environment variables.") - -import os -import platformdirs -from tenacity import ( - retry, - stop_after_attempt, - wait_random_exponential, -) - -from .base import EngineLM, CachedEngine - -class ChatTogether(EngineLM, CachedEngine): - DEFAULT_SYSTEM_PROMPT = "You are a helpful, creative, and smart assistant." - - def __init__( - self, - model_string="meta-llama/Llama-3-70b-chat-hf", - system_prompt=DEFAULT_SYSTEM_PROMPT): - """ - :param model_string: - :param system_prompt: - """ - root = platformdirs.user_cache_dir("textgrad") - cache_path = os.path.join(root, f"cache_together_{model_string}.db") - super().__init__(cache_path=cache_path) - - self.system_prompt = system_prompt - if os.getenv("TOGETHER_API_KEY") is None: - raise ValueError("Please set the TOGETHER_API_KEY environment variable if you'd like to use OpenAI models.") - - self.client = Together( - api_key=os.getenv("TOGETHER_API_KEY"), - ) - self.model_string = model_string - - def generate( - self, prompt, system_prompt=None, temperature=0, max_tokens=2000, top_p=0.99 - ): - - sys_prompt_arg = system_prompt if system_prompt else self.system_prompt - - cache_or_none = self._check_cache(sys_prompt_arg + prompt) - if cache_or_none is not None: - return cache_or_none - - response = self.client.chat.completions.create( - model=self.model_string, - messages=[ - {"role": "system", "content": sys_prompt_arg}, - {"role": "user", "content": prompt}, - ], - frequency_penalty=0, - presence_penalty=0, - stop=None, - temperature=temperature, - max_tokens=max_tokens, - top_p=top_p, - ) - - response = response.choices[0].message.content - self._save_cache(sys_prompt_arg + prompt, response) - return response - - @retry(wait=wait_random_exponential(min=1, max=5), stop=stop_after_attempt(5)) - def __call__(self, prompt, **kwargs): - return self.generate(prompt, **kwargs) - diff --git a/textgrad/optimizer/__init__.py b/textgrad/optimizer/__init__.py deleted file mode 100644 index fc5ba66..0000000 --- a/textgrad/optimizer/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .optimizer import TextualGradientDescent, Optimizer, TextualGradientDescentwithMomentum - -TGD = TextualGradientDescent diff --git a/textgrad/prompts.py b/textgrad/prompts.py deleted file mode 100644 index cb661a1..0000000 --- a/textgrad/prompts.py +++ /dev/null @@ -1,6 +0,0 @@ -# This is how we save gradients to the variable. -GRADIENT_TEMPLATE = ( - "Here is a conversation:\n\n{context}\n\n" - "This conversation is potentially part of a larger system. The output is used as {response_desc}\n\n" - "Here is the feedback we got for {variable_desc} in the conversation:\n\n{feedback}\n\n" -) diff --git a/textgrad/tasks/__init__.py b/textgrad/tasks/__init__.py deleted file mode 100644 index 879d912..0000000 --- a/textgrad/tasks/__init__.py +++ /dev/null @@ -1,105 +0,0 @@ -from .mmlu import MMLU, MMLUInstanceDataset -from .base import Dataset, DataLoader -from .leetcode import LeetCodeHardEval - -from typing import Tuple, Callable -from textgrad import Variable -from textgrad.engine import EngineLM - -AVAILABLE_DATASETS = [ - "BBH_object_counting", - "BBH_word_sorting", - "GSM8K_DSPy", -] - -AVAILABLE_INSTANCE_DATASETS = [ - "MMLU_machine_learning", - "MMLU_college_physics", - "GPQA_diamond" - "LeetCodeHardEval" -] - -def load_task(task_name: str, evaluation_api: EngineLM, *args, **kwargs) -> Tuple[Dataset, Dataset, Callable]: - """ - Args: - task_name: the name of the task to evaluate - evaluation_api: the engine to use for evaluation, if needed - """ - if "object_counting" in task_name: - from textgrad.loss import MultiFieldTokenParsedEvaluation - from .big_bench_hard import BigBenchHard, string_based_equality_fn - from textgrad.autograd.string_based_ops import StringBasedFunction - task_name = task_name[4:] - train_set = BigBenchHard(task_name, split="train", *args, **kwargs) - val_set = BigBenchHard(task_name, split="val", *args, **kwargs) - test_set = BigBenchHard(task_name, split="test", *args, **kwargs) - role_descriptions = [ - "Question for the task", - "Ground truth answer", - "Reasoning and prediction from the language model" - ] - fn_purpose = "The runtime of string-based function that checks if the prediction is correct." - eval_fn = StringBasedFunction(string_based_equality_fn, function_purpose=fn_purpose) - return train_set, val_set, test_set, eval_fn - - elif "BBH" in task_name: - from textgrad.loss import MultiFieldTokenParsedEvaluation - from .big_bench_hard import BigBenchHard - task_name = task_name[4:] - train_set = BigBenchHard(task_name, split="train", *args, **kwargs) - val_set = BigBenchHard(task_name, split="val", *args, **kwargs) - test_set = BigBenchHard(task_name, split="test", *args, **kwargs) - role_descriptions = [ - "Question for the task", - "Ground truth answer", - "Reasoning and prediction from the language model" - ] - - evaluation_instruction = "Below is a question from a question-answering task, the ground truth answer, and reasoning with the final prediction. Is the final prediction correct, i.e. the same as the ground truth answer? Say only 1 (yes) or 0 (no). Return your response within tags. e.g. 0 or 1 " - eval_instruction = Variable(evaluation_instruction, requires_grad=False, role_description="evaluation instruction for the task") - eval_fn = MultiFieldTokenParsedEvaluation( - eval_instruction, - engine=evaluation_api, - role_descriptions=role_descriptions, - parse_tags=["", ""] - ) - - return train_set, val_set, test_set, eval_fn - - elif task_name == "GSM8K_DSPy": - from textgrad.tasks.gsm8k import GSM8K_DSPy - from .big_bench_hard import string_based_equality_fn - from textgrad.autograd.string_based_ops import StringBasedFunction - evaluation_instruction = "Below is a prediction we got for a question answering task, and the correct final answer. Is the final answer correct? Say only 1 (yes) or 0 (no). Return 1 if and only if the final answer is correct. Return your response within tags. e.g. 0 or 1 " - system_prompt = Variable("You are a language model that evaluates the accuracy of a prediction for a mathematical question answering task. Only call a prediction accurate if it is the same as the ground truth answer.", requires_grad=False, role_description="system prompt for the evaluation") - # Should we do train/test like this? - train_set = GSM8K_DSPy(split="train", *args, **kwargs) - val_set = GSM8K_DSPy(split="val", *args, **kwargs) - test_set = GSM8K_DSPy(split="test", *args, **kwargs) - role_descriptions = [ - "Question for the task", - "Ground truth answer", - "Prediction from the language model" - ] - fn_purpose = "The runtime of string-based function that checks if the prediction is correct." - eval_fn = StringBasedFunction(string_based_equality_fn, function_purpose=fn_purpose) - return train_set, val_set, test_set, eval_fn - - else: - raise ValueError(f"Task {task_name} not found.") - - -def load_instance_task(task_name: str, evaluation_api: EngineLM, *args, **kwargs): - if "MMLU_" in task_name: - subset = task_name[5:] - test_set = MMLUInstanceDataset(evaluation_api=evaluation_api, subset=subset, split="test", *args, **kwargs) - return test_set - elif "GPQA" in task_name: - from .gpqa import GPQAInstanceDataset - test_set = GPQAInstanceDataset(evaluation_api=evaluation_api, subset=task_name.lower(), *args, **kwargs) - return test_set - elif task_name in ["LeetCodeHardEval"]: - dataset = LeetCodeHardEval() - return dataset - else: - raise ValueError(f"Instance task {task_name} not found.") \ No newline at end of file diff --git a/textgrad/tasks/base.py b/textgrad/tasks/base.py deleted file mode 100644 index 059473e..0000000 --- a/textgrad/tasks/base.py +++ /dev/null @@ -1,49 +0,0 @@ -import numpy as np -from abc import ABC, abstractmethod - -class Dataset(ABC): - @abstractmethod - def __init__(self): - pass - - @abstractmethod - def __getitem__(self): - pass - - @abstractmethod - def __len__(self): - pass - - @abstractmethod - def get_default_task_instruction(self): - pass - -class DataLoader: - def __init__(self, data, batch_size=32, shuffle=True): - self.data = data - self.batch_size = batch_size - self.shuffle = shuffle - self.indices = np.arange(len(data)) - self.current_index = 0 - - def __iter__(self): - if self.shuffle: - np.random.shuffle(self.indices) - self.current_index = 0 - return self - - def __next__(self): - if self.current_index >= len(self.data): - raise StopIteration - - batch_indices = self.indices[self.current_index : self.current_index + self.batch_size] - batch_data = [self.data[int(i)] for i in batch_indices] - - if isinstance(batch_data[0], tuple): - batch_data = tuple(zip(*batch_data)) - else: - batch_data = np.array(batch_data) - - self.current_index += self.batch_size - - return batch_data \ No newline at end of file diff --git a/textgrad/tasks/big_bench_hard.py b/textgrad/tasks/big_bench_hard.py deleted file mode 100644 index e11c834..0000000 --- a/textgrad/tasks/big_bench_hard.py +++ /dev/null @@ -1,102 +0,0 @@ -import os -import json -import pandas as pd -import subprocess -import platformdirs -import textgrad as tg -from .base import Dataset - -# The below metric is taken from DSPy for consistenc -# and modified to work with TG-graphs - -def parse_integer_answer(answer: str, only_first_line: bool=False): - try: - if only_first_line: - answer = answer.strip().split('\n')[0] - answer = answer.strip() - # find the last token that has a number in it - answer = [token for token in answer.split() if any(c.isdigit() for c in token)][-1] - answer = answer.split('.')[0] - answer = ''.join([c for c in answer if c.isdigit()]) - answer = int(answer) - - except (ValueError, IndexError): - # print(answer) - answer = 0 - - return answer - -def string_based_equality_fn(prediction: tg.Variable, ground_truth_answer: tg.Variable): - return int(parse_integer_answer(str(prediction.value)) == int(parse_integer_answer(str(ground_truth_answer.value)))) - - -class BigBenchHard(Dataset): - def __init__(self, task_name: str, root: str=None, split: str="train", *args, **kwargs): - """ - Tasks from BIG-Bench Hard - - - The train, val, test splits were constructed from 50/100/100 examples. - - Args: - root (string): Root directory of the dataset - split (string, optional): The dataset split, supports ``"train"`` (default), ``"val"`` and ``"test"``. - """ - if root is None: - root = platformdirs.user_cache_dir("textgrad") - self.root = root - self.split = split - self.task_name = task_name - self._check_or_download_dataset() - assert split in ["train", "val", "test"] - data_path = os.path.join(self.root, self.task_name, f"{split}.csv") - self.data = pd.read_csv(data_path, index_col=0) - self._task_description = "You will answer a reasoning question. Think step by step. The last line of your response should be of the following format: 'Answer: $VALUE' where VALUE is a numerical value." - - def get_task_description(self): - return self._task_description - - def _check_or_download_dataset(self): - data_path = os.path.join(self.root, self.task_name, f"{self.split}.csv") - if os.path.exists(data_path): - return - - os.makedirs(os.path.join(self.root, self.task_name), exist_ok=True) - # Download the dataset - # Download from https://github.com/suzgunmirac/BIG-Bench-Hard/blob/main/bbh/[task_name].json - # and save it to self.root - subprocess.call( - [ - "wget", - f"https://raw.githubusercontent.com/suzgunmirac/BIG-Bench-Hard/main/bbh/{self.task_name}.json", - "-O", - os.path.join(self.root, f"{self.task_name}.json") - ] - ) - # Separate to train, val, test - data = json.load(open(os.path.join(self.root, f"{self.task_name}.json"))) - examples = data["examples"] - train_examples = [{"x": ex["input"], "y": ex["target"]} for ex in examples[:50]] - val_examples = [{"x": ex["input"], "y": ex["target"]} for ex in examples[50:150]] - test_examples = [{"x": ex["input"], "y": ex["target"]} for ex in examples[150:]] - train_path = os.path.join(self.root, self.task_name, "train.csv") - with open(train_path, "w") as f: - pd.DataFrame(train_examples).to_csv(f) - val_path = os.path.join(self.root, self.task_name, "val.csv") - with open(val_path, "w") as f: - pd.DataFrame(val_examples).to_csv(f) - test_path = os.path.join(self.root, self.task_name, "test.csv") - with open(test_path, "w") as f: - pd.DataFrame(test_examples).to_csv(f) - - - def __getitem__(self, index): - row = self.data.iloc[index] - return row["x"], row["y"] - - def __len__(self): - return len(self.data) - - def get_default_task_instruction(self): - return self._task_description - diff --git a/textgrad/tasks/gpqa.py b/textgrad/tasks/gpqa.py deleted file mode 100644 index 631a720..0000000 --- a/textgrad/tasks/gpqa.py +++ /dev/null @@ -1,225 +0,0 @@ -import re -import platformdirs -import random -from textgrad.variable import Variable -from textgrad.loss import MultiFieldTokenParsedEvaluation -from .base import Dataset -from textgrad.loss import MultiChoiceTestTime - -# Below template is from https://github.com/openai/simple-evals/blob/main/common.py#L12 -QUERY_TEMPLATE_MULTICHOICE = """ -Answer the following multiple choice question. The last line of your response should be of the following format: 'Answer: $LETTER' (without quotes) where LETTER is one of ABCD. Think step by step before answering. - -{Question} - -A) {A} -B) {B} -C) {C} -D) {D} -""".strip() - -def eval_string_based(response_text, correct_answer): - ANSWER_PATTERN_MULTICHOICE = r"(?i)Answer\s*:\s*([A-D])" - - match = re.search(ANSWER_PATTERN_MULTICHOICE, response_text) - extracted_answer = match.group(1) if match else None - score = 1.0 if extracted_answer == correct_answer else 0.0 - return score - -class GPQA(Dataset): - def __init__(self, subset:str, root: str=None, *args, **kwargs): - """ - GPQA dataset from HF.""" - from datasets import load_dataset - if root is None: - root = platformdirs.user_cache_dir("textgrad") - - self.root = root - assert subset in ["gpqa_main", "gpqa_diamond", "gpqa_extended"] - self.subset = subset - self.data = load_dataset("Idavidrein/gpqa", subset, split="train", cache_dir=root) - self._task_description = 'GPQA task' # Need to update - - def __getitem__(self, index): - row = self.data[index] - - choices = [row['Incorrect Answer 1'], row['Incorrect Answer 2'], row['Incorrect Answer 3'], row['Correct Answer']] - choices = [choice.strip() for choice in choices] - random.seed(42) - random.shuffle(choices) - choices_dict = dict( - A=choices[0], B=choices[1], C=choices[2], D=choices[3], Question=row["Question"] - ) - correct_answer_idx = choices.index(row['Correct Answer'].strip()) - - # Choices will be a. Choice 1 b. Choice 2 ... etc - question_prompt = QUERY_TEMPLATE_MULTICHOICE.format(**choices_dict) - answer = chr(65+correct_answer_idx) - return question_prompt, answer - - def __len__(self): - return len(self.data) - - def get_default_task_instruction(self): - return "Given a multiple choice question, the goal is to select the correct answer from the choices." - - -class GPQAInstanceDataset(GPQA): - def __init__(self, evaluation_api, subset:str, root: str=None, split: str="train", max_samples=-1): - super().__init__(subset, root, split, max_samples) - self.evaluation_api = evaluation_api - - - def _get_instance_test_time_objective(self, question: str): - evaluation_instruction = "Below is a multi-choice question and a prediction. You are an expert scientist. Your job is to investigate the prediction. Critically go through reasoning steps, and see if there is a reason why the prediction could be incorrect." - evaluation_instruction += "\nUse the Janusian Process. Think about whether alternative answers could be true. Raise creative and critical objections to the solution, when needed." - eval_fn = MultiChoiceTestTime(evaluation_instruction, engine=self.evaluation_api) - def test_time_objective(instance: Variable): - return eval_fn(question, instance) - return test_time_objective - - - def _legacy_get_instance_eval_fn(self, question_prompt: str, answer: str): - role_descriptions = [ - "Question for the task", - "Correct answer", - "Solution and prediction from the language model" - ] - eval_system_prompt = Variable("You are a language model that evaluates the accuracy of a prediction for a mathematical question answering task. Only call a prediction accurate if it is the same as the ground truth answer.", requires_grad=False, role_description="system prompt for the evaluation") - - evaluation_instruction = "Below is a question from a question-answering task, the ground truth answer, and a prediction. Is the final prediction correct, i.e. the same as the ground truth answer? Say only 1 (yes) or 0 (no). Return your response within tags. e.g. 0 or 1 " - eval_instruction = Variable(evaluation_instruction, requires_grad=False, role_description="evaluation instruction for the task") - eval_fn = MultiFieldTokenParsedEvaluation( - eval_instruction, - engine=self.evaluation_api, - role_descriptions=role_descriptions, - parse_tags=["", ""], - system_prompt=eval_system_prompt - ) - - answer_var = Variable(answer, requires_grad=False, role_description="Correct answer") - question_var = Variable(question_prompt, requires_grad=False, role_description="Question for the task") - def instance_eval_fn(instance): - eval_output = eval_fn([question_var, answer_var, instance]) - return eval_fn.parse_output(eval_output) - return instance_eval_fn - - - def _get_instance_eval_fn(self, question_prompt: str, answer: str): - eval_string_based_fn = lambda response: eval_string_based(response.value, answer) - return eval_string_based_fn - - def __len__(self): - return len(self.data) - - def __getitem__(self, index): - row = self.data[index] - - choices = [row['Incorrect Answer 1'], row['Incorrect Answer 2'], row['Incorrect Answer 3'], row['Correct Answer']] - choices = [choice.strip() for choice in choices] - random.seed(42) - random.shuffle(choices) - choices_dict = dict( - A=choices[0], B=choices[1], C=choices[2], D=choices[3], Question=row["Question"] - ) - correct_answer_idx = choices.index(row['Correct Answer'].strip()) - - # Choices will be a. Choice 1 b. Choice 2 ... etc - question_prompt = QUERY_TEMPLATE_MULTICHOICE.format(**choices_dict) - answer = chr(65+correct_answer_idx) - - # TODO: Make the two-way comparison class abstract enough. - # TODO: How do we determine the role of the instances? We should be more consistent - return question_prompt, answer, self._get_instance_test_time_objective(question_prompt), self._get_instance_eval_fn(question_prompt, answer) - - def get_task_description(self): - return "Given a multiple choice question, the goal is to select the correct final answer from the choices." - - - -class GPQAInstanceDatasetOpenAI(Dataset): - def __init__(self, evaluation_api, subset:str, root: str=None, *args, **kwargs): - """ - GPQA dataset from OpenAI (from https://github.com/openai/simple-evals/)""" - import pandas as pd - if root is None: - root = platformdirs.user_cache_dir("textgrad") - - self.root = root - assert subset in ["gpqa_main", "gpqa_diamond", "gpqa_extended"] - self.subset = subset - df = pd.read_csv(f"https://openaipublic.blob.core.windows.net/simple-evals/gpqa_{subset[5:]}.csv") - examples = [row.to_dict() for _, row in df.iterrows()] - rng = random.Random(0) - self.data = [example | {"permutation": rng.sample(range(4), 4)} for example in examples] - self._task_description = 'GPQA task' # Need to update - self.evaluation_api = evaluation_api - - - def _get_instance_test_time_objective(self, question: str): - evaluation_instruction = "Below is a multi-choice question and a prediction. You are a critical and creative scientist. Your job is to investigate the prediction. Critically go through reasoning steps, and see if there is a reason why the prediction could be incorrect." - evaluation_instruction = "\nUse the Janusian Process, think about whether alternative answers could be true." - eval_fn = MultiChoiceTestTime(evaluation_instruction, engine=self.evaluation_api) - def test_time_objective(instance: Variable): - return eval_fn(question, instance) - return test_time_objective - - - def _legacy_get_instance_eval_fn(self, question_prompt: str, answer: str): - role_descriptions = [ - "Question for the task", - "Correct answer", - "Solution and prediction from the language model" - ] - eval_system_prompt = Variable("You are a language model that evaluates the accuracy of a prediction for a mathematical question answering task. Only call a prediction accurate if it is the same as the ground truth answer.", requires_grad=False, role_description="system prompt for the evaluation") - - evaluation_instruction = "Below is a question from a question-answering task, the ground truth answer, and a prediction. Is the final prediction correct, i.e. the same as the ground truth answer? Say only 1 (yes) or 0 (no). Return your response within tags. e.g. 0 or 1 " - eval_instruction = Variable(evaluation_instruction, requires_grad=False, role_description="evaluation instruction for the task") - eval_fn = MultiFieldTokenParsedEvaluation( - eval_instruction, - engine=self.evaluation_api, - role_descriptions=role_descriptions, - parse_tags=["", ""], - system_prompt=eval_system_prompt - ) - - answer_var = Variable(answer, requires_grad=False, role_description="Correct answer") - question_var = Variable(question_prompt, requires_grad=False, role_description="Question for the task") - def instance_eval_fn(instance): - eval_output = eval_fn([question_var, answer_var, instance]) - return eval_fn.parse_output(eval_output) - return instance_eval_fn - - - def _get_instance_eval_fn(self, question_prompt: str, answer: str): - eval_string_based_fn = lambda response: eval_string_based(response.value, answer) - return eval_string_based_fn - - def __len__(self): - return len(self.data) - - def __getitem__(self, index): - row = self.data[index] - - choices = [ - row["Correct Answer"], - row["Incorrect Answer 1"], - row["Incorrect Answer 2"], - row["Incorrect Answer 3"], - ] - choices = [choices[i] for i in row["permutation"]] - correct_answer_idx = choices.index(row["Correct Answer"]) - answer = "ABCD"[correct_answer_idx] - choices_dict = dict( - A=choices[0], B=choices[1], C=choices[2], D=choices[3], Question=row["Question"] - ) - - # Choices will be a. Choice 1 b. Choice 2 ... etc - question_prompt = QUERY_TEMPLATE_MULTICHOICE.format(**choices_dict) - - # TODO: Make the two-way comparison class abstract enough. - # TODO: How do we determine the role of the instances? We should be more consistent - return question_prompt, answer, self._get_instance_test_time_objective(question_prompt), self._get_instance_eval_fn(question_prompt, answer) - - def get_default_task_instruction(self): - return "Given a multiple choice question, the goal is to select the correct final answer from the choices." diff --git a/textgrad/tasks/gsm8k.py b/textgrad/tasks/gsm8k.py deleted file mode 100644 index 080f907..0000000 --- a/textgrad/tasks/gsm8k.py +++ /dev/null @@ -1,86 +0,0 @@ -import platformdirs - -from .base import Dataset - -class GSM8K(Dataset): - def __init__(self, subset:str, root: str=None, split: str="train", *args, **kwargs): - """ - GSM8K dataset from HF.""" - from datasets import load_dataset - if root is None: - root = platformdirs.user_cache_dir("textgrad") - - self.root = root - self.subset = subset - assert split in ["train", "val", "test"] - if split == "test": - self.data = load_dataset("gsm8k", subset, cache_dir=root, split="test[:300]") - elif split == "val": - # Split the training set into half. Let the second half be the training set. - # Let the first 100 samples be the validation set. - self.data = load_dataset("gsm8k", subset, cache_dir=root, split="train[:100]") - elif split == "train": - self.data = load_dataset("gsm8k", subset, cache_dir=root, split="train[100:]") - self.split = split - - def __getitem__(self, index): - row = self.data[index] - question = row["question"] - answer = row["answer"] - question_prompt = f"Question: {question}" - return question_prompt, answer - - def __len__(self): - return len(self.data) - - def get_task_description(self): - return "You will answer a mathemetical reasoning question. Think step by step. The last line of your response should be of the following format: 'Answer: $VALUE' where VALUE is a numerical value." - - - - -class GSM8K_DSPy(GSM8K): - def __init__(self, root:str=None, split: str="train"): - """DSPy splits for the GSM8K dataset.""" - import tqdm - import random - from datasets import load_dataset - if root is None: - root = platformdirs.user_cache_dir("textgrad") - - dataset = load_dataset("gsm8k", 'main', cache_dir=root) - hf_official_train = dataset['train'] - hf_official_test = dataset['test'] - official_train = [] - official_test = [] - for example in tqdm.tqdm(hf_official_train): - question = example['question'] - answer = example['answer'].strip().split() - assert answer[-2] == '####' - - gold_reasoning = ' '.join(answer[:-2]) - answer = str(int(answer[-1].replace(',', ''))) - official_train.append(dict(question=question, gold_reasoning=gold_reasoning, answer=answer)) - - for example in tqdm.tqdm(hf_official_test): - question = example['question'] - answer = example['answer'].strip().split() - assert answer[-2] == '####' - - gold_reasoning = ' '.join(answer[:-2]) - answer = str(int(answer[-1].replace(',', ''))) - official_test.append(dict(question=question, gold_reasoning=gold_reasoning, answer=answer)) - - rng = random.Random(0) - rng.shuffle(official_train) - rng = random.Random(0) - rng.shuffle(official_test) - trainset = official_train[:200] - devset = official_train[200:500] - testset = official_test[:] - if split == "train": - self.data = trainset - elif split == "val": - self.data = devset - elif split == "test": - self.data = testset diff --git a/textgrad/tasks/leetcode.py b/textgrad/tasks/leetcode.py deleted file mode 100644 index 2158a53..0000000 --- a/textgrad/tasks/leetcode.py +++ /dev/null @@ -1,44 +0,0 @@ -import platformdirs -from .base import Dataset -import os -import json - - -class LeetCodeHardEval(Dataset): - def __init__(self, root: str = None): - if root is None: - root = platformdirs.user_cache_dir("textgrad") - - self.root = root - data_path = f"{self.root}/leetcode-hard.jsonl" - self._check_or_download_dataset() - - self.dataset = [json.loads(line) for line in open(data_path)] - - self._task_description = 'You will solve a hard coding problem from LeetCode. You will be given a prompt describing a problem. You need to write a function that passes all the tests.' - - def get_task_description(self): - return self._task_description - - def _check_or_download_dataset(self): - data_path = f"{self.root}/leetcode-hard.jsonl" - if os.path.exists(data_path): - return - - os.makedirs(f"{self.root}/", exist_ok=True) - import requests - url = "https://raw.githubusercontent.com/vinid/data/master/leetcode_with_tests.jsonl" - r = requests.get(url) - with open(data_path, 'wb') as f: - f.write(r.content) - - def __getitem__(self, index): - row = self.dataset[index] - task_id = row["task_id"] - prompt = row["prompt"] - tests = row["test"] - - return task_id, prompt, tests - - def __len__(self): - return len(self.dataset) diff --git a/textgrad/tasks/mmlu.py b/textgrad/tasks/mmlu.py deleted file mode 100644 index a604784..0000000 --- a/textgrad/tasks/mmlu.py +++ /dev/null @@ -1,123 +0,0 @@ -import platformdirs - -from textgrad.variable import Variable -from textgrad.loss import MultiChoiceTestTime, MultiFieldTokenParsedEvaluation -from .base import Dataset - -import re - -def eval_string_based(response_text, correct_answer): - ANSWER_PATTERN_MULTICHOICE = r"(?i)Answer\s*:\s*([A-D])" - - match = re.search(ANSWER_PATTERN_MULTICHOICE, response_text) - extracted_answer = match.group(1) if match else None - score = 1.0 if extracted_answer == correct_answer else 0.0 - return score - -# Below template is from https://github.com/openai/simple-evals/blob/main/common.py#L12 -QUERY_TEMPLATE_MULTICHOICE = """ -Answer the following multiple choice question. The last line of your response should be of the following format: 'Answer: $LETTER' (without quotes) where LETTER is one of ABCD. Think step by step before answering. - -{Question} - -A) {A} -B) {B} -C) {C} -D) {D} -""".strip() - -class MMLU(Dataset): - def __init__(self, subset:str, root: str=None, split: str="train", *args, **kwargs): - """ - MMLU dataset from HF.""" - from datasets import load_dataset - if root is None: - root = platformdirs.user_cache_dir("textgrad") - - self.root = root - self.subset = subset - assert split in ["train", "validation", "test"] - self.data = load_dataset("cais/mmlu", subset, cache_dir=root, split=split if split != "train" else "dev") - self.split = split - self._task_description = 'You will answer multiple-choice questions. Think step by step.' - - def __getitem__(self, index): - row = self.data[index] - question = row["question"] - choices = row["choices"] - # Choices will be a. Choice 1 b. Choice 2 ... etc - choices_str = "\n".join([f"{chr(65+i)}. {choice}" for i, choice in enumerate(choices)]) - answer = chr(65+row["answer"]) - question_prompt = f"Question: {question}\nChoices:\n{choices_str}" - return question_prompt, answer - - def __len__(self): - return len(self.data) - - def get_default_task_instruction(self): - return "Given a multiple choice question, the goal is to select the correct final answer from the choices." - - -class MMLUInstanceDataset(MMLU): - def __init__(self, evaluation_api, subset:str, root: str=None, split: str="train", max_samples=-1): - super().__init__(subset, root, split, max_samples) - self.evaluation_api = evaluation_api - - - def _get_instance_test_time_objective(self, question: str): - evaluation_instruction = "Below is a multi-choice question and an answer. You are an expert scientist. Your job is to investigate the answer. Critically go through reasoning steps, consider your knowledge, and see if the answer is correct or if there are any critical mistakes." - eval_fn = MultiChoiceTestTime(evaluation_instruction, engine=self.evaluation_api) - def test_time_objective(instance: Variable): - return eval_fn(question, instance) - return test_time_objective - - - def _legacy_get_instance_eval_fn(self, question_prompt: str, answer: str): - role_descriptions = [ - "Question for the task", - "Correct answer", - "Prediction from the language model" - ] - eval_system_prompt = Variable("You are a language model that evaluates the accuracy of a prediction for a mathematical question answering task. Only call a prediction accurate if it is the same as the ground truth answer.", requires_grad=False, role_description="system prompt for the evaluation") - - evaluation_instruction = "Below is a question from a question-answering task, the ground truth answer, and a prediction. Is the final prediction correct, i.e. the same as the ground truth answer? Say only 1 (yes) or 0 (no). Return your response within tags. e.g. 0 or 1 " - eval_instruction = Variable(evaluation_instruction, requires_grad=False, role_description="evaluation instruction for the task") - eval_fn = MultiFieldTokenParsedEvaluation( - eval_instruction, - engine=self.evaluation_api, - role_descriptions=role_descriptions, - parse_tags=["", ""], - system_prompt=eval_system_prompt - ) - - answer_var = Variable(answer, requires_grad=False, role_description="Correct answer") - question_var = Variable(question_prompt, requires_grad=False, role_description="Question for the task") - def instance_eval_fn(instance): - eval_output = eval_fn([question_var, answer_var, instance]) - return eval_fn.parse_output(eval_output) - return instance_eval_fn - - def _get_instance_eval_fn(self, question_prompt: str, answer: str): - eval_string_based_fn = lambda response: eval_string_based(response.value, answer) - return eval_string_based_fn - - def __len__(self): - return len(self.data) - - def __getitem__(self, index): - row = self.data[index] - question = row["question"] - choices = row["choices"] - choices_dict = dict( - A=choices[0], B=choices[1], C=choices[2], D=choices[3], Question=question - ) - question_prompt = QUERY_TEMPLATE_MULTICHOICE.format(**choices_dict) - - # Choices will be a. Choice 1 b. Choice 2 ... etc - answer = chr(65+row["answer"]) - # TODO: Make the two-way comparison class abstract enough. - # TODO: How do we determine the role of the instances? We should be more consistent - return question_prompt, answer, self._get_instance_test_time_objective(question_prompt), self._get_instance_eval_fn(question_prompt, answer) - - def get_default_task_instruction(self): - return "Given a multiple choice question, the goal is to select the correct final answer from the choices."