Skip to content

Commit

Permalink
removed reference to prompt_with_metadata in comments
Browse files Browse the repository at this point in the history
  • Loading branch information
pitneitemeier committed Feb 21, 2024
1 parent d10d1fa commit 2680895
Show file tree
Hide file tree
Showing 3 changed files with 7 additions and 8 deletions.
6 changes: 3 additions & 3 deletions src/intelligence_layer/core/prompt_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,7 @@ class PromptTemplate:
... ''')
>>> placeholder = template.placeholder(tokens)
>>> names = ["World", "Rutger"]
>>> prompt = template.to_prompt(names=names, image=placeholder)
>>> prompt = template.to_rich_prompt(names=names, image=placeholder)
>>> request = CompletionRequest(prompt=prompt)
"""

Expand All @@ -212,7 +212,7 @@ def __init__(self, template_str: str) -> None:
... Input: {% promptrange input %}{{text}}{% endpromptrange %}
... Question: {% promptrange question %}{{question}}{% endpromptrange %}
... Answer:''')
>>> prompt_data = template.to_prompt_with_metadata(text="Some text...", question="A question ...")
>>> prompt_data = template.to_rich_prompt(text="Some text...", question="A question ...")
>>> input_range = prompt_data.ranges.get("input")
"""
env = Environment()
Expand Down Expand Up @@ -260,7 +260,7 @@ def embed_prompt(self, prompt: Prompt) -> str:
... Text.from_text("cool"),
... ])
>>> template = PromptTemplate("Question: {{user_prompt}}\\n Answer: ")
>>> prompt = template.to_prompt(user_prompt=template.embed_prompt(user_prompt))
>>> prompt = template.to_rich_prompt(user_prompt=template.embed_prompt(user_prompt))
"""
prompt_text = ""
last_item = None
Expand Down
8 changes: 4 additions & 4 deletions src/intelligence_layer/core/text_highlight.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,11 @@ class TextHighlightInput(BaseModel):
"""The input for a text highlighting task.
Attributes:
prompt_with_metadata: From client's PromptTemplate. Includes both the actual 'Prompt' as well as text range information.
rich_prompt: From client's PromptTemplate. Includes both the actual 'Prompt' as well as text range information.
Supports liquid-template-language-style {% promptrange range_name %}/{% endpromptrange %} for range.
target: The target that should be explained. Expected to follow the prompt.
model: A valid Aleph Alpha model name.
focus_ranges: The ranges contained in `prompt_with_metadata` the returned highlights stem from. That means that each returned
focus_ranges: The ranges contained in `rich_prompt` the returned highlights stem from. That means that each returned
highlight overlaps with at least one character with one of the ranges listed here.
If this set is empty highlights of the entire prompt are returned.
"""
Expand Down Expand Up @@ -93,11 +93,11 @@ class TextHighlight(Task[TextHighlightInput, TextHighlightOutput]):
... "{% promptrange r1 %}Question: What is 2 + 2?{% endpromptrange %}\\nAnswer:"
... )
>>> template = PromptTemplate(prompt_template_str)
>>> prompt_with_metadata = template.to_prompt_with_metadata()
>>> rich_prompt = template.to_rich_prompt()
>>> completion = " 4."
>>> model = "luminous-base"
>>> input = TextHighlightInput(
... prompt_with_metadata=prompt_with_metadata, target=completion, model=model
... rich_prompt=rich_prompt, target=completion, model=model
... )
>>> output = text_highlight.run(input, InMemoryTracer())
"""
Expand Down
1 change: 0 additions & 1 deletion tests/core/test_prompt_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
from textwrap import dedent
from typing import List

from aleph_alpha_client import Prompt
from aleph_alpha_client.prompt import Image, PromptItem, Text, Tokens
from liquid.exceptions import LiquidSyntaxError, LiquidTypeError
from pytest import raises
Expand Down

0 comments on commit 2680895

Please sign in to comment.