Skip to content

Commit

Permalink
Implement ExpandChunks-task
Browse files Browse the repository at this point in the history
  • Loading branch information
NickyHavoc committed Apr 16, 2024
1 parent f12758d commit eb33426
Show file tree
Hide file tree
Showing 5 changed files with 267 additions and 8 deletions.
18 changes: 13 additions & 5 deletions src/intelligence_layer/core/chunk.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@


class ChunkInput(BaseModel):
"""The input for a `ChunkTask`.
"""The input for a `Chunk`-task.
Attributes:
text: A text of arbitrary length.
Expand Down Expand Up @@ -63,25 +63,33 @@ def do_run(self, input: ChunkInput, task_span: TaskSpan) -> ChunkOutput:


class ChunkWithStartIndex(BaseModel):
"""A `TextChunk` and its `start_index` relative to its parent document.
Attributes:
chunk: The actual text.
start_index: The character start index of the chunk within the respective document.
"""

chunk: TextChunk
start_index: int


class ChunkWithIndicesOutput(BaseModel):
"""The output of a `ChunkTask`.
"""The output of a `ChunkWithIndices`-task.
Attributes:
chunks_with_indices: A list of smaller sections of the input text.
chunks_with_indices: A list of smaller sections of the input text with the respective start_index.
"""

chunks_with_indices: Sequence[ChunkWithStartIndex]


class ChunkWithIndices(Task[ChunkInput, ChunkWithIndicesOutput]):
"""Splits a longer text into smaller text chunks.
"""Splits a longer text into smaller text chunks and returns the chunks' start indices.
Provide a text of any length and chunk it into smaller pieces using a
tokenizer that is available within the Aleph Alpha client.
tokenizer that is available within the Aleph Alpha client. For each chunk, the respective
start index relative to the document is also returned.
Args:
model: A valid Aleph Alpha model.
Expand Down
3 changes: 3 additions & 0 deletions src/intelligence_layer/use_cases/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,9 @@
from .qa.single_chunk_qa import SingleChunkQa as SingleChunkQa
from .qa.single_chunk_qa import SingleChunkQaInput as SingleChunkQaInput
from .qa.single_chunk_qa import SingleChunkQaOutput as SingleChunkQaOutput
from .search.expand_chunk import ExpandChunkInput as ExpandChunkInput
from .search.expand_chunk import ExpandChunkOutput as ExpandChunkOutput
from .search.expand_chunk import ExpandChunks as ExpandChunks
from .search.search import AggregatedSearchEvaluation as AggregatedSearchEvaluation
from .search.search import ChunkFound as ChunkFound
from .search.search import ExpectedSearchOutput as ExpectedSearchOutput
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -101,9 +101,9 @@ def _get_highlights_per_chunk(
if highlight.start < next_start and highlight.end > current_start:
highlights_with_indices_fixed = ScoredTextHighlight(
start=max(0, highlight.start - current_start),
end=min(highlight.end - current_start, next_start)
if isinstance(next_start, int)
else highlight.end,
end=highlight.end - current_start
if isinstance(next_start, float)
else min(next_start, highlight.end - current_start),
score=highlight.score,
)
current_overlaps.append(highlights_with_indices_fixed)
Expand Down
78 changes: 78 additions & 0 deletions src/intelligence_layer/use_cases/search/expand_chunk.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
from typing import Generic, Sequence

from pydantic import BaseModel

from intelligence_layer.connectors import BaseRetriever, DocumentChunk
from intelligence_layer.connectors.retrievers.base_retriever import ID
from intelligence_layer.core.chunk import ChunkInput, ChunkWithIndices, TextChunk
from intelligence_layer.core.model import AlephAlphaModel
from intelligence_layer.core.task import Task
from intelligence_layer.core.tracer.tracer import TaskSpan


class ExpandChunkInput(BaseModel, Generic[ID]):
document_id: ID
chunks_found: Sequence[DocumentChunk]


class ExpandChunkOutput(BaseModel):
chunks: Sequence[TextChunk]


class ExpandChunks(Generic[ID], Task[ExpandChunkInput[ID], ExpandChunkOutput]):
def __init__(
self,
retriever: BaseRetriever[ID],
model: AlephAlphaModel,
max_chunk_size: int = 512,
):
super().__init__()
self._retriever = retriever
self._chunk_with_indices = ChunkWithIndices(model, max_chunk_size)

def do_run(
self, input: ExpandChunkInput[ID], task_span: TaskSpan
) -> ExpandChunkOutput:
full_doc = self._retriever.get_full_document(input.document_id)
if not full_doc:
raise RuntimeError(f"No document for id '{input.document_id}' found")

chunk_with_indices = self._chunk_with_indices.run(
ChunkInput(text=full_doc.text), task_span
).chunks_with_indices

overlapping_chunk_indices = self._overlapping_chunk_indices(
[c.start_index for c in chunk_with_indices],
[(chunk.start, chunk.end) for chunk in input.chunks_found],
)

return ExpandChunkOutput(
chunks=[
chunk_with_indices[index].chunk for index in overlapping_chunk_indices
]
)

def _overlapping_chunk_indices(
self,
chunk_start_indices: Sequence[int],
target_ranges: Sequence[tuple[int, int]],
) -> list[int]:
n = len(chunk_start_indices)
overlapping_indices: list[int] = []

for i in range(n):
if i < n - 1:
chunk_end: float = chunk_start_indices[i + 1]
else:
chunk_end = float("inf")

if any(
(
chunk_start_indices[i] <= target_range[1]
and chunk_end > target_range[0]
)
for target_range in target_ranges
):
overlapping_indices.append(i)

return overlapping_indices
170 changes: 170 additions & 0 deletions tests/use_cases/search/test_expand_chunk.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
from typing import Sequence

from pytest import fixture

from intelligence_layer.connectors import (
Document,
DocumentChunk,
QdrantInMemoryRetriever,
)
from intelligence_layer.core import LuminousControlModel, NoOpTracer
from intelligence_layer.use_cases import ExpandChunkInput, ExpandChunks


@fixture
def in_memory_retriever_documents() -> Sequence[Document]:
return [
Document(
text="""In the rolling verdant hills of a realm untouched by the passage of modern times, a kingdom thrived under the rule of a benevolent monarch. The king, known for his wisdom and justice, held the loyalty of his people and the respect of his peers. However, beneath the surface of peace, a shadow loomed that would test the mettle of the kingdom's most valiant defenders: the knights.
These knights, clad in gleaming armor and bearing the colors of their liege, were not mere soldiers but champions of the realm's ideals. They were sworn to protect the innocent, uphold justice, and maintain the peace, guided by a chivalric code that was as much a part of them as the swords they wielded. Among these noble warriors, Sir Aelwyn stood prominent, known across the land for his prowess in battle and his unyielding honor.
Sir Aelwyn, the youngest knight ever to be granted the title of Master of the Horse, was a figure of legend. His tales were told in every corner of the kingdom, often embellished with each retelling. From his duel with the Giant of Gormouth to his silent vigil in the Haunted Wood, Aelwyn's life was a tapestry of bravery and adventure. Yet, his greatest challenge lay ahead, whispered in fearful murmurs throughout the castle—the rise of the Dragon of Black Hollow.
The dragon had awoken from a centuries-long slumber, driven by hunger and wrath, laying waste to the villages on the kingdom's fringes. Smoke and despair rose from the once tranquil borders, drawing the attention of the king and his council. With the threat growing each day, the king summoned Sir Aelwyn and tasked him with a quest that could either save the kingdom or doom it forever—to defeat the dragon.
As Sir Aelwyn prepared for his journey, the castle buzzed with activity. Blacksmiths forged new armor and weapons, alchemists concocted potent draughts, and scholars poured over ancient texts seeking any knowledge that might aid him. The knight spent his nights in the chapel, praying for strength and wisdom, and his days in the training yard, honing his skills against opponents both real and imagined.
Accompanying Sir Aelwyn were his loyal companions: Sir Rowan, a strategist known for his cunning and intellect; Lady Elara, a knight whose skill with the bow was unmatched; and Dame Miriel, a warrior-poet whose songs could stir the soul as fiercely as her sword could cleave armor. Together, they represented the kingdom's finest, united under a single cause.
Their journey was fraught with peril. They crossed through the Whispering Forest, where shadows moved with minds of their own, and over the Mountains of Echoes, where the wind carried voices from the past. Each step brought them closer to their quarry, and the signs of the dragon's passage grew ever more ominous—the charred earth, the ruins of once happy homes, and the air heavy with the scent of sulfur.
As they approached Black Hollow, the landscape grew bleak, and the sky darkened. The dragon, coiled atop a pile of gold and bones, awaited them, its scales shimmering like molten rock. The air crackled with the heat of its breath, and its eyes, glowing like coals, fixed on Sir Aelwyn and his companions.
The battle was fierce. Sir Rowan directed their movements with precision, while Lady Elara loosed arrows that found chinks in the dragon's armor. Dame Miriel's voice rose above the clamor, her words bolstering their courage and blinding the beast with bursts of radiant light. Sir Aelwyn faced the dragon head-on, his shield absorbing the flames that poured from its maw, his sword striking with the weight of his oath behind each blow.
Hours seemed like days as the clash continued, the outcome uncertain. Finally, seeing an opening, Sir Aelwyn drove his sword deep into the dragon's heart. With a final roar that shook the heavens, the dragon fell, its reign of terror ended.
The return to the kingdom was triumphant. The people lined the streets, showering the knights with flowers and cheers. The king welcomed them back as heroes, their deeds to be recorded in the annals of history for generations to come. Sir Aelwyn and his companions had not only saved the kingdom but had also reaffirmed the values it stood for: courage, honor, and a steadfast commitment to the protection of the realm.
As the celebrations faded, Sir Aelwyn looked out over the kingdom from the castle's highest tower. The peace they had fought for lay stretched before him, a tapestry of green fields and bustling towns. Yet, he knew that this peace was not permanent but a precious moment to be cherished and protected. For as long as there were threats to the realm, there would be knights to face them, their swords ready and their hearts brave.
In this timeless land, the cycle of challenge and triumph continued, each generation of knights rising to meet the dangers of their times with the same valor and resolve as those who had come before them. And so, the legends grew, each knight adding their thread to the ever-unfolding story of the kingdom and its defenders."""
)
]


def build_expand_chunk_input(
document: Document, index_ranges: Sequence[tuple[int, int]]
) -> ExpandChunkInput[int]:
return ExpandChunkInput(
document_id=0,
chunks_found=[
DocumentChunk(
text=document.text[index_range[0] : index_range[1]],
start=index_range[0],
end=index_range[1],
)
for index_range in index_ranges
],
)


@fixture
def wholly_included_expand_chunk_input(
in_memory_retriever_documents: Sequence[Document],
) -> ExpandChunkInput[int]:
assert len(in_memory_retriever_documents) == 1
start_index, end_index = (
int(len(in_memory_retriever_documents[0].text) * 0.5),
int(len(in_memory_retriever_documents[0].text) * 0.55),
)

return build_expand_chunk_input(
in_memory_retriever_documents[0], [(start_index, end_index)]
)


@fixture
def overlapping_expand_chunk_input(
in_memory_retriever_documents: Sequence[Document],
) -> ExpandChunkInput[int]:
assert len(in_memory_retriever_documents) == 1
start_index, end_index = (
int(len(in_memory_retriever_documents[0].text) * 0.2),
int(len(in_memory_retriever_documents[0].text) * 0.8),
)

return build_expand_chunk_input(
in_memory_retriever_documents[0], [(start_index, end_index)]
)


@fixture
def multiple_chunks_expand_chunk_input(
in_memory_retriever_documents: Sequence[Document],
) -> ExpandChunkInput[int]:
assert len(in_memory_retriever_documents) == 1
start_index_1, end_index_1 = (
int(len(in_memory_retriever_documents[0].text) * 0.3),
int(len(in_memory_retriever_documents[0].text) * 0.4),
)
start_index_2, end_index_2 = (
int(len(in_memory_retriever_documents[0].text) * 0.45),
int(len(in_memory_retriever_documents[0].text) * 0.6),
)

return build_expand_chunk_input(
in_memory_retriever_documents[0],
[(start_index_1, end_index_1), (start_index_2, end_index_2)],
)


def test_expand_chunk_works_for_wholly_included_chunk(
asymmetric_in_memory_retriever: QdrantInMemoryRetriever,
luminous_control_model: LuminousControlModel,
wholly_included_expand_chunk_input: ExpandChunkInput[int],
no_op_tracer: NoOpTracer,
) -> None:
expand_chunk_task = ExpandChunks(
asymmetric_in_memory_retriever, luminous_control_model, 256
)
expand_chunk_output = expand_chunk_task.run(
wholly_included_expand_chunk_input, no_op_tracer
)

assert (
len(expand_chunk_output.chunks)
== 1
== len(wholly_included_expand_chunk_input.chunks_found)
)
assert (
wholly_included_expand_chunk_input.chunks_found[0].text
in expand_chunk_output.chunks[0]
)


def test_expand_chunk_works_for_overlapping_chunk(
asymmetric_in_memory_retriever: QdrantInMemoryRetriever,
luminous_control_model: LuminousControlModel,
overlapping_expand_chunk_input: ExpandChunkInput[int],
no_op_tracer: NoOpTracer,
) -> None:
expand_chunk_task = ExpandChunks(
asymmetric_in_memory_retriever, luminous_control_model, 256
)
expand_chunk_output = expand_chunk_task.run(
overlapping_expand_chunk_input, no_op_tracer
)

assert len(expand_chunk_output.chunks) == 4


def test_expand_chunk_works_for_multiple_chunks(
asymmetric_in_memory_retriever: QdrantInMemoryRetriever,
luminous_control_model: LuminousControlModel,
multiple_chunks_expand_chunk_input: ExpandChunkInput[int],
no_op_tracer: NoOpTracer,
) -> None:
expand_chunk_task = ExpandChunks(
asymmetric_in_memory_retriever, luminous_control_model, 256
)
expand_chunk_output = expand_chunk_task.run(
multiple_chunks_expand_chunk_input, no_op_tracer
)

assert len(expand_chunk_output.chunks) == 3

combined_chunks = "\n\n".join(expand_chunk_output.chunks)
for chunk_found in multiple_chunks_expand_chunk_input.chunks_found:
assert chunk_found.text in combined_chunks

0 comments on commit eb33426

Please sign in to comment.