Skip to content

Commit

Permalink
Switched to raw str_labels and dropping dupes during __add__()
Browse files Browse the repository at this point in the history
  • Loading branch information
ojh31 committed Oct 12, 2023
1 parent 97a33e0 commit 72cd307
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 3 deletions.
10 changes: 9 additions & 1 deletion utils/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -553,6 +553,14 @@ def __init__(
self.position = position
self.label = label

def drop_duplicates(self):
_, indices = torch.unique(
self.clean_tokens[torch.arange(len(self.clean_tokens)), self.position],
dim=0,
return_inverse=True
)
return self.get_subset(indices.tolist())

def __add__(self, other: "CleanCorruptedDataset"):
assert isinstance(other, CleanCorruptedDataset)
assert self.tokenizer is not None
Expand Down Expand Up @@ -581,7 +589,7 @@ def __add__(self, other: "CleanCorruptedDataset"):
self.tokenizer,
None,
self.label,
)
).drop_duplicates()

def get_subset(self, indices: List[int]):
return CleanCorruptedDataset(
Expand Down
4 changes: 2 additions & 2 deletions utils/residual_stream.py
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ def __init__(
torch.arange(len(self.prompt_tokens)), self.position
].cpu().detach()
str_tokens = [
f"{pos}:{tok}"
for pos, tok in zip(position, model.to_str_tokens(label_tensor))
f"{tok}"
for tok in model.to_str_tokens(label_tensor)
]
to_str_check = (
len(str_tokens) == len(self.prompt_tokens) and
Expand Down

0 comments on commit 72cd307

Please sign in to comment.