Skip to content

Commit

Permalink
fix
Browse files Browse the repository at this point in the history
  • Loading branch information
ydshieh committed Dec 7, 2023
1 parent 642e9f6 commit 3c0ed10
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 16 deletions.
6 changes: 3 additions & 3 deletions tests/models/align/test_modeling_align.py
Original file line number Diff line number Diff line change
Expand Up @@ -478,9 +478,9 @@ def test_inputs_embeds(self):
def test_retain_grad_hidden_states_attentions(self):
pass

@unittest.skip(reason="AlignModel does not have input/output embeddings")

def test_model_common_attributes(self):
pass
assert 1 == 2

# override as the `temperature` parameter initilization is different for ALIGN
def test_initialization(self):
Expand Down Expand Up @@ -598,8 +598,8 @@ def test_load_vision_text_config(self):
text_config = AlignTextConfig.from_pretrained(tmp_dir_name)
self.assertDictEqual(config.text_config.to_dict(), text_config.to_dict())

@slow
def test_model_from_pretrained(self):
assert 1 == 3
for model_name in ALIGN_PRETRAINED_MODEL_ARCHIVE_LIST[:1]:
model = AlignModel.from_pretrained(model_name)
self.assertIsNotNone(model)
Expand Down
32 changes: 19 additions & 13 deletions utils/notification_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -677,8 +677,7 @@ def post_reply(self):
if "prev_ci_results" in self.prev_ci_artifacts and "model_results.json" in self.prev_ci_artifacts["prev_ci_results"]:
prev_model_results = json.loads(self.prev_ci_artifacts["prev_ci_results"]["model_results.json"])

MAX_ERROR_TEXT = 3000 - len("[Truncated]")
failure_text = ""
all_failure_lines = dict()
for job, job_result in sorted_dict:
if len(job_result["failures"]):
for device, failures in job_result["failures"].items():
Expand All @@ -697,19 +696,26 @@ def post_reply(self):
continue

if url is not None:
new_text = failure_text + f'device: {device} gpu\n<{url}|{error["line"]}>\n\n'
new_text = f'<{url}|{error["line"]}>\n\n'
else:
new_text = failure_text + f'device: {device} gpu\n{error["line"]}\n\n'
if len(new_text) > MAX_ERROR_TEXT:
# `failure_text` here has length <= 3000
failure_text = failure_text + "[Truncated]"
break
# `failure_text` here has length <= MAX_ERROR_TEXT
failure_text = new_text
if failure_text.endswith("[Truncated]"):
break
if failure_text.endswith("[Truncated]"):
new_text = f'{error["line"]}\n\n'

if new_text not in all_failure_lines:
all_failure_lines[new_text] = []
all_failure_lines[new_text].append(device)

MAX_ERROR_TEXT = 3000 - len("[Truncated]")
failure_text = ""
for line, devices in all_failure_lines.items():
if len(devices) == 2:
devices = ["single", "multi"]
new_text = f"{'|'.join(devices)} gpu\n{line}"
if len(new_text) > MAX_ERROR_TEXT:
# `failure_text` here has length <= 3000
failure_text = failure_text + "[Truncated]"
break
# `failure_text` here has length <= MAX_ERROR_TEXT
failure_text = new_text

if failure_text:
blocks = [
Expand Down

0 comments on commit 3c0ed10

Please sign in to comment.