Skip to content

Commit

Permalink
change deta_modeling_test of configuration 'two_stage' default to TRU…
Browse files Browse the repository at this point in the history
…E and minor change of dist checking
  • Loading branch information
SangbumChoi committed Dec 13, 2023
1 parent b2ea2b3 commit 92280f5
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 2 deletions.
3 changes: 2 additions & 1 deletion src/transformers/models/deta/modeling_deta.py
Original file line number Diff line number Diff line change
Expand Up @@ -2220,7 +2220,7 @@ def forward(self, outputs, targets):
num_boxes = sum(len(t["class_labels"]) for t in targets)
num_boxes = torch.as_tensor([num_boxes], dtype=torch.float, device=next(iter(outputs.values())).device)
# (Niels): comment out function below, distributed training to be added
if dist.is_available() or dist.is_initialized():
if dist.is_available() and dist.is_initialized():
torch.distributed.all_reduce(num_boxes)
world_size = dist.get_world_size()
else:
Expand Down Expand Up @@ -2665,6 +2665,7 @@ def forward(self, outputs, targets, return_cost_matrix=False):
bs = len(targets)
indices = []
ious = []
print(outputs["init_reference"][0].shape)
for b in range(bs):
iou, _ = box_iou(
center_to_corners_format(targets[b]["boxes"]),
Expand Down
2 changes: 1 addition & 1 deletion tests/models/deta/test_modeling_deta.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ def __init__(
num_feature_levels=4,
encoder_n_points=2,
decoder_n_points=6,
two_stage=False,
two_stage=True,
):
self.parent = parent
self.batch_size = batch_size
Expand Down

0 comments on commit 92280f5

Please sign in to comment.