Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
fxmarty committed Jul 9, 2024
1 parent 350aed7 commit 8ac5a23
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 11 deletions.
17 changes: 13 additions & 4 deletions src/transformers/utils/fx.py
Original file line number Diff line number Diff line change
Expand Up @@ -997,11 +997,20 @@ def _generate_dummy_input(
)
elif "inputs_embeds" in input_name:
batch_size = shape[0]
sequence_length = shape[-1]

inputs_dict[input_name] = torch.zeros(
batch_size, sequence_length, model.config.hidden_size, dtype=torch.float, device=device
)
if getattr(model.config, "embedding_size", None) is not None:
embedding_size = model.config.embedding_size
else:
embedding_size = model.config.hidden_size

if len(shape) == 3:
# (batch_size, num_choices, sequence_length, embedding_size)
embedding_shape = (batch_size, shape[1], shape[2], embedding_size)
else:
# (batch_size, sequence_length, embedding_size)
embedding_shape = (batch_size, shape[1], embedding_size)

inputs_dict[input_name] = torch.zeros(embedding_shape, dtype=torch.float, device=device)
elif "visual_feats" in input_name:
inputs_dict[input_name] = torch.zeros(
shape
Expand Down
38 changes: 31 additions & 7 deletions tests/test_modeling_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -1215,15 +1215,39 @@ def _create_and_check_torch_fx_tracing(self, config, inputs_dict, output_loss=Fa
(past_mask, inputs_to_test[1]["attention_mask"]), dim=1
)

if "inputs_embeds" in inspect.signature(model.forward).parameters and not model.config.is_encoder_decoder:
inputs_to_test.append(
{
"inputs_embeds": torch.rand(
2, 2, model.config.hidden_size, dtype=torch.float, device=torch_device
)
}
if (
"input_ids" in inspect.signature(model.forward).parameters
and "inputs_embeds" in inspect.signature(model.forward).parameters
and not model.config.is_encoder_decoder
):
inps = copy.deepcopy(inputs_to_test[0])

embedding_size = (
model.config.embedding_size
if getattr(model.config, "embedding_size", None) is not None
and model.config.model_type != "megatron-bert"
else model.config.hidden_size
)

if (
model.config.model_type in MODEL_FOR_MULTIPLE_CHOICE_MAPPING_NAMES
and model.__class__.__name__ == MODEL_FOR_MULTIPLE_CHOICE_MAPPING_NAMES[model.config.model_type]
):
batch_size = inputs[next(iter(inputs))].shape[0]
num_choices = inputs[next(iter(inputs))].shape[1]
sequence_length = inputs[next(iter(inputs))].shape[2]
shape = (batch_size, num_choices, sequence_length, embedding_size)
elif inps["input_ids"].ndim == 2:
batch_size = inputs[next(iter(inputs))].shape[0]
sequence_length = inputs[next(iter(inputs))].shape[1]
shape = (batch_size, sequence_length, embedding_size)
else:
self.skipTest("Unknown case")

del inps["input_ids"]
inps["inputs_embeds"] = torch.rand(shape, dtype=torch.float, device=torch_device)
inputs_to_test.append(inps)

for inps in inputs_to_test:
filtered_inputs = {k: v for (k, v) in inps.items() if k in input_names}
input_names_to_trace = list(filtered_inputs.keys())
Expand Down

0 comments on commit 8ac5a23

Please sign in to comment.