From 0f8aab71e8967926767891c03b3badeacd17ae71 Mon Sep 17 00:00:00 2001 From: yardeny-sony Date: Wed, 25 Sep 2024 09:34:35 +0300 Subject: [PATCH] remove scale parameter --- tests/common_tests/base_test.py | 4 ++-- .../feature_models/scaled_dot_product_attention_test.py | 9 ++++----- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/tests/common_tests/base_test.py b/tests/common_tests/base_test.py index b5e479f0d..74231587c 100644 --- a/tests/common_tests/base_test.py +++ b/tests/common_tests/base_test.py @@ -16,8 +16,8 @@ def __init__(self, unit_test, use_fuzzy_validation=False ): """ - :param use_fuzzy_validation: Allow similar (instead of exact) float output when comparing to original float - model against the no_quantization output model + :param use_fuzzy_validation: Allow similar (instead of exact) outputs when comparing the original float + model output against the no_quantization model output. """ self.unit_test = unit_test diff --git a/tests/pytorch_tests/model_tests/feature_models/scaled_dot_product_attention_test.py b/tests/pytorch_tests/model_tests/feature_models/scaled_dot_product_attention_test.py index 144ec1076..0c045a352 100644 --- a/tests/pytorch_tests/model_tests/feature_models/scaled_dot_product_attention_test.py +++ b/tests/pytorch_tests/model_tests/feature_models/scaled_dot_product_attention_test.py @@ -15,14 +15,14 @@ def forward(self, q, k, v): attn_mask=self.attn_mask, dropout_p=self.dropout_p, is_causal=self.is_causal, - scale=self.scale + # scale=self.scale ) return x class ScaledDotProductAttentionTest(BasePytorchTest): """ - This test checks the MultiHeadAttention as a single layer with add_bias_kv feature. + This test checks the scaled_dot_product_attention (SDPA) substitution using a single SDPA layer. """ def __init__(self, unit_test, dropout_p=0.0, scale=None, attn_mask=None, is_causal=False): super().__init__(unit_test) @@ -47,9 +47,8 @@ def create_inputs_shape(self): def _test_substitution_structure_output(self, post_substitution_nodes): """ - :param orig_graph: The original float model graph before substitution - :param new_graph: The post substitutions graph - :return: True if the new graph after scaled_dot_product_attention substitution is in the correct structure. + :param post_substitution_nodes: The graph nodes after the SDPA substitution + raise Exception if case the post_substitution_nodes doesn't match the expected_nodes_counter """ expected_nodes_counter = { 'DummyPlaceHolder': 3,