From 71a8f78f41938d646553cd13a1419294028c985a Mon Sep 17 00:00:00 2001 From: Amir Klein <203507526+amirkl94@users.noreply.github.com> Date: Wed, 9 Jul 2025 12:15:17 +0300 Subject: [PATCH 1/2] tests: Fix lora perf test Signed-off-by: Amir Klein <203507526+amirkl94@users.noreply.github.com> --- .../defs/perf/pytorch_model_config.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/tests/integration/defs/perf/pytorch_model_config.py b/tests/integration/defs/perf/pytorch_model_config.py index 476e23079c7..74b450e5243 100644 --- a/tests/integration/defs/perf/pytorch_model_config.py +++ b/tests/integration/defs/perf/pytorch_model_config.py @@ -144,13 +144,22 @@ def get_model_yaml_config(model_label: str, if 'pytorch' in model_label and 'loras' in model_label: lora_config = { 'lora_config': { - 'lora_dir': lora_dirs if lora_dirs is not None else [], - 'max_lora_rank': 64, - 'lora_target_modules': ['attn_q', 'attn_k', 'attn_v'], + 'lora_dir': + lora_dirs if lora_dirs is not None else [], + 'max_lora_rank': + 64, + 'lora_target_modules': [ + 'attn_q', 'attn_k', 'attn_v', 'attn_dense', 'mlp_h_to_4h', + 'mlp_4h_to_h', 'mlp_gate' + ], 'trtllm_modules_to_hf_modules': { "attn_q": "q_proj", "attn_k": "k_proj", - "attn_v": "v_proj" + "attn_v": "v_proj", + "attn_dense": "o_proj", + "mlp_h_to_4h": "gate_proj", + "mlp_4h_to_h": "down_proj", + "mlp_gate": "up_proj", } } } From dfa78007d78e35b7b7e82fda6e1cfa9f141da554 Mon Sep 17 00:00:00 2001 From: Amir Klein <203507526+amirkl94@users.noreply.github.com> Date: Wed, 9 Jul 2025 14:37:36 +0300 Subject: [PATCH 2/2] Remove redundant modules Signed-off-by: Amir Klein <203507526+amirkl94@users.noreply.github.com> --- tests/integration/defs/perf/pytorch_model_config.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/tests/integration/defs/perf/pytorch_model_config.py b/tests/integration/defs/perf/pytorch_model_config.py index 74b450e5243..ba01027d465 100644 --- a/tests/integration/defs/perf/pytorch_model_config.py +++ b/tests/integration/defs/perf/pytorch_model_config.py @@ -144,14 +144,9 @@ def get_model_yaml_config(model_label: str, if 'pytorch' in model_label and 'loras' in model_label: lora_config = { 'lora_config': { - 'lora_dir': - lora_dirs if lora_dirs is not None else [], - 'max_lora_rank': - 64, - 'lora_target_modules': [ - 'attn_q', 'attn_k', 'attn_v', 'attn_dense', 'mlp_h_to_4h', - 'mlp_4h_to_h', 'mlp_gate' - ], + 'lora_dir': lora_dirs if lora_dirs is not None else [], + 'max_lora_rank': 64, + 'lora_target_modules': ['attn_q', 'attn_k', 'attn_v'], 'trtllm_modules_to_hf_modules': { "attn_q": "q_proj", "attn_k": "k_proj",