Skip to content
This repository was archived by the owner on Oct 25, 2024. It is now read-only.

Commit 5caf330

Browse files
authored
Fixed issue for T5 base model quantization issue with IPEX smoothquant (#946)
1 parent 1967445 commit 5caf330

File tree

5 files changed

+10
-10
lines changed

5 files changed

+10
-10
lines changed

examples/.config/pytorch_optimize.json

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2068,7 +2068,6 @@
20682068
"cmd": "bash run_tuning.sh",
20692069
"params": {
20702070
"topology": "flan-t5-large",
2071-
"task": "generation",
20722071
"approach": "static",
20732072
"backend": "ipex",
20742073
"output_model": "saved_results"
@@ -2078,7 +2077,7 @@
20782077
"cmd": "bash run_benchmark.sh",
20792078
"params": {
20802079
"topology": "flan-t5-large",
2081-
"task": "generation",
2080+
"lm_eval_tasks": "cnn_dailymail",
20822081
"approach": "static",
20832082
"backend": "ipex",
20842083
"mode": "benchmark",
@@ -2095,7 +2094,6 @@
20952094
"cmd": "bash run_tuning.sh",
20962095
"params": {
20972096
"topology": "t5-base-tag",
2098-
"task": "generation",
20992097
"approach": "static",
21002098
"backend": "ipex",
21012099
"output_model": "saved_results"
@@ -2105,7 +2103,7 @@
21052103
"cmd": "bash run_benchmark.sh",
21062104
"params": {
21072105
"topology": "t5-base-tag",
2108-
"task": "generation",
2106+
"lm_eval_tasks": "cnn_dailymail",
21092107
"approach": "static",
21102108
"backend": "ipex",
21112109
"mode": "benchmark",

examples/huggingface/pytorch/text2text-generation/requirements.txt

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,5 +7,7 @@ onnx
77
rouge-score
88
nltk
99
neural-compressor
10-
optimum-intel
11-
git+https://github.com/intel/intel-extension-for-pytorch.git
10+
optimum-intel > 1.12.0
11+
onnxruntime
12+
intel-extension-for-pytorch
13+
git+https://github.com/EleutherAI/lm-evaluation-harness.git@cc9778fbe4fa1a709be2abed9deb6180fd40e7e2

examples/huggingface/pytorch/text2text-generation/run_benchmark.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -98,7 +98,7 @@ function run_benchmark {
9898
elif [[ ${int8} == "true" ]]; then
9999
model_name_or_path=${tuned_checkpoint}
100100
else
101-
model_name_or_path="fabiochiu/t5-base-tag-generation"
101+
model_name_or_path="t5-base"
102102
fi
103103
if [ "${backend}" = "ipex" ]; then
104104
extra_cmd=$extra_cmd" --ipex"

examples/huggingface/pytorch/text2text-generation/run_seq2seq_generation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -72,10 +72,10 @@ def set_seed(args):
7272

7373
def get_example_inputs(model):
7474
onnx_config_class = TasksManager.get_exporter_config_constructor(model_type=model.config.model_type, exporter="onnx", task="text2text-generation")
75-
onnx_config = onnx_config_class(model.config, use_past=model.config.use_cache)
75+
onnx_config = onnx_config_class(model.config, use_past=model.config.use_cache, use_past_in_inputs=model.config.use_cache)
7676
encoder_onnx_config = onnx_config.with_behavior("encoder")
7777
decoder_onnx_config = onnx_config.with_behavior("decoder", use_past=False)
78-
decoder_with_past_onnx_config = onnx_config.with_behavior("decoder", use_past=True)
78+
decoder_with_past_onnx_config = onnx_config.with_behavior("decoder", use_past=True, use_past_in_inputs=model.config.use_cache)
7979
encoder_dummy_inputs = encoder_onnx_config.generate_dummy_inputs(framework="pt")
8080
decoder_dummy_inputs = decoder_onnx_config.generate_dummy_inputs(framework="pt")
8181
decoder_dummy_inputs["encoder_outputs"] = tuple(decoder_dummy_inputs["encoder_outputs"][0:1])

examples/huggingface/pytorch/text2text-generation/run_tuning.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -73,7 +73,7 @@ function run_tuning {
7373
if [ $input_model ];then
7474
model_name_or_path=${input_model}
7575
else
76-
model_name_or_path="fabiochiu/t5-base-tag-generation"
76+
model_name_or_path="t5-base"
7777
fi
7878
if [ "${backend}" = "ipex" ]; then
7979
extra_cmd=$extra_cmd" --ipex"

0 commit comments

Comments
 (0)