Skip to content

Commit 571e845

Browse files
More testing (#233)
1 parent ba69358 commit 571e845

File tree

155 files changed

+1385
-1782
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

155 files changed

+1385
-1782
lines changed

.flake8

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
[flake8]
2+
max-line-length = 120
3+
ignore = W504, W503, E722, E731
4+
exclude = demos/privacy_preservation

.github/workflows/test.yml

Lines changed: 97 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -13,20 +13,31 @@ jobs:
1313
linter:
1414
runs-on: ubuntu-latest
1515
container: ubuntu:22.04
16-
name: Lint - Flake8
16+
name: Flake8, intellectual property compliance
1717
steps:
18-
- name: Install git & flake8
18+
- name: Install deps
1919
run:
20-
apt-get update && apt-get install -y git python3-pip && pip3 install flake8
20+
apt-get update && apt-get install -y git python3-pip && pip3 install flake8 urlextract
2121

22-
- name: Git checkout
22+
- name: Git checkout w/o submodules
2323
uses: actions/checkout@v4
2424
with:
2525
submodules: false
2626

2727
- name: Lint with flake8
2828
run:
29-
python3 -m flake8 --max-complexity=10 --max-line-length=120 --statistics --exit-zero
29+
python3 -m flake8
30+
31+
- name: Git checkout w/ submodules
32+
uses: actions/checkout@v4
33+
with:
34+
fetch-depth: 0
35+
submodules: True
36+
37+
- name: Check for intellectual property compliance
38+
run: |
39+
git config --global --add safe.directory $(pwd)
40+
python3 -m unittest tests.test_ip
3041
3142
test_x86:
3243
runs-on: ubuntu-latest
@@ -37,6 +48,15 @@ jobs:
3748
COCO_IMG_PATH: aio_objdet_dataset
3849
COCO_ANNO_PATH: aio_objdet_dataset/annotations.json
3950
OMP_NUM_THREADS: 4
51+
S3_URL_CRITEO_DATASET: ${{ secrets.S3_URL_CRITEO_DATASET }}
52+
S3_URL_RESNET_50_V15_TF_FP32: ${{ secrets.S3_URL_RESNET_50_V15_TF_FP32 }}
53+
S3_URL_SSD_INCEPTION_V2_TF_FP32: ${{ secrets.S3_URL_SSD_INCEPTION_V2_TF_FP32 }}
54+
S3_URL_ALPACA_PYTORCH_FP32: ${{ secrets.S3_URL_ALPACA_PYTORCH_FP32 }}
55+
S3_URL_IMAGENET_DATASET: ${{ secrets.S3_URL_IMAGENET_DATASET }}
56+
S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }}
57+
S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }}
58+
S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }}
59+
HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }}
4060
steps:
4161
- name: Install git
4262
run:
@@ -50,17 +70,17 @@ jobs:
5070
- name: Set up AML
5171
run:
5272
FORCE_INSTALL=1 bash setup_deb.sh
73+
74+
- name: Unittest
75+
run: |
76+
python3 -m unittest tests.test_pytorch_models
5377
54-
- name: AML imports test
55-
run:
56-
python3 utils/tests/setup_test_utils/attempt_imports.py
57-
58-
- name: AML smoke test
78+
- name: End-user smoke test
5979
run: |
6080
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/aio_objdet_dataset.tar.gz > /dev/null 2>&1
6181
tar -xf aio_objdet_dataset.tar.gz > /dev/null
6282
63-
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/resnet_50_v15_tf_fp32.pb > /dev/null 2>&1
83+
wget $S3_URL_RESNET_50_V15_TF_FP32 > /dev/null 2>&1
6484
python3 computer_vision/classification/resnet_50_v15/run.py -m resnet_50_v15_tf_fp32.pb -p fp32 -f tf --timeout=60
6585
6686
python3 computer_vision/classification/mobilenet_v2/run.py -p fp32 -f pytorch --timeout=60
@@ -70,7 +90,7 @@ jobs:
7090
7191
python3 speech_recognition/whisper/run.py -m small.en
7292
73-
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/ssd_inception_v2_tf_fp32.pb > /dev/null 2>&1
93+
wget $S3_URL_SSD_INCEPTION_V2_TF_FP32 > /dev/null 2>&1
7494
python3 computer_vision/object_detection/ssd_inception_v2/run.py -m ssd_inception_v2_tf_fp32.pb -p fp32 --timeout=60
7595
7696
wget https://zenodo.org/records/4735647/files/resnet50_v1.onnx > /dev/null 2>&1
@@ -91,6 +111,15 @@ jobs:
91111
COCO_IMG_PATH: aio_objdet_dataset
92112
COCO_ANNO_PATH: aio_objdet_dataset/annotations.json
93113
OMP_NUM_THREADS: 32
114+
S3_URL_CRITEO_DATASET: ${{ secrets.S3_URL_CRITEO_DATASET }}
115+
S3_URL_RESNET_50_V15_TF_FP32: ${{ secrets.S3_URL_RESNET_50_V15_TF_FP32 }}
116+
S3_URL_SSD_INCEPTION_V2_TF_FP32: ${{ secrets.S3_URL_SSD_INCEPTION_V2_TF_FP32 }}
117+
S3_URL_ALPACA_PYTORCH_FP32: ${{ secrets.S3_URL_ALPACA_PYTORCH_FP32 }}
118+
S3_URL_IMAGENET_DATASET: ${{ secrets.S3_URL_IMAGENET_DATASET }}
119+
S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }}
120+
S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }}
121+
S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }}
122+
HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }}
94123
steps:
95124
- name: Install git
96125
run:
@@ -105,12 +134,16 @@ jobs:
105134
run:
106135
bash setup_deb.sh
107136

108-
- name: AML smoke test
137+
- name: Unittest
138+
run: |
139+
python3 -m unittest tests.test_pytorch_models
140+
141+
- name: End-user smoke test
109142
run: |
110143
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/aio_objdet_dataset.tar.gz > /dev/null 2>&1
111144
tar -xf aio_objdet_dataset.tar.gz > /dev/null
112145
113-
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/resnet_50_v15_tf_fp32.pb > /dev/null 2>&1
146+
wget $S3_URL_RESNET_50_V15_TF_FP32 > /dev/null 2>&1
114147
python3 computer_vision/classification/resnet_50_v15/run.py -m resnet_50_v15_tf_fp32.pb -p fp32 -f tf --timeout=60
115148
116149
python3 computer_vision/classification/mobilenet_v2/run.py -p fp32 -f pytorch --timeout=60
@@ -120,7 +153,7 @@ jobs:
120153
121154
python3 speech_recognition/whisper/run.py -m small.en
122155
123-
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/ssd_inception_v2_tf_fp32.pb > /dev/null 2>&1
156+
wget $S3_URL_SSD_INCEPTION_V2_TF_FP32 > /dev/null 2>&1
124157
python3 computer_vision/object_detection/ssd_inception_v2/run.py -m ssd_inception_v2_tf_fp32.pb -p fp32 --timeout=60
125158
126159
wget https://zenodo.org/records/4735647/files/resnet50_v1.onnx > /dev/null 2>&1
@@ -136,6 +169,17 @@ jobs:
136169
image: ubuntu:22.04
137170
options: --memory=170g
138171
name: Ampere Altra - Ampere optimized PyTorch (shell installer)
172+
env:
173+
PYTHONPATH: ./
174+
AIO_NUM_THREADS: 32
175+
AIO_DEBUG_MODE: 0
176+
S3_URL_CRITEO_DATASET: ${{ secrets.S3_URL_CRITEO_DATASET }}
177+
S3_URL_ALPACA_PYTORCH_FP32: ${{ secrets.S3_URL_ALPACA_PYTORCH_FP32 }}
178+
S3_URL_IMAGENET_DATASET: ${{ secrets.S3_URL_IMAGENET_DATASET }}
179+
S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }}
180+
S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }}
181+
S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }}
182+
HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }}
139183
steps:
140184
- name: Install Ampere optimized PyTorch
141185
run: |
@@ -151,9 +195,13 @@ jobs:
151195
run:
152196
bash setup_deb.sh
153197

198+
- name: Unittest
199+
run: |
200+
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 -m unittest tests.test_pytorch_models
201+
154202
- name: benchmark.py test
155203
run: |
156-
PYTHONPATH=/__w/ampere_model_library/ampere_model_library python3 benchmark.py --no-interactive
204+
PYTHONPATH=/__w/ampere_model_library/ampere_model_library python3 benchmark.py --no-interactive --model resnet_50_v1.5
157205
158206
test_pytorch_arm64_docker:
159207
runs-on: self-hosted
@@ -166,6 +214,13 @@ jobs:
166214
COCO_IMG_PATH: aio_objdet_dataset
167215
COCO_ANNO_PATH: aio_objdet_dataset/annotations.json
168216
AIO_NUM_THREADS: 32
217+
AIO_DEBUG_MODE: 0
218+
S3_URL_CRITEO_DATASET: ${{ secrets.S3_URL_CRITEO_DATASET }}
219+
S3_URL_ALPACA_PYTORCH_FP32: ${{ secrets.S3_URL_ALPACA_PYTORCH_FP32 }}
220+
S3_URL_IMAGENET_DATASET: ${{ secrets.S3_URL_IMAGENET_DATASET }}
221+
S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }}
222+
S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }}
223+
S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }}
169224
HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }}
170225
steps:
171226
- name: Git checkout & pull submodules
@@ -178,18 +233,22 @@ jobs:
178233
bash setup_deb.sh
179234
echo $HF_HUB_TOKEN > ~/.cache/huggingface/token
180235
236+
- name: Unittest
237+
run: |
238+
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 -m unittest tests.test_pytorch_models
239+
181240
- name: benchmark.py test
182241
run: |
183242
{ echo "y"; echo "y"; echo "y"; echo "y"; echo "y"; echo "y"; echo "y"; echo "y"; echo "y"; echo "y"; echo "y"; } | PYTHONPATH=/__w/ampere_model_library/ampere_model_library python3 benchmark.py
184243
# testing second time to ensure that left-over files don't interrupt, etc. - this time no-interactive mode
185244
PYTHONPATH=/__w/ampere_model_library/ampere_model_library python3 benchmark.py --no-interactive --memory 30 --max-threads 24
186245
187-
- name: AML smoke test
246+
- name: AML end-user smoke test
188247
run: |
189248
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/aio_objdet_dataset.tar.gz > /dev/null 2>&1
190249
tar -xf aio_objdet_dataset.tar.gz > /dev/null
191250
192-
wget https://ampereaimodelzoo.s3.amazonaws.com/alpaca_data.json > /dev/null 2>&1
251+
wget https://github.com/tloen/alpaca-lora/raw/main/alpaca_data.json > /dev/null 2>&1
193252
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 natural_language_processing/text_generation/llama2/run.py -m meta-llama/Llama-2-7b-chat-hf --dataset_path=alpaca_data.json
194253
195254
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 recommendation/dlrm_torchbench/run.py -p fp32
@@ -203,7 +262,7 @@ jobs:
203262
wget https://github.com/ultralytics/assets/releases/download/v0.0.0/yolov8l.pt > /dev/null 2>&1
204263
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 computer_vision/object_detection/yolo_v8/run.py -m yolov8l.pt -p fp32 -f pytorch
205264
206-
wget -O bert_large_mlperf.pt https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/bert_large_pytorch_fp32.pytorch > /dev/null 2>&1
265+
wget -O bert_large_mlperf.pt https://zenodo.org/records/3733896/files/model.pytorch?download=1 > /dev/null 2>&1
207266
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 natural_language_processing/extractive_question_answering/bert_large/run_mlperf.py -m bert_large_mlperf.pt -p fp32 -f pytorch
208267
209268
test_tensorflow_arm64:
@@ -217,6 +276,13 @@ jobs:
217276
COCO_IMG_PATH: aio_objdet_dataset
218277
COCO_ANNO_PATH: aio_objdet_dataset/annotations.json
219278
AIO_NUM_THREADS: 32
279+
AIO_DEBUG_MODE: 0
280+
S3_URL_RESNET_50_V15_TF_FP32: ${{ secrets.S3_URL_RESNET_50_V15_TF_FP32 }}
281+
S3_URL_SSD_INCEPTION_V2_TF_FP32: ${{ secrets.S3_URL_SSD_INCEPTION_V2_TF_FP32 }}
282+
S3_URL_IMAGENET_DATASET: ${{ secrets.S3_URL_IMAGENET_DATASET }}
283+
S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }}
284+
S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }}
285+
S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }}
220286
HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }}
221287
steps:
222288
- name: Git checkout & pull submodules
@@ -229,17 +295,17 @@ jobs:
229295
bash setup_deb.sh
230296
echo $HF_HUB_TOKEN > ~/.cache/huggingface/token
231297
232-
- name: AML smoke test
298+
- name: End-user smoke test
233299
run: |
234300
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/aio_objdet_dataset.tar.gz > /dev/null 2>&1
235301
tar -xf aio_objdet_dataset.tar.gz > /dev/null
236302
237303
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 natural_language_processing/extractive_question_answering/bert_large/run_huggingface.py -m bert-large-cased-whole-word-masking-finetuned-squad
238304
239-
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/resnet_50_v15_tf_fp32.pb > /dev/null 2>&1
305+
wget $S3_URL_RESNET_50_V15_TF_FP32 > /dev/null 2>&1
240306
IGNORE_DATASET_LIMITS=1 AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" python3 computer_vision/classification/resnet_50_v15/run.py -m resnet_50_v15_tf_fp32.pb -b 32 -p fp32 -f tf --timeout=60
241307
242-
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/ssd_inception_v2_tf_fp32.pb > /dev/null 2>&1
308+
wget $S3_URL_SSD_INCEPTION_V2_TF_FP32 > /dev/null 2>&1
243309
IGNORE_DATASET_LIMITS=1 python3 computer_vision/object_detection/ssd_inception_v2/run.py -m ssd_inception_v2_tf_fp32.pb -b 8 -p fp32 --timeout=60
244310
245311
test_onnxrt_arm64:
@@ -253,6 +319,14 @@ jobs:
253319
COCO_IMG_PATH: aio_objdet_dataset
254320
COCO_ANNO_PATH: aio_objdet_dataset/annotations.json
255321
AIO_NUM_THREADS: 32
322+
AIO_DEBUG_MODE: 0
323+
S3_URL_RESNET_50_V15_TF_FP32: ${{ secrets.S3_URL_RESNET_50_V15_TF_FP32 }}
324+
S3_URL_SSD_INCEPTION_V2_TF_FP32: ${{ secrets.S3_URL_SSD_INCEPTION_V2_TF_FP32 }}
325+
S3_URL_IMAGENET_DATASET: ${{ secrets.S3_URL_IMAGENET_DATASET }}
326+
S3_URL_IMAGENET_DATASET_LABELS: ${{ secrets.S3_URL_IMAGENET_DATASET_LABELS }}
327+
S3_URL_COCO_DATASET: ${{ secrets.S3_URL_COCO_DATASET }}
328+
S3_URL_COCO_DATASET_ANNOTATIONS: ${{ secrets.S3_URL_COCO_DATASET_ANNOTATIONS }}
329+
HF_HUB_TOKEN: ${{ secrets.HF_HUB_TOKEN }}
256330
steps:
257331
- name: Git checkout & pull submodules
258332
uses: actions/checkout@v4
@@ -263,7 +337,7 @@ jobs:
263337
run:
264338
bash setup_deb.sh
265339

266-
- name: AML smoke test
340+
- name: End-user smoke test
267341
run: |
268342
wget https://ampereaimodelzoo.s3.eu-central-1.amazonaws.com/aio_objdet_dataset.tar.gz > /dev/null 2>&1
269343
tar -xvf aio_objdet_dataset.tar.gz > /dev/null

LICENSE

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -187,12 +187,20 @@
187187
same "printed page" as the copyright notice for easier
188188
identification within third-party archives.
189189

190-
Copyright (c) 2022, Ampere Computing LLC
190+
Copyright (c) 2024, Ampere Computing LLC
191+
Copyright (c) 2022 Andrej Karpathy
192+
Copyright (c) 2022 OpenAI
193+
Copyright (c) 2022 Stability AI
191194
Copyright 2021 The MLPerf Authors. All Rights Reserved.
192195
Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
196+
Copyright [2019] [Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany]
193197
Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
194198
Copyright (c) 2019, Myrtle Software Limited. All rights reserved.
195199
Copyright (c) 2017 Keith Ito
200+
Copyright 2017-present Weichen Shen
201+
Copyright (c) 2014-, Continuum Analytics, Inc.
202+
Copyright (c) 2012, Anaconda, Inc.
203+
Copyright (c) Facebook, Inc. and its affiliates.
196204

197205
Licensed under the Apache License, Version 2.0 (the "License");
198206
you may not use this file except in compliance with the License.

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -88,7 +88,7 @@ Note that the example uses PyTorch - we recommend using Ampere Optimized PyTorch
8888
**Before running this example you need to be granted access by Meta to LLaMA2 model. Go here: [Meta](https://ai.meta.com/resources/models-and-libraries/llama-downloads) and here: [HF](https://huggingface.co/meta-llama/Llama-2-7b-chat-hf) to learn more.**
8989
```bash
9090
source set_env_variables.sh
91-
wget https://ampereaimodelzoo.s3.amazonaws.com/alpaca_data.json
91+
wget https://github.com/tloen/alpaca-lora/raw/main/alpaca_data.json
9292
AIO_IMPLICIT_FP16_TRANSFORM_FILTER=".*" AIO_NUM_THREADS=32 python3 natural_language_processing/text_generation/llama2/run.py -m meta-llama/Llama-2-7b-chat-hf --dataset_path=alpaca_data.json
9393
```
9494
The command above will run the model utilizing 32 threads, implicit conversion to FP16 datatype will be applied - you can default to fp32 precision by not setting the **AIO_IMPLICIT_FP16_TRANSFORM_FILTER** variable.

0 commit comments

Comments
 (0)