Skip to content

Commit 5d397f5

Browse files
authored
improves side by side (#304)
* improves side by side * mypy * changes * mypy * fixes * fix ut * doc * sbs * doc * last * fix * fix * fix sbs * sbs * fix sbs * fix * fix * mypy" git push " * onx fix * fix mypy * fix missing domain * mypy
1 parent a682d15 commit 5d397f5

30 files changed

+1315
-225
lines changed

.gitignore

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
*.weight
1515
*.nsys-rep
1616
*.pkl
17+
*.pt
18+
*.pt2
1719
*.xlsx
1820
*.sarif
1921
*.sqlitest

CHANGELOGS.rst

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
Change Logs
22
===========
33

4+
0.8.3
5+
+++++
6+
7+
* :pr:`304`: improves side-by-side comparison
8+
49
0.8.2
510
+++++
611

_doc/index.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -239,8 +239,8 @@ The function replaces dynamic dimensions defined as strings by
239239
Older versions
240240
==============
241241

242+
* `0.8.3 <../v0.8.3/index.html>`_
242243
* `0.8.2 <../v0.8.2/index.html>`_
243-
* `0.8.1 <../v0.8.1/index.html>`_
244244
* `0.7.16 <../v0.7.16/index.html>`_
245245
* `0.6.3 <../v0.6.3/index.html>`_
246246
* `0.5.0 <../v0.5.0/index.html>`_

_unittests/ut_export/test_api.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -110,6 +110,8 @@ def test_tiny_llm_to_onnx(self):
110110
diff = max_diff(expected, got)
111111
assert diff["abs"] <= 1e-5, f"diff={diff}"
112112

113+
self.clean_dump()
114+
113115

114116
if __name__ == "__main__":
115117
unittest.main(verbosity=2)

_unittests/ut_helpers/test_bench_run.py

Lines changed: 25 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -109,35 +109,51 @@ def test_make_configs_replace(self):
109109
def test_max_diff(self):
110110
self.assertEqual(
111111
max_diff(torch.Tensor([1, 2]), torch.Tensor([1, 2])),
112-
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 2.0, "dnan": 0.0, "argm": (0,)},
112+
{
113+
"abs": 0.0,
114+
"rel": 0.0,
115+
"sum": 0.0,
116+
"n": 2.0,
117+
"dnan": 0.0,
118+
"argm": (0,),
119+
"dev": 0,
120+
},
113121
)
114122
self.assertEqual(
115123
max_diff(
116124
(torch.Tensor([1, 2]),),
117125
(torch.Tensor([1, 2])),
118126
),
119-
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 2.0, "dnan": 0.0, "argm": (0,)},
127+
{
128+
"abs": 0.0,
129+
"rel": 0.0,
130+
"sum": 0.0,
131+
"n": 2.0,
132+
"dnan": 0.0,
133+
"argm": (0,),
134+
"dev": 0,
135+
},
120136
)
121137
self.assertEqual(
122138
max_diff(
123139
(torch.Tensor([1, 2]), (torch.Tensor([1, 2]),)),
124140
(torch.Tensor([1, 2]), (torch.Tensor([1, 2]),)),
125141
),
126-
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 4.0, "dnan": 0.0},
142+
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 4.0, "dnan": 0.0, "dev": 0},
127143
)
128144
self.assertEqual(
129145
max_diff(
130146
{"a": torch.Tensor([1, 2])},
131147
{"a": torch.Tensor([1, 2])},
132148
),
133-
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 2.0, "dnan": 0.0},
149+
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 2.0, "dnan": 0.0, "dev": 0},
134150
)
135151
self.assertEqual(
136152
max_diff(
137153
{"a": torch.Tensor([1, 2])},
138154
[torch.Tensor([1, 2])],
139155
),
140-
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 2.0, "dnan": 0.0},
156+
{"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 2.0, "dnan": 0.0, "dev": 0},
141157
)
142158
self.assertEqual(
143159
max_diff(
@@ -150,6 +166,7 @@ def test_max_diff(self):
150166
"n": 2.0,
151167
"rel": 0.9999999997999001,
152168
"sum": 9999999998.0,
169+
"dev": 0,
153170
},
154171
)
155172

@@ -164,7 +181,9 @@ def test_max_diff_dynamic_cache(self):
164181
flatten=True,
165182
verbose=10,
166183
)
167-
self.assertEqual(md, {"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 10.0, "dnan": 0})
184+
self.assertEqual(
185+
md, {"abs": 0.0, "rel": 0.0, "sum": 0.0, "n": 10.0, "dnan": 0, "dev": 0}
186+
)
168187

169188

170189
if __name__ == "__main__":

_unittests/ut_helpers/test_helper.py

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
hide_stdout,
1212
requires_onnx,
1313
requires_transformers,
14+
requires_cuda,
1415
)
1516
from onnx_diagnostic.helpers.helper import (
1617
string_type,
@@ -199,6 +200,31 @@ def test_flatten(self):
199200
d = string_diff(diff)
200201
self.assertIsInstance(d, str)
201202

203+
@hide_stdout()
204+
def test_maxdiff_device(self):
205+
inputs = (torch.arange(2), torch.cos(torch.arange(3)))
206+
diff = max_diff(inputs, inputs, flatten=True, verbose=10)
207+
self.assertEqual(diff["abs"], 0)
208+
self.assertEqual(diff["dev"], 0)
209+
210+
@hide_stdout()
211+
@requires_cuda()
212+
def test_maxdiff_device_cuda(self):
213+
diff = max_diff(torch.ones((2,)).cuda(), torch.ones((2,)), verbose=10)
214+
self.assertEqual(diff["dev"], 1)
215+
inputs = (torch.arange(2), torch.cos(torch.arange(3)))
216+
inputs2 = (inputs[0].cuda(), inputs[1].cuda())
217+
diff = max_diff(inputs, inputs2, verbose=10)
218+
self.assertEqual(diff["abs"], 0)
219+
self.assertEqual(diff["dev"], 2)
220+
inputs2 = (inputs[0], inputs[1].cuda())
221+
diff = max_diff(inputs, inputs2, verbose=10)
222+
self.assertEqual(diff["abs"], 0)
223+
self.assertEqual(diff["dev"], 1)
224+
diff = max_diff(inputs2, inputs2, verbose=10)
225+
self.assertEqual(diff["abs"], 0)
226+
self.assertEqual(diff["dev"], 0)
227+
202228
def test_flatten_cache(self):
203229
cache = make_dynamic_cache([(torch.ones((5, 6, 5, 6)), torch.ones((5, 6, 5, 6)) + 2)])
204230
flat = flatten_object(cache, drop_keys=True)

_unittests/ut_helpers/test_log_helper.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -189,6 +189,7 @@ def test_cube_logs_excel(self):
189189
verbose=1,
190190
)
191191
self.assertExists(output)
192+
self.clean_dump()
192193

193194
@hide_stdout()
194195
def test_enumerate_csv_files(self):
@@ -210,6 +211,7 @@ def test_enumerate_csv_files(self):
210211
cube.load(verbose=1)
211212
self.assertEqual((3, 11), cube.shape)
212213
self.assertIn("RAWFILENAME", cube.data.columns)
214+
self.clean_dump()
213215

214216
def test_cube_logs_performance1(self):
215217
output = self.get_dump_file("test_cube_logs_performance1.xlsx")
@@ -235,6 +237,7 @@ def test_cube_logs_performance1(self):
235237
],
236238
)
237239
self.assertExists(output)
240+
self.clean_dump()
238241

239242
def test_cube_logs_performance2(self):
240243
output = self.get_dump_file("test_cube_logs_performance2.xlsx")
@@ -470,6 +473,7 @@ def test_historical_cube_time_mask(self):
470473
)
471474
cube = CubeLogs(df, keys=["^m_*", "exporter"], time="date").load()
472475
cube.to_excel(output, views=["time_p"], time_mask=True, verbose=1)
476+
self.clean_dump()
473477

474478
def test_cube_sbs_no_time(self):
475479
df = pandas.DataFrame(
@@ -532,6 +536,7 @@ def test_cube_sbs_no_time(self):
532536
verbose=0,
533537
sbs=dict(CFA=dict(exporter="E1", opt="O"), CFB=dict(exporter="E2", opt="O")),
534538
)
539+
self.clean_dump()
535540

536541
def test_cube_sbs_with_time(self):
537542
df = pandas.DataFrame(

_unittests/ut_helpers/test_ort_session.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -310,6 +310,7 @@ def test_profiling(self):
310310
got = wrap.run(None, feeds)
311311
self.assertIsInstance(got[0], torch.Tensor)
312312
self.assertEqualArray(expected[0], got[0])
313+
self.clean_dump()
313314

314315

315316
if __name__ == "__main__":

_unittests/ut_helpers/test_torch_helper.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,7 @@ def forward(self, x, y):
151151
self.assertEqualAny(restored["main", 1, "I"], (inputs, {}))
152152
self.assertEqualAny(restored["main", 0, "O"], res1)
153153
self.assertEqualAny(restored["main", 0, "O"], res2)
154+
self.clean_dump()
154155

155156
@hide_stdout()
156157
def test_steal_forward_dump_file_steal_append(self):
@@ -181,6 +182,7 @@ def forward(self, x, y):
181182
{("", 1, "I"), ("", 1, "O"), "sx", ("", 0, "O"), "sx_1", ("", 0, "I")},
182183
set(restored),
183184
)
185+
self.clean_dump()
184186

185187
@hide_stdout()
186188
def test_steal_forward_dump_file_steal_append_drop(self):
@@ -214,6 +216,7 @@ def forward(self, x, y):
214216
first = restored[("", 0, "I")]
215217
_a, kws = first
216218
self.assertNotIn("x", kws)
219+
self.clean_dump()
217220

218221
@hide_stdout()
219222
def test_steal_forward_submodules(self):
@@ -257,6 +260,7 @@ def forward(self, x, y):
257260
),
258261
len(sorted(restored)),
259262
)
263+
self.clean_dump()
260264

261265
def test_replace_string_by_dynamic(self):
262266
example = {

_unittests/ut_reference/test_onnxruntime_evaluator.py

Lines changed: 34 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
import unittest
2+
from typing import Optional
23
import numpy as np
34
import onnx
45
import onnx.helper as oh
56
import torch
67
import onnxruntime
7-
from onnx_diagnostic.ext_test_case import ExtTestCase, hide_stdout
8+
from onnx_diagnostic.ext_test_case import ExtTestCase, hide_stdout, ignore_warnings
89
from onnx_diagnostic.helpers.onnx_helper import from_array_extended
910
from onnx_diagnostic.reference import (
1011
OnnxruntimeEvaluator,
@@ -22,6 +23,14 @@
2223

2324

2425
class TestOnnxruntimeEvaluator(ExtTestCase):
26+
def _range(self, *shape, bias: Optional[float] = None):
27+
n = np.prod(shape)
28+
x = np.arange(n).astype(np.float32) / n
29+
if bias:
30+
x = x + bias
31+
return x.reshape(tuple(shape)).astype(np.float32)
32+
33+
@ignore_warnings(FutureWarning)
2534
def test_ort_eval_scan_cdist_add(self):
2635

2736
def dist(unused: torch.Tensor, x: torch.Tensor, samex: torch.Tensor):
@@ -69,6 +78,7 @@ def forward(self, x):
6978
got = orte.run(None, {name: x.numpy()})[0]
7079
self.assertEqualArray(expected, got)
7180

81+
@ignore_warnings((UserWarning, FutureWarning))
7282
def test_ort_eval_cond(self):
7383
import torch
7484

@@ -180,6 +190,7 @@ def test_constant_bool_input(self):
180190
self.assertEqual(got.dtype, torch.bool)
181191
self.assertEqual(got[0], True)
182192

193+
@hide_stdout()
183194
def test_ort_eval_loop(self):
184195
model = torch.nn.EmbeddingBag(num_embeddings=49157, embedding_dim=32, mode="sum")
185196
a = torch.tensor([[39906, 39906]]).long()
@@ -226,6 +237,28 @@ def test_report_results_comparison_ort(self):
226237
self.assertLess(d[(0, "nx"), "r_cos"], 1e-6)
227238
self.assertLess(d[(2, "u"), "r_exp"], 1e-6)
228239

240+
@hide_stdout()
241+
def test_skip_layer_normalization(self):
242+
node = oh.make_node(
243+
"SkipLayerNormalization",
244+
["x", "skip", "beta", "gamma", "bias"],
245+
["Z"],
246+
epsilon=1.0e-5,
247+
domain="com.microsoft",
248+
)
249+
feeds = dict(
250+
x=self._range(2, 3, 8),
251+
skip=self._range(2, 3, 8, bias=3),
252+
beta=self._range(8, bias=1),
253+
gamma=self._range(8, bias=2),
254+
bias=self._range(8, bias=0.1),
255+
)
256+
ref = ExtendedReferenceEvaluator(node)
257+
expected = ref.run(None, feeds)
258+
rt = OnnxruntimeEvaluator(node, verbose=10, opsets={"": 22})
259+
got = rt.run(None, feeds)
260+
self.assertEqualAny(expected, got, atol=1e-4)
261+
229262

230263
if __name__ == "__main__":
231264
unittest.main(verbosity=2)

0 commit comments

Comments
 (0)