|
12 | 12 | from pytensor.compile.mode import get_default_mode, get_mode |
13 | 13 | from pytensor.compile.ops import DeepCopyOp, deep_copy_op |
14 | 14 | from pytensor.configdefaults import config |
15 | | -from pytensor.graph.basic import equal_computations |
| 15 | +from pytensor.graph import Op |
| 16 | +from pytensor.graph.basic import Constant, equal_computations |
16 | 17 | from pytensor.graph.fg import FunctionGraph |
17 | 18 | from pytensor.graph.rewriting.basic import check_stack_trace, out2in |
18 | 19 | from pytensor.graph.rewriting.db import RewriteDatabaseQuery |
|
29 | 30 | TensorFromScalar, |
30 | 31 | as_tensor, |
31 | 32 | cast, |
| 33 | + constant, |
32 | 34 | join, |
33 | 35 | tile, |
34 | 36 | ) |
|
65 | 67 | local_merge_alloc, |
66 | 68 | local_useless_alloc, |
67 | 69 | local_useless_elemwise, |
| 70 | + topo_constant_folding, |
| 71 | + topo_unconditional_constant_folding, |
68 | 72 | topological_fill_sink, |
69 | 73 | ) |
70 | 74 | from pytensor.tensor.rewriting.math import local_lift_transpose_through_dot |
@@ -742,56 +746,92 @@ def test_upcast(self): |
742 | 746 | ) or (len(topo) > 1) |
743 | 747 |
|
744 | 748 |
|
745 | | -def test_constant_folding(): |
746 | | - # Test that constant folding get registered at fast_compile |
747 | | - # An error removed that registration during the registration. |
748 | | - x = dvector() |
749 | | - mode = get_mode("FAST_COMPILE").excluding("fusion") |
750 | | - f = function([x], [x * 2, x + x], mode=mode) |
751 | | - topo = f.maker.fgraph.toposort() |
752 | | - assert len(topo) == 2 |
753 | | - |
754 | | - # Test that we do not crash when constant folding elemwise scalar |
755 | | - # as they should not generate c code. |
| 749 | +class TestConstantFolding: |
| 750 | + def test_constant_folding(self): |
| 751 | + # Test that constant folding get registered at fast_compile |
| 752 | + # An error removed that registration during the registration. |
| 753 | + x = dvector() |
| 754 | + mode = get_mode("FAST_COMPILE").excluding("fusion") |
| 755 | + f = function([x], [x * 2, x + x], mode=mode) |
| 756 | + topo = f.maker.fgraph.toposort() |
| 757 | + assert len(topo) == 2 |
756 | 758 |
|
757 | | - x = pt.constant(3) |
758 | | - assert x.ndim == 0 |
759 | | - mode = get_mode("FAST_COMPILE").excluding("fusion") |
760 | | - f = function([], [x * 2, x + x], mode=mode) |
761 | | - topo = f.maker.fgraph.toposort() |
762 | | - assert len(topo) == 2 |
763 | | - assert all(isinstance(n.op, DeepCopyOp) for n in topo) |
| 759 | + # Test that we do not crash when constant folding elemwise scalar |
| 760 | + # as they should not generate c code. |
764 | 761 |
|
| 762 | + x = pt.constant(3) |
| 763 | + assert x.ndim == 0 |
| 764 | + mode = get_mode("FAST_COMPILE").excluding("fusion") |
| 765 | + f = function([], [x * 2, x + x], mode=mode) |
| 766 | + topo = f.maker.fgraph.toposort() |
| 767 | + assert len(topo) == 2 |
| 768 | + assert all(isinstance(n.op, DeepCopyOp) for n in topo) |
765 | 769 |
|
766 | | -@pytest.mark.xfail( |
767 | | - reason="PyTensor rewrites constants before stabilization. " |
768 | | - "This breaks stabilization rewrites in some cases. See #504.", |
769 | | - raises=AssertionError, |
770 | | -) |
771 | | -def test_constant_get_stabilized(): |
772 | | - # Currently PyTensor enables the `constant_folding` rewrite before stabilization rewrites. |
773 | | - # This caused some stabilization rewrites to not be activated and that |
774 | | - # caused inf values to appear when they should not. |
| 770 | + @pytest.mark.xfail( |
| 771 | + reason="PyTensor rewrites constants before stabilization. " |
| 772 | + "This breaks stabilization rewrites in some cases. See #504.", |
| 773 | + raises=AssertionError, |
| 774 | + ) |
| 775 | + def test_constant_get_stabilized(self): |
| 776 | + # Currently PyTensor enables the `constant_folding` rewrite before stabilization rewrites. |
| 777 | + # This caused some stabilization rewrites to not be activated and that |
| 778 | + # caused inf values to appear when they should not. |
775 | 779 |
|
776 | | - # We can't simply move the `constant_folding` rewrite to |
777 | | - # specialize since this will break other rewrites. We will need to |
778 | | - # partially duplicate some canonicalize rewrites to fix this issue. |
| 780 | + # We can't simply move the `constant_folding` rewrite to |
| 781 | + # specialize since this will break other rewrites. We will need to |
| 782 | + # partially duplicate some canonicalize rewrites to fix this issue. |
779 | 783 |
|
780 | | - x2 = scalar() |
781 | | - y2 = log(1 + exp(x2)) |
782 | | - mode = get_default_mode() |
783 | | - mode.check_isfinite = False |
784 | | - f2 = function([x2], y2, mode=mode) |
785 | | - |
786 | | - assert len(f2.maker.fgraph.toposort()) == 1 |
787 | | - assert f2.maker.fgraph.toposort()[0].op == softplus |
788 | | - assert f2(800) == 800 |
789 | | - |
790 | | - x = pt.as_tensor_variable(800) |
791 | | - y = log(1 + exp(x)) |
792 | | - f = function([], y, mode=mode) |
793 | | - # When this error is fixed, the following line should be ok. |
794 | | - assert f() == 800, f() |
| 784 | + x2 = scalar() |
| 785 | + y2 = log(1 + exp(x2)) |
| 786 | + mode = get_default_mode() |
| 787 | + mode.check_isfinite = False |
| 788 | + f2 = function([x2], y2, mode=mode) |
| 789 | + |
| 790 | + assert len(f2.maker.fgraph.toposort()) == 1 |
| 791 | + assert f2.maker.fgraph.toposort()[0].op == softplus |
| 792 | + assert f2(800) == 800 |
| 793 | + |
| 794 | + x = pt.as_tensor_variable(800) |
| 795 | + y = log(1 + exp(x)) |
| 796 | + f = function([], y, mode=mode) |
| 797 | + # When this error is fixed, the following line should be ok. |
| 798 | + assert f() == 800, f() |
| 799 | + |
| 800 | + def test_unconditional(self): |
| 801 | + x = pt.alloc(np.e, *(3, 5)) |
| 802 | + fg = FunctionGraph(outputs=[x], clone=False) |
| 803 | + |
| 804 | + # Default constant folding doesn't apply to Alloc used as outputs |
| 805 | + topo_constant_folding.apply(fg) |
| 806 | + assert not isinstance(fg.outputs[0], Constant) |
| 807 | + |
| 808 | + # Unconditional constant folding does apply |
| 809 | + topo_unconditional_constant_folding.apply(fg) |
| 810 | + assert isinstance(fg.outputs[0], Constant) |
| 811 | + np.testing.assert_allclose(fg.outputs[0].data, np.full((3, 5), np.e)) |
| 812 | + |
| 813 | + def test_unconditional_no_perform_method(self): |
| 814 | + """Test that errors are caught when the Op does not have a perform method.""" |
| 815 | + |
| 816 | + class OpNoPerform(Op): |
| 817 | + itypes = [scalar(dtype="float64").type] |
| 818 | + otypes = [scalar(dtype="float64").type] |
| 819 | + |
| 820 | + def perform(self, *args, **kwargs): |
| 821 | + raise NotImplementedError("This Op cannot be evaluated") |
| 822 | + |
| 823 | + x = constant(np.array(5.0)) |
| 824 | + out = OpNoPerform()(x) |
| 825 | + |
| 826 | + fg = FunctionGraph(outputs=[out], clone=False) |
| 827 | + # Default constant_folding will raise |
| 828 | + with pytest.raises(NotImplementedError): |
| 829 | + topo_constant_folding.apply(fg) |
| 830 | + |
| 831 | + # Unconditional constant folding will be silent |
| 832 | + topo_unconditional_constant_folding.apply(fg) |
| 833 | + assert not isinstance(fg.outputs[0], Constant) |
| 834 | + assert isinstance(fg.outputs[0].owner.op, OpNoPerform) |
795 | 835 |
|
796 | 836 |
|
797 | 837 | class TestLocalSwitchSink: |
|
0 commit comments