@@ -57,7 +57,7 @@ function extremeTopK(testCase)
5757 %% This should work, and it does on some computers. On others, Ollama
5858 %% receives the parameter, but either Ollama or llama.cpp fails to
5959 %% honor it correctly.
60- testCase .assumeTrue( false , " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
60+ testCase .assumeFail( " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
6161
6262 % setting top-k to k=1 leaves no random choice,
6363 % so we expect to get a fixed response.
@@ -72,7 +72,7 @@ function extremeMinP(testCase)
7272 %% This should work, and it does on some computers. On others, Ollama
7373 %% receives the parameter, but either Ollama or llama.cpp fails to
7474 %% honor it correctly.
75- testCase .assumeTrue( false , " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
75+ testCase .assumeFail( " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
7676
7777 % setting min-p to p=1 means only tokens with the same logit as
7878 % the most likely one can be chosen, which will almost certainly
@@ -88,7 +88,7 @@ function extremeTfsZ(testCase)
8888 %% This should work, and it does on some computers. On others, Ollama
8989 %% receives the parameter, but either Ollama or llama.cpp fails to
9090 %% honor it correctly.
91- testCase .assumeTrue( false , " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
91+ testCase .assumeFail( " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
9292
9393 % setting tfs_z to z=0 leaves no random choice, but degrades to
9494 % greedy sampling, so we expect to get a fixed response.
@@ -113,7 +113,7 @@ function seedFixesResult(testCase)
113113 %% This should work, and it does on some computers. On others, Ollama
114114 %% receives the parameter, but either Ollama or llama.cpp fails to
115115 %% honor it correctly.
116- testCase .assumeTrue( false , " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
116+ testCase .assumeFail( " disabled due to Ollama/llama.cpp not honoring parameter reliably" );
117117
118118 chat = ollamaChat(" mistral" );
119119 response1 = generate(chat ," hi" ,Seed= 1234 );
0 commit comments