Skip to content

Commit cb56739

Browse files
committed
Add parameter tab ui contents
1 parent 0d8efbc commit cb56739

File tree

1 file changed

+71
-2
lines changed

1 file changed

+71
-2
lines changed
Lines changed: 71 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,77 @@
1-
<FluentLayout Id="@Id">
1+
@using System.Linq
22

3+
<FluentLayout Id="@Id">
4+
<LabelWithTooltip Id="slider-past-messages"
5+
LabelText="Past messages included"
6+
TooltipText="Select the number of past messages to include in each new API request. This helps give the model context for new user queries. Setting this number to 10 will include 5 user queries and 5 system responses." />
7+
<SliderWithTextfield Min="1" Max="20" Step="1" @bind-Value=@pastMessagesValue />
8+
9+
<LabelWithTooltip Id="slider-max-response"
10+
LabelText="Max response"
11+
TooltipText="Set a limit on the number of tokens per model response. The API supports a maximum of MaxTokensPlaceholderDoNotTranslate tokens shared between the prompt (including system message, examples, message history, and user query) and the model's response. One token is roughly 4 characters for typical English text." />
12+
<SliderWithTextfield Min="1" Max="16000" Step="1" @bind-Value=@maxResponseValue />
13+
14+
<LabelWithTooltip Id="slider-temperature"
15+
LabelText="Temperature"
16+
TooltipText="Controls randomness. Lowering the temperature means that the model will produce more repetitive and deterministic responses. Increasing the temperature will result in more unexpected or creative responses. Try adjusting temperature or Top P but not both." />
17+
<SliderWithTextfield Min="0" Max="1" Step="0.01" @bind-Value=@temperatureValue />
18+
19+
<LabelWithTooltip Id="slider-top-p"
20+
LabelText="Top P"
21+
TooltipText="Similar to temperature, this controls randomness but uses a different method. Lowering Top P will narrow the model’s token selection to likelier tokens. Increasing Top P will let the model choose from tokens with both high and low likelihood. Try adjusting temperature or Top P but not both." />
22+
<SliderWithTextfield Min="0" Max="1" Step="0.01" @bind-Value=@topPValue />
23+
24+
<LabelWithTooltip Id="complete-stop-sequence"
25+
LabelText="Stop sequence"
26+
TooltipText="Make the model end its response at a desired point. The model response will end before the specified sequence, so it won't contain the stop sequence text. For ChatGPT, using <|im_end|> ensures that the model response doesn't generate a follow-up user query. You can include as many as four stop sequences." />
27+
28+
<FluentAutocomplete TOption="string" Multiple="true" AutoComplete="false"
29+
ShowOverlayOnEmptyResults="false"
30+
SelectValueOnTab="true"
31+
MaximumOptionsSearch="1"
32+
@bind-SelectedOptions=stopSequenceValue
33+
OnOptionsSearch=OnSearchAsync
34+
Style="width:95%;padding:5px 0px;margin: 0 auto">
35+
<OptionTemplate>
36+
<FluentLabel>Create "@(context)"</FluentLabel>
37+
</OptionTemplate>
38+
</FluentAutocomplete>
39+
40+
<LabelWithTooltip Id="slider-frequency-penalty"
41+
LabelText="Frequency penalty"
42+
TooltipText="Reduce the chance of repeating a token proportionally based on how often it has appeared in the text so far. This decreases the likelihood of repeating the exact same text in a response." />
43+
<SliderWithTextfield Min="0" Max="2" Step="0.01" @bind-Value=@frequencyPenaltyValue />
44+
45+
<LabelWithTooltip Id="slider-presence-penalty"
46+
LabelText="Presence penalty"
47+
TooltipText="Reduce the chance of repeating any token that has appeared in the text at all so far. This increases the likelihood of introducing new topics in a response." />
48+
<SliderWithTextfield Min="0" Max="2" Step="0.01" @bind-Value=@presencePenaltyValue />
349
</FluentLayout>
450

551
@code {
652
[Parameter]
753
public string? Id { get; set; }
8-
}
54+
55+
private int pastMessagesValue = 10;
56+
private int maxResponseValue = 800;
57+
private double temperatureValue = 0.7;
58+
private double topPValue = 0.95;
59+
60+
private double frequencyPenaltyValue = 0;
61+
private double presencePenaltyValue = 0;
62+
63+
private IEnumerable<string> stopSequenceValue = new List<string>();
64+
private List<string> searchTextItems = new();
65+
66+
private Task OnSearchAsync(OptionsSearchEventArgs<string> e)
67+
{
68+
searchTextItems.Clear();
69+
if (string.IsNullOrEmpty(e.Text) || stopSequenceValue.Contains(e.Text))
70+
return Task.CompletedTask;
71+
72+
searchTextItems.Add(e.Text);
73+
e.Items = searchTextItems;
74+
75+
return Task.CompletedTask;
76+
}
77+
}

0 commit comments

Comments
 (0)