@@ -11,6 +11,8 @@ import (
1111 "strings"
1212 "time"
1313
14+ "github.com/google/uuid"
15+
1416 "github.com/ollama/ollama/envconfig"
1517 "github.com/ollama/ollama/types/model"
1618)
@@ -36,6 +38,19 @@ func (e StatusError) Error() string {
3638 }
3739}
3840
41+ type AuthorizationError struct {
42+ StatusCode int
43+ Status string
44+ PublicKey string `json:"public_key"`
45+ }
46+
47+ func (e AuthorizationError ) Error () string {
48+ if e .Status != "" {
49+ return e .Status
50+ }
51+ return "something went wrong, please see the ollama server logs for details"
52+ }
53+
3954// ImageData represents the raw binary data of an image file.
4055type ImageData []byte
4156
@@ -313,14 +328,29 @@ func (t *ToolFunction) String() string {
313328// ChatResponse is the response returned by [Client.Chat]. Its fields are
314329// similar to [GenerateResponse].
315330type ChatResponse struct {
316- Model string `json:"model"`
317- CreatedAt time.Time `json:"created_at"`
318- Message Message `json:"message"`
319- DoneReason string `json:"done_reason,omitempty"`
320- DebugInfo * DebugInfo `json:"_debug_info,omitempty"`
331+ // Model is the model name that generated the response.
332+ Model string `json:"model"`
333+
334+ // RemoteModel is the name of the upstream model that generated the response.
335+ RemoteModel string `json:"remote_model,omitempty"`
336+
337+ // RemoteHost is the URL of the upstream Ollama host that generated the response.
338+ RemoteHost string `json:"remote_host,omitempty"`
321339
340+ // CreatedAt is the timestamp of the response.
341+ CreatedAt time.Time `json:"created_at"`
342+
343+ // Message contains the message or part of a message from the model.
344+ Message Message `json:"message"`
345+
346+ // Done specifies if the response is complete.
322347 Done bool `json:"done"`
323348
349+ // DoneReason is the reason the model stopped generating text.
350+ DoneReason string `json:"done_reason,omitempty"`
351+
352+ DebugInfo * DebugInfo `json:"_debug_info,omitempty"`
353+
324354 Metrics
325355}
326356
@@ -425,20 +455,47 @@ type EmbeddingResponse struct {
425455
426456// CreateRequest is the request passed to [Client.Create].
427457type CreateRequest struct {
428- Model string `json:"model"`
429- Stream * bool `json:"stream,omitempty"`
458+ // Model is the model name to create.
459+ Model string `json:"model"`
460+
461+ // Stream specifies whether the response is streaming; it is true by default.
462+ Stream * bool `json:"stream,omitempty"`
463+
464+ // Quantize is the quantization format for the model; leave blank to not change the quantization level.
430465 Quantize string `json:"quantize,omitempty"`
431466
432- From string `json:"from,omitempty"`
433- Files map [string ]string `json:"files,omitempty"`
434- Adapters map [string ]string `json:"adapters,omitempty"`
435- Template string `json:"template,omitempty"`
436- License any `json:"license,omitempty"`
437- System string `json:"system,omitempty"`
438- Parameters map [string ]any `json:"parameters,omitempty"`
439- Messages []Message `json:"messages,omitempty"`
440- Renderer string `json:"renderer,omitempty"`
441- Parser string `json:"parser,omitempty"`
467+ // From is the name of the model or file to use as the source.
468+ From string `json:"from,omitempty"`
469+
470+ // RemoteHost is the URL of the upstream ollama API for the model (if any).
471+ RemoteHost string `json:"remote_host,omitempty"`
472+
473+ // Files is a map of files include when creating the model.
474+ Files map [string ]string `json:"files,omitempty"`
475+
476+ // Adapters is a map of LoRA adapters to include when creating the model.
477+ Adapters map [string ]string `json:"adapters,omitempty"`
478+
479+ // Template is the template used when constructing a request to the model.
480+ Template string `json:"template,omitempty"`
481+
482+ // License is a string or list of strings for licenses.
483+ License any `json:"license,omitempty"`
484+
485+ // System is the system prompt for the model.
486+ System string `json:"system,omitempty"`
487+
488+ // Parameters is a map of hyper-parameters which are applied to the model.
489+ Parameters map [string ]any `json:"parameters,omitempty"`
490+
491+ // Messages is a list of messages added to the model before chat and generation requests.
492+ Messages []Message `json:"messages,omitempty"`
493+
494+ Renderer string `json:"renderer,omitempty"`
495+ Parser string `json:"parser,omitempty"`
496+
497+ // Info is a map of additional information for the model
498+ Info map [string ]any `json:"info,omitempty"`
442499
443500 // Deprecated: set the model name with Model instead
444501 Name string `json:"name"`
@@ -480,6 +537,8 @@ type ShowResponse struct {
480537 Parser string `json:"parser,omitempty"`
481538 Details ModelDetails `json:"details,omitempty"`
482539 Messages []Message `json:"messages,omitempty"`
540+ RemoteModel string `json:"remote_model,omitempty"`
541+ RemoteHost string `json:"remote_host,omitempty"`
483542 ModelInfo map [string ]any `json:"model_info,omitempty"`
484543 ProjectorInfo map [string ]any `json:"projector_info,omitempty"`
485544 Tensors []Tensor `json:"tensors,omitempty"`
@@ -538,12 +597,14 @@ type ProcessResponse struct {
538597
539598// ListModelResponse is a single model description in [ListResponse].
540599type ListModelResponse struct {
541- Name string `json:"name"`
542- Model string `json:"model"`
543- ModifiedAt time.Time `json:"modified_at"`
544- Size int64 `json:"size"`
545- Digest string `json:"digest"`
546- Details ModelDetails `json:"details,omitempty"`
600+ Name string `json:"name"`
601+ Model string `json:"model"`
602+ RemoteModel string `json:"remote_model,omitempty"`
603+ RemoteHost string `json:"remote_host,omitempty"`
604+ ModifiedAt time.Time `json:"modified_at"`
605+ Size int64 `json:"size"`
606+ Digest string `json:"digest"`
607+ Details ModelDetails `json:"details,omitempty"`
547608}
548609
549610// ProcessModelResponse is a single model description in [ProcessResponse].
@@ -567,6 +628,12 @@ type GenerateResponse struct {
567628 // Model is the model name that generated the response.
568629 Model string `json:"model"`
569630
631+ // RemoteModel is the name of the upstream model that generated the response.
632+ RemoteModel string `json:"remote_model,omitempty"`
633+
634+ // RemoteHost is the URL of the upstream Ollama host that generated the response.
635+ RemoteHost string `json:"remote_host,omitempty"`
636+
570637 // CreatedAt is the timestamp of the response.
571638 CreatedAt time.Time `json:"created_at"`
572639
@@ -604,6 +671,18 @@ type ModelDetails struct {
604671 QuantizationLevel string `json:"quantization_level"`
605672}
606673
674+ // UserResponse provides information about a user.
675+ type UserResponse struct {
676+ ID uuid.UUID `json:"id"`
677+ Email string `json:"email"`
678+ Name string `json:"name"`
679+ Bio string `json:"bio,omitempty"`
680+ AvatarURL string `json:"avatarurl,omitempty"`
681+ FirstName string `json:"firstname,omitempty"`
682+ LastName string `json:"lastname,omitempty"`
683+ Plan string `json:"plan,omitempty"`
684+ }
685+
607686// Tensor describes the metadata for a given tensor.
608687type Tensor struct {
609688 Name string `json:"name"`
0 commit comments