@@ -47,10 +47,9 @@ export class Prompts extends APIResource {
4747 }
4848
4949 /**
50- * Fetches the configured model parameters and messages rendered with the provided
51- * variables mapped to the set LLM provider. This endpoint abstracts the need to
52- * handle mapping between different providers, while still allowing direct calls to
53- * the providers.
50+ * Fetches the model configuration parameters for a specified prompt, including
51+ * penalty settings, response format, and the model messages rendered with the
52+ * given variables mapped to the set LLM provider.
5453 */
5554 getParameters (
5655 id : string ,
@@ -539,9 +538,14 @@ export namespace PromptConfiguration {
539538 maxTokens : number | null ;
540539
541540 /**
542- * The name of the model for the provider.
541+ * Example: "gpt-3.5-turbo"
543542 */
544- name : string ;
543+ modelName : string ;
544+
545+ /**
546+ * The provider of the provided model.
547+ */
548+ modelProvider : 'ANTHROPIC' | 'OPENAI' ;
545549
546550 parallelToolCalls : boolean ;
547551
@@ -550,11 +554,6 @@ export namespace PromptConfiguration {
550554 */
551555 presencePenalty : number ;
552556
553- /**
554- * The LLM model provider.
555- */
556- provider : 'ANTHROPIC' | 'OPENAI' ;
557-
558557 /**
559558 * Example: PromptResponseFormat.TEXT
560559 */
@@ -717,9 +716,14 @@ export namespace PromptCreateParams {
717716 maxTokens : number | null ;
718717
719718 /**
720- * The name of the model for the provider.
719+ * Example: "gpt-3.5-turbo"
721720 */
722- name : string ;
721+ modelName : string ;
722+
723+ /**
724+ * The provider of the provided model.
725+ */
726+ modelProvider : 'ANTHROPIC' | 'OPENAI' ;
723727
724728 parallelToolCalls : boolean ;
725729
@@ -728,11 +732,6 @@ export namespace PromptCreateParams {
728732 */
729733 presencePenalty : number ;
730734
731- /**
732- * The LLM model provider.
733- */
734- provider : 'ANTHROPIC' | 'OPENAI' ;
735-
736735 /**
737736 * Example: PromptResponseFormat.TEXT
738737 */
@@ -867,9 +866,14 @@ export namespace PromptUpdateParams {
867866 maxTokens : number | null ;
868867
869868 /**
870- * The name of the model for the provider.
869+ * Example: "gpt-3.5-turbo"
871870 */
872- name : string ;
871+ modelName : string ;
872+
873+ /**
874+ * The provider of the provided model.
875+ */
876+ modelProvider : 'ANTHROPIC' | 'OPENAI' ;
873877
874878 parallelToolCalls : boolean ;
875879
@@ -878,11 +882,6 @@ export namespace PromptUpdateParams {
878882 */
879883 presencePenalty : number ;
880884
881- /**
882- * The LLM model provider.
883- */
884- provider : 'ANTHROPIC' | 'OPENAI' ;
885-
886885 /**
887886 * Example: PromptResponseFormat.TEXT
888887 */
0 commit comments