File tree Expand file tree Collapse file tree 6 files changed +93
-1
lines changed
openai-core/src/main/scala/io/cequence/openaiscala
openai-examples/src/main/scala/io/cequence/openaiscala/examples/nonopenai Expand file tree Collapse file tree 6 files changed +93
-1
lines changed Original file line number Diff line number Diff line change @@ -7,7 +7,7 @@ val scala3 = "3.2.2"
77
88ThisBuild / organization := " io.cequence"
99ThisBuild / scalaVersion := scala212
10- ThisBuild / version := " 1.1.1.RC.11 "
10+ ThisBuild / version := " 1.1.1.RC.17 "
1111ThisBuild / isSnapshot := false
1212
1313lazy val commonSettings = Seq (
Original file line number Diff line number Diff line change @@ -202,4 +202,9 @@ object NonOpenAIModelId {
202202
203203 // context 131072
204204 val grok_beta = " grok-beta"
205+
206+ // Deepseek
207+ // context 64K, 4K (8KBeta)
208+ val deepseek_chat = " deepseek-chat"
209+ val deepseek_coder = " deepseek-coder"
205210}
Original file line number Diff line number Diff line change @@ -13,4 +13,6 @@ object ChatProviderSettings {
1313 val togetherAI =
1414 ProviderSettings (" https://api.together.xyz/v1/" , " TOGETHERAI_API_KEY" )
1515 val grok = ProviderSettings (" https://api.x.ai/v1/" , " GROK_API_KEY" )
16+ val deepseek = ProviderSettings (" https://api.deepseek.com/" , " DEEPSEEK_API_KEY" )
17+ val deepseekBeta = ProviderSettings (" https://api.deepseek.com/beta/" , " DEEPSEEK_API_KEY" )
1618}
Original file line number Diff line number Diff line change @@ -90,6 +90,16 @@ object ChatCompletionProvider {
9090 ): OpenAIChatCompletionStreamedService =
9191 AnthropicServiceFactory .asOpenAI(withCache = withCache)
9292
93+ def deepseek (
94+ implicit ec : ExecutionContext ,
95+ m : Materializer
96+ ): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings .deepseek)
97+
98+ def deepseekBeta (
99+ implicit ec : ExecutionContext ,
100+ m : Materializer
101+ ): OpenAIChatCompletionStreamedService = provide(ChatProviderSettings .deepseekBeta)
102+
93103 private def provide (
94104 settings : ProviderSettings
95105 )(
Original file line number Diff line number Diff line change 1+ package io .cequence .openaiscala .examples .nonopenai
2+
3+ import io .cequence .openaiscala .domain ._
4+ import io .cequence .openaiscala .domain .settings .CreateChatCompletionSettings
5+ import io .cequence .openaiscala .examples .ExampleBase
6+ import io .cequence .openaiscala .service .OpenAIChatCompletionService
7+
8+ import scala .concurrent .Future
9+
10+ /**
11+ * Requires `DEEPSEEK_API_KEY` environment variable to be set.
12+ */
13+ object DeepseekCreateChatCompletion extends ExampleBase [OpenAIChatCompletionService ] {
14+
15+ override val service : OpenAIChatCompletionService = ChatCompletionProvider .deepseek
16+
17+ private val messages = Seq (
18+ SystemMessage (" You are a helpful assistant." ),
19+ UserMessage (" What is the weather like in Norway?" )
20+ )
21+
22+ private val modelId = NonOpenAIModelId .deepseek_chat
23+
24+ override protected def run : Future [_] =
25+ service
26+ .createChatCompletion(
27+ messages = messages,
28+ settings = CreateChatCompletionSettings (
29+ model = modelId,
30+ temperature = Some (0.1 ),
31+ max_tokens = Some (1024 )
32+ )
33+ )
34+ .map(printMessageContent)
35+ }
Original file line number Diff line number Diff line change 1+ package io .cequence .openaiscala .examples .nonopenai
2+
3+ import akka .stream .scaladsl .Sink
4+ import io .cequence .openaiscala .domain ._
5+ import io .cequence .openaiscala .domain .settings .CreateChatCompletionSettings
6+ import io .cequence .openaiscala .examples .ExampleBase
7+ import io .cequence .openaiscala .service .OpenAIChatCompletionStreamedServiceExtra
8+
9+ import scala .concurrent .Future
10+
11+ // requires `openai-scala-client-stream` as a dependency and `DEEPSEEK_API_KEY` environment variable to be set
12+ object DeepseekCreateChatCompletionStreamed
13+ extends ExampleBase [OpenAIChatCompletionStreamedServiceExtra ] {
14+
15+ override val service : OpenAIChatCompletionStreamedServiceExtra = ChatCompletionProvider .deepseekBeta
16+
17+ private val messages = Seq (
18+ SystemMessage (" You are a helpful assistant." ),
19+ UserMessage (" What is the weather like in Norway?" )
20+ )
21+
22+ private val modelId = NonOpenAIModelId .deepseek_chat
23+
24+ override protected def run : Future [_] =
25+ service
26+ .createChatCompletionStreamed(
27+ messages = messages,
28+ settings = CreateChatCompletionSettings (
29+ model = modelId,
30+ temperature = Some (0.01 ),
31+ max_tokens = Some (512 )
32+ )
33+ )
34+ .runWith(
35+ Sink .foreach { completion =>
36+ val content = completion.choices.headOption.flatMap(_.delta.content)
37+ print(content.getOrElse(" " ))
38+ }
39+ )
40+ }
You can’t perform that action at this time.
0 commit comments