|
37 | 37 | "source": [ |
38 | 38 | "## Getting Started\n", |
39 | 39 | "\n", |
40 | | - "CrateDB supports storing vectors since version 5.5. You can install CrateDB on your own,\n", |
41 | | - "or leverage the fully managed service of [CrateDB Cloud].\n", |
| 40 | + "CrateDB supports storing vectors since version 5.5. You can leverage the fully managed service of\n", |
| 41 | + "[CrateDB Cloud], or install CrateDB on your own, for example using Docker.\n", |
42 | 42 | "\n", |
43 | | - "If you want to test with the very latest developments, you can run CrateDB instance based\n", |
44 | | - "on Docker:\n", |
45 | | - "\n", |
46 | | - "```\n", |
47 | | - "docker run --publish 4200:4200 --publish 5432:5432 --pull=always crate/crate:5.5 -Cdiscovery.type=single-node\n", |
| 43 | + "```shell\n", |
| 44 | + "docker run --publish 4200:4200 --publish 5432:5432 --pull=always crate:latest -Cdiscovery.type=single-node\n", |
48 | 45 | "```\n", |
49 | 46 | "\n", |
50 | 47 | "[CrateDB Cloud]: https://console.cratedb.cloud/" |
|
370 | 367 | " for record in results:\n", |
371 | 368 | " documents.append(record[0])\n", |
372 | 369 | " \n", |
373 | | - "print(len(documents))" |
| 370 | + "print(documents)" |
374 | 371 | ] |
375 | 372 | }, |
376 | 373 | { |
|
405 | 402 | } |
406 | 403 | ], |
407 | 404 | "source": [ |
| 405 | + "from openai import OpenAI\n", |
| 406 | + "\n", |
| 407 | + "# Concatenate the found documents into the context that will be provided in the system prompt\n", |
408 | 408 | "context = '---\\n'.join(doc for doc in documents)\n", |
409 | 409 | "\n", |
| 410 | + "# Give instructions and context in the system prompt\n", |
410 | 411 | "system_prompt = f\"\"\"\n", |
411 | 412 | "You are a time series expert and get questions from the user covering the area of time series databases and time series use cases. \n", |
412 | 413 | "Please answer the users question in the language it was asked in. \n", |
|
415 | 416 | "Context: \n", |
416 | 417 | "{context}\"\"\"\n", |
417 | 418 | "\n", |
418 | | - "#openai.api_key = os.environ['OPENAI_API_KEY']\n", |
419 | | - "\n", |
420 | | - "\n", |
421 | | - "client = OpenAI(\n", |
422 | | - " api_key=os.environ['OPENAI_API_KEY']\n", |
423 | | - ")\n", |
| 419 | + "client = OpenAI(api_key=os.environ['OPENAI_API_KEY'])\n", |
424 | 420 | "\n", |
425 | 421 | "chat_completion = client.chat.completions.create(\n", |
426 | 422 | " model=\"gpt-3.5-turbo\", \n", |
|
430 | 426 | " ]\n", |
431 | 427 | ")\n", |
432 | 428 | "\n", |
433 | | - "#chat_completion = openai.ChatCompletion.create(model=\"gpt-3.5-turbo\", \n", |
434 | | - "# messages=[{\"role\": \"system\", \"content\": system_prompt},\n", |
435 | | - "# {\"role\": \"user\", \"content\": my_question}])\n", |
436 | 429 | "chat_completion.choices[0].message.content" |
437 | 430 | ] |
438 | 431 | } |
|
0 commit comments