Skip to content

Commit 986cb22

Browse files
gustavocidornelaswhoseoyster
authored andcommitted
Completes OPEN-5123 Create llm_monitors module
1 parent 4ac7687 commit 986cb22

File tree

5 files changed

+576
-3
lines changed

5 files changed

+576
-3
lines changed

examples/monitoring/quickstart/monitoring-quickstart.ipynb renamed to examples/monitoring/quickstart/discriminative-ml/monitoring-quickstart.ipynb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
"id": "ef55abc9",
66
"metadata": {},
77
"source": [
8-
"[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/examples-gallery/blob/main/monitoring/quickstart/monitoring-quickstart.ipynb)\n",
8+
"[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/examples-gallery/blob/main/monitoring/quickstart/discriminative-ml/monitoring-quickstart.ipynb)\n",
99
"\n",
1010
"\n",
1111
"# <a id=\"top\">Monitoring quickstart</a>\n",
Lines changed: 279 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,279 @@
1+
{
2+
"cells": [
3+
{
4+
"cell_type": "markdown",
5+
"id": "2722b419",
6+
"metadata": {},
7+
"source": [
8+
"[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/openlayer-ai/examples-gallery/blob/main/monitoring/quickstart/llms/openai-llm-monitor.ipynb)\n",
9+
"\n",
10+
"\n",
11+
"# <a id=\"top\">LLM monitoring quickstart</a>\n",
12+
"\n",
13+
"This notebook illustrates how to get started monitoring OpenAI LLMs with Openlayer."
14+
]
15+
},
16+
{
17+
"cell_type": "markdown",
18+
"id": "75c2a473",
19+
"metadata": {},
20+
"source": [
21+
"## 1. Set the environment variables"
22+
]
23+
},
24+
{
25+
"cell_type": "code",
26+
"execution_count": 1,
27+
"id": "f3f4fa13",
28+
"metadata": {},
29+
"outputs": [],
30+
"source": [
31+
"import os\n",
32+
"\n",
33+
"os.environ[\"OPENLAYER_API_KEY\"] = \"YOUR_OPENLAYER_API_KEY_HERE\"\n",
34+
"os.environ[\"OPENLAYER_PROJECT_NAME\"] = \"Your Openlayer Project Name Here\" "
35+
]
36+
},
37+
{
38+
"cell_type": "markdown",
39+
"id": "9758533f",
40+
"metadata": {},
41+
"source": [
42+
"## 2. Instantiate the monitor"
43+
]
44+
},
45+
{
46+
"cell_type": "code",
47+
"execution_count": 2,
48+
"id": "e60584fa",
49+
"metadata": {},
50+
"outputs": [],
51+
"source": [
52+
"from openlayer import llm_monitors\n",
53+
"\n",
54+
"openai_monitor = llm_monitors.OpenAIMonitor(publish=True) # with publish=True, every row gets published to Openlayer automatically"
55+
]
56+
},
57+
{
58+
"cell_type": "markdown",
59+
"id": "72a6b954",
60+
"metadata": {},
61+
"source": [
62+
"## 3. Turn it on!"
63+
]
64+
},
65+
{
66+
"cell_type": "code",
67+
"execution_count": 8,
68+
"id": "2cb5ce9b",
69+
"metadata": {},
70+
"outputs": [
71+
{
72+
"name": "stdout",
73+
"output_type": "stream",
74+
"text": [
75+
"Monitoring is already on!\n",
76+
"To stop it, call `stop_monitoring`.\n"
77+
]
78+
}
79+
],
80+
"source": [
81+
"openai_monitor.start_monitoring()"
82+
]
83+
},
84+
{
85+
"cell_type": "markdown",
86+
"id": "76a350b4",
87+
"metadata": {},
88+
"source": [
89+
"That's it! Now you can continue using OpenAI LLMs normally. The data is automatically published to Openlayer and you can start creating goals around it!"
90+
]
91+
},
92+
{
93+
"cell_type": "code",
94+
"execution_count": 4,
95+
"id": "fb648049-00bd-447c-8feb-ecf794d45ba7",
96+
"metadata": {},
97+
"outputs": [],
98+
"source": [
99+
"import openai\n",
100+
"\n",
101+
"openai.api_key = \"YOUR_OPENAI_API_KEY_HERE\""
102+
]
103+
},
104+
{
105+
"cell_type": "code",
106+
"execution_count": 5,
107+
"id": "7d1e8c74-c667-44a7-b8ea-8f5b83049ea7",
108+
"metadata": {},
109+
"outputs": [
110+
{
111+
"name": "stdout",
112+
"output_type": "stream",
113+
"text": [
114+
"Data published!\n"
115+
]
116+
}
117+
],
118+
"source": [
119+
"completion = openai.ChatCompletion.create(\n",
120+
" model=\"gpt-3.5-turbo\",\n",
121+
" messages=[\n",
122+
" {\"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n",
123+
" {\"role\": \"user\", \"content\": \"How are you doing today?\"}\n",
124+
" ]\n",
125+
")"
126+
]
127+
},
128+
{
129+
"cell_type": "code",
130+
"execution_count": 6,
131+
"id": "58dd9603",
132+
"metadata": {},
133+
"outputs": [
134+
{
135+
"name": "stdout",
136+
"output_type": "stream",
137+
"text": [
138+
"Data published!\n"
139+
]
140+
}
141+
],
142+
"source": [
143+
"completion = openai.ChatCompletion.create(\n",
144+
" model=\"gpt-3.5-turbo\",\n",
145+
" messages=[\n",
146+
" {\"role\": \"system\", \"content\": \"You are a helpful assistant.\"},\n",
147+
" {\"role\": \"user\", \"content\": \"Is Python strongly typed?\"}\n",
148+
" ]\n",
149+
")"
150+
]
151+
},
152+
{
153+
"cell_type": "markdown",
154+
"id": "f7c3dfbc",
155+
"metadata": {},
156+
"source": [
157+
"You can also access all the data accumulated (and in this case, published to Openlayer) with the `data` attribute:"
158+
]
159+
},
160+
{
161+
"cell_type": "code",
162+
"execution_count": 7,
163+
"id": "27bb2bdc",
164+
"metadata": {},
165+
"outputs": [
166+
{
167+
"data": {
168+
"text/html": [
169+
"<div>\n",
170+
"<style scoped>\n",
171+
" .dataframe tbody tr th:only-of-type {\n",
172+
" vertical-align: middle;\n",
173+
" }\n",
174+
"\n",
175+
" .dataframe tbody tr th {\n",
176+
" vertical-align: top;\n",
177+
" }\n",
178+
"\n",
179+
" .dataframe thead th {\n",
180+
" text-align: right;\n",
181+
" }\n",
182+
"</style>\n",
183+
"<table border=\"1\" class=\"dataframe\">\n",
184+
" <thead>\n",
185+
" <tr style=\"text-align: right;\">\n",
186+
" <th></th>\n",
187+
" <th>input</th>\n",
188+
" <th>output</th>\n",
189+
" <th>tokens</th>\n",
190+
" <th>latency</th>\n",
191+
" </tr>\n",
192+
" </thead>\n",
193+
" <tbody>\n",
194+
" <tr>\n",
195+
" <th>0</th>\n",
196+
" <td>How are you doing today?</td>\n",
197+
" <td>Thank you for asking! As an AI, I don't have f...</td>\n",
198+
" <td>62</td>\n",
199+
" <td>6400.212049</td>\n",
200+
" </tr>\n",
201+
" <tr>\n",
202+
" <th>1</th>\n",
203+
" <td>Is Python strongly typed?</td>\n",
204+
" <td>Yes, Python is considered a strongly typed lan...</td>\n",
205+
" <td>68</td>\n",
206+
" <td>7226.889133</td>\n",
207+
" </tr>\n",
208+
" </tbody>\n",
209+
"</table>\n",
210+
"</div>"
211+
],
212+
"text/plain": [
213+
" input \\\n",
214+
"0 How are you doing today? \n",
215+
"1 Is Python strongly typed? \n",
216+
"\n",
217+
" output tokens latency \n",
218+
"0 Thank you for asking! As an AI, I don't have f... 62 6400.212049 \n",
219+
"1 Yes, Python is considered a strongly typed lan... 68 7226.889133 "
220+
]
221+
},
222+
"execution_count": 7,
223+
"metadata": {},
224+
"output_type": "execute_result"
225+
}
226+
],
227+
"source": [
228+
"openai_monitor.data"
229+
]
230+
},
231+
{
232+
"cell_type": "code",
233+
"execution_count": 9,
234+
"id": "f9139f2b",
235+
"metadata": {},
236+
"outputs": [
237+
{
238+
"name": "stdout",
239+
"output_type": "stream",
240+
"text": [
241+
"Monitoring stopped.\n"
242+
]
243+
}
244+
],
245+
"source": [
246+
"openai_monitor.stop_monitoring()"
247+
]
248+
},
249+
{
250+
"cell_type": "code",
251+
"execution_count": null,
252+
"id": "654bb896",
253+
"metadata": {},
254+
"outputs": [],
255+
"source": []
256+
}
257+
],
258+
"metadata": {
259+
"kernelspec": {
260+
"display_name": "Python 3 (ipykernel)",
261+
"language": "python",
262+
"name": "python3"
263+
},
264+
"language_info": {
265+
"codemirror_mode": {
266+
"name": "ipython",
267+
"version": 3
268+
},
269+
"file_extension": ".py",
270+
"mimetype": "text/x-python",
271+
"name": "python",
272+
"nbconvert_exporter": "python",
273+
"pygments_lexer": "ipython3",
274+
"version": "3.8.13"
275+
}
276+
},
277+
"nbformat": 4,
278+
"nbformat_minor": 5
279+
}

openlayer/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1159,7 +1159,7 @@ def publish_batch_data(
11591159
presigned_url_query_params=presigned_url_query_params,
11601160
)
11611161

1162-
print("Batch of data published!")
1162+
print("Data published!")
11631163

11641164
def _add_default_column(
11651165
self, config: Dict[str, any], df: pd.DataFrame, column_name: str

0 commit comments

Comments
 (0)