Skip to content

Commit 1c9eb53

Browse files
authored
Update version (#1648)
* Add support for JSON and SSE responses to invoke endpoint * update tensorlake version, indexify server and executor version * update poetry lock * fix import * fix compilation errors * fix test
1 parent 5c2b9e2 commit 1c9eb53

File tree

7 files changed

+36
-38
lines changed

7 files changed

+36
-38
lines changed

Cargo.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

indexify/poetry.lock

Lines changed: 23 additions & 23 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

indexify/pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[tool.poetry]
22
name = "indexify"
33
# Incremented if any of the components provided in this packages are updated.
4-
version = "0.4.20"
4+
version = "0.4.21"
55
description = "Open Source Indexify components and helper tools"
66
authors = ["Tensorlake Inc. <support@tensorlake.ai>"]
77
license = "Apache 2.0"
@@ -29,7 +29,7 @@ psutil = "^7.0.0"
2929
boto3 = "^1.39.14"
3030
# Adds function-executor binary, utils lib, sdk used in indexify-cli commands.
3131
# We need to specify the tensorlake version exactly because pip install doesn't respect poetry.lock files.
32-
tensorlake = "0.2.32"
32+
tensorlake = "0.2.33"
3333
# Uncomment the next line to use local tensorlake package (only for development!)
3434
# tensorlake = { path = "../tensorlake", develop = true }
3535
# grpcio is provided by tensorlake

indexify/tests/features/test_cacheable.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def setUp(self):
4242

4343
def _stream(self) -> tuple[dict[str, int], str]:
4444
counts = defaultdict(int)
45-
events = self.graph.stream(block_until_done=True, count=2)
45+
events = self.graph.stream(count=2)
4646
try:
4747
while True:
4848
event = next(events)

server/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[package]
22
name = "indexify-server"
3-
version = "0.3.7"
3+
version = "0.3.8"
44
edition = "2021"
55
authors = ["Tensorlake Inc. <support@tensorlake.ai>"]
66
license = "Apache-2.0"

server/src/routes/invoke.rs

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -262,14 +262,13 @@ pub async fn invoke_with_object_v1(
262262
}
263263

264264
#[derive(Debug, Deserialize)]
265-
pub struct RequestQueryParam {
265+
pub struct RequestQueryParams {
266266
pub block_until_finish: Option<bool>,
267267
}
268268

269-
#[axum::debug_handler]
270269
pub async fn invoke_with_object(
271270
Path((namespace, compute_graph)): Path<(String, String)>,
272-
Query(params): Query<RequestQueryParam>,
271+
Query(params): Query<RequestQueryParams>,
273272
State(state): State<RouteState>,
274273
headers: HeaderMap,
275274
body: Body,
@@ -318,7 +317,6 @@ pub async fn invoke_with_object(
318317
if should_block {
319318
rx.replace(state.indexify_state.task_event_stream());
320319
}
321-
322320
let compute_graph = state
323321
.indexify_state
324322
.reader()
@@ -342,7 +340,7 @@ pub async fn invoke_with_object(
342340
namespace: namespace.clone(),
343341
compute_graph_name: compute_graph.name.clone(),
344342
invocation_payload,
345-
ctx: graph_invocation_ctx.clone(),
343+
ctx: graph_invocation_ctx,
346344
});
347345
state
348346
.indexify_state
@@ -357,13 +355,13 @@ pub async fn invoke_with_object(
357355

358356
let invocation_event_stream =
359357
create_invocation_progress_stream(id, rx, state, namespace, compute_graph.name).await;
360-
Ok(axum::response::Sse::new(invocation_event_stream)
361-
.keep_alive(
358+
Ok(
359+
axum::response::Sse::new(invocation_event_stream).keep_alive(
362360
axum::response::sse::KeepAlive::new()
363361
.interval(Duration::from_secs(1))
364362
.text("keep-alive-text"),
365-
)
366-
.into_response())
363+
),
364+
)
367365
}
368366

369367
/// Stream progress of a request until it is completed

0 commit comments

Comments
 (0)