Skip to content

Commit 87bab3c

Browse files
committed
Update tests and CLI to new Tensorlake SDK
1 parent ae055be commit 87bab3c

20 files changed

+259
-272
lines changed
Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
11
import click
22

3-
from . import build_image, deploy, executor
3+
from . import build_images, deploy, executor
44

55

66
@click.group()
77
@click.version_option(package_name="indexify", prog_name="indexify-cli")
88
@click.pass_context
99
def cli(ctx: click.Context):
1010
"""
11-
Indexify CLI to manage and deploy workflows to Indexify Server and run Indexify Executors.
11+
Indexify CLI to manage and deploy applications to Indexify Server and run Indexify Executors.
1212
"""
1313
pass
1414

1515

16-
cli.add_command(build_image.build_image)
16+
cli.add_command(build_images.build_images)
1717
cli.add_command(deploy.deploy)
1818
cli.add_command(executor.executor)

indexify/src/indexify/cli/build_image.py renamed to indexify/src/indexify/cli/build_images.py

Lines changed: 33 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,46 +1,43 @@
11
import importlib
2-
from typing import Any, Generator, Tuple
2+
import os
3+
from typing import Any, Generator, Set
34

45
import click
56
import docker
67
import docker.api.build
78
from docker.errors import BuildError
8-
from tensorlake.functions_sdk.image import Image
9-
from tensorlake.functions_sdk.workflow_module import (
10-
WorkflowModuleInfo,
11-
load_workflow_module_info,
9+
from docker.models.images import Image as DockerImage
10+
from tensorlake.applications import Image
11+
from tensorlake.applications.image import (
12+
ImageInformation,
13+
dockerfile_content,
14+
image_infos,
1215
)
16+
from tensorlake.applications.remote.application.ignored_code_paths import (
17+
ignored_code_paths,
18+
)
19+
from tensorlake.applications.remote.application.loader import load_application
1320

1421

1522
@click.command(
16-
short_help="Build images for graphs/workflows defined in the workflow file"
23+
short_help="Builds images for application defined in <application-source-path> directory or file"
1724
)
1825
@click.argument(
19-
"workflow-file-path",
20-
type=click.Path(exists=True, file_okay=True, dir_okay=False),
21-
)
22-
@click.option(
23-
"-i",
24-
"--image-names",
25-
multiple=True,
26-
help="Names of images to build. Can be specified multiple times. If not provided, all images will be built.",
26+
"application-path",
27+
type=click.Path(exists=True, file_okay=True, dir_okay=True),
2728
)
28-
def build_image(
29-
workflow_file_path: str,
30-
image_names: tuple[str, ...] = None,
31-
):
32-
"""
33-
Build the images associated to an Indexify workflow
34-
35-
A workflow is defined in a Python file, and the images are built using the local Docker daemon.
36-
"""
29+
def build_images(application_path: str):
3730
try:
38-
workflow_module_info: WorkflowModuleInfo = load_workflow_module_info(
39-
workflow_file_path
31+
application_source_dir_or_file_path: str = os.path.abspath(application_path)
32+
33+
ignored_absolute_paths: Set[str] = ignored_code_paths(
34+
os.path.dirname(application_source_dir_or_file_path)
4035
)
36+
37+
load_application(application_source_dir_or_file_path, ignored_absolute_paths)
4138
except Exception as e:
4239
click.secho(
43-
f"Failed loading workflow file, please check the error message: {e}",
40+
f"Failed to load the application modules, please check the error message: {e}",
4441
fg="red",
4542
)
4643
raise click.Abort
@@ -49,39 +46,28 @@ def build_image(
4946
docker_client.ping()
5047

5148
indexify_version: str = importlib.metadata.version("indexify")
52-
for image in workflow_module_info.images.keys():
49+
for image, image_info in image_infos().items():
5350
image: Image
54-
if len(image_names) > 0 and image.image_name not in image_names:
55-
click.echo(
56-
f"Skipping image `{image.image_name}` as it is not in the provided image names."
57-
)
58-
continue
59-
60-
click.echo(f"Building image `{image.image_name}`")
61-
51+
image_info: ImageInformation
52+
click.echo(f"Building image `{image.name}:{image.tag}`")
6253
image.run(f"pip install 'indexify=={indexify_version}'")
63-
built_image, logs_generator = _build(image=image, docker_client=docker_client)
54+
6455
try:
6556
built_image, logs_generator = _build(
6657
image=image, docker_client=docker_client
6758
)
59+
built_image: DockerImage
6860
_print_build_log(logs_generator)
69-
click.secho(f"built image: {built_image.tags[0]}", fg="green")
61+
click.secho(f"Built image: {built_image.tags[0]}", fg="green")
7062
except BuildError as e:
7163
raise click.Abort() from e
7264

73-
click.secho(f"built image: {built_image.tags[0]}", fg="green")
74-
7565

7666
def _build(
7767
image: Image, docker_client: docker.DockerClient
78-
) -> Tuple[docker.models.images.Image, Generator[str, Any, None]]:
79-
docker_file = image.dockerfile()
80-
image_name = (
81-
image.image_name
82-
if ":" in image.image_name
83-
else f"{image.image_name}:{image.image_tag}"
84-
)
68+
) -> tuple[DockerImage, Generator[str, Any, None]]:
69+
docker_file_content: str = dockerfile_content(image)
70+
image_name = image.name if ":" in image.name else f"{image.name}:{image.tag}"
8571

8672
docker.api.build.process_dockerfile = lambda dockerfile, path: (
8773
"Dockerfile",
@@ -91,7 +77,7 @@ def _build(
9177
try:
9278
built_image, logs_generator = docker_client.images.build(
9379
path=".",
94-
dockerfile=docker_file,
80+
dockerfile=docker_file_content,
9581
tag=image_name,
9682
rm=True,
9783
# pull=True, # optional: ensures fresh base images
Lines changed: 19 additions & 35 deletions
Original file line numberDiff line numberDiff line change
@@ -1,57 +1,41 @@
1+
import traceback
2+
13
import click
2-
from tensorlake import Graph
3-
from tensorlake.functions_sdk.graph_serialization import graph_code_dir_path
4-
from tensorlake.functions_sdk.remote_graph import RemoteGraph
5-
from tensorlake.functions_sdk.workflow_module import (
6-
WorkflowModuleInfo,
7-
load_workflow_module_info,
8-
)
4+
from tensorlake.applications.remote.deploy import deploy as tl_deploy
95

106

117
@click.command(
12-
short_help="Deploy all graphs/workflows defined in the workflow file to Indexify"
8+
short_help="Deploys application defined in <application-path> directory or file to Indexify"
139
)
14-
# Path to the file where the graphs/workflows are defined as global variables
1510
@click.argument(
16-
"workflow-file-path",
17-
type=click.Path(exists=True, file_okay=True, dir_okay=False),
11+
"application-path",
12+
type=click.Path(exists=True, file_okay=True, dir_okay=True),
1813
)
1914
@click.option(
2015
"-u",
21-
"--upgrade-queued-invocations",
16+
"--upgrade-running-requests",
2217
is_flag=True,
2318
default=False,
24-
help="Upgrade invocations that are already queued or running to use the deployed version of the graphs/workflows",
19+
help="Upgrade requests that are already queued or running to use the new deployed version of the application",
2520
)
2621
def deploy(
27-
workflow_file_path: str,
28-
upgrade_queued_invocations: bool,
22+
application_path: str,
23+
upgrade_running_requests: bool,
2924
):
30-
click.echo(f"Preparing deployment for {workflow_file_path}")
25+
click.echo(f"Preparing deployment for application from {application_path}")
26+
3127
try:
32-
workflow_module_info: WorkflowModuleInfo = load_workflow_module_info(
33-
workflow_file_path
28+
tl_deploy(
29+
application_source_dir_or_file_path=application_path,
30+
upgrade_running_requests=upgrade_running_requests,
31+
load_application_modules=True,
3432
)
3533
except Exception as e:
3634
click.secho(
37-
f"Failed loading workflow file, please check the error message: {e}",
35+
f"Application could not be deployed, please check the error message:",
3836
fg="red",
3937
)
38+
traceback.print_exception(e)
4039
raise click.Abort
4140

42-
for graph in workflow_module_info.graphs:
43-
graph: Graph
44-
try:
45-
RemoteGraph.deploy(
46-
graph,
47-
code_dir_path=graph_code_dir_path(workflow_file_path),
48-
upgrade_tasks_to_latest_version=upgrade_queued_invocations,
49-
)
50-
except Exception as e:
51-
click.secho(
52-
f"Graph {graph.name} could not be deployed, please check the error message: {e}",
53-
fg="red",
54-
)
55-
raise click.Abort
56-
57-
click.secho(f"Deployed {graph.name}", fg="green")
41+
click.secho(f"Successfully deployed the application", fg="green")

indexify/src/indexify/cli/executor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from importlib.metadata import version
1111
from pathlib import Path
1212
from socket import gethostname
13-
from typing import Dict, List, Optional
13+
from typing import Dict, List
1414

1515
import click
1616
import nanoid
@@ -110,7 +110,7 @@ def executor(
110110
verbose: bool,
111111
very_verbose: bool,
112112
function_uris: List[str],
113-
config_path: Optional[str],
113+
config_path: str | None,
114114
executor_cache_path: str,
115115
monitoring_server_host: str,
116116
monitoring_server_port: int,

indexify/src/indexify/proto/executor_api.proto

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -293,6 +293,7 @@ message ReduceOp {
293293
// The DataPayload of the last function call in the reducer will have the function call id set
294294
// as the id of the ReduceOp.
295295
optional string id = 1;
296+
// Contains at least two items.
296297
repeated FunctionArg collection = 2;
297298
optional FunctionRef reducer = 3;
298299
// This required metadata allows SDK to restore original function call from the arguments.

indexify/tests/cli/test_environment_variables.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
from typing import Dict
55

66
import pydantic
7-
import tensorlake.workflows.interface as tensorlake
8-
from tensorlake.workflows.remote.deploy import deploy
7+
from tensorlake.applications import Request, api, call_remote_api, function
8+
from tensorlake.applications.remote.deploy import deploy
99
from testing import (
1010
ExecutorProcessContextManager,
1111
executor_pid,
@@ -18,8 +18,8 @@ class Response(pydantic.BaseModel):
1818
environment: Dict[str, str]
1919

2020

21-
@tensorlake.api()
22-
@tensorlake.function()
21+
@api()
22+
@function()
2323
def function_a(_a: int) -> Response:
2424
return Response(executor_pid=executor_pid(), environment=os.environ.copy())
2525

@@ -45,7 +45,7 @@ def test_executor_env_variables_are_passed_to_functions(self):
4545
wait_executor_startup(7001)
4646

4747
for _ in range(10):
48-
request: tensorlake.Request = tensorlake.call_remote_api(
48+
request: Request = call_remote_api(
4949
function_a,
5050
1,
5151
)

indexify/tests/cli/test_executor_exit.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,17 @@
33
import unittest
44
from typing import List
55

6-
import tensorlake.workflows.interface as tensorlake
7-
from tensorlake.workflows.remote.deploy import deploy
6+
from tensorlake.applications import Request, api, call_remote_api, function
7+
from tensorlake.applications.remote.deploy import deploy
88
from testing import (
99
ExecutorProcessContextManager,
1010
function_uri,
1111
wait_executor_startup,
1212
)
1313

1414

15-
@tensorlake.api()
16-
@tensorlake.function()
15+
@api()
16+
@function()
1717
def success_func(sleep_secs: float) -> str:
1818
time.sleep(sleep_secs)
1919
return "success"
@@ -39,10 +39,10 @@ def test_all_tasks_succeed_when_executor_exits(self):
3939
print(f"Started Executor A with PID: {executor_a.pid}")
4040
wait_executor_startup(7001)
4141

42-
requests: List[tensorlake.Request] = []
42+
requests: List[Request] = []
4343
for i in range(10):
4444
print(f"Running request {i}")
45-
request: tensorlake.Request = tensorlake.call_remote_api(
45+
request: Request = call_remote_api(
4646
success_func,
4747
0.1,
4848
)

indexify/tests/cli/test_metrics.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -5,13 +5,13 @@
55
from typing import Dict
66

77
import httpx
8-
import tensorlake.workflows.interface as tensorlake
98

109
# We're using internal APIs here, this might break when we update prometheus_client.
1110
from prometheus_client.metrics_core import Metric
1211
from prometheus_client.parser import text_string_to_metric_families
1312
from prometheus_client.samples import Sample
14-
from tensorlake.workflows.remote.deploy import deploy
13+
from tensorlake.applications import Request, api, call_remote_api, function
14+
from tensorlake.applications.remote.deploy import deploy
1515
from testing import (
1616
ExecutorProcessContextManager,
1717
wait_executor_startup,
@@ -29,8 +29,8 @@ def fetch_metrics(
2929
return metrics
3030

3131

32-
@tensorlake.api()
33-
@tensorlake.function()
32+
@api()
33+
@function()
3434
def successful_function(arg: str) -> str:
3535
return "success"
3636

@@ -68,7 +68,7 @@ def test_executor_info(self):
6868
self.assertIn("id", info_sample.labels)
6969

7070
def test_expected_function_executor_infos(self):
71-
request: tensorlake.Request = tensorlake.call_remote_api(
71+
request: Request = call_remote_api(
7272
successful_function,
7373
"ignored",
7474
)

0 commit comments

Comments
 (0)