Skip to content

Commit 050146c

Browse files
authored
Fix container naming issue (#503)
* Add note on test M1 incompatibility to docs * Add tests to reproduce KubeCluster start failure with custom container name * Fix container name when getting logs * Rename `KUBECLUSTER_WORKER_CONTAINER_NAME` to `KUBECLUSTER_CONTAINER_NAME` Done as the new name should be more accurate
1 parent 2e24857 commit 050146c

File tree

7 files changed

+43
-27
lines changed

7 files changed

+43
-27
lines changed

dask_kubernetes/classic/kubecluster.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,6 @@
1616
import kubernetes_asyncio as kubernetes
1717
from kubernetes_asyncio.client.rest import ApiException
1818

19-
from ..constants import KUBECLUSTER_WORKER_CONTAINER_NAME
2019
from ..common.objects import (
2120
make_pod_from_dict,
2221
make_service_from_dict,
@@ -112,7 +111,7 @@ async def logs(self):
112111
log = await self.core_api.read_namespaced_pod_log(
113112
self._pod.metadata.name,
114113
self.namespace,
115-
container=KUBECLUSTER_WORKER_CONTAINER_NAME,
114+
container=self.pod_template.spec.containers[0].name,
116115
)
117116
except ApiException as e:
118117
if "waiting to start" in str(e):

dask_kubernetes/classic/tests/test_async.py

Lines changed: 18 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
from dask.utils import tmpfile
2525
from distributed.utils_test import captured_logger
2626

27-
from dask_kubernetes.constants import KUBECLUSTER_WORKER_CONTAINER_NAME
27+
from dask_kubernetes.constants import KUBECLUSTER_CONTAINER_NAME
2828

2929
TEST_DIR = os.path.abspath(os.path.join(__file__, ".."))
3030
CONFIG_DEMO = os.path.join(TEST_DIR, "config-demo.yaml")
@@ -217,7 +217,7 @@ async def test_pod_from_yaml(k8s_cluster, docker_image):
217217
],
218218
"image": docker_image,
219219
"imagePullPolicy": "IfNotPresent",
220-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
220+
"name": KUBECLUSTER_CONTAINER_NAME,
221221
}
222222
]
223223
},
@@ -262,7 +262,7 @@ async def test_pod_expand_env_vars(k8s_cluster, docker_image):
262262
],
263263
"image": "${FOO_IMAGE}",
264264
"imagePullPolicy": "IfNotPresent",
265-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
265+
"name": KUBECLUSTER_CONTAINER_NAME,
266266
}
267267
]
268268
},
@@ -296,7 +296,7 @@ async def test_pod_template_dict(docker_image):
296296
"command": None,
297297
"image": docker_image,
298298
"imagePullPolicy": "IfNotPresent",
299-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
299+
"name": KUBECLUSTER_CONTAINER_NAME,
300300
}
301301
]
302302
},
@@ -336,7 +336,7 @@ async def test_pod_template_minimal_dict(k8s_cluster, docker_image):
336336
"command": None,
337337
"image": docker_image,
338338
"imagePullPolicy": "IfNotPresent",
339-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
339+
"name": KUBECLUSTER_CONTAINER_NAME,
340340
}
341341
]
342342
}
@@ -354,20 +354,28 @@ async def test_pod_template_minimal_dict(k8s_cluster, docker_image):
354354
async def test_pod_template_from_conf(docker_image):
355355
spec = {
356356
"spec": {
357-
"containers": [
358-
{"name": KUBECLUSTER_WORKER_CONTAINER_NAME, "image": docker_image}
359-
]
357+
"containers": [{"name": KUBECLUSTER_CONTAINER_NAME, "image": docker_image}]
360358
}
361359
}
362360

363361
with dask.config.set({"kubernetes.worker-template": spec}):
364362
async with KubeCluster(**cluster_kwargs) as cluster:
365363
assert (
366364
cluster.pod_template.spec.containers[0].name
367-
== KUBECLUSTER_WORKER_CONTAINER_NAME
365+
== KUBECLUSTER_CONTAINER_NAME
368366
)
369367

370368

369+
@pytest.mark.asyncio
370+
async def test_pod_template_with_custom_container_name(docker_image):
371+
container_name = "my-custom-container"
372+
spec = {"spec": {"containers": [{"name": container_name, "image": docker_image}]}}
373+
374+
with dask.config.set({"kubernetes.worker-template": spec}):
375+
async with KubeCluster(**cluster_kwargs) as cluster:
376+
assert cluster.pod_template.spec.containers[0].name == container_name
377+
378+
371379
@pytest.mark.asyncio
372380
async def test_constructor_parameters(k8s_cluster, pod_spec):
373381
env = {"FOO": "BAR", "A": 1}
@@ -567,7 +575,7 @@ async def test_automatic_startup(k8s_cluster, docker_image):
567575
"1",
568576
],
569577
"image": docker_image,
570-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
578+
"name": KUBECLUSTER_CONTAINER_NAME,
571579
}
572580
]
573581
},

dask_kubernetes/classic/tests/test_sync.py

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99
from dask.utils import tmpfile
1010

1111
from dask_kubernetes import KubeCluster, make_pod_spec
12-
from dask_kubernetes.constants import KUBECLUSTER_WORKER_CONTAINER_NAME
12+
from dask_kubernetes.constants import KUBECLUSTER_CONTAINER_NAME
1313

1414
TEST_DIR = os.path.abspath(os.path.join(__file__, ".."))
1515
CONFIG_DEMO = os.path.join(TEST_DIR, "config-demo.yaml")
@@ -100,7 +100,7 @@ def dont_test_pod_template_yaml(docker_image, loop):
100100
],
101101
"image": docker_image,
102102
"imagePullPolicy": "IfNotPresent",
103-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
103+
"name": KUBECLUSTER_CONTAINER_NAME,
104104
}
105105
]
106106
},
@@ -146,7 +146,7 @@ def test_pod_template_yaml_expand_env_vars(docker_image, loop):
146146
],
147147
"image": "${FOO_IMAGE}",
148148
"imagePullPolicy": "IfNotPresent",
149-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
149+
"name": KUBECLUSTER_CONTAINER_NAME,
150150
}
151151
]
152152
},
@@ -179,7 +179,7 @@ def test_pod_template_dict(docker_image, loop):
179179
"command": None,
180180
"image": docker_image,
181181
"imagePullPolicy": "IfNotPresent",
182-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
182+
"name": KUBECLUSTER_CONTAINER_NAME,
183183
}
184184
]
185185
},
@@ -218,7 +218,7 @@ def test_pod_template_minimal_dict(docker_image, loop):
218218
"command": None,
219219
"image": docker_image,
220220
"imagePullPolicy": "IfNotPresent",
221-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
221+
"name": KUBECLUSTER_CONTAINER_NAME,
222222
}
223223
]
224224
}
@@ -235,20 +235,27 @@ def test_pod_template_minimal_dict(docker_image, loop):
235235
def test_pod_template_from_conf(docker_image):
236236
spec = {
237237
"spec": {
238-
"containers": [
239-
{"name": KUBECLUSTER_WORKER_CONTAINER_NAME, "image": docker_image}
240-
]
238+
"containers": [{"name": KUBECLUSTER_CONTAINER_NAME, "image": docker_image}]
241239
}
242240
}
243241

244242
with dask.config.set({"kubernetes.worker-template": spec}):
245243
with KubeCluster() as cluster:
246244
assert (
247245
cluster.pod_template.spec.containers[0].name
248-
== KUBECLUSTER_WORKER_CONTAINER_NAME
246+
== KUBECLUSTER_CONTAINER_NAME
249247
)
250248

251249

250+
def test_pod_template_with_custom_container_name(docker_image):
251+
container_name = "my-custom-container"
252+
spec = {"spec": {"containers": [{"name": container_name, "image": docker_image}]}}
253+
254+
with dask.config.set({"kubernetes.worker-template": spec}):
255+
with KubeCluster() as cluster:
256+
assert cluster.pod_template.spec.containers[0].name == container_name
257+
258+
252259
def test_bad_args():
253260
with pytest.raises(FileNotFoundError) as info:
254261
KubeCluster("myfile.yaml")
@@ -318,7 +325,7 @@ def test_automatic_startup(docker_image):
318325
"1",
319326
],
320327
"image": docker_image,
321-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
328+
"name": KUBECLUSTER_CONTAINER_NAME,
322329
}
323330
]
324331
},

dask_kubernetes/common/objects.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88

99
from kubernetes.client.configuration import Configuration
1010

11-
from dask_kubernetes.constants import KUBECLUSTER_WORKER_CONTAINER_NAME
11+
from dask_kubernetes.constants import KUBECLUSTER_CONTAINER_NAME
1212

1313
_FakeResponse = namedtuple("_FakeResponse", ["data"])
1414

@@ -193,7 +193,7 @@ def make_pod_spec(
193193
restart_policy="Never",
194194
containers=[
195195
client.V1Container(
196-
name=KUBECLUSTER_WORKER_CONTAINER_NAME,
196+
name=KUBECLUSTER_CONTAINER_NAME,
197197
image=image,
198198
args=args,
199199
env=[client.V1EnvVar(name=k, value=v) for k, v in env.items()],

dask_kubernetes/common/tests/test_objects.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
from dask_kubernetes.constants import KUBECLUSTER_WORKER_CONTAINER_NAME
1+
from dask_kubernetes.constants import KUBECLUSTER_CONTAINER_NAME
22
from dask_kubernetes.common.objects import make_pod_from_dict
33

44

@@ -16,7 +16,7 @@ def test_make_pod_from_dict():
1616
"1",
1717
],
1818
"image": "image-name",
19-
"name": KUBECLUSTER_WORKER_CONTAINER_NAME,
19+
"name": KUBECLUSTER_CONTAINER_NAME,
2020
"securityContext": {
2121
"capabilities": {"add": ["SYS_ADMIN"]},
2222
"privileged": True,

dask_kubernetes/constants.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
KUBECLUSTER_WORKER_CONTAINER_NAME = "dask-worker"
1+
KUBECLUSTER_CONTAINER_NAME = "dask-container"

doc/source/testing.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Testing
22
=======
33

4+
.. warning:: Tests are not working on aarch64 (Apple M1) at the moment due to an architecture incompatibility between ``pytest-kind-control-plane`` and the docker image built from ``ci/Dockerfile``, similar to `this GitHub issue <https://github.com/kubernetes-sigs/kind/issues/2402>`_.
5+
46
Running the test suite for ``dask-kubernetes`` doesn't require an existing Kubernetes cluster but does require
57
`Docker <https://docs.docker.com/get-docker/>`_, `kubectl <https://kubernetes.io/docs/tasks/tools/#kubectl>`_ and `helm <https://helm.sh/docs/intro/install/>`_.
68

0 commit comments

Comments
 (0)