Skip to content

Commit 01da39d

Browse files
authored
CLOUDP-342858: Running search test under OM 8.2 (#467)
# Summary This PR adds testing for running search with Ops Manager 8.0.14 and with mongod 8.2. Changes to search/search_enterprise_tls.py: * Added ability to run the test with either OM or cloud qa. * Added mongod migration steps from 8.0.14 to 8.2.0. * This adds automated verification that we can have search running and upgrade from mdb 8.0 (having polyfilled searchCoordinator role) to mdb 8.2 (with the role being a built-in one). ## Proof of Work EVG passing for search_enterprise_tls.py in both cloud-qa and om80 variants: [evg link](https://spruce.mongodb.com/version/68da6bb997a26c000793eff0/tasks?page=0&sorts=STATUS%3AASC%3BBASE_STATUS%3ADESC&taskName=e2e_search_enterprise_tls) Also the test is only run in om80 variant and not for earlier OM. ## Checklist - [ ] Have you linked a jira ticket and/or is the ticket in the title? - [ ] Have you checked whether your jira ticket required DOCSP changes? - [ ] Have you added changelog file? - use `skip-changelog` label if not needed - refer to [Changelog files and Release Notes](https://github.com/mongodb/mongodb-kubernetes/blob/master/CONTRIBUTING.md#changelog-files-and-release-notes) section in CONTRIBUTING.md for more details
1 parent 6062a2b commit 01da39d

File tree

12 files changed

+299
-89
lines changed

12 files changed

+299
-89
lines changed

.evergreen.yml

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1120,6 +1120,15 @@ task_groups:
11201120
- e2e_om_ops_manager_prometheus
11211121
<<: *teardown_group
11221122

1123+
# Tests features only supported on OM80
1124+
- name: e2e_ops_manager_kind_8_0_only_task_group
1125+
max_hosts: -1
1126+
<<: *setup_group
1127+
<<: *setup_and_teardown_task
1128+
tasks:
1129+
- e2e_search_enterprise_tls
1130+
<<: *teardown_group
1131+
11231132
# Tests features only supported on OM70 and OM80, its only upgrade test as we test upgrading from 6 to 7 or 7 to 8
11241133
- name: e2e_ops_manager_upgrade_only_task_group
11251134
max_hosts: -1
@@ -1330,6 +1339,7 @@ buildvariants:
13301339
- name: e2e_ops_manager_kind_5_0_only_task_group_without_queryable_backup
13311340
- name: e2e_ops_manager_kind_6_0_only_task_group
13321341
- name: e2e_ops_manager_upgrade_only_task_group
1342+
- name: e2e_ops_manager_kind_8_0_only_task_group
13331343

13341344
- name: e2e_static_om80_kind_ubi
13351345
display_name: e2e_static_om80_kind_ubi

docker/mongodb-kubernetes-tests/kubetester/__init__.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -131,7 +131,6 @@ def create_or_update_configmap(
131131
data: Dict[str, str],
132132
api_client: Optional[kubernetes.client.ApiClient] = None,
133133
) -> str:
134-
print("Logging inside create_or_update configmap")
135134
try:
136135
create_configmap(namespace, name, data, api_client)
137136
except kubernetes.client.ApiException as e:

docker/mongodb-kubernetes-tests/kubetester/kubetester.py

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -978,16 +978,6 @@ def get_automation_status(group_id=None, group_name=None):
978978

979979
return response.json()
980980

981-
@staticmethod
982-
def get_automation_status(group_id=None, group_name=None):
983-
if group_id is None:
984-
group_id = KubernetesTester.get_om_group_id(group_name=group_name)
985-
986-
url = build_automation_status_endpoint(KubernetesTester.get_om_base_url(), group_id)
987-
response = KubernetesTester.om_request("get", url)
988-
989-
return response.json()
990-
991981
@staticmethod
992982
def get_monitoring_config(group_id=None):
993983
if group_id is None:

docker/mongodb-kubernetes-tests/kubetester/mongodb.py

Lines changed: 38 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -235,7 +235,18 @@ def __repr__(self):
235235

236236
def configure(
237237
self,
238-
om: MongoDBOpsManager,
238+
om: Optional[MongoDBOpsManager],
239+
project_name: str,
240+
api_client: Optional[client.ApiClient] = None,
241+
) -> MongoDB:
242+
if om is not None:
243+
return self.configure_ops_manager(om, project_name, api_client=api_client)
244+
else:
245+
return self.configure_cloud_qa(project_name, api_client=api_client)
246+
247+
def configure_ops_manager(
248+
self,
249+
om: Optional[MongoDBOpsManager],
239250
project_name: str,
240251
api_client: Optional[client.ApiClient] = None,
241252
) -> MongoDB:
@@ -252,6 +263,29 @@ def configure(
252263
self["spec"]["credentials"] = om.api_key_secret(self.namespace, api_client=api_client)
253264
return self
254265

266+
def configure_cloud_qa(
267+
self,
268+
project_name,
269+
api_client: Optional[client.ApiClient] = None,
270+
) -> MongoDB:
271+
if "opsManager" in self["spec"]:
272+
del self["spec"]["opsManager"]
273+
274+
src_project_config_map_name = "my-project"
275+
if "cloudManager" in self["spec"]:
276+
src_project_config_map_name = self["spec"]["cloudManager"]["configMapRef"]["name"]
277+
278+
src_cm = read_configmap(self.namespace, src_project_config_map_name, api_client=api_client)
279+
280+
new_project_config_map_name = f"{self.name}-project-config"
281+
ensure_nested_objects(self, ["spec", "cloudManager", "configMapRef"])
282+
self["spec"]["cloudManager"]["configMapRef"]["name"] = new_project_config_map_name
283+
284+
src_cm.update({"projectName": f"{self.namespace}-{project_name}"})
285+
create_or_update_configmap(self.namespace, new_project_config_map_name, src_cm, api_client=api_client)
286+
287+
return self
288+
255289
def configure_backup(self, mode: str = "enabled") -> MongoDB:
256290
ensure_nested_objects(self, ["spec", "backup"])
257291
self["spec"]["backup"]["mode"] = mode
@@ -454,6 +488,9 @@ def get_external_domain(self):
454488
def config_map_name(self) -> str:
455489
if "opsManager" in self["spec"]:
456490
return self["spec"]["opsManager"]["configMapRef"]["name"]
491+
elif "cloudManager" in self["spec"]:
492+
return self["spec"]["cloudManager"]["configMapRef"]["name"]
493+
457494
return self["spec"]["project"]
458495

459496
def shard_replicaset_names(self) -> List[str]:

docker/mongodb-kubernetes-tests/kubetester/omtester.py

Lines changed: 47 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -15,21 +15,22 @@
1515
import requests
1616
import semver
1717
from kubetester.automation_config_tester import AutomationConfigTester
18-
from kubetester.kubetester import build_agent_auth, build_auth, run_periodically
18+
from kubetester.kubetester import (
19+
KubernetesTester,
20+
build_agent_auth,
21+
build_auth,
22+
run_periodically,
23+
)
1924
from kubetester.mongotester import BackgroundHealthChecker
2025
from kubetester.om_queryable_backups import OMQueryableBackup
2126
from opentelemetry import trace
2227
from requests.adapters import HTTPAdapter, Retry
28+
from tests import test_logger
29+
from tests.common.ops_manager.cloud_manager import is_cloud_qa
2330

24-
from .kubetester import get_env_var_or_fail
31+
skip_if_cloud_manager = pytest.mark.skipif(is_cloud_qa(), reason="Do not run in Cloud Manager")
2532

26-
27-
def running_cloud_manager():
28-
"Determines if the current test is running against Cloud Manager"
29-
return get_env_var_or_fail("OM_HOST") == "https://cloud-qa.mongodb.com"
30-
31-
32-
skip_if_cloud_manager = pytest.mark.skipif(running_cloud_manager(), reason="Do not run in Cloud Manager")
33+
logger = test_logger.get_test_logger(__name__)
3334

3435

3536
class BackupStatus(str, Enum):
@@ -421,7 +422,7 @@ def om_request():
421422
span.set_attribute(key=f"mck.om.request.retries", value=retries - retry_count)
422423
return resp
423424
except Exception as e:
424-
print(f"Encountered exception: {e} on retry number {retries-retry_count}")
425+
print(f"Encountered exception: {e} on retry number {retries - retry_count}")
425426
span.set_attribute(key=f"mck.om.request.exception", value=str(e))
426427
last_exception = e
427428
time.sleep(1)
@@ -685,6 +686,42 @@ def api_update_version_manifest(self, major_version: str = "8.0"):
685686
body = requests.get(url=f"https://opsmanager.mongodb.com/static/version_manifest/{major_version}.json").json()
686687
self.om_request("put", "/versionManifest", json_object=body)
687688

689+
def api_get_automation_status(self) -> dict[str, str]:
690+
return self.om_request("get", f"/groups/{self.context.project_id}/automationStatus").json()
691+
692+
def wait_agents_ready(self, timeout: Optional[int] = 600):
693+
"""Waits until all the agents reached the goal automation config version."""
694+
log_prefix = f"[{self.context.group_name}/{self.context.project_id}] "
695+
696+
def agents_are_ready():
697+
auto_status = self.api_get_automation_status()
698+
goal_version = auto_status.get("goalVersion")
699+
700+
logger.info(f"{log_prefix}Checking if all agent processes have reached goal version: {goal_version}")
701+
processes_not_ready = []
702+
for process in auto_status.get("processes", []):
703+
process_name = process.get("name", "unknown")
704+
process_version = process.get("lastGoalVersionAchieved")
705+
if process_version != goal_version:
706+
logger.info(
707+
f"{log_prefix}Process {process_name} at version {process_version}, expected {goal_version}"
708+
)
709+
processes_not_ready.append(process_name)
710+
711+
all_processes_ready = len(processes_not_ready) == 0
712+
if all_processes_ready:
713+
logger.info(f"{log_prefix}All agent processes have reached the goal version")
714+
else:
715+
logger.info(f"{log_prefix}{len(processes_not_ready)} processes have not yet reached the goal version")
716+
717+
return all_processes_ready
718+
719+
KubernetesTester.wait_until(
720+
agents_are_ready,
721+
timeout=timeout,
722+
sleep_time=3,
723+
)
724+
688725

689726
class OMBackgroundTester(BackgroundHealthChecker):
690727
"""
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
import os
2+
3+
4+
def is_cloud_qa() -> bool:
5+
return os.getenv("ops_manager_version", "cloud_qa") == "cloud_qa"

docker/mongodb-kubernetes-tests/tests/opsmanager/fixtures/om_ops_manager_basic.yaml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,3 +15,16 @@ spec:
1515

1616
backup:
1717
enabled: false
18+
19+
# adding this just to avoid wizard when opening OM UI
20+
configuration:
21+
automation.versions.source: mongodb
22+
mms.adminEmailAddr: cloud-manager-support@mongodb.com
23+
mms.fromEmailAddr: cloud-manager-support@mongodb.com
24+
mms.ignoreInitialUiSetup: "true"
25+
mms.mail.hostname: email-smtp.us-east-1.amazonaws.com
26+
mms.mail.port: "465"
27+
mms.mail.ssl: "true"
28+
mms.mail.transport: smtp
29+
mms.minimumTLSVersion: TLSv1.2
30+
mms.replyToEmailAddr: cloud-manager-support@mongodb.com

docker/mongodb-kubernetes-tests/tests/search/fixtures/enterprise-replicaset-sample-mflix.yaml

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -19,16 +19,15 @@ spec:
1919
- SCRAM
2020
agent:
2121
logLevel: DEBUG
22-
statefulSet:
23-
spec:
24-
template:
25-
spec:
26-
containers:
27-
- name: mongodb-enterprise-database
28-
resources:
29-
limits:
30-
cpu: "2"
31-
memory: 2Gi
32-
requests:
33-
cpu: "1"
34-
memory: 1Gi
22+
podSpec:
23+
podTemplate:
24+
spec:
25+
containers:
26+
- name: mongodb-enterprise-database
27+
resources:
28+
limits:
29+
cpu: "2"
30+
memory: 2Gi
31+
requests:
32+
cpu: "1"
33+
memory: 1Gi
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
from typing import Optional
2+
3+
from kubetester import try_load
4+
from kubetester.kubetester import fixture as yaml_fixture
5+
from kubetester.kubetester import is_multi_cluster
6+
from kubetester.opsmanager import MongoDBOpsManager
7+
from pytest import fixture
8+
from tests.common.ops_manager.cloud_manager import is_cloud_qa
9+
from tests.conftest import get_custom_appdb_version, get_custom_om_version
10+
from tests.opsmanager.withMonitoredAppDB.conftest import enable_multi_cluster_deployment
11+
12+
13+
def get_ops_manager(namespace: str) -> Optional[MongoDBOpsManager]:
14+
if is_cloud_qa():
15+
return None
16+
17+
resource: MongoDBOpsManager = MongoDBOpsManager.from_yaml(
18+
yaml_fixture("om_ops_manager_basic.yaml"), namespace=namespace
19+
)
20+
21+
if try_load(resource):
22+
return resource
23+
24+
resource.set_version(get_custom_om_version())
25+
resource.set_appdb_version(get_custom_appdb_version())
26+
27+
if is_multi_cluster():
28+
enable_multi_cluster_deployment(resource)
29+
30+
return resource

0 commit comments

Comments
 (0)