|
1 | 1 | import logging |
2 | 2 | import os |
3 | 3 | import sys |
| 4 | +import tempfile |
4 | 5 |
|
5 | 6 | import boto3 |
| 7 | +import git |
6 | 8 | import paramiko |
7 | 9 | import pysftp |
| 10 | +import redis |
| 11 | +import redistimeseries.client as Client |
8 | 12 | from git import Repo |
9 | 13 | from jsonpath_ng import parse |
10 | 14 | from python_terraform import Terraform |
@@ -241,3 +245,134 @@ def upload_artifacts_to_s3(artifacts, s3_bucket_name, s3_bucket_path, acl="publi |
241 | 245 | response = object_acl.put(ACL=acl) |
242 | 246 | progress.update() |
243 | 247 | progress.close() |
| 248 | + |
| 249 | + |
| 250 | +def checkAndFixPemStr(EC2_PRIVATE_PEM): |
| 251 | + pem_str = EC2_PRIVATE_PEM.replace("-----BEGIN RSA PRIVATE KEY-----", "") |
| 252 | + pem_str = pem_str.replace("-----END RSA PRIVATE KEY-----", "") |
| 253 | + pem_str = pem_str.replace(" ", "\n") |
| 254 | + pem_str = "-----BEGIN RSA PRIVATE KEY-----\n" + pem_str |
| 255 | + pem_str = pem_str + "-----END RSA PRIVATE KEY-----\n" |
| 256 | + # remove any dangling whitespace |
| 257 | + pem_str = os.linesep.join([s for s in pem_str.splitlines() if s]) |
| 258 | + return pem_str |
| 259 | + |
| 260 | + |
| 261 | +def get_run_full_filename( |
| 262 | + start_time_str, |
| 263 | + deployment_type, |
| 264 | + github_org, |
| 265 | + github_repo, |
| 266 | + github_branch, |
| 267 | + test_name, |
| 268 | + github_sha, |
| 269 | +): |
| 270 | + benchmark_output_filename = "{start_time_str}-{github_org}-{github_repo}-{github_branch}-{test_name}-{deployment_type}-{github_sha}.json".format( |
| 271 | + start_time_str=start_time_str, |
| 272 | + github_org=github_org, |
| 273 | + github_repo=github_repo, |
| 274 | + github_branch=github_branch, |
| 275 | + test_name=test_name, |
| 276 | + deployment_type=deployment_type, |
| 277 | + github_sha=github_sha, |
| 278 | + ) |
| 279 | + return benchmark_output_filename |
| 280 | + |
| 281 | + |
| 282 | +def fetchRemoteSetupFromConfig(remote_setup_config): |
| 283 | + branch = 'master' |
| 284 | + repo = None |
| 285 | + path = None |
| 286 | + for remote_setup_property in remote_setup_config: |
| 287 | + if 'repo' in remote_setup_property: |
| 288 | + repo = remote_setup_property['repo'] |
| 289 | + if 'branch' in remote_setup_property: |
| 290 | + branch = remote_setup_property['branch'] |
| 291 | + if 'path' in remote_setup_property: |
| 292 | + path = remote_setup_property['path'] |
| 293 | + # fetch terraform folder |
| 294 | + temporary_dir = tempfile.mkdtemp() |
| 295 | + logging.info( |
| 296 | + "Fetching infrastructure definition from git repo {}/{} (branch={})".format( |
| 297 | + repo, path, branch |
| 298 | + ) |
| 299 | + ) |
| 300 | + git.Repo.clone_from(repo, temporary_dir, branch=branch, depth=1) |
| 301 | + terraform_working_dir = temporary_dir |
| 302 | + if path is not None: |
| 303 | + terraform_working_dir += path |
| 304 | + return terraform_working_dir |
| 305 | + |
| 306 | + |
| 307 | +def pushDataToRedisTimeSeries(rts: Client, branch_time_series_dict: dict): |
| 308 | + datapoint_errors = 0 |
| 309 | + datapoint_inserts = 0 |
| 310 | + for timeseries_name, time_series in branch_time_series_dict.items(): |
| 311 | + try: |
| 312 | + logging.info( |
| 313 | + "Creating timeseries named {} with labels {}".format( |
| 314 | + timeseries_name, time_series["labels"] |
| 315 | + ) |
| 316 | + ) |
| 317 | + rts.create(timeseries_name, labels=time_series["labels"]) |
| 318 | + except redis.exceptions.ResponseError as e: |
| 319 | + logging.warning( |
| 320 | + "Timeseries named {} already exists".format( |
| 321 | + timeseries_name |
| 322 | + ) |
| 323 | + ) |
| 324 | + pass |
| 325 | + for timestamp, value in time_series["data"].items(): |
| 326 | + try: |
| 327 | + rts.add( |
| 328 | + timeseries_name, |
| 329 | + timestamp, |
| 330 | + value, |
| 331 | + duplicate_policy="last", |
| 332 | + ) |
| 333 | + datapoint_inserts += 1 |
| 334 | + except redis.exceptions.ResponseError: |
| 335 | + logging.warning( |
| 336 | + "Error while inserting datapoint ({} : {}) in timeseries named {}. ".format( |
| 337 | + timestamp, value, timeseries_name |
| 338 | + ) |
| 339 | + ) |
| 340 | + datapoint_errors += 1 |
| 341 | + pass |
| 342 | + return datapoint_errors, datapoint_inserts |
| 343 | + |
| 344 | + |
| 345 | +def extractPerBranchTimeSeriesFromResults(datapoints_timestamp: int, metrics: list, results_dict: dict, |
| 346 | + tf_github_branch: str, tf_github_org: str, tf_github_repo: str, |
| 347 | + deployment_type: str, test_name: str, tf_triggering_env: str): |
| 348 | + branch_time_series_dict = {} |
| 349 | + for jsonpath in metrics: |
| 350 | + jsonpath_expr = parse(jsonpath) |
| 351 | + metric_name = jsonpath[2:] |
| 352 | + metric_value = float(jsonpath_expr.find(results_dict)[0].value) |
| 353 | + # prepare tags |
| 354 | + # branch tags |
| 355 | + branch_tags = { |
| 356 | + "branch": str(tf_github_branch), |
| 357 | + "github_org": tf_github_org, |
| 358 | + "github_repo": tf_github_repo, |
| 359 | + "deployment_type": deployment_type, |
| 360 | + "test_name": test_name, |
| 361 | + "triggering_env": tf_triggering_env, |
| 362 | + "metric": metric_name, |
| 363 | + } |
| 364 | + ts_name = "ci.benchmarks.redislabs/{triggering_env}/{github_org}/{github_repo}/{test_name}/{deployment_type}/{branch}/{metric}".format( |
| 365 | + branch=str(tf_github_branch), |
| 366 | + github_org=tf_github_org, |
| 367 | + github_repo=tf_github_repo, |
| 368 | + deployment_type=deployment_type, |
| 369 | + test_name=test_name, |
| 370 | + triggering_env=tf_triggering_env, |
| 371 | + metric=metric_name, |
| 372 | + ) |
| 373 | + |
| 374 | + branch_time_series_dict[ts_name] = { |
| 375 | + "labels": branch_tags.copy(), |
| 376 | + "data": {datapoints_timestamp: metric_value}, |
| 377 | + } |
| 378 | + return True, branch_time_series_dict |
0 commit comments