|
| 1 | +import os |
| 2 | +import boto3 |
| 3 | +import curator |
| 4 | +from requests_aws4auth import AWS4Auth |
| 5 | +from elasticsearch import Elasticsearch, RequestsHttpConnection |
| 6 | + |
| 7 | +ES_HOST = os.environ.get('ES_HOST', 'localhost') |
| 8 | +ES_REGION = os.environ.get('ES_REGION', 'us-west-1') |
| 9 | +ES_INDICES_PREFIX = os.environ.get('ES_INDICES_PREFIX', 'logs') |
| 10 | +ES_INDICES_DATA_RETENTION_THRESHOLD_IN_DAYS = os.environ.get('ES_INDICES_DATA_RETENTION_THRESHOLD_IN_DAYS', '90') |
| 11 | +ES_INDICES_DATE_FORMAT = os.environ.get('ES_INDICES_DATE_FORMAT', '%Y.%m.%d') |
| 12 | +AWS_SERVICE = "es" |
| 13 | + |
| 14 | + |
| 15 | +def lambda_handler(event, context): |
| 16 | + """ |
| 17 | + Lambda Handler |
| 18 | + """ |
| 19 | + |
| 20 | + # getting credentials |
| 21 | + credentials = boto3.Session().get_credentials() |
| 22 | + awsauth = AWS4Auth(credentials.access_key, credentials.secret_key, ES_REGION, AWS_SERVICE, session_token=credentials.token) |
| 23 | + |
| 24 | + elasticsearch_client_obj = Elasticsearch(hosts=[{'host': ES_HOST, 'port': 443}], |
| 25 | + http_auth=awsauth, |
| 26 | + use_ssl=True, |
| 27 | + verify_certs=True, |
| 28 | + connection_class=RequestsHttpConnection |
| 29 | + ) |
| 30 | + |
| 31 | + index_list = curator.IndexList(elasticsearch_client_obj) |
| 32 | + |
| 33 | + # filter the indices based on the prefix (ES_INDICES_PREFIX). |
| 34 | + index_list.filter_by_regex(kind='prefix', |
| 35 | + value=ES_INDICES_PREFIX) |
| 36 | + |
| 37 | + if index_list.indices: |
| 38 | + # filter the logs indices that are ES_INDICES_DATA_RETENTION_THRESHOLD_IN_DAYS days old |
| 39 | + index_list.filter_by_age(source='name', |
| 40 | + direction='older', |
| 41 | + timestring=ES_INDICES_DATE_FORMAT, |
| 42 | + unit='days', |
| 43 | + unit_count=int(ES_INDICES_DATA_RETENTION_THRESHOLD_IN_DAYS)) |
| 44 | + |
| 45 | + # check if indices exist |
| 46 | + if index_list.indices: |
| 47 | + curator.DeleteIndices(index_list).do_action() |
0 commit comments