ElasticSearch Snapshot

Python script to create ElasticSearch snapshots

22

Votes

#!/usr/bin/env python

import time
import json
import base64
import requests
import urllib
import urllib2

# #############################################################################

# User and password to access the ElasticSearch API
PROXY_USER='user'
PROXY_PASSWORD='password'

# Information used to build the URL used to access the ElasticSearch API and
# create the new snapshot
PROXY_PROTO='https'
PROXY_HOST='proxy-elasticsearch.com'
PROXY_PATH='_snapshot/logstash-archive'
PROXY_SNAPSHOT='logstash_snapshot_%s' % time.strftime('%Y_%m_%d')

# PagerDuty Incident Information
PAGERDUTY_URL='https://events.pagerduty.com/generic/2010-04-15/create_event.json'
PAGERDUTY_SERVICE_KEY='w_8PcNuhHa-y3xYdmc1x'
PAGERDUTY_EVENT_TYPE='trigger'
PAGERDUTY_INCIDENT_KEY='ElasticSearchCreateSnapshot'
PAGERDUTY_CLIENT='ElasticSearchSnapshot'

# #############################################################################

# The actual URL we will use in order to create the snapshot
PROXY_URL='%s://%s/%s/%s' % (
    PROXY_PROTO,
    PROXY_HOST,
    PROXY_PATH,
    PROXY_SNAPSHOT
)

# #############################################################################

def lambda_handler(event, context):
    print 'INFO: Lambda handler activated'
    print 'INFO: Event ID is %s' % event['id']

    main()

# #############################################################################

def pagerduty(description='FAILURE Create ElasticSearch Snapshot'):
    print 'INFO: Alerting on call support using PagerDuty'

    headers = {
        'Content-Type': 'application/json'
    }

    params = json.dumps({
        'incident_key': PAGERDUTY_INCIDENT_KEY,
        'service_key':  PAGERDUTY_SERVICE_KEY,
        'event_type':   PAGERDUTY_EVENT_TYPE,
        'description':  description,
        'client':       PAGERDUTY_CLIENT
    }).encode('utf8')

    req = urllib2.Request(PAGERDUTY_URL, params, headers)

    print 'INFO: Creating alert'

    try:
        rsp = urllib2.urlopen(req)
        data = json.loads(rsp.read())
    except urllib2.HTTPError as e:
        print 'ERROR: Not able to create alert'
        if hasattr(e, 'reason'):
            print 'Reason: %s' % e.reason
    except urllib2.URLError as e:
        print 'ERROR: URL malformed'
        if hasattr(e, 'reason'):
            print 'Reason: %s' % e.reason
    else:
        if data['status'] == 'success':
            print 'INFO: Alert was sent correctly'
        else:
            print 'ERROR: Non success response from PagerDuty'

        print 'INFO: PagerDuty Response= %s' % data


# #############################################################################

def create_snapshot():
    headers = {
        'Content-Type': 'application/json',
    }

    print 'INFO: Creating snapshot'
    print 'INFO: URL=%s' % PROXY_URL

    try:
        req = requests.put(
            PROXY_URL,
            headers=headers,
            auth=(PROXY_USER, PROXY_PASSWORD)
        )
    except requests.exceptions.ConnectionError as e:
        print 'ERROR: Not able to connect to URL'
        pagerduty('ElasticSearch Snapshot: Connection Error')
    except requests.exceptions.Timeout as e:
        print 'ERROR: ElasticSearch Timeout'
        pagerduty('ElasticSearch Snapshot: Timeout')
    except requests.exceptions.HTTPError as e:
        print 'ERROR: HTTP Error'
        pagerduty('ElasticSearch Snapshot: HTTP Error')
    else:
        print 'INFO: ElasticSearch Response Code= %s' % req.status_code
        print 'INFO: ElasticSearch Response Data= %s' % req.text

        if req.status_code != 200:
            pagerduty('ElasticSearch Snapshot: Non success response from ElasticSearch')

# #############################################################################

def main():
    print 'INFO: Starting task'
    create_snapshot()
    print 'INFO: Completed'

# #############################################################################

if __name__ == "__main__":
    main()

Vote Here

You must earn at least 1 vote on your snippets to be allowed to vote

Terms Of Use

Privacy Policy

Featured snippets are MIT license

Gears & Masters

Advertise

DevOpsnipp.com © 2020

medium.png