Hyperflex-API/hx_metrics.py

210 lines
6.9 KiB
Python
Raw Normal View History

2018-01-25 01:12:00 -08:00
#!/usr/bin/env python2
2019-01-22 08:40:31 -08:00
# ver 1.1 CL2019
2018-01-27 05:51:28 -08:00
2018-01-25 01:12:00 -08:00
from flask import Flask
from flask import Response
import requests
import json
import time
from gevent.wsgi import WSGIServer
2018-01-26 04:38:52 -08:00
#
import logging
import logging.handlers
#
2018-01-27 05:51:28 -08:00
import hx_creds
2018-01-27 05:53:00 -08:00
2018-01-25 01:12:00 -08:00
'''
the hx_creds.py looks like this:
hosts=[{'host':'10.1.1.1', 'username':'local/root', 'password':'*******'},{'host':'10.1.1.2', 'username':'local/root', 'password':'****'}]
2019-01-22 08:40:31 -08:00
2018-01-25 01:12:00 -08:00
'''
2018-01-26 06:13:10 -08:00
#
server_IP ='10.100.253.13'
server_port = '8082'
2018-01-26 04:38:52 -08:00
# Logging config
2018-01-26 06:13:10 -08:00
logFile="hx_stats_%s_%s.log"%(server_IP,server_port)
2018-01-26 04:38:52 -08:00
logCount=4
logBytes=1048576
2018-01-25 01:12:00 -08:00
# suppress the unverified request messages (when using self-signed certificates)
requests.packages.urllib3.disable_warnings()
# cache the credentials, if you keep requesting this you will hit th 256 open session/user limit
tokens = {}
app = Flask('HX Stats')
# gets called via the http://your_server_ip:port/metrics
@app.route('/metrics')
def get_stats():
results =''
for host in hx_creds.hosts:
2019-01-22 08:40:31 -08:00
logger.info("----------- Processing Host: %s -----------"%host['host'])
2018-01-25 02:01:04 -08:00
# get auth token (either cached or new one)
2018-01-25 01:12:00 -08:00
authdata = get_auth(host['host'], host['username'], host['password'])
2018-01-26 04:38:52 -08:00
if not authdata:
logger.info("Missing token, skipping host: "+host['host'])
continue
2018-01-25 01:12:00 -08:00
url = "https://"+host['host']
2018-01-27 05:51:28 -08:00
2018-01-26 04:38:52 -08:00
# uri for throughput data with "last 5 min" filter
2018-01-25 01:12:00 -08:00
uri_MBps = '/render?target=stats.counters.scvmclient.allhosts.nfsBytesRead.cluster.rate&target=stats.counters.scvmclient.allhosts.nfsBytesWritten.cluster.rate&format=json&from=-5min'
2018-01-26 04:38:52 -08:00
# Get throughput data
2018-01-25 01:12:00 -08:00
MBps_data = get_stats(authdata,url+uri_MBps)
2018-01-26 04:38:52 -08:00
if MBps_data:
try:
MBps_Read=round(MBps_data[0]['datapoints'][-2][0],3)
MBps_Write=round(MBps_data[1]['datapoints'][-2][0],3)
2019-01-22 08:40:31 -08:00
logger.info("Got MBps info")
2018-01-27 05:51:28 -08:00
2018-01-26 04:38:52 -08:00
# build the results
results += 'MBps_Read{host="%s"} %s\n'%(host['host'],str(MBps_Read))
results += 'MBps_Write{host="%s"} %s\n'%(host['host'],str(MBps_Write))
2019-01-22 08:40:31 -08:00
except Exception as e:
logger.error(e)
2018-01-26 04:38:52 -08:00
logger.error("Couldn't parse returned throughput data")
pass
# url to get the IOPS data
uri_IOPS = '/render?target=stats.counters.scvmclient.allhosts.nfsReads.cluster.rate&target=stats.counters.scvmclient.allhosts.nfsWrites.cluster.rate&format=json&from=-5min'
# get IOPS data
2018-01-25 01:12:00 -08:00
IOPS_data = get_stats(authdata,url+uri_IOPS)
2018-01-26 04:38:52 -08:00
if IOPS_data:
try:
IOPS_Read=round(IOPS_data[0]['datapoints'][-2][0],3)
IOPS_Write=round(IOPS_data[1]['datapoints'][-2][0],3)
2019-01-22 08:40:31 -08:00
logger.info("Got IOPS info")
2018-01-27 05:51:28 -08:00
2018-01-26 04:38:52 -08:00
# build the results
results += 'IOPS_Read{host="%s"} %s\n'%(host['host'],str(IOPS_Read))
results += 'IOPS_Write{host="%s"} %s\n'%(host['host'],str(IOPS_Write))
2019-01-22 08:40:31 -08:00
except Exception as e:
logger.error(e)
2018-01-26 04:38:52 -08:00
logger.error("Couldn't parse returned IOPS data")
pass
# url to get Latency data
uri_Lat ='/render?target=divideSeries(stats.timers.scvmclient.allhosts.nfsReadLatency.cluster.total%2Cstats.counters.scvmclient.allhosts.nfsReads.cluster.count)&target=divideSeries(stats.timers.scvmclient.allhosts.nfsWriteLatency.cluster.total%2Cstats.counters.scvmclient.allhosts.nfsWrites.cluster.count)&format=json&from=-5min'
# get latency data
2018-01-25 01:12:00 -08:00
Lat_data = get_stats(authdata,url+uri_Lat)
2018-01-26 04:38:52 -08:00
if Lat_data:
try:
Lat_Read=round(Lat_data[0]['datapoints'][-2][0],3)
2018-01-27 05:51:28 -08:00
Lat_Write=round(Lat_data[1]['datapoints'][-2][0],3)
2019-01-22 08:40:31 -08:00
logger.info("Got Latency info")
2018-01-27 05:51:28 -08:00
# build the results
2018-01-26 04:38:52 -08:00
results += 'Lat_Read{host="%s"} %s\n'%(host['host'],str(Lat_Read))
results += 'Lat_Write{host="%s"} %s\n'%(host['host'],str(Lat_Write))
2019-01-22 08:40:31 -08:00
except Exception as e:
logger.error(e)
2018-01-26 04:38:52 -08:00
logger.error("Couldn't parse returned latency data")
pass
#
2018-01-27 05:51:28 -08:00
# When processing data I'm taking one before last record ([-2]), as sometimes the last record is None
2018-01-26 04:38:52 -08:00
#
2018-01-25 01:12:00 -08:00
# return the results to the caller
2019-01-22 08:40:31 -08:00
logger.info("----------- Finished -----------")
2018-01-25 01:12:00 -08:00
return Response(results, mimetype='text/plain')
2018-01-26 04:38:52 -08:00
#
# Returns Authentication token
#
2018-01-25 01:12:00 -08:00
def get_auth(host, username, password):
2018-01-27 05:51:28 -08:00
2018-01-26 04:38:52 -08:00
logger.info("Trying to auth "+host)
2018-01-25 01:12:00 -08:00
global tokens
headers={'content-type':'application/json'}
payload = {
"username": username,
"password": password,
"client_id": "HxGuiClient",
"client_secret": "Sunnyvale",
"redirect_uri": "http://"+host
}
if tokens.get(host):
# looks like we have token cached already
# let's check if it's valid
2018-01-27 05:51:28 -08:00
2018-01-25 01:12:00 -08:00
payload = {
"access_token": tokens.get(host)['access_token'],
"scope": "READ",
"token_type": tokens.get(host)['token_type']
}
try:
#validating token
url = 'https://%s/aaa/v1/validate'%host
response = requests.post(url,headers=headers,data=json.dumps(payload),verify=False,timeout=4)
if response.status_code == 200:
2018-01-26 04:38:52 -08:00
logger.info("Re-using existing auth token")
2018-01-25 01:12:00 -08:00
return tokens.get(host)
2018-01-27 05:51:28 -08:00
logger.error("Failed to validate existing token "+response.content)
2018-01-25 01:12:00 -08:00
except Exception as e:
2018-01-26 06:13:10 -08:00
logger.error("Post for token validate failed \n"+str(e))
2018-01-27 05:51:28 -08:00
2018-01-26 04:38:52 -08:00
# this happens if no cached token found, or existing token was invalid
url = 'https://%s/aaa/v1/auth?grant_type=password'%host
2018-01-25 01:12:00 -08:00
payload = {
"username": username,
"password": password,
"client_id": "HxGuiClient",
"client_secret": "Sunnyvale",
"redirect_uri": "http://"+host
}
try:
response = requests.post(url,headers=headers,data=json.dumps(payload),verify=False,timeout=4)
if response.status_code == 201:
if response.json().get('access_token'):
tokens[host]=response.json()
2018-01-26 04:38:52 -08:00
logger.info("Got token ok")
2018-01-25 01:12:00 -08:00
return response.json()
2018-01-26 04:38:52 -08:00
logger.error("Failed get a token "+response.content)
return None
except Exception as e:
2018-01-26 06:13:10 -08:00
logger.error("Post for token get failed \n"+str(e))
2018-01-25 01:12:00 -08:00
return None
2018-01-26 04:38:52 -08:00
#
# calls HX API
#
2018-01-25 01:12:00 -08:00
def get_stats(authdata, url):
2018-01-26 04:38:52 -08:00
logger.info("call for get_stats")
2018-01-25 01:12:00 -08:00
try:
headers = {'Authorization': authdata['token_type'] + ' ' + authdata['access_token'],'Connection':'close'}
2019-01-22 08:40:31 -08:00
response = requests.get(url,headers=headers,verify=False,timeout=4)
2018-01-25 01:12:00 -08:00
if response.status_code == 200:
2018-01-26 04:38:52 -08:00
logger.info("Got data ok")
2018-01-25 01:12:00 -08:00
return response.json()
2018-01-26 04:38:52 -08:00
logger.error("Failed to get data "+response.content)
return None
except Exception as e:
2018-01-26 06:13:10 -08:00
logger.error("Post for data failed \n"+str(e))
2018-01-25 01:12:00 -08:00
return None
if __name__ == '__main__':
2018-01-26 04:38:52 -08:00
print "Service Started"
# Enable logging
logger = logging.getLogger("HX-Stats")
logger.setLevel(logging.DEBUG)
handler = logging.handlers.RotatingFileHandler(logFile, maxBytes=logBytes, backupCount=logCount)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.info("-"*25)
logger.info("HX Stats script started")
2018-01-27 05:51:28 -08:00
2018-01-26 06:13:10 -08:00
http_server = WSGIServer((server_IP, int(server_port)), app, log = logger)
2018-01-25 01:12:00 -08:00
http_server.serve_forever()