1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129
|
import requests import time import json import copy import os import sys
import random
''' pull history data of marco from open-falcon(小米监控) and write to Historydatafile '''
''' [{"endpoint":"hostname1","counter":"cpu_util","dstype":"GAUGE","step":60,"Values":[{"timestamp":1492417260,"value":103.670000},{"timestamp":1492417320,"value":106.060000}]}] '''
upyun = 'smaug' endcounter_file = './point-counter.json'
example = { "start": "", "end": "", "cf": "AVERAGE", "endpoint_counters": [ ], }
queryload = [] CONFIG = {}
def config_load(filename): if os.path.isfile(filename): with open(filename,'r') as f: return json.load(f) else: print("%s not exist" % filename) sys.exit(1)
def get_endpointlist_And_counters(conf_dict): return conf_dict.get("endpoints"), conf_dict.get("counters")
def append(d,endpoint, counter): endpoint_counter = { "endpoint": endpoint, "counter": counter, } d['endpoint_counters'].append(endpoint_counter) def mkd(start,end,ep,counterlist): d = copy.deepcopy(example) d['start']=start d['end'] = end for cv in counterlist[:]: append(d,ep, cv) queryload.append(d)
def start_end(endtime,hour): end = int(time.mktime(time.strptime(endtime,'%Y-%m-%d %H:%M:%S'))) ts = 3600 * hour start = end - ts return start,end
def time_is_dimension(CONF,hours,com): endpointlists , counterlist = get_endpointlist_And_counters(CONF) endpointlist = endpointlists.get("default-point") if not endpointlist or not counterlist : sys.exit(1) before = com.get("time").get("before") sb,eb= start_end(before,hours) for ep in endpointlist[:]: mkd(sb,eb,ep,counterlist)
after = com.get("time").get("after") sa,ea = start_end(after,hours) for ep in endpointlist[:]: mkd(sa,ea,ep,counterlist)
def filesize_is_dimension(CONF,hours): endpointlists , counterlist = get_endpointlist_And_counters(CONF) default_point = endpointlists.get("default-point") another_point = endpointlists.get("another-point") if not default_point or not another_point: sys.exit(1) end = int(time.mktime(time.strptime("2017-05-25 08:01:30",'%Y-%m-%d %H:%M:%S'))) start = end - 3600 * hours for ep in default_point[:]: mkd(start,end,ep,counterlist)
for ep in another_point[:]: mkd(start,end,ep,counterlist)
CONFIG = config_load(endcounter_file) dimension = CONFIG.get("dimension") hour = CONFIG.get("hour") compare = CONFIG.get("compare")
if dimension == "time": time_is_dimension(CONFIG,hour,compare) elif dimension == "filesize": filesize_is_dimension(CONFIG,hour) endname = endcounter_file[2:-5]
Historydatafile = "./{0}_hist{1}h_{2}_{3}.data".format(upyun,hour,endname,dimension)
query_api = "http://queryip:9966/graph/history" if os.path.isfile(Historydatafile): os.remove(Historydatafile) for vd in queryload[:]: r = requests.post(query_api, data=json.dumps(vd)) STORE = r.json() with open(Historydatafile, 'aw') as fout: json.dump(STORE, fout) fout.write('\n') print("pull %s hours historydata to %s" % (hour, Historydatafile))
|