forked from LogentriesCommunity/Logentries-Log-Usage
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathquery_all_logs.py
130 lines (111 loc) · 3.97 KB
/
query_all_logs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
import urllib
import json
import sys
import requests
import time
import csv
API_KEY = sys.argv[2]
now_millis = int(round(time.time() * 1000))
TO_TS = now_millis
date_time = sys.argv[4]#'dd.mm.yyyy'
time_patt = '%d.%m.%Y'
epoch = int(time.mktime(time.strptime(date_time, time_patt)))
FROM_TS = epoch * 1000
SEARCH_QUERY = "where(/.*/) calculate(bytes)"
ACCOUNT_KEY = ''
HOST_NAMES_KEYS_DICT = {}
OUTFILE = open(sys.argv[3], 'w')
OUTFILE_WRITER = csv.writer(OUTFILE)
OUTFILE_WRITER.writerow(['Log Set', 'Log Name', 'Query Result'])
def get_host_name():
req = urllib.urlopen("https://api.logentries.com/" + ACCOUNT_KEY + '/hosts/')
response = json.load(req)
for hosts in response['list']:
HOST_NAMES_KEYS_DICT[hosts['key']] = hosts['name']
for k, v in HOST_NAMES_KEYS_DICT.iteritems():
if v != r'Inactivity Alerts':
get_log_name_and_key(k, v)
else:
pass
def get_le_url(url):
header = {'x-api-key': API_KEY}
return requests.get(url, headers=header)
def get_continuity_final_response(response):
while True:
response = get_le_url(response.json()['links'][0]['href'])
if response.status_code != 200:
return None
if 'links' not in response.json():
return response
else:
time.sleep(1)
continue
def post_query_to_le(hostkey):
headers = {'x-api-key': API_KEY}
payload = {"logs": [hostkey],
"leql": {"during": {"from": FROM_TS, "to": TO_TS},
"statement": SEARCH_QUERY}}
url = "https://rest.logentries.com/query/logs/"
return requests.post(url, headers=headers, json=payload)
def handle_response(resp, log_key):
time.sleep(0.5)
if resp.status_code == 200:
return resp
elif resp.status_code == 202:
print "Polling after 202"
return get_continuity_final_response(resp)
elif resp.status_code == 503:
print "Retrying after 503 code"
retried_response = post_query_to_le(log_key)
return handle_response(retried_response, log_key)
elif resp.status_code > 202:
print 'Error status code ' + str(resp.status_code)
return
def humanize_bytes(bytesize, precision=2):
"""
Humanize byte size figures
"""
abbrevs = (
(1 << 50, 'PB'),
(1 << 40, 'TB'),
(1 << 30, 'GB'),
(1 << 20, 'MB'),
(1 << 10, 'kB'),
(1, 'bytes')
)
if bytesize == 1:
return '1 byte'
for factor, suffix in abbrevs:
if bytesize >= factor:
break
if factor == 1:
precision = 0
return '%.*f %s' % (precision, bytesize / float(factor), suffix)
def get_log_name_and_key(host_key, host_name):
req = urllib.urlopen("http://api.logentries.com/" + ACCOUNT_KEY + '/hosts/' + host_key + '/')
response = json.load(req)
for everylogkey in response['list']:
if not everylogkey['key']:
continue
print everylogkey['name']
results1 = post_query_to_le(str(everylogkey['key']))
results = handle_response(results1, str(everylogkey['key']))
if not results:
break
# if query is calculate(count) then: results.json()['statistics']['stats']['global_timeseries']['count']
# if query is calculate(bytes) then: results.json()['statistics']['stats']['global_timeseries']['bytes']
try:
if len(results.json()['statistics']['stats']['global_timeseries']) > 0:
query_result = results.json()['statistics']['stats']['global_timeseries']['bytes']
query_result = humanize_bytes(query_result)
else:
query_result = humanize_bytes(0)
print query_result
OUTFILE_WRITER.writerow((host_name, everylogkey['name'], query_result))
except KeyError as exception:
print "Empty"
if __name__ == '__main__':
ACCOUNT_KEY = sys.argv[1]
get_host_name()
OUTFILE.close()
print "csv result file generated. all done."