-
Notifications
You must be signed in to change notification settings - Fork 0
/
logcollector.py
120 lines (102 loc) · 3.47 KB
/
logcollector.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import sys
import time
import requests
import re
import configparser
requests.packages.urllib3.disable_warnings()
#! Future features
# Multiple config.ini files
# CLI menu instead of passing arguments
def read_ini(file_path):
config = configparser.ConfigParser()
config.read(file_path)
cfg = {
"creds": (config["GENERAL"]["username"], config["GENERAL"]["password"]),
"exp_servers": config["SERVERS"]
}
return cfg
def log_put(url, exp_creds, headers, req_body):
url = url + "/api/provisioning/common/diagnosticlogging"
# req_body = {"Mode": mode}
# print(f"PUT {url}")
response = requests.put(url=url, auth=exp_creds, json=req_body, headers=headers, verify=False)
print(f" Response Status: {response.status_code} {response.reason}")
print(f" Response: {response.json()}")
def get_filename_from_cd(cd):
if not cd:
return None
fname = re.findall('filename=(.+)', cd)
if len(fname) == 0:
return None
return fname[0]
def log_download(url, exp_creds, headers, req_body):
url = url + "/api/provisioning/common/diagnosticlogging"
while True:
# print(f"GET {url}")
response = requests.get(url=url, auth=exp_creds, headers=headers, verify=False)
print(f" Response Status: {response.status_code} {response.reason}")
print(f" Response: {response.json()}")
rj = response.json()
if rj["DownLoadStatus"] == "Ready to download": break
time.sleep(2)
# print(f"PUT {url}")
response = requests.put(url=url, auth=exp_creds, json=req_body, headers=headers, verify=False)
print(f" Response Status: {response.status_code} {response.reason}")
filename = get_filename_from_cd(response.headers.get('content-disposition'))
filename = filename.strip('"') # Remove quotes from ends of string
filename = filename.replace(':','_') # Replace : for _ due to Windows filename limitation
open(filename, 'wb').write(response.content)
print(f"Log saved: {filename}")
cfg = read_ini("config.ini")
exp_creds = cfg['creds']
headers = {"content-type": "application/json"}
exp_srv_list = []
for k in cfg['exp_servers']:
x = cfg['exp_servers'][k].split(':')
exp_srv_list.append((x[0].strip(), x[1].strip()))
# Prefix https to servers
# print(exp_srv_list)
for i,v in enumerate(exp_srv_list):
exp_srv_list[i] = ("https://" + v[0], v[1])
# print(exp_srv_list)
try:
if not re.search("^(start|stop|download)$", sys.argv[1]):
print(f"{sys.argv[1]} is not a valid argument!")
sys.exit(1)
except IndexError:
print("Missing arguments!")
sys.exit(1)
if sys.argv[1] == "start":
# Start logging on master servers
for srv in exp_srv_list:
if srv[1] == "master":
print(f"Starting logging on {srv[0]}")
req_body = {
"Mode": "start",
"TCPDump": "on"
}
log_put(url=srv[0], exp_creds=exp_creds, headers=headers, req_body=req_body)
elif sys.argv[1] == "stop":
# Stop logging on master servers
for srv in exp_srv_list:
if srv[1] == "master":
print(f"Stopping logging on {srv[0]}")
req_body = {
"Mode": "stop"
}
log_put(url=srv[0], exp_creds=exp_creds, headers=headers, req_body=req_body)
elif sys.argv[1] == "download":
# Initiate log collection on all servers, then download
for srv in exp_srv_list:
print(f"Starting log collection on {srv[0]}")
req_body = {
"Mode": "collect"
}
log_put(url=srv[0], exp_creds=exp_creds, headers=headers, req_body=req_body)
time.sleep(2)
for srv in exp_srv_list:
print(f"Starting log download on {srv[0]}")
req_body = {
"Mode": "download"
}
log_download(url=srv[0], exp_creds=exp_creds, headers=headers, req_body=req_body)