-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathsiemens_wimax_capture.py
executable file
·104 lines (78 loc) · 3.04 KB
/
siemens_wimax_capture.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
#!/usr/bin/python3
"""
Capture Siemens WiMAX device configurations. Used by CA Spectrum NCM.
"""
import argparse
import os
import re
import sys
import time
import requests
from bs4 import BeautifulSoup
def newer(file_path, mtime):
try:
return os.path.getmtime(file_path) > mtime
except FileNotFoundError:
return False
def main():
parser = argparse.ArgumentParser(description='Capture Siemens WiMAX configurations')
parser.add_argument('host', type=str, help='Device ip address')
parser.add_argument('username', type=str, help='Username for connecting')
parser.add_argument('password', type=str, help='Password for connecting')
parser.add_argument('enable', type=str, help='Enable password (not used)')
parser.add_argument('timeout', type=int, default=30, help='Timeout')
parser.add_argument('retries', type=int, default=3, help='Retries')
args = parser.parse_args()
# Disable SSL warnings
requests.packages.urllib3.disable_warnings(requests.packages.urllib3.exceptions.InsecureRequestWarning)
s = requests.Session()
s.auth = (args.username, args.password)
base_url = 'https://{}/'.format(args.host)
# First request get the cookie
s.get(base_url, verify=False)
# Second request to authtenticate an get the session number
r = s.get(base_url, verify=False)
if not r.ok:
print("Invalid credentials supplied.", file=sys.stderr)
return(4)
# Navigate to locate the file id
m = re.search('URL=/(.*)(m.*?)"', r.text)
base_url += m.group(1)
link = m.group(2)
r = s.get(base_url+link, verify=False)
soup = BeautifulSoup(r.text, features='html.parser')
link = soup.find('frame', {'name': 'menuframe'}).attrs['src']
r = s.get(base_url+link)
soup = BeautifulSoup(r.text, features='html.parser')
link = soup.find('td', text='Primary Bank').find('a').attrs['href']
r = s.get(base_url+link)
soup = BeautifulSoup(r.text, features='html.parser')
link = soup.find('frame', {'name': 'cmdframe'}).attrs['src']
r = s.get(base_url+link)
soup = BeautifulSoup(r.text, features='html.parser')
file_id = soup.find('td', text='BS-Val-Unique.xml').find_previous_sibling().find('input').attrs['value']
# Get the date before uploading to compare the file
now = time.time()
# Trigger the upload
data = {
'MCTable_action': '',
'MCTable_S': file_id,
'Submit': 'Upload File'
}
url = base_url+link
r = s.post(url, data=data, verify=False)
r = s.post(url.replace('-',''), data=data, verify=False)
# Wait for the file to appear on the FTP Server
file_path = '/opt/ftp/{}_BS-Val-Unique.xml'.format(args.host)
attempts = 0
while not newer(file_path, now) and attempts < args.timeout:
time.sleep(1)
attempts += 1
if attempts >= args.timeout:
print("Timeout waiting for the file.", file=sys.stderr)
return(3)
with open(file_path, encoding='latin1') as fh:
print(fh.read())
return(0)
if __name__ == '__main__':
exit(main())