comparison adslstats.py @ 23:4b6c811e77df

Save & load session cookie so we don't have to reauth every time. It seems even successful auth attemps are rate limited and get a 503.
author Daniel O'Connor <darius@dons.net.au>
date Sat, 17 Jun 2017 15:59:23 +0930
parents a53f90508a06
children 806b1ed7f1b5
comparison
equal deleted inserted replaced
22:a53f90508a06 23:4b6c811e77df
37 import json 37 import json
38 import mechanize 38 import mechanize
39 import mysrp as srp 39 import mysrp as srp
40 import optparse 40 import optparse
41 import os 41 import os
42 import os.path
42 import re 43 import re
43 import requests 44 import requests
44 import rrdtool 45 import rrdtool
45 import sys 46 import sys
46 import time 47 import time
49 conf = ConfigParser.ConfigParser() 50 conf = ConfigParser.ConfigParser()
50 conf.add_section('global') 51 conf.add_section('global')
51 conf.set('global', 'username', 'admin') 52 conf.set('global', 'username', 'admin')
52 conf.set('global', 'password', 'admin') 53 conf.set('global', 'password', 'admin')
53 conf.set('global', 'name', '10.0.2.14') 54 conf.set('global', 'name', '10.0.2.14')
55 conf.set('global', 'cookiejar', os.path.expanduser('~/.adslstats.cj'))
54 56
55 conflist = ['adslstats.ini'] 57 conflist = ['adslstats.ini']
56 if ('HOME' in os.environ): 58 if ('HOME' in os.environ):
57 conflist.append(os.path.expanduser('~/.adslstats.ini')) 59 conflist.append(os.path.expanduser('~/.adslstats.ini'))
58 conf.read(conflist) 60 conf.read(conflist)
98 100
99 def getstats(): 101 def getstats():
100 stats = DSLStats() 102 stats = DSLStats()
101 parser = ConfigParser.ConfigParser() 103 parser = ConfigParser.ConfigParser()
102 base = 'http://%s' % (conf.get('global', 'name')) 104 base = 'http://%s' % (conf.get('global', 'name'))
103 105 br = mechanize.Browser()
106 #br.set_debug_http(True)
107 #br.set_debug_responses(True)
108 #br.set_debug_redirects(True)
109 cj = mechanize.LWPCookieJar()
110 if os.path.exists(conf.get('global', 'cookiejar')):
111 cj.load(conf.get('global', 'cookiejar'), ignore_discard = True)
112 br.set_cookiejar(cj)
113 if not fillstats(br, base, stats):
114 if not authenticate(br, base, conf.get('global', 'username'), conf.get('global', 'password')):
115 print('login failed')
116 return None
117 print('login succeeded, getting stats')
118 fillstats(br, base, stats)
119
120 cj.save(conf.get('global', 'cookiejar'), ignore_discard = True)
121 return stats
122
123 def authenticate(br, base, username, password):
104 # Connect and authenticate 124 # Connect and authenticate
105 br = mechanize.Browser()
106 r = br.open(base) 125 r = br.open(base)
107 bs = bs4.BeautifulSoup(r) 126 bs = bs4.BeautifulSoup(r)
108 token = bs.head.find(lambda tag: tag.has_attr('name') and tag['name'] == 'CSRFtoken')['content'] 127 token = bs.head.find(lambda tag: tag.has_attr('name') and tag['name'] == 'CSRFtoken')['content']
109 #print('Got CSRF token ' + token) 128 #print('Got CSRF token ' + token)
110 129
111 usr = srp.User(conf.get('global', 'username'), conf.get('global', 'password'), hash_alg = srp.SHA256, ng_type = srp.NG_2048) 130 usr = srp.User(username, password, hash_alg = srp.SHA256, ng_type = srp.NG_2048)
112 uname, A = usr.start_authentication() 131 uname, A = usr.start_authentication()
113 132
114 req = mechanize.Request(base + '/authenticate', data = urllib.urlencode({'CSRFtoken' : token, 'I' : uname, 'A' : binascii.hexlify(A)})) 133 req = mechanize.Request(base + '/authenticate', data = urllib.urlencode({'CSRFtoken' : token, 'I' : uname, 'A' : binascii.hexlify(A)}))
115 r = br.open(req) 134 r = br.open(req)
116 j = json.decoder.JSONDecoder().decode(r.read()) 135 j = json.decoder.JSONDecoder().decode(r.read())
123 #print('Got response ' + str(j)) 142 #print('Got response ' + str(j))
124 143
125 usr.verify_session(binascii.unhexlify(j['M'])) 144 usr.verify_session(binascii.unhexlify(j['M']))
126 if not usr.authenticated(): 145 if not usr.authenticated():
127 print('Failed to authenticate') 146 print('Failed to authenticate')
128 return None 147 return False
129 148 return True
149
150 def fillstats(br, base, stats):
130 # Fetch stats and parse 151 # Fetch stats and parse
131 r = br.open(base + '/modals/broadband-bridge-modal.lp') 152 r = br.open(base + '/modals/broadband-bridge-modal.lp')
132 bs = bs4.BeautifulSoup(r) 153 bs = bs4.BeautifulSoup(r)
154 if bs.find('div', 'login') != None:
155 return False
133 156
134 # Helper function to extract data 157 # Helper function to extract data
135 def getvals(bs, text): 158 def getvals(bs, text):
136 subs = bs.findAll('label', text = text)[0].fetchNextSiblings()[0].strings 159 subs = bs.findAll('label', text = text)[0].fetchNextSiblings()[0].strings
137 return map(lambda s: float(s.split()[0]), subs) 160 return map(lambda s: float(s.split()[0]), subs)
159 print('Too many uptime elements to work out') 182 print('Too many uptime elements to work out')
160 stats.uptime = None 183 stats.uptime = None
161 else: 184 else:
162 stats.uptime = reduce(lambda a, b: a + b, map(lambda a: int(a[0]) * a[1], zip(uptime, mults))) 185 stats.uptime = reduce(lambda a, b: a + b, map(lambda a: int(a[0]) * a[1], zip(uptime, mults)))
163 186
164 return stats 187 return True
165 188
166 # Setup RRD 189 # Setup RRD
167 # We expect data to be logged every 5 minutes 190 # We expect data to be logged every 5 minutes
168 # Average 12 5 minute points -> hourly stats (keep 168 - a weeks worth) 191 # Average 12 5 minute points -> hourly stats (keep 168 - a weeks worth)
169 # Average 288 5 minute points -> daily stats (keep 1825 - 5 years worth) 192 # Average 288 5 minute points -> daily stats (keep 1825 - 5 years worth)