Révision 77e53059
Merge both SB6183 plugins
Add more error handling for bad status from modem
Signed-off-by: Nathaniel Clark <Nathaniel.Clark@misrule.us>
| plugins/router/arris-sb6183 | ||
|---|---|---|
| 27 | 27 |
* upstream and downstream power levels |
| 28 | 28 |
* downstream signal to noise ratio |
| 29 | 29 |
* downstream error counts |
| 30 |
* uptime |
|
| 30 | 31 |
|
| 31 | 32 |
The values are retrieved from the cable modem's status web pages at |
| 32 | 33 |
192.168.100.1. So, this plugin must be installed on a munin node |
| ... | ... | |
| 74 | 75 |
from urllib import request |
| 75 | 76 |
|
| 76 | 77 |
HOSTNAME = os.getenv("hostname", None)
|
| 77 |
URL = "http://192.168.100.1/RgConnect.asp" |
|
| 78 |
STATUS_URL = "http://192.168.100.1/RgConnect.asp" |
|
| 79 |
INFO_URL = "http://192.168.100.1/RgSwInfo.asp" |
|
| 78 | 80 |
UPCOUNT = 4 |
| 79 | 81 |
DOWNCOUNT = 16 |
| 80 | 82 |
|
| ... | ... | |
| 83 | 85 |
if HOSTNAME: |
| 84 | 86 |
print("host_name {0}\n".format(HOSTNAME))
|
| 85 | 87 |
|
| 88 |
# UPTIME |
|
| 89 |
print( |
|
| 90 |
"""multigraph arris_uptime |
|
| 91 |
graph_title Modem Uptime |
|
| 92 |
graph_category system |
|
| 93 |
graph_args --base 1000 -l 0 |
|
| 94 |
graph_vlabel uptime in days |
|
| 95 |
graph_scale no |
|
| 96 |
graph_category system |
|
| 97 |
graph_info This graph shows the number of days that the the host is up and running so far. |
|
| 98 |
uptime.label uptime |
|
| 99 |
uptime.info The system uptime itself in days. |
|
| 100 |
uptime.draw AREA |
|
| 101 |
""" |
|
| 102 |
) |
|
| 103 |
|
|
| 86 | 104 |
# POWER |
| 87 | 105 |
print("multigraph arris_power")
|
| 88 | 106 |
print("graph_title Arris Power (dBmV)")
|
| ... | ... | |
| 170 | 188 |
try: |
| 171 | 189 |
from lxml import html |
| 172 | 190 |
|
| 173 |
resp = request.urlopen(URL) |
|
| 191 |
resp = request.urlopen(STATUS_URL)
|
|
| 174 | 192 |
except ImportError: |
| 175 | 193 |
print("no (missing lxml module)")
|
| 176 | 194 |
except OSError: |
| ... | ... | |
| 188 | 206 |
rxcomment = re.compile(r"<!--.*?-->") |
| 189 | 207 |
rxscript = re.compile(r"<script.*?</script>", re.MULTILINE) |
| 190 | 208 |
|
| 191 |
resp = request.urlopen(URL) |
|
| 192 |
data = rxscript.sub( |
|
| 193 |
"", |
|
| 194 |
rxcomment.sub( |
|
| 209 |
|
|
| 210 |
def process_url(url): |
|
| 211 |
""" |
|
| 212 |
Extract simpleTables from page at URL |
|
| 213 |
""" |
|
| 214 |
try: |
|
| 215 |
resp = request.urlopen(url) |
|
| 216 |
except OSError: |
|
| 217 |
print("failed to contact router", file=sys.stderr)
|
|
| 218 |
return [] |
|
| 219 |
if resp.status != 200: |
|
| 220 |
print( |
|
| 221 |
"failed to get status page %d: %s" % (resp.status, resp.reason), |
|
| 222 |
file=sys.stderr, |
|
| 223 |
) |
|
| 224 |
return [] |
|
| 225 |
data = rxscript.sub( |
|
| 195 | 226 |
"", |
| 196 |
rxblank.sub(" ", "".join(map(lambda x: x.decode("utf-8"), resp.readlines()))),
|
|
| 197 |
), |
|
| 198 |
) |
|
| 199 |
dom = html.fromstring(data) |
|
| 227 |
rxcomment.sub( |
|
| 228 |
"", |
|
| 229 |
rxblank.sub( |
|
| 230 |
" ", "".join(map(lambda x: x.decode("utf-8"), resp.readlines()))
|
|
| 231 |
), |
|
| 232 |
), |
|
| 233 |
) |
|
| 234 |
dom = html.fromstring(data) |
|
| 235 |
|
|
| 236 |
return dom.xpath('//table[contains(@class, "simpleTable")]')
|
|
| 237 |
|
|
| 238 |
|
|
| 239 |
print("multi_graph arris_uptime")
|
|
| 240 |
arr = process_url(INFO_URL) |
|
| 241 |
if arr: |
|
| 242 |
trs = arr[1].findall("tr")
|
|
| 243 |
# drop title |
|
| 244 |
trs.pop(0) |
|
| 200 | 245 |
|
| 201 |
arr = dom.xpath('//table[contains(@class, "simpleTable")]')
|
|
| 202 |
downstream = arr[1] |
|
| 203 |
upstream = arr[2] |
|
| 246 |
date = "".join(trs[0].findall("td")[1].itertext()).strip()
|
|
| 204 | 247 |
|
| 205 |
trs = downstream.findall("tr")
|
|
| 206 |
# drop title |
|
| 207 |
trs.pop(0) |
|
| 248 |
arr = date.split(" ")
|
|
| 249 |
rx = re.compile(r"[hms]") |
|
| 250 |
days = int(arr[0]) |
|
| 251 |
hms = rx.sub("", arr[2]).split(":")
|
|
| 208 | 252 |
|
| 209 |
headings = ["".join(x.itertext()).strip() for x in trs.pop(0).findall("td")]
|
|
| 210 |
# ['Channel', 'Lock Status', 'Modulation', 'Channel ID', 'Frequency', 'Power', 'SNR', 'Corrected', 'Uncorrectables'] |
|
| 253 |
seconds = ((days * 24 + int(hms[0])) * 60 + int(hms[1])) * 60 + int(hms[2]) |
|
| 254 |
print("uptime.value {0}".format(seconds / 86400.0))
|
|
| 255 |
else: |
|
| 256 |
print("uptime.value U")
|
|
| 257 |
|
|
| 258 |
|
|
| 259 |
arr = process_url(STATUS_URL) |
|
| 260 |
if arr: |
|
| 261 |
downstream = arr[1] |
|
| 262 |
upstream = arr[2] |
|
| 263 |
|
|
| 264 |
trs = downstream.findall("tr")
|
|
| 265 |
# drop title |
|
| 266 |
trs.pop(0) |
|
| 267 |
|
|
| 268 |
headings = ["".join(x.itertext()).strip() for x in trs.pop(0).findall("td")]
|
|
| 269 |
# ['Channel', 'Lock Status', 'Modulation', 'Channel ID', 'Frequency', 'Power', 'SNR', 'Corrected', 'Uncorrectables'] |
|
| 270 |
else: |
|
| 271 |
trs = [] |
|
| 272 |
headings = [] |
|
| 211 | 273 |
|
| 212 | 274 |
# Summation Graphs |
| 213 | 275 |
correct = 0 |
| ... | ... | |
| 223 | 285 |
|
| 224 | 286 |
channel = int(data["Channel"]) |
| 225 | 287 |
|
| 226 |
print("multigraph arris_power.down_{0}".format(channel))
|
|
| 288 |
print("\nmultigraph arris_power.down_{0}".format(channel))
|
|
| 227 | 289 |
value = data["Power"].split(" ")[0]
|
| 228 | 290 |
print("power.value {0}".format(value))
|
| 229 | 291 |
power["down"][channel - 1] = value |
| ... | ... | |
| 239 | 301 |
|
| 240 | 302 |
# Fill missing |
| 241 | 303 |
for i in range(len(trs), DOWNCOUNT): |
| 242 |
print("multigraph arris_power.down_{0}".format(i + 1))
|
|
| 304 |
print("\nmultigraph arris_power.down_{0}".format(i + 1))
|
|
| 243 | 305 |
print("power.value U")
|
| 244 | 306 |
|
| 245 | 307 |
print("multigraph arris_snr.down_{0}".format(i + 1))
|
| ... | ... | |
| 250 | 312 |
print("uncr.value U")
|
| 251 | 313 |
|
| 252 | 314 |
print("multigraph arris_error")
|
| 253 |
print("corr.value {0}".format(correct))
|
|
| 254 |
print("uncr.value {0}".format(uncorr))
|
|
| 315 |
if arr: |
|
| 316 |
print("corr.value {0}".format(correct))
|
|
| 317 |
print("uncr.value {0}".format(uncorr))
|
|
| 318 |
else: |
|
| 319 |
print("corr.value U")
|
|
| 320 |
print("uncr.value U")
|
|
| 255 | 321 |
|
| 256 | 322 |
print("multigraph arris_snr")
|
| 257 | 323 |
for i in range(0, DOWNCOUNT): |
| 258 | 324 |
print("down_{0}.value {1}".format(i + 1, snr[i]))
|
| 259 | 325 |
|
| 260 |
trs = upstream.findall("tr")
|
|
| 261 |
# drop title |
|
| 262 |
trs.pop(0) |
|
| 326 |
if arr: |
|
| 327 |
trs = upstream.findall("tr")
|
|
| 328 |
# drop title |
|
| 329 |
trs.pop(0) |
|
| 330 |
|
|
| 331 |
headings = ["".join(x.itertext()).strip() for x in trs.pop(0).findall("td")]
|
|
| 332 |
# ['Channel', 'Lock Status', 'US Channel Type', 'Channel ID', 'Symbol Rate', 'Frequency', 'Power'] |
|
| 263 | 333 |
|
| 264 |
headings = ["".join(x.itertext()).strip() for x in trs.pop(0).findall("td")]
|
|
| 265 |
# ['Channel', 'Lock Status', 'US Channel Type', 'Channel ID', 'Symbol Rate', 'Frequency', 'Power'] |
|
| 266 | 334 |
for row in trs: |
| 267 | 335 |
data = dict( |
| 268 | 336 |
zip(headings, ["".join(x.itertext()).strip() for x in row.findall("td")])
|
Formats disponibles : Unified diff