Skip to content

Commit 246721e

Browse files
committed
Add timeouts to urllib2 calls otherwise we run into situations where gmond is hosed
1 parent 4828cb4 commit 246721e

9 files changed

Lines changed: 17 additions & 11 deletions

File tree

apache_status/python_modules/apache_status.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ def get_metrics():
6868
req = urllib2.Request(SERVER_STATUS_URL + "?auto")
6969

7070
# Download the status file
71-
res = urllib2.urlopen(req)
71+
res = urllib2.urlopen(req, None, 2)
7272

7373
for line in res:
7474
split_line = line.rstrip().split(": ")
@@ -93,7 +93,7 @@ def get_metrics():
9393
req2 = urllib2.Request(SERVER_STATUS_URL)
9494

9595
# Download the status file
96-
res = urllib2.urlopen(req2)
96+
res = urllib2.urlopen(req2, None, 2)
9797

9898
for line in res:
9999
regMatch = SSL_REGEX.match(line)

couchdb/python_modules/couchdb.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,8 @@ def _get_couchdb_stats(url, refresh_rate):
5454
logging.warning('The specified refresh_rate of %d is invalid and has been substituted with 60!' % refresh_rate)
5555
url += '?range=60'
5656

57-
c = urllib2.urlopen(url)
57+
# Set time out for urlopen to 2 seconds otherwise we run into the possibility of hosing gmond
58+
c = urllib2.urlopen(url, None, 2)
5859
json_data = c.read()
5960
c.close()
6061

elasticsearch/python_modules/elasticsearch.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def update_result(result, url):
127127
diff = now - last_update
128128
if diff > 20:
129129
print '[elasticsearch] ' + str(diff) + ' seconds passed - Fetching ' + url
130-
result = json.load(urllib.urlopen(url))
130+
result = json.load(urllib.urlopen(url, None, 2))
131131
last_update = now
132132

133133
return result
@@ -199,7 +199,7 @@ def metric_init(params):
199199

200200
# First iteration - Grab statistics
201201
print('[elasticsearch] Fetching ' + url_cluster)
202-
result = json.load(urllib.urlopen(url_cluster))
202+
result = json.load(urllib.urlopen(url_cluster, None, 2))
203203

204204
metric_group = params.get('metric_group', 'elasticsearch')
205205

@@ -220,7 +220,7 @@ def metric_init(params):
220220
url_indices = '{0}{1}/_stats'.format(host, index)
221221
print('[elasticsearch] Fetching ' + url_indices)
222222

223-
r_indices = json.load(urllib.urlopen(url_indices))
223+
r_indices = json.load(urllib.urlopen(url_indices, None, 2))
224224
descriptors += get_indices_descriptors(index,
225225
Desc_Skel,
226226
r_indices,

httpd/python_modules/httpd.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ def update_stats():
7474
try:
7575
httpd_stats = {}
7676
logging.debug(' opening URL: ' + str(STATUS_URL))
77-
f = urllib.urlopen(STATUS_URL)
77+
f = urllib.urlopen(STATUS_URL, None, 2)
7878

7979
for line in f.readlines():
8080
diff = False

jenkins/python_modules/jenkins.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ def _get_jenkins_statistics(url):
5252
url += '/api/json'
5353
url += '?tree=jobs[color],overallLoad[busyExecutors[min[latest]],queueLength[min[latest]],totalExecutors[min[latest]]]'
5454

55-
c = urllib2.urlopen(url)
55+
c = urllib2.urlopen(url, None, 2)
5656
json_data = c.read()
5757
c.close()
5858

network/netstats/conf.d/netstats.pyconf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,11 @@ collection_group {
2525
name_match = "ip_(.+)"
2626
value_threshold = 1.0
2727
}
28+
metric {
29+
name_match = "icmpmsg_(.+)"
30+
value_threshold = 1.0
31+
}
32+
2833
metric {
2934
name_match = "icmp_(.+)"
3035
value_threshold = 1.0

nginx_status/python_modules/nginx_status.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ def run(self):
4949

5050
@staticmethod
5151
def _get_nginx_status_stub_response(url):
52-
c = urllib2.urlopen(url)
52+
c = urllib2.urlopen(url, None, 2)
5353
data = c.read()
5454
c.close()
5555

rabbit/python_modules/rabbitmq.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -99,7 +99,7 @@ def refreshStats(stats = ('nodes', 'queues'), vhosts = ['/']):
9999
result_dict = {}
100100
urlstring = url_template.safe_substitute(stats = stat, vhost = vhost)
101101
print urlstring
102-
result = json.load(urllib.urlopen(urlstring))
102+
result = json.load(urllib.urlopen(urlstring, None, 2))
103103
# Rearrange results so entry is held in a dict keyed by name - queue name, host name, etc.
104104
if stat in ("queues", "nodes", "exchanges"):
105105
for entry in result:

riak/riak.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ def run(self):
6363
def update_metric(self):
6464
try:
6565
req = urllib2.Request(url = self.url)
66-
res = urllib2.urlopen(req)
66+
res = urllib2.urlopen(req, None, 2)
6767
stats = res.read()
6868
dprint("%s", stats)
6969
json_stats = json.loads(stats)

0 commit comments

Comments
 (0)