Skip to content

Commit

Permalink
Merge pull request #40 from pkarkazis/master
Browse files Browse the repository at this point in the history
 Monitoring Probe update
  • Loading branch information
pkarkazis authored Dec 6, 2017
2 parents eff9e60 + 353474e commit 6b75a9c
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 9 deletions.
2 changes: 1 addition & 1 deletion vm_mon/node.conf
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ node_name: TEST_VM1
post_freq = 3

[Prometheus]
server_url: http://sp.int3.sonata-nfv.eu:9091/metrics
server_url: ["http://sp.int3.sonata-nfv.eu:9091/metrics"]
27 changes: 19 additions & 8 deletions vm_mon/sonmonprobe.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
__author__="panos"
__date__ ="$Apr 20, 2016 1:11:43 PM$"

import urllib2, time, logging
import urllib2, time, logging
import json, os, subprocess
from threading import Thread
from VmData import vmdt
Expand All @@ -51,6 +51,10 @@ def init():
node_name = os.getenv('NODE_NAME', conf.ConfigSectionMap("vm_node")['node_name'])
prometh_server = os.getenv('PROM_SRV', conf.ConfigSectionMap("Prometheus")['server_url'])
interval = conf.ConfigSectionMap("vm_node")['post_freq']
if is_json(prometh_server):
prometh_server = json.loads(prometh_server)
else:
prometh_server = [prometh_server]
logger = logging.getLogger('dataCollector')
#hdlr = logging.FileHandler('dataCollector.log', mode='w')
hdlr = RotatingFileHandler('dataCollector.log', maxBytes=10000, backupCount=1)
Expand All @@ -66,21 +70,19 @@ def init():
node_name +=":"+vm_id
print vm_id
logger.info('SP Data Collector')
logger.info('Promth P/W Server '+prometh_server)
logger.info('Promth P/W Server '+json.dumps(prometh_server))
logger.info('Monitoring Node '+node_name)
logger.info('Monitoring time interval '+interval)

def postNode(node_,type_, data_):
#print data
url = prometh_server+"/job/"+type_+"/instance/"+node_
#print url
def postNode(node_,type_, data_,server_):
url = server_+"/job/"+type_+"/instance/"+node_
logger.info('Post on: \n'+url)
#logger.info('Post ports metrics: \n'+data_)
try:
req = urllib2.Request(url)
req.add_header('Content-Type','text/html')
req.get_method = lambda: 'PUT'
response=urllib2.urlopen(req,data_)
response=urllib2.urlopen(req,data_, timeout = 10)
code = response.code
logger.info('Response Code: '+str(code))
except urllib2.HTTPError, e:
Expand Down Expand Up @@ -140,6 +142,13 @@ def collectVM(id_):
vm_dt = dt_collector.prom_parser()
time.sleep(1)

def is_json(myjson):
try:
json_object = json.loads(myjson)
except ValueError, e:
return False
return True


if __name__ == "__main__":
init()
Expand All @@ -151,4 +160,6 @@ def collectVM(id_):
while 1:
time.sleep(float(interval))
#print vm_dt
postNode(node_name,"vnf",vm_dt)
for url in prometh_server:
postNode(node_name,"vnf",vm_dt,url)

0 comments on commit 6b75a9c

Please sign in to comment.