Skip to content

Commit

Permalink
Merge branch 'master' into fix_exporter_server
Browse files Browse the repository at this point in the history
  • Loading branch information
kumarashit authored Dec 24, 2020
2 parents c733ed4 + 0c967f2 commit bf672e4
Show file tree
Hide file tree
Showing 5 changed files with 119 additions and 6 deletions.
51 changes: 50 additions & 1 deletion delfin/drivers/fake_storage/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,56 @@ def clear_alert(self, context, alert):
pass

def list_alerts(self, context, query_para=None):
pass
alert_list = [{
"storage_id": self.storage_id,
'alert_id': str(random.randint(1111111, 9999999)),
'sequence_number': 100,
'alert_name': 'SNMP connect failed',
'category': 'Fault',
'severity': 'Major',
'type': 'OperationalViolation',
'location': 'NetworkEntity=entity1',
'description': "SNMP connection to the storage failed.",
'recovery_advice': "Check snmp configurations.",
'occur_time': int(time.time())
}, {
"storage_id": self.storage_id,
'alert_id': str(random.randint(1111111, 9999999)),
'sequence_number': 101,
'alert_name': 'Link state down',
'category': 'Fault',
'severity': 'Critical',
'type': 'CommunicationsAlarm',
'location': 'NetworkEntity=entity2',
'description': "Backend link has gone down",
'recovery_advice': "Recheck the network configuration setting.",
'occur_time': int(time.time())
}, {
"storage_id": self.storage_id,
'alert_id': str(random.randint(1111111, 9999999)),
'sequence_number': 102,
'alert_name': 'Power failure',
'category': 'Fault',
'severity': 'Fatal',
'type': 'OperationalViolation',
'location': 'NetworkEntity=entity3',
'description': "Power failure occurred. ",
'recovery_advice': "Investigate power connection.",
'occur_time': int(time.time())
}, {
"storage_id": self.storage_id,
'alert_id': str(random.randint(1111111, 9999999)),
'sequence_number': 103,
'alert_name': 'Communication failure',
'category': 'Fault',
'severity': 'Critical',
'type': 'CommunicationsAlarm',
'location': 'NetworkEntity=network1',
'description': "Communication link gone down",
'recovery_advice': "Consult network administrator",
'occur_time': int(time.time())
}]
return alert_list

@wait_random(MIN_WAIT, MAX_WAIT)
def _get_volume_range(self, start, end):
Expand Down
62 changes: 62 additions & 0 deletions delfin/exporter/prometheus/alert_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
# Copyright 2020 The SODA Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import requests
from oslo_config import cfg
from oslo_log import log

LOG = log.getLogger(__name__)
CONF = cfg.CONF
alert_mngr_opts = [

cfg.StrOpt('alert_manager_host', default='localhost',
help='The prometheus alert manager host'),
cfg.StrOpt('alert_manager_port', default='9093',
help='The prometheus alert manager port'),
]

CONF.register_opts(alert_mngr_opts, "PROMETHEUS_ALERT_MANAGER_EXPORTER")
alert_cfg = CONF.PROMETHEUS_ALERT_MANAGER_EXPORTER


class PrometheusAlertExporter(object):
alerts = []
model_key = ['alert_id', 'alert_name', 'sequence_number', 'category',
'severity', 'type', 'location', 'recovery_advice',
'storage_id', 'storage_name', 'vendor',
'model', 'serial_number', 'occur_time']

def push_prometheus_alert(self, alerts):

host = alert_cfg.alert_manager_host
port = alert_cfg.alert_manager_port
for alert in alerts:
dict = {}
dict["labels"] = {}
dict["annotations"] = {}
for key in self.model_key:
dict["labels"][key] = str(alert.get(key))

dict["annotations"]["summary"] = alert.get("description")

self.alerts.append(dict)
try:
response = requests.post('http://' + host + ":" + port +
'/api/v1/alerts',
json=self.alerts)
if response.status_code != 200:
LOG.error("POST request failed for alert %s ",
alert.get('alert_id'))
except Exception:
LOG.error("Exporting alert to alert manager has been failed "
"for alert %s ", alert.get('alert_id'))
5 changes: 3 additions & 2 deletions delfin/exporter/prometheus/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,15 @@

from oslo_log import log
from delfin.exporter import base_exporter
from delfin.exporter.prometheus import prometheus
from delfin.exporter.prometheus import prometheus, alert_manager

LOG = log.getLogger(__name__)


class AlertExporterPrometheus(base_exporter.BaseExporter):
def dispatch(self, ctxt, data):
pass
alert_manager_obj = alert_manager.PrometheusAlertExporter()
alert_manager_obj.push_prometheus_alert(data)


class PerformanceExporterPrometheus(base_exporter.BaseExporter):
Expand Down
3 changes: 1 addition & 2 deletions etc/delfin/delfin.conf
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,5 @@ metric_server_port = 8195
metrics_cache_file = /var/lib/delfin/delfin_exporter.txt

[PROMETHEUS_ALERT_MANAGER_EXPORTER]
enable = False
alert_manager_host = 'localhost'
alert_manager_port = '9093'
alert_manager_port = '9093'
4 changes: 3 additions & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,9 @@
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*",
entry_points={
'delfin.alert.exporters': [
'example = delfin.exporter.example:AlertExporterExample'
'example = delfin.exporter.example:AlertExporterExample',
'prometheus = delfin.exporter.prometheus.exporter'
':AlertExporterPrometheus',
],
'delfin.performance.exporters': [
'example = delfin.exporter.example:PerformanceExporterExample',
Expand Down

0 comments on commit bf672e4

Please sign in to comment.