diff --git a/CHANGES.rst b/CHANGES.rst
index 2cd9dbf..5eb29d7 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -5,7 +5,12 @@ Changelog of serverscripts
2.11 (unreleased)
-----------------
-- Nothing changed yet.
+- Removed pbis and rabbitmq: both are unused.
+
+- Sped up geoserver log extraction and made sure it didn't consume so
+ much memory anymore.
+
+- Added JNDI info for geoserver.
2.10 (2023-04-05)
diff --git a/Dockerfile b/Dockerfile
index af06816..215a987 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,4 +1,4 @@
-FROM fkrull/multi-python:focal
+FROM ghcr.io/fkrull/docker-multi-python:bionic
ENV LC_ALL=C.UTF-8
ENV LANG=C.UTF-8
diff --git a/README.rst b/README.rst
index d51ae14..5751be1 100644
--- a/README.rst
+++ b/README.rst
@@ -77,37 +77,6 @@ to run as root)::
*/5 * * * * /usr/local/bin/checkout-info > /dev/null 2>&1
-Rabbitmq-checker
-----------------
-
-``bin/rabbitmq-checker`` The script checks the length of messages per queue and
-amount of the queues per vhost. When the limit of queues or messages is reached it
-saves warnings in ``/var/local/serverscripts/nens.rabbitmq.message`` and a number of
-warnings to ``/var/local/serverscripts/nens.num_rabbitmq_too_big.warnings``.
-The configuration file is optionally in ``/etc/serverscripts/rabbitmq_zabbix.json``,
-for example see ``tests/example_rabbitmq_zabbix.json``. If configuration is not
-specified the scritp uses defaults values, queues=100 and messages=200.
-
-configuration::
-
- {
- 'lizard-nxt': { // vhost in rabbitmq
- 'queues_limit': 10,
- 'messages_limit': 300
- },
- ...
- }
-
-Retrieve vhosts on rabbitmq-server::
-
- $ sudo rabbitmqctl list_vhosts
-
-
-Before the taking it in production run the file manually in debug mode like::
-
- $ sudo bin/rabbitmq-checker -v
-
-
Docker-info
------------
diff --git a/serverscripts/geoserver.py b/serverscripts/geoserver.py
index 89a4ee1..88a2b12 100644
--- a/serverscripts/geoserver.py
+++ b/serverscripts/geoserver.py
@@ -3,12 +3,12 @@
"""
from collections import Counter
from serverscripts.clfparser import CLFParser
-from serverscripts.utils import get_output
from urllib.parse import parse_qs
from urllib.parse import urlparse
import argparse
import glob
+import gzip
import json
import logging
import os
@@ -122,26 +122,23 @@ def extract_from_line(line):
def extract_from_logfiles(logfile):
if not os.path.exists(logfile):
- return []
-
- logfile_pattern = logfile + "*"
- cmd = "zcat --force %s" % logfile_pattern
- logger.debug("Grabbing logfile output with: %s", cmd)
- output, _ = get_output(cmd)
- lines = output.split("\n")
- logger.debug("Grabbed %s lines", len(lines))
-
- results = []
- for line in lines:
- if "/geoserver/" not in line:
- continue
- if "GetMap" not in line:
- continue
- result = extract_from_line(line)
- if result:
- results.append(result)
- logger.debug("After filtering, we have %s lines", len(results))
- return results
+ return
+
+ logfiles = glob.glob(logfile + "*")
+ for logfile in logfiles:
+ if logfile.endswith(".gz"):
+ f = gzip.open(logfile, "rt")
+ else:
+ f = open(logfile, "rt")
+ for line in f:
+ if "/geoserver/" not in line:
+ continue
+ if "GetMap" not in line:
+ continue
+ result = extract_from_line(line)
+ if result:
+ yield result
+ f.close()
def get_text_or_none(element, tag):
@@ -168,9 +165,11 @@ def extract_datastore_info(datastore_file):
connection, "./entry[@key='database']"
)
result["database_user"] = get_text_or_none(connection, "./entry[@key='user']")
- # result["database_namespace"] = get_text_or_none(
- # connection, "./entry[@key='namespace']"
- # )
+ jndi_connection = get_text_or_none(
+ connection, "./entry[@key='jndiReferenceName']"
+ )
+ if jndi_connection:
+ result["database_name"] = jndi_connection
return result
@@ -213,32 +212,39 @@ def extract_from_dirs(data_dir):
def extract_workspaces_info(geoserver_configuration):
"""Return list of workspaces with all info"""
- log_lines = extract_from_logfiles(geoserver_configuration["logfile"])
workspaces = {}
datastores_info = extract_from_dirs(geoserver_configuration["data_dir"])
- workspace_names = Counter(
- [log_line["workspace"] for log_line in log_lines]
+ workspace_names_and_referers = Counter(
+ (
+ (log_line["workspace"], log_line["referer"])
+ for log_line in extract_from_logfiles(geoserver_configuration["logfile"])
+ )
).most_common()
+
+ workspace_names_counter = Counter()
+ for (workspace_name, referer), workspace_count in workspace_names_and_referers:
+ workspace_names_counter.update({workspace_name: workspace_count})
+ workspace_names = workspace_names_counter.most_common()
+
for workspace_name, workspace_count in workspace_names:
if workspace_name not in datastores_info:
logger.warn(
"Workspace %s from nginx logfile is missing in workspaces dir.",
- workspace_name
+ workspace_name,
)
continue
- workspaces[workspace_name] = {}
- workspace_lines = [
- log_line
- for log_line in log_lines
- if log_line["workspace"] == workspace_name
+
+ referers = Counter()
+ for (found_workspace_name, referer), count in workspace_names_and_referers:
+ if found_workspace_name != workspace_name:
+ continue
+ referers.update({referer: count})
+ common_referers = [
+ "%s (%d)" % (referer, count) for (referer, count) in referers.most_common(5)
]
- referers = Counter(
- [log_line["referer"] for log_line in workspace_lines if log_line["referer"]]
- )
- common_referers = [referer for (referer, count) in referers.most_common(5)]
workspaces[workspace_name] = {
- "usage": len(workspace_lines),
+ "usage": workspace_count,
"referers": " + ".join(common_referers),
}
diff --git a/serverscripts/pbis.py b/serverscripts/pbis.py
deleted file mode 100644
index c482976..0000000
--- a/serverscripts/pbis.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""Determine whether pbis runs correctly
-
-"""
-import argparse
-import json
-import logging
-import os
-import serverscripts
-import subprocess
-import sys
-
-
-VAR_DIR = "/var/local/serverscripts"
-OUTPUT_DIR = "/var/local/serverinfo-facts"
-OUTPUT_FILE = os.path.join(OUTPUT_DIR, "pbis.fact")
-PBIS_EXECUTABLE = "/usr/bin/pbis"
-OK = 0
-ERROR = 1
-
-logger = logging.getLogger(__name__)
-
-
-def check_pbis():
- command = "%s status" % PBIS_EXECUTABLE
- logger.debug("Running '%s'...", command)
- sub = subprocess.Popen(
- command,
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- universal_newlines=True,
- )
- output, error = sub.communicate()
- if error:
- logger.warning("Error output from pbis command: %s", error)
- lines = [line.strip().lower() for line in output.split("\n")]
- online = [line for line in lines if "online" in line]
- nens_local = [line for line in lines if "nens.local" in line]
- if online and nens_local:
- logger.info("Both 'online' and 'nens.local' found")
- return OK
- else:
- logger.error("Not both of 'online' and 'nens.local' found")
- return ERROR
-
-
-def main():
- """Installed as bin/rabbitmq-info"""
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "-v",
- "--verbose",
- action="store_true",
- dest="verbose",
- default=False,
- help="Verbose output",
- )
- parser.add_argument(
- "-V",
- "--version",
- action="store_true",
- dest="print_version",
- default=False,
- help="Print version",
- )
-
- options = parser.parse_args()
- if options.print_version:
- print(serverscripts.__version__)
- sys.exit()
- if options.verbose:
- loglevel = logging.DEBUG
- else:
- loglevel = logging.WARN
- logging.basicConfig(level=loglevel, format="%(levelname)s: %(message)s")
-
- status = OK
- pbis_exists = os.path.exists(PBIS_EXECUTABLE)
- if pbis_exists:
- status = check_pbis()
- else:
- logger.info("No %s found, skipping the pbis check", PBIS_EXECUTABLE)
-
- # Write facts for serverinfo.
- if not os.path.exists(OUTPUT_DIR):
- os.mkdir(OUTPUT_DIR)
- logger.info("Created %s", OUTPUT_DIR)
- open(OUTPUT_FILE, "w").write(
- json.dumps({"exists": pbis_exists}, sort_keys=True, indent=4)
- )
-
- zabbix_errors_file = os.path.join(VAR_DIR, "nens.pbis.errors")
- open(zabbix_errors_file, "w").write(str(status))
diff --git a/serverscripts/rabbitmq.py b/serverscripts/rabbitmq.py
deleted file mode 100644
index 9fd3a3c..0000000
--- a/serverscripts/rabbitmq.py
+++ /dev/null
@@ -1,249 +0,0 @@
-"""Extract information from rabbitmq.
-
-"""
-import argparse
-import json
-import logging
-import operator
-import os
-import serverscripts
-import subprocess
-import sys
-
-
-VAR_DIR = "/var/local/serverscripts"
-
-CONFIG_DIR = "/etc/serverscripts"
-CONFIG_FILE = os.path.join(CONFIG_DIR, "rabbitmq_zabbix.json")
-
-ALLOWED_NUM_QUEUES = 100
-ALLOWED_NUM_MESSAGES = 200
-
-QUEUES_LIMIT = "queues_limit"
-MESSAGES_LIMIT = "messages_limit"
-
-logger = logging.getLogger(__name__)
-
-
-def parse_queues_stdout(queues_stdout):
- """
- Retrieve the amount of messages per queues.
- queues_stdout attribute contains the shell output of
- 'rabbitmqctl list_queues' command like:
-
- Listing queues ...
- queuename1 0
- queuename1 4
- ...done.
-
- The second column contains amount of massages in the queue.
- """
- queues = {}
- for line in queues_stdout.split("\n"):
- line_attrs = line.split()
- if len(line_attrs) == 2:
- queues[line_attrs[0]] = int(line_attrs[1])
- return queues
-
-
-def parse_vhosts_stdout(vhosts_stdout):
- """
- Retrieve vhosts from stdout,
- vhosts_stdout attribute contains the shell output of
- 'rabbitmqctl list_vhosts' command like:
-
- Listing vhosts ...
- /
- efcis
- nrr
- lizard-nxt
- ...done.
-
- """
- vhosts = []
- for line in vhosts_stdout.split("\n"):
- if line.find("done") > 0:
- # end of stdout is reached
- break
- line_attrs = line.split()
- if len(line_attrs) == 1:
- vhosts.append(line_attrs[0])
- return vhosts
-
-
-def retrieve_vhosts():
- """Run shell command 'rabbitmqctl list_vhosts', parse stdout, return vhosts."""
- stdout = ""
- try:
- stdout = subprocess.check_output(["/usr/sbin/rabbitmqctl", "list_vhosts"])
- except OSError:
- logger.info("/usr/sbin/rabbitmqctl is not available.")
- return
- except subprocess.CalledProcessError:
- logger.info("'/usr/sbin/rabbitmqctl list_vhosts' returns non-zero exit status.")
- return
-
- return parse_vhosts_stdout(stdout)
-
-
-def retrieve_queues(vhost):
- """Run shell command, parse stdout, returtn queues."""
- stdout = ""
- try:
- stdout = subprocess.check_output(
- ["/usr/sbin/rabbitmqctl", "list_queues", "-p", str(vhost)]
- )
- except OSError:
- logger.warning("/usr/sbin/rabbitmqctl is not available.")
- return
- except subprocess.CalledProcessError:
- logger.warning(
- "'/usr/sbin/rabbitmqctl list_queues -p %s' returns non-zero exit status."
- % vhost
- )
- return
-
- return parse_queues_stdout(stdout)
-
-
-def get_max_queue(queues):
- """Retrieve a queue with max messages as tuple."""
- queue, value = max(queues.items(), key=operator.itemgetter(1))
- return (queue, value)
-
-
-def validate_configuration(configuration):
- """Validate loaded content of rabbitmq-zabbix.json."""
- error_type = "Rabbitmq-Zabbix configuration error:"
- if not configuration:
- logger.error("%s: no vhost.", error_type)
- return False
-
- for vhost in configuration:
- queues_limit_value = configuration[vhost].get(QUEUES_LIMIT)
- messages_limit_value = configuration[vhost].get(MESSAGES_LIMIT)
- queues_limit_key = QUEUES_LIMIT in configuration[vhost]
- messages_limit_key = MESSAGES_LIMIT in configuration[vhost]
-
- if not queues_limit_key:
- logger.error(
- "%s: vhost '%s' has not '%s' item.", error_type, vhost, QUEUES_LIMIT
- )
- return False
- if not messages_limit_key:
- logger.error(
- "%s: vhost '%s' has not '%s' item.", error_type, vhost, MESSAGES_LIMIT
- )
- return False
- try:
- int(queues_limit_value)
- int(messages_limit_value)
- except:
- logger.error(
- "%s: '%s' one of the values is not an integer.", error_type, vhost
- )
- return False
- return True
-
-
-def load_config(config_file_path):
- """Retrieve conriguration, return a {} when
- the content is invalid"""
- content = {}
- if not os.path.exists(config_file_path):
- return {}
- with open(config_file_path, "r") as config_file:
- try:
- content = json.loads(config_file.read())
- except:
- logger.error("Can not load a rabbitmq-zabbix configuration.")
- # end with
- if validate_configuration(content):
- return content
- return {}
-
-
-def main():
- """Installed as bin/rabbitmq-info"""
- parser = argparse.ArgumentParser()
- parser.add_argument(
- "-v",
- "--verbose",
- action="store_true",
- dest="verbose",
- default=False,
- help="Verbose output",
- )
- parser.add_argument(
- "-V",
- "--version",
- action="store_true",
- dest="print_version",
- default=False,
- help="Print version",
- )
-
- options = parser.parse_args()
- if options.print_version:
- print(serverscripts.__version__)
- sys.exit()
- if options.verbose:
- loglevel = logging.DEBUG
- else:
- loglevel = logging.WARN
- logging.basicConfig(level=loglevel, format="%(levelname)s: %(message)s")
-
- vhosts = retrieve_vhosts()
- if vhosts is None:
- vhosts = []
- if not os.path.exists(CONFIG_DIR):
- os.mkdir(CONFIG_DIR)
- logger.info("Created %s", CONFIG_DIR)
-
- configuration = load_config(CONFIG_FILE)
- num_too_big = 0
- wrong_vhosts = []
-
- for vhost in vhosts:
- logger.info("Checking vhost '%s'." % vhost)
- vhost_num_queues = ALLOWED_NUM_QUEUES
- queue_num_messages = ALLOWED_NUM_MESSAGES
- queues = retrieve_queues(vhost)
- # check or the vhost has a queue
- if not queues:
- logger.info("vhost '%s' has no queues." % vhost)
- continue
- # check the allowed amount of queues per vhost
- vhost_configuration = configuration.get(vhost)
- if vhost_configuration:
- vhost_num_queues = vhost_configuration.get(QUEUES_LIMIT)
- queue_num_messages = vhost_configuration.get(MESSAGES_LIMIT)
- logger.info("Using custom limits for vhost '%s'.", vhost)
- if len(queues) >= vhost_num_queues:
- wrong_vhosts.append(vhost)
- num_too_big = num_too_big + 1
- logger.error(
- "Number of queues is greater than %d: %d", vhost_num_queues, len(queues)
- )
- continue
- # check the allowed amount of messages in the largest queue
- queue_name, queue_value = get_max_queue(queues)
- if queue_value >= queue_num_messages:
- wrong_vhosts.append(vhost)
- num_too_big = num_too_big + 1
- logger.error(
- "Number of messages in queue '%s' is greater than %d: %d",
- queue_name,
- queue_num_messages,
- queue_value,
- )
-
- logger.info("Write check results to files: %d." % num_too_big)
- zabbix_message_file = os.path.join(VAR_DIR, "nens.rabbitmq.message")
- open(zabbix_message_file, "w").write(", ".join(wrong_vhosts))
- if num_too_big:
- logger.warning(
- "Number queues/messages too big for: %s" % ", ".join(wrong_vhosts)
- )
- zabbix_rmq_count_file = os.path.join(VAR_DIR, "nens.num_rabbitmq_too_big.warnings")
- open(zabbix_rmq_count_file, "w").write("%d" % num_too_big)
diff --git a/serverscripts/script.py b/serverscripts/script.py
index 345cc91..46bba1c 100644
--- a/serverscripts/script.py
+++ b/serverscripts/script.py
@@ -9,8 +9,6 @@
import serverscripts.geoserver
import serverscripts.haproxy
import serverscripts.nginx
-import serverscripts.pbis
-import serverscripts.rabbitmq
def main():
@@ -33,8 +31,6 @@ def main():
serverscripts.geoserver,
serverscripts.haproxy,
serverscripts.nginx,
- serverscripts.pbis,
- serverscripts.rabbitmq,
]:
try:
module.main()
diff --git a/serverscripts/tests/example_geoserver_data/workspaces/u0106_3Dzeeland/u0106/datastore.xml b/serverscripts/tests/example_geoserver_data/workspaces/u0106_3Dzeeland/u0106/datastore.xml
index f8b147d..f30c7df 100755
--- a/serverscripts/tests/example_geoserver_data/workspaces/u0106_3Dzeeland/u0106/datastore.xml
+++ b/serverscripts/tests/example_geoserver_data/workspaces/u0106_3Dzeeland/u0106/datastore.xml
@@ -2,38 +2,24 @@
DataStoreInfoImpl-52b81a0d:16feab49b5d:-7afd
u0106
Kloosterzande assets
- PostGIS
+ PostGIS (JNDI)
true
WorkspaceInfoImpl-52b81a0d:16feab49b5d:-7b01
- public
- 300
- 50
- false
+ true
1
false
- staging-project-geoserver
- s-project-db-d1.nens
- true
- true
- 1000
+ FAST
+ 5000
+ true
+ java:comp/env/jdbc/klimaatatlas_geoserver
false
- true
true
- 20
- false
- 5432
- crypt2:VKqOzz9nVJencJVUIrUQfXje7SdCdTryWkFfP2ilzN0=
- 1
postgis
- u0106_3Dzeeland
- 10
- 3
- true
- staging-project-geoserver
- 300
+ hhnk
+ true
<__default>false
-
\ No newline at end of file
+
diff --git a/serverscripts/tests/example_rabbitmq_zabbix.json b/serverscripts/tests/example_rabbitmq_zabbix.json
deleted file mode 100644
index d3d9285..0000000
--- a/serverscripts/tests/example_rabbitmq_zabbix.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "lizard-nxt": {
- "queues_limit": 100,
- "messages_limit": 150
- },
- "flooding": {
- "queues_limit": 100,
- "messages_limit": 2
- }
-}
diff --git a/serverscripts/tests/example_rabbitmq_zabbix_broken.json b/serverscripts/tests/example_rabbitmq_zabbix_broken.json
deleted file mode 100644
index cdaaae1..0000000
--- a/serverscripts/tests/example_rabbitmq_zabbix_broken.json
+++ /dev/null
@@ -1,9 +0,0 @@
- "lizard-nxt": {
- "queues": 100,
- "messages": 150
- },
- "flooding": {
- "queues": 100,
- "messages": 2
- }
-}
diff --git a/serverscripts/tests/test_geoserver.py b/serverscripts/tests/test_geoserver.py
index eba95a6..388a5ab 100644
--- a/serverscripts/tests/test_geoserver.py
+++ b/serverscripts/tests/test_geoserver.py
@@ -6,11 +6,6 @@
OUR_DIR = os.path.dirname(__file__)
-def test_broken_config_file():
- broken_config_example = os.path.join(OUR_DIR, "example_rabbitmq_zabbix_broken.json")
- assert geoserver.load_config(broken_config_example) is None
-
-
def test_config_file():
config_example = os.path.join(OUR_DIR, "example_geoserver.json")
configuration = geoserver.load_config(config_example)
@@ -19,8 +14,10 @@ def test_config_file():
def test_extract_from_logfiles():
- lines = geoserver.extract_from_logfiles(
- os.path.join(OUR_DIR, "example_geoserver_logs/access.log")
+ lines = list(
+ geoserver.extract_from_logfiles(
+ os.path.join(OUR_DIR, "example_geoserver_logs/access.log")
+ )
)
assert len(lines) == 439
# ^^^ this used to be 210, but now I'm also looking at LAYERS= instead of
diff --git a/serverscripts/tests/test_rabbitmq.py b/serverscripts/tests/test_rabbitmq.py
deleted file mode 100644
index e1523f7..0000000
--- a/serverscripts/tests/test_rabbitmq.py
+++ /dev/null
@@ -1,116 +0,0 @@
-from pprint import pprint
-from serverscripts import rabbitmq
-from unittest import TestCase
-
-import os
-
-
-class GitAndEggInfoTestCase(TestCase):
- def setUp(self):
- our_dir = os.path.dirname(__file__)
- self.config_example = os.path.join(our_dir, "example_rabbitmq_zabbix.json")
- self.broken_config_example = os.path.join(
- our_dir, "example_rabbitmq_zabbix_broken.json"
- )
- self.queues_stdout_example = os.path.join(
- our_dir, "example_queues_stdout_example.txt"
- )
- self.vhosts_stdout_example = os.path.join(
- our_dir, "example_vhosts_stdout_example.txt"
- )
-
- def test_parse_vhosts_stdout(self):
- vhosts_stdout = ""
- with open(self.vhosts_stdout_example, "r") as vhosts_file:
- vhosts_stdout = vhosts_file.read()
- pprint(vhosts_stdout)
-
- vhosts = rabbitmq.parse_vhosts_stdout(vhosts_stdout)
- pprint(vhosts)
- self.assertGreater(len(vhosts), 1)
-
- def test_parse_vhosts_stdout_empty(self):
- vhosts_stdout = ""
- vhosts = rabbitmq.parse_vhosts_stdout(vhosts_stdout)
- pprint(vhosts)
- self.assertEqual(len(vhosts), 0)
-
- def test_parse_queues_stdout(self):
- queues_stdout = ""
- with open(self.queues_stdout_example, "r") as queues_file:
- queues_stdout = queues_file.read()
- pprint(queues_stdout)
-
- queues = rabbitmq.parse_queues_stdout(queues_stdout)
- pprint(queues)
- self.assertGreater(len(queues), 1)
-
- def test_get_max_queue(self):
- queues = {"q1": 10, "q2": 3, "z": -1}
- result = rabbitmq.get_max_queue(queues)
- expected = ("q1", 10)
- assert result == expected
-
- def test_load_config_file(self):
- configuration = rabbitmq.load_config(self.config_example)
- self.assertGreater(len(configuration), 0)
-
- def test_load_config_file_broken_content(self):
- configuration = rabbitmq.load_config(self.broken_config_example)
- self.assertEqual(len(configuration), 0)
-
- def test_rabbitmqctl_not_exists(self):
- vhost = "asdasda"
- queues = rabbitmq.retrieve_queues(vhost)
- pprint("'%s' contains '%s' queues." % (vhost, queues))
- self.assertEqual(queues, None)
-
- def test_rabbitmqctl_vhosts(self):
- vhosts = rabbitmq.retrieve_vhosts()
- pprint("broker contains '%s' vhosts." % vhosts)
- self.assertEqual(vhosts, None)
-
- def test_validate_configuration_empty(self):
- configuration = {}
- is_valid = rabbitmq.validate_configuration(configuration)
- self.assertFalse(is_valid)
-
- def test_validate_configuration_no_messages_limit(self):
- configuration = {
- "flooding": {"queues_limit": 23},
- "lizard-nxt": {"queues_limit": 1, "messages_limit": 200},
- }
- is_valid = rabbitmq.validate_configuration(configuration)
- self.assertFalse(is_valid)
-
- def test_validate_configuration_no_queues_limit(self):
- configuration = {
- "flooding": {"queues_limit": 23, "messages_limit": 200},
- "lizard-nxt": {"messages_limit": 200},
- }
- is_valid = rabbitmq.validate_configuration(configuration)
- self.assertFalse(is_valid)
-
- def test_validate_configuration_queues_not_int(self):
- configuration = {
- "flooding": {"queues_limit": "dfdf", "messages_limit": 200},
- "lizard-nxt": {"messages_limit": "200", "queues_limit": 1},
- }
- is_valid = rabbitmq.validate_configuration(configuration)
- self.assertFalse(is_valid)
-
- def test_validate_configuration_messages_not_int(self):
- configuration = {
- "flooding": {"queues_limit": 22, "messages_limit": ""},
- "lizard-nxt": {"messages_limit": 200, "queues_limit": 1},
- }
- is_valid = rabbitmq.validate_configuration(configuration)
- self.assertFalse(is_valid)
-
- def test_validate_configuration_valid(self):
- configuration = {
- "flooding": {"queues_limit": 22, "messages_limit": 22},
- "lizard-nxt": {"messages_limit": "20", "queues_limit": 1},
- }
- is_valid = rabbitmq.validate_configuration(configuration)
- self.assertTrue(is_valid)
diff --git a/setup.py b/setup.py
index 0b9a158..b589efb 100644
--- a/setup.py
+++ b/setup.py
@@ -55,8 +55,6 @@
"geoserver-info = serverscripts.geoserver:main",
"haproxy-info = serverscripts.haproxy:main",
"nginx-info = serverscripts.nginx:main",
- "pbis-info = serverscripts.pbis:main",
- "rabbitmq-checker = serverscripts.rabbitmq:main",
# The one below runs all above ones, except for cifsfixer
"gather-all-info = serverscripts.script:main",
]
diff --git a/tox.ini b/tox.ini
index da574cd..6315898 100644
--- a/tox.ini
+++ b/tox.ini
@@ -2,10 +2,11 @@
# 16.04: 3.5
# 18.04: 3.7
# 20.04: 3.8
+# 22.04: 3.10, not available in the dockerfile
envlist = py35,py36,py38
[testenv]
deps = .[test]
commands =
- pytest
\ No newline at end of file
+ pytest