-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathmain.py
179 lines (146 loc) · 5.89 KB
/
main.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
#!/usr/bin/python3
# This Script uses the following dependencies
# pip install nums-from-string
# pip install datetime
#
# To Run this script type:
# python main.py <Log File Name>
#
# The default <Log File Name> is ./docker_snowflake.log
#
# Example:
# python main.py snow.log
#
# Written By Allstreamer_
# Licenced Under MIT
#
# Enhanced by MariusHerget
# Further enhanced and modified by mrdrache333
# Further enhanced by francisco-core
import argparse
import sys
import re
from datetime import datetime, timedelta
from http.server import HTTPServer, BaseHTTPRequestHandler
# Format of your timestamps in the beginning of the log
# e.g. "2022/01/01 16:50:30 <LOG ENTRY>" => "%Y/%m/%d %H:%M:%S"
TIMESTAMP_FORMAT = "%Y/%m/%d %H:%M:%S"
def nums_from_string(string):
return [int(num) for num in re.findall(r"\d+", string)]
class TextHandler(BaseHTTPRequestHandler):
logfile_path = None
def do_GET(self):
if self.path != "/metrics":
# If the request path is not /metrics, return a 404 Not Found error
self.send_error(404)
return
# Set the response status code to 200 OK
self.send_response(200)
# Set the content type to text/plain
self.send_header("Content-type", "text/plain")
# End the headers
self.end_headers()
# Return the metrics
print_stats(
self.logfile_path,
lambda x: self.wfile.write(x.encode()) # encode response
)
def print_stats(logfile_path: str, printer_func):
# Read file
lines_all = readFile(logfile_path)
# Get the statistics for various time windows
# e.g. all time => getDataFromLines(lines_all, 24)
# e.g. last 24h => getDataFromLines(filterLinesBasedOnTimeDelta(lines_all, 24))
# e.g. last Week => getDataFromLines(filterLinesBasedOnTimeDelta(lines_all, 24 * 7))
stats = {
'All time': getDataFromLines(lines_all),
'Last 24h': getDataFromLines(filterLinesBasedOnTimeDelta(lines_all, 24)),
'Last Week': getDataFromLines(filterLinesBasedOnTimeDelta(lines_all, 24 * 7)),
}
# Print all the results in the Prometheus metric format
for time in stats:
stat = stats[time]
printer_func(
f"served_people{{time=\"{time}\"}} {stat['connections']}\n" +
f"upload_gb{{time=\"{time}\"}} {round(stat['upload_gb'], 4)}\n" +
f"download_gb{{time=\"{time}\"}} {round(stat['download_gb'], 4)}\n"
)
def readFile(logfile_path: str):
# Read in log file as lines
lines_all = []
with open(logfile_path, "r") as file:
lines_all = file.readlines()
return lines_all
# Catchphrase for lines who do not start with a timestamp
def catchTimestampException(rowSubString, timestampFormat):
try:
return datetime.strptime(rowSubString, timestampFormat)
except Exception:
return datetime.strptime("1970/01/01 00:00:00", "%Y/%m/%d %H:%M:%S")
# Filter the log lines based on a time delta in hours
def filterLinesBasedOnTimeDelta(log_lines, hours):
now = datetime.now()
length_timestamp_format = len(datetime.strftime(now, TIMESTAMP_FORMAT))
return filter(lambda row: now - timedelta(hours=hours) <= catchTimestampException(row[0:length_timestamp_format],
TIMESTAMP_FORMAT) <= now,
log_lines)
# Convert traffic information (in B, KB, MB, or GB) to B (Bytes) and add up to a sum
def get_byte_count(log_lines):
byte_count = 0
for row in log_lines:
symbols = row.split(" ")
# Use a dictionary to map units to their byte conversion values
units = {
"B": 1,
"KB": 1024,
"MB": 1024 * 1024,
"GB": 1024 * 1024 * 1024
}
# Use the dictionary to get the byte conversion value for the current unit
byte_count += int(symbols[1]) * units[symbols[2]]
return byte_count
# Filter important lines from the log
# Extract number of connections, uploaded traffic in GB and download traffic in GB
def getDataFromLines(lines):
# Filter out important lines (Traffic information)
lines = [row.strip() for row in lines if "In the" in row]
lines = [row.split(",", 1)[1] for row in lines]
# Filter out all traffic log lines who did not had any connection
lines = [row for row in lines if nums_from_string(row)[0] != 0]
# Extract number of connections as a sum
connections = sum([nums_from_string(row)[0] for row in lines])
# Extract upload and download data
lines = [row.split("Relayed")[1] for row in lines]
upload = [row.split(",")[0].strip() for row in lines]
download = [row.split(",")[1].strip()[:-1] for row in lines]
# Convert upload/download data to GB
upload_gb = get_byte_count(upload) / 1024 / 1024 / 1024
download_gb = get_byte_count(download) / 1024 / 1024 / 1024
# Return information as a dictionary for better structure
return {'connections': connections, 'upload_gb': upload_gb, 'download_gb': download_gb}
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--serve",
dest="serve",
action="store_true",
help="Start http server directly on port 8080"
)
parser.add_argument(
"--no-serve",
dest="serve",
action="store_false",
help="Simply parse the input file"
)
parser.set_defaults(serve=True)
# Log file path from arguments (default: ./docker_snowflake.log)
parser.add_argument("logfile_path", default="./docker_snowflake.log")
args = parser.parse_args()
if args.serve:
# Start the HTTP server on port 8080
TextHandler.logfile_path = args.logfile_path
httpd = HTTPServer(("", 8080), TextHandler)
httpd.serve_forever()
else:
# Simply parse the file and print the resulting metrics
print_stats(args.logfile_path, sys.stdout.write)