aboutsummaryrefslogtreecommitdiffstats
path: root/main.py
blob: e6c39aa0f9aef8faeacdec214f197cbddec58633 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
def do_get(url, method) -> str:
    import requests

    resp = requests.get(f"{url}{method}")
    if resp.status_code != 200:
        raise ValueError(f"unexpected http error {resp.status_code}")

    return resp.text


def format_manager_directory(mgr_name) -> str:
    import os

    dir = os.path.join(config["reports_dir"], mgr_name)
    return dir


def make_manager_directory(mgr_name):
    import os

    os.makedirs(format_manager_directory(mgr_name), exist_ok=True)


def get_arrays_diff(old, cur) -> str:
    old_set = set(old)

    not_changed = []
    new = []

    for item in cur:
        if item in old_set:
            old_set.remove(item)
            not_changed.append(item)
        else:
            new.append(item)

    return {
        "not_changed": not_changed,
        "new": new,
        "removed": list(old_set),
    }


def manager_write_state(mgr, method, new_state):
    import os
    from datetime import datetime

    dir = format_manager_directory(mgr)

    file = os.path.join(dir, f"{method}.state")
    if not os.path.exists(file):
        os.close(os.open(file, os.O_CREAT))

    old_state = []
    with open(file, "r") as f:
        old_state = [line.strip() for line in f.readlines()]

    diff = get_arrays_diff(old_state, new_state)

    with open(file, "w") as f:
        f.write("\n".join(new_state))

    file = os.path.join(dir, f"{method}.diff")
    time = datetime.now().isoformat()
    with open(file, "a") as f:
        f.write("Time: {}\n\n".format(time))
        if len(diff["not_changed"]) != 0:
            f.write("Not changed: {}\n".format(len(diff["not_changed"])))
        if len(diff["new"]) != 0:
            f.write("New: {}\n".format(len(diff["new"])))
        if len(diff["removed"]) != 0:
            f.write("Removed: {}\n".format(len(diff["removed"])))

        f.write("===\n")

    file = f"{file}.detail"
    with open(file, "a") as f:
        f.write("Time: {}\n\n".format(time))
        for line in diff["new"]:
            f.write(f"+ {line}\n")

        for line in diff["removed"]:
            f.write(f"- {line}\n")
        f.write("===\n")


def parse_metrics(mgr, method, raw_metrics) -> str:
    import os
    from datetime import datetime

    metrics = {"time": datetime.now().isoformat()}

    for line in raw_metrics.strip().split("\n"):
        if "#" in line:
            continue
        name, value = line.split()
        metrics[name] = value

    dir = format_manager_directory(mgr)

    file = os.path.join(dir, f"{method}.state")
    if not os.path.exists(file):
        header = ",".join(metrics.keys())
    else:
        with open(file, "r") as f:
            header = "".join(f.readlines())

    data_row = ",".join(metrics.values())

    return f"{header}\n{data_row}"


def parse_crashes(mgr, raw_main_page) -> str:
    from bs4 import BeautifulSoup

    soup = BeautifulSoup(raw_main_page, "html.parser")
    body = soup.find("body")

    crashes = body.find_all("table", class_=["list_table"])[1:2]
    if len(crashes) == 0:
        return ""
    crashes = crashes[0]

    result = ["title,stat,time,repro"]
    for row in crashes.find_all("tr"):
        row_list = []

        for elem in row.find_all("td"):
            row_list.append(elem.text.strip())

        if len(row_list) == 0:
            continue

        result.append(",".join(row_list))

    return "\n".join(result)


def main():
    import time

    for manager in config["managers"]:
        make_manager_directory(manager["name"])

    while True:
        for manager in config["managers"]:
            for endpoint in config["endpoints"]:
                try:
                    data = do_get(manager["http_url"], endpoint["http_uri"])
                except Exception as e:
                    print(f"Failed to get information from manager {manager['name']} using endpoint {endpoint['name']}")
                    print(e)
                    continue

                try:
                    if endpoint.get("metrics", False):
                        data = parse_metrics(manager["name"], endpoint["name"], data)
                    elif endpoint.get("crashes", False):
                        data = parse_crashes(manager["name"], data)
                    state = data.split("\n")
                except:
                    print(f"Failed to parse information from manager {manager['name']}, endpoint: {endpoint['name']}")
                    continue

                try:
                    manager_write_state(manager["name"], endpoint["name"], state)
                except:
                    print(f"Failed to log manager {manager['name']} state, endpoint: {endpoint['name']}")
                    continue

        time.sleep(config["timeout"])


if __name__ == "__main__":
    import json

    with open("./config.json", "r") as f:
        config = json.load(f)

    main()