Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • perun/perun-proxyidp/perun-proxy-utils
1 result
Show changes
Commits on Source (3)
Showing
with 320 additions and 222 deletions
# [1.8.0](https://gitlab.ics.muni.cz/perun-proxy-aai/python/perun-proxy-utils/compare/v1.7.1...v1.8.0) (2023-07-12)
### Features
* run_probes support for command line flags and printing stderr, entry_points support ([55edad8](https://gitlab.ics.muni.cz/perun-proxy-aai/python/perun-proxy-utils/commit/55edad8e344164302aadc920ac9957987218d054))
## [1.7.1](https://gitlab.ics.muni.cz/perun-proxy-aai/python/perun-proxy-utils/compare/v1.7.0...v1.7.1) (2023-06-28)
......
......@@ -39,3 +39,17 @@ check_rpc_status:
p: "password"
d: "domain"
i: 1
check_syncrepl:
module: perun.proxy.utils.nagios.check_ldap_syncrepl
runs:
check_ldap_syncrepl:
p: "ldaps://ldapmaster.foo:636"
c: "ldaps://ldapslave.foo:636"
b: "o=example"
D: "uid=nagios,ou=sysaccounts,o=example"
P: "bind_password"
n:
only-check-contextCSN:
W: 900
C: 3600
......@@ -2,13 +2,22 @@ import sys
from urllib.request import urlopen
from bs4 import BeautifulSoup
url = sys.argv[1]
html = urlopen(url).read()
closest_expiration = BeautifulSoup(html, "html.parser")
if float(closest_expiration.text) >= 24:
print("0 metadata_expiration - OK (" + closest_expiration.text + ")")
elif float(closest_expiration.text) >= 12:
print("1 metadata_expiration - WARNING (" + closest_expiration.text + ")")
else:
print("2 metadata_expiration - CRITICAL (" + closest_expiration.text + ")")
def main():
url = sys.argv[1]
html = urlopen(url).read()
closest_expiration = BeautifulSoup(html, "html.parser")
if float(closest_expiration.text) >= 24:
print("0 metadata_expiration - OK (" + closest_expiration.text + ")")
return 0
elif float(closest_expiration.text) >= 12:
print("1 metadata_expiration - WARNING (" + closest_expiration.text + ")")
return 1
else:
print("2 metadata_expiration - CRITICAL (" + closest_expiration.text + ")")
return 2
if __name__ == "__main__":
sys.exit(main())
#!/usr/bin/env python3
import argparse
import re
import subprocess
import sys
"""
general script to run non-python checks by a custom-defined command
"""
......@@ -27,8 +27,14 @@ def get_args():
def main():
args = get_args()
result = subprocess.run(args.command, shell=True, text=True, capture_output=True)
print(result.stdout, end="")
result = subprocess.run(
args.command,
shell=True,
text=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
print(re.sub("[ \t\n]+", " ", result.stdout))
return result.returncode
......
#!/usr/bin/env python3
import argparse
import sys
import docker
from docker.errors import NotFound, APIError
......@@ -53,4 +54,4 @@ def main():
if __name__ == "__main__":
exit(main())
sys.exit(main())
#!/usr/bin/env python3
import sys
from subprocess import run
......@@ -28,4 +28,4 @@ def main():
if __name__ == "__main__":
exit(main())
sys.exit(main())
......@@ -6,7 +6,6 @@ import time
from ldap3 import Server, Connection, SUBTREE
"""
check LDAP is available
"""
......
......@@ -129,7 +129,7 @@ def split_host_port(string):
return (host, port)
def main(argv):
def main():
p = optparse.OptionParser(
conflict_handler="resolve",
description="This Nagios plugin checks the health of mongodb.",
......@@ -2218,4 +2218,4 @@ def replication_get_time_diff(con):
# main app
#
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
sys.exit(main())
#!/usr/bin/env python3
import sys
from check_nginx_status.check_nginx_status import main
......@@ -6,4 +7,4 @@ from check_nginx_status.check_nginx_status import main
# https://gitlab.ics.muni.cz/perun-proxy-aai/python/check_nginx_status/
if __name__ == "__main__":
main()
sys.exit(main())
......@@ -6,7 +6,6 @@ import sys
import time
import requests
"""
check RPC API is available
"""
......
......@@ -388,7 +388,7 @@ class SAMLChecker:
)
if self.args.cache_timeout > 0:
try:
file_path = tempfile.gettempdir() + "/" + args.cache_file
file_path = tempfile.gettempdir() + "/" + self.args.cache_file
f = open(file_path, "w")
f.write("{}_{}_{}".format(cache_time, status, message))
f.close()
......@@ -402,7 +402,7 @@ class SAMLChecker:
def check_cache(self):
try:
tempdir = tempfile.gettempdir()
file_path = tempdir + "/" + args.cache_file
file_path = tempdir + "/" + self.args.cache_file
if os.path.isfile(file_path):
with open(file_path, "r") as f:
res_b = f.read()
......@@ -414,7 +414,7 @@ class SAMLChecker:
message = res[2]
actual_time = time.time()
time_diff = actual_time - float(cached_time)
if time_diff < args.cache_timeout:
if time_diff < self.args.cache_timeout:
self.finish(
message=message,
status=status,
......@@ -551,7 +551,10 @@ class SAMLChecker:
)
if __name__ == "__main__":
args = get_args()
checker = SAMLChecker(args)
def main():
checker = SAMLChecker(get_args())
checker.main()
if __name__ == "__main__":
main()
......@@ -110,8 +110,8 @@ def main():
path, regex, datetime_format, logins, seconds = command_line_validate(argv)
user_dict = parse_log_data(path, regex, datetime_format)
check_log_data(user_dict, logins, seconds)
print("OK", logins, seconds)
return 0
print("OK - ", logins, seconds)
sys.exit(0)
if __name__ == "__main__":
......
#!/usr/bin/env python3
import argparse
import sys
import requests
......@@ -43,4 +44,4 @@ def main():
if __name__ == "__main__":
exit(main())
sys.exit(main())
......@@ -4,65 +4,76 @@ import multiprocessing
import os
import re
import json
import docker
import argparse
import platform
output = {
"cpu_count": "",
"memory": "",
"os_version": "",
"kernel_version": "",
"docker_version": "",
"containerd_version": "",
"containers": {},
}
parser = argparse.ArgumentParser()
parser.add_argument(
"-e", "--exclude", type=str, help="Space delimited list of containers to exclude"
)
args = parser.parse_args()
exc_containers = args.exclude.split(" ") if args.exclude is not None else []
output["cpu_count"] = str(multiprocessing.cpu_count())
mem_bytes = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")
if mem_bytes > 1:
output["memory"] = str(round(mem_bytes / (1024.0**3), 2)) + "GiB"
name = ""
maj_version = ""
with open("/etc/os-release") as file:
contents = file.read()
match = re.search(r"NAME=\"(.*)\"", contents)
if match is not None:
name = match.group(1)
match = re.search(r"VERSION_ID=\"(.*)\"", contents)
if match is not None:
maj_version = match.group(1).split(".")[0]
if name.startswith("Debian"):
name = name.split(" ")[0]
output["os_version"] = name + " " + maj_version
output["kernel_version"] = platform.release()
client = docker.from_env()
if client is not None:
version_info = client.version()
docker_ver_filter = list(
filter(lambda x: x["Name"] == "Engine", version_info["Components"])
)
output["docker_version"] = (
docker_ver_filter[0]["Version"] if len(docker_ver_filter) > 0 else ""
)
containerd_ver_filter = list(
filter(lambda x: x["Name"] == "containerd", version_info["Components"])
def main():
output = {
"cpu_count": "",
"memory": "",
"os_version": "",
"kernel_version": "",
"docker_version": "",
"containerd_version": "",
"containers": {},
}
parser = argparse.ArgumentParser()
parser.add_argument(
"-e",
"--exclude",
type=str,
help="Space delimited list of containers to exclude",
)
containerd_version = (
containerd_ver_filter[0]["Version"] if len(containerd_ver_filter) > 0 else ""
)
if len(containerd_version) > 0 and containerd_version[0] == "v":
containerd_version = containerd_version[1:]
output["containerd_version"] = containerd_version
containers = client.containers.list()
containers = list(filter(lambda x: x.name not in exc_containers, containers))
for container in containers:
container_image = container.image.tags[0] if container.image.tags else ""
output["containers"][container.name] = container_image.split(":")[-1]
print(json.dumps(output))
args = parser.parse_args()
exc_containers = args.exclude.split(" ") if args.exclude is not None else []
output["cpu_count"] = str(multiprocessing.cpu_count())
mem_bytes = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")
if mem_bytes > 1:
output["memory"] = str(round(mem_bytes / (1024.0**3), 2)) + "GiB"
name = ""
maj_version = ""
with open("/etc/os-release") as file:
contents = file.read()
match = re.search(r"NAME=\"(.*)\"", contents)
if match is not None:
name = match.group(1)
match = re.search(r"VERSION_ID=\"(.*)\"", contents)
if match is not None:
maj_version = match.group(1).split(".")[0]
if name.startswith("Debian"):
name = name.split(" ")[0]
output["os_version"] = name + " " + maj_version
output["kernel_version"] = platform.release()
client = docker.from_env()
if client is not None:
version_info = client.version()
docker_ver_filter = list(
filter(lambda x: x["Name"] == "Engine", version_info["Components"])
)
output["docker_version"] = (
docker_ver_filter[0]["Version"] if len(docker_ver_filter) > 0 else ""
)
containerd_ver_filter = list(
filter(lambda x: x["Name"] == "containerd", version_info["Components"])
)
containerd_version = (
containerd_ver_filter[0]["Version"]
if len(containerd_ver_filter) > 0
else ""
)
if len(containerd_version) > 0 and containerd_version[0] == "v":
containerd_version = containerd_version[1:]
output["containerd_version"] = containerd_version
containers = client.containers.list()
containers = list(filter(lambda x: x.name not in exc_containers, containers))
for container in containers:
container_image = container.image.tags[0] if container.image.tags else ""
output["containers"][container.name] = container_image.split(":")[-1]
print(json.dumps(output))
if __name__ == "__main__":
main()
......@@ -16,20 +16,24 @@ def open_file(filepath):
f"Cannot open config with path: {filepath}, error: {e.strerror}",
file=sys.stderr,
)
exit(2)
sys.exit(2)
def run_probe(probe_name, command):
result = subprocess.run(command, text=True, capture_output=True)
search = re.search(r" - .*", result.stdout)
result = subprocess.run(
command, text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
)
output = re.sub("[ \t\n]+", " ", result.stdout)
search = re.search(r" - .*", output)
if search:
print(f"{result.returncode} {probe_name} {search.group()}")
else:
print(f"{result.returncode} {probe_name} - {result.stdout}")
print(f"{result.returncode} {probe_name} - {output}")
return result.returncode
def main(config_filepath):
def main():
config_filepath = "/etc/run_probes_cfg.yaml"
config = yaml.safe_load(open_file(config_filepath))
if not config:
return
......@@ -37,7 +41,7 @@ def main(config_filepath):
for _, options in config.items():
module = options["module"]
for name, args in options.get("runs").items():
command = ["python", "-m", module]
command = ["python3", "-m", module]
for arg_name, arg_val in args.items():
if len(arg_name) == 1:
arg_name = "-" + arg_name
......@@ -48,10 +52,10 @@ def main(config_filepath):
elif arg_val is False:
arg_val = "false"
command.append(arg_name)
command.append(str(arg_val))
if arg_val is not None:
command.append(str(arg_val))
Thread(target=run_probe, args=[name, command]).start()
if __name__ == "__main__":
config_filepath = "/etc/run_probes_cfg.yaml"
main(config_filepath)
main()
......@@ -50,7 +50,7 @@ def dict_to_md_table(dictionary):
)
async def run_script(user, host):
async def run_script(user, host, exc_containers):
try:
async with asyncssh.connect(host, username=user) as conn:
await asyncssh.scp("print_docker_versions.py", (conn, "/tmp/"))
......@@ -64,8 +64,8 @@ async def run_script(user, host):
return e, host
async def collect_info(hosts):
tasks = (run_script(host[0], host[1]) for host in hosts)
async def collect_info(hosts, exc_containers):
tasks = (run_script(host[0], host[1], exc_containers) for host in hosts)
results = await asyncio.gather(*tasks, return_exceptions=True)
stdouts = []
hosts = []
......@@ -85,13 +85,21 @@ async def collect_info(hosts):
dict_to_md_table(jsons_to_dictionary(stdouts, hosts))
parser = argparse.ArgumentParser()
parser.add_argument(
"-e", "--exclude", type=str, help="Space delimited list of containers to exclude"
)
parser.add_argument("machines", nargs="+", help="Machines to collect the info from")
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-e",
"--exclude",
type=str,
help="Space delimited list of containers to exclude",
)
parser.add_argument("machines", nargs="+", help="Machines to collect the info from")
args = parser.parse_args()
exc_containers = args.exclude if args.exclude is not None else ""
machines = list(map(lambda x: x.split("@"), args.machines))
asyncio.run(collect_info(machines, exc_containers))
args = parser.parse_args()
exc_containers = args.exclude if args.exclude is not None else ""
machines = list(map(lambda x: x.split("@"), args.machines))
asyncio.run(collect_info(machines))
if __name__ == "__main__":
main()
......@@ -6,66 +6,72 @@ import re
import sys
from os import mkdir, path, rename, remove, system
if len(sys.argv) <= 1:
print("One argument is expected!")
exit(-1)
absolute_file_name = sys.argv[1]
def main():
if len(sys.argv) <= 1:
print("One argument is expected!")
sys.exit(-1)
if not path.exists(absolute_file_name):
print("File with name " + absolute_file_name + " doesn't exists!")
exit(-1)
absolute_file_name = sys.argv[1]
file_name = path.basename(absolute_file_name)
dir_name = path.dirname(absolute_file_name)
if not path.exists(absolute_file_name):
print("File with name " + absolute_file_name + " doesn't exists!")
sys.exit(-1)
if len(dir_name) != 0:
dir_name += "/"
file_name = path.basename(absolute_file_name)
dir_name = path.dirname(absolute_file_name)
full_log_dir_name = dir_name + "full_logs/"
full_log_file_name = "full_" + file_name
full_log_absolute_name = full_log_dir_name + full_log_file_name
if len(dir_name) != 0:
dir_name += "/"
if not path.exists(full_log_dir_name):
mkdir(full_log_dir_name)
full_log_dir_name = dir_name + "full_logs/"
full_log_file_name = "full_" + file_name
full_log_absolute_name = full_log_dir_name + full_log_file_name
rename(absolute_file_name, full_log_absolute_name)
if not path.exists(full_log_dir_name):
mkdir(full_log_dir_name)
session_ids = set()
regex_session_id = r"(?<=\s\[)\w+(?=\]\s+\S+\s+:)"
rename(absolute_file_name, full_log_absolute_name)
file = open(full_log_absolute_name, "r")
for line in file:
res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
if res is not None:
session_ids = set()
regex_session_id = r"(?<=\s\[)\w+(?=\]\s+\S+\s+:)"
file = open(full_log_absolute_name, "r")
for line in file:
res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
if res is not None:
session_id = re.search(regex_session_id, line)
if session_id is not None:
session_ids.add(session_id.group(0))
file.close()
file = open(full_log_absolute_name, "r")
final_log_file = open(absolute_file_name, "w")
last_session_id = ""
for line in file:
session_id = re.search(regex_session_id, line)
if session_id is not None:
session_ids.add(session_id.group(0))
file.close()
file = open(full_log_absolute_name, "r")
final_log_file = open(absolute_file_name, "w")
last_session_id = ""
for line in file:
session_id = re.search(regex_session_id, line)
if session_id is not None:
last_session_id = session_id.group(0)
if session_id is None or session_id.group(0) not in session_ids:
if last_session_id not in session_ids:
final_log_file.write(line)
file.close()
final_log_file.close()
# Zip old log file
with open(full_log_absolute_name, "rb") as f_in, gzip.open(
full_log_absolute_name + ".gz", "wb"
) as f_out:
shutil.copyfileobj(f_in, f_out)
# Remove unzip file
remove(full_log_absolute_name)
# Remove old files
system("find " + full_log_dir_name + " -mtime +7 -delete")
last_session_id = session_id.group(0)
if session_id is None or session_id.group(0) not in session_ids:
if last_session_id not in session_ids:
final_log_file.write(line)
file.close()
final_log_file.close()
# Zip old log file
with open(full_log_absolute_name, "rb") as f_in, gzip.open(
full_log_absolute_name + ".gz", "wb"
) as f_out:
shutil.copyfileobj(f_in, f_out)
# Remove unzip file
remove(full_log_absolute_name)
# Remove old files
system("find " + full_log_dir_name + " -mtime +7 -delete")
if __name__ == "__main__":
main()
......@@ -6,76 +6,82 @@ import re
import sys
from os import mkdir, path, rename, remove, system
if len(sys.argv) <= 1:
print("One argument is expected!")
exit(-1)
absolute_file_name = sys.argv[1]
def main():
if len(sys.argv) <= 1:
print("One argument is expected!")
sys.exit(-1)
if not path.exists(absolute_file_name):
print("File with name " + absolute_file_name + " doesn't exists!")
exit(-1)
absolute_file_name = sys.argv[1]
file_name = path.basename(absolute_file_name)
dir_name = path.dirname(absolute_file_name)
if not path.exists(absolute_file_name):
print("File with name " + absolute_file_name + " doesn't exists!")
sys.exit(-1)
if len(dir_name) != 0:
dir_name += "/"
file_name = path.basename(absolute_file_name)
dir_name = path.dirname(absolute_file_name)
full_log_dir_name = dir_name + "full_logs/"
full_log_file_name = "full_" + file_name
full_log_absolute_name = full_log_dir_name + full_log_file_name
if len(dir_name) != 0:
dir_name += "/"
if not path.exists(full_log_dir_name):
mkdir(full_log_dir_name)
full_log_dir_name = dir_name + "full_logs/"
full_log_file_name = "full_" + file_name
full_log_absolute_name = full_log_dir_name + full_log_file_name
rename(absolute_file_name, full_log_absolute_name)
if not path.exists(full_log_dir_name):
mkdir(full_log_dir_name)
session_ids = set()
thread_ids = set()
regex_session_id = r"^.*]:\s\d\s\[(.*?)\].*$"
regex_thread_id = r"^.*\[(.*?)\]:.*$"
rename(absolute_file_name, full_log_absolute_name)
file = open(full_log_absolute_name, "r")
for line in file:
res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
if res is not None:
session_ids = set()
thread_ids = set()
regex_session_id = r"^.*]:\s\d\s\[(.*?)\].*$"
regex_thread_id = r"^.*\[(.*?)\]:.*$"
file = open(full_log_absolute_name, "r")
for line in file:
res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
if res is not None:
session_id = re.search(regex_session_id, line)
if session_id is not None:
session_ids.add(session_id.group(1))
thread_id = re.search(regex_thread_id, line)
if thread_id is not None:
thread_ids.add(thread_id.group(1))
file.close()
file = open(full_log_absolute_name, "r")
final_log_file = open(absolute_file_name, "w")
last_session_id = ""
for line in file:
session_id = re.search(regex_session_id, line)
if session_id is not None:
session_ids.add(session_id.group(1))
thread_id = re.search(regex_thread_id, line)
if thread_id is not None:
thread_ids.add(thread_id.group(1))
file.close()
file = open(full_log_absolute_name, "r")
final_log_file = open(absolute_file_name, "w")
last_session_id = ""
for line in file:
session_id = re.search(regex_session_id, line)
if session_id is not None:
last_session_id = session_id.group(1)
if session_id is None or session_id.group(1) not in session_ids:
thread_id = re.search(regex_thread_id, line)
if (
thread_id is None
or thread_id.group(1) not in thread_ids
or last_session_id not in session_ids
):
final_log_file.write(line)
file.close()
final_log_file.close()
# Zip old log file
with open(full_log_absolute_name, "rb") as f_in, gzip.open(
full_log_absolute_name + ".gz", "wb"
) as f_out:
shutil.copyfileobj(f_in, f_out)
# Remove unzip file
remove(full_log_absolute_name)
# Remove old files
system("find " + full_log_dir_name + " -mtime +7 -delete")
last_session_id = session_id.group(1)
if session_id is None or session_id.group(1) not in session_ids:
thread_id = re.search(regex_thread_id, line)
if (
thread_id is None
or thread_id.group(1) not in thread_ids
or last_session_id not in session_ids
):
final_log_file.write(line)
file.close()
final_log_file.close()
# Zip old log file
with open(full_log_absolute_name, "rb") as f_in, gzip.open(
full_log_absolute_name + ".gz", "wb"
) as f_out:
shutil.copyfileobj(f_in, f_out)
# Remove unzip file
remove(full_log_absolute_name)
# Remove old files
system("find " + full_log_dir_name + " -mtime +7 -delete")
if __name__ == "__main__":
main()
[metadata]
version = 1.7.1
version = 1.8.0
license_files = LICENSE
long_description = file: README.md
long_description_content_type = text/markdown
......
......@@ -19,4 +19,27 @@ setuptools.setup(
"check_syncrepl_extended~=2020.13",
"check_nginx_status~=1.0",
],
entry_points={
"console_scripts": [
"run_probes=perun.proxy.utils.run_probes:main",
"check_custom_command=perun.proxy.utils.nagios.check_custom_command:main",
"check_dockers=perun.proxy.utils.nagios.check_dockers:main",
"check_exabgp_propagation="
"perun.proxy.utils.nagios.check_exabgp_propagation:main",
"check_ldap=perun.proxy.utils.nagios.check_ldap:main",
"check_ldap_syncrepl=check_syncrepl_extended.check_syncrepl_extended:main",
"check_mongodb=perun.proxy.utils.nagios.check_mongodb:main",
"check_nginx=check_nginx_status.check_nginx_status:main",
"check_rpc_status=perun.proxy.utils.nagios.check_rpc_status:main",
"check_saml=perun.proxy.utils.nagios.check_saml:main",
"check_user_logins=perun.proxy.utils.nagios.check_user_logins:main",
"check_webserver_availability="
"perun.proxy.utils.nagios.webserver_availability:main",
"metadata_expiration=perun.proxy.utils.metadata_expiration:main",
"print_docker_versions=perun.proxy.utils.print_docker_versions:main",
"run_version_script=perun.proxy.utils.run_version_script:main",
"separate_oidc_logs=perun.proxy.utils.separate_oidc_logs:main",
"separate_ssp_logs=perun.proxy.utils.separate_ssp_logs:main",
]
},
)