diff --git a/config_templates/run_probes_cfg.yaml b/config_templates/run_probes_cfg.yaml
index 8b5c81ee02ecfdceeb54d1cd54b6b2d6c0fd0b42..ca4b86ae7db812c00c41925282ae8540d65a36f7 100644
--- a/config_templates/run_probes_cfg.yaml
+++ b/config_templates/run_probes_cfg.yaml
@@ -39,3 +39,17 @@ check_rpc_status:
       p: "password"
       d: "domain"
       i: 1
+
+check_syncrepl:
+  module: perun.proxy.utils.nagios.check_ldap_syncrepl
+  runs:
+    check_ldap_syncrepl:
+      p: "ldaps://ldapmaster.foo:636"
+      c: "ldaps://ldapslave.foo:636"
+      b: "o=example"
+      D: "uid=nagios,ou=sysaccounts,o=example"
+      P: "bind_password"
+      n:
+      only-check-contextCSN:
+      W: 900
+      C: 3600
diff --git a/perun/proxy/utils/metadata_expiration.py b/perun/proxy/utils/metadata_expiration.py
index ea087815a1f563676016f666bd018e8d990b46a0..083379f904a51b2f18675811829a3f8602a7e251 100644
--- a/perun/proxy/utils/metadata_expiration.py
+++ b/perun/proxy/utils/metadata_expiration.py
@@ -2,13 +2,22 @@ import sys
 from urllib.request import urlopen
 from bs4 import BeautifulSoup
 
-url = sys.argv[1]
-html = urlopen(url).read()
-closest_expiration = BeautifulSoup(html, "html.parser")
-
-if float(closest_expiration.text) >= 24:
-    print("0 metadata_expiration - OK (" + closest_expiration.text + ")")
-elif float(closest_expiration.text) >= 12:
-    print("1 metadata_expiration - WARNING (" + closest_expiration.text + ")")
-else:
-    print("2 metadata_expiration - CRITICAL (" + closest_expiration.text + ")")
+
+def main():
+    url = sys.argv[1]
+    html = urlopen(url).read()
+    closest_expiration = BeautifulSoup(html, "html.parser")
+
+    if float(closest_expiration.text) >= 24:
+        print("0 metadata_expiration - OK (" + closest_expiration.text + ")")
+        return 0
+    elif float(closest_expiration.text) >= 12:
+        print("1 metadata_expiration - WARNING (" + closest_expiration.text + ")")
+        return 1
+    else:
+        print("2 metadata_expiration - CRITICAL (" + closest_expiration.text + ")")
+        return 2
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/perun/proxy/utils/nagios/check_custom_command.py b/perun/proxy/utils/nagios/check_custom_command.py
index 8628ecaea9300e08f1c3d70545e626ce80407420..8678cca38ed6c912730c92bb53cef19e0019be1c 100644
--- a/perun/proxy/utils/nagios/check_custom_command.py
+++ b/perun/proxy/utils/nagios/check_custom_command.py
@@ -1,10 +1,10 @@
 #!/usr/bin/env python3
 
 import argparse
+import re
 import subprocess
 import sys
 
-
 """
 general script to run non-python checks by a custom-defined command
 """
@@ -27,8 +27,14 @@ def get_args():
 
 def main():
     args = get_args()
-    result = subprocess.run(args.command, shell=True, text=True, capture_output=True)
-    print(result.stdout, end="")
+    result = subprocess.run(
+        args.command,
+        shell=True,
+        text=True,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.STDOUT,
+    )
+    print(re.sub("[ \t\n]+", " ", result.stdout))
     return result.returncode
 
 
diff --git a/perun/proxy/utils/nagios/check_dockers.py b/perun/proxy/utils/nagios/check_dockers.py
index e048e8f518dd8bd8c4f82495a10caf84b802110c..ba57795faac7eeedb3857638ec92b8f4a3c3e74a 100644
--- a/perun/proxy/utils/nagios/check_dockers.py
+++ b/perun/proxy/utils/nagios/check_dockers.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python3
 
 import argparse
+import sys
 
 import docker
 from docker.errors import NotFound, APIError
@@ -53,4 +54,4 @@ def main():
 
 
 if __name__ == "__main__":
-    exit(main())
+    sys.exit(main())
diff --git a/perun/proxy/utils/nagios/check_exabgp_propagation.py b/perun/proxy/utils/nagios/check_exabgp_propagation.py
index 36824e56430ae48bb2de5f73043bc7422ce814b4..e8e7d4a5d6fd395bf165c85615a389f1cc2d47bb 100644
--- a/perun/proxy/utils/nagios/check_exabgp_propagation.py
+++ b/perun/proxy/utils/nagios/check_exabgp_propagation.py
@@ -1,5 +1,5 @@
 #!/usr/bin/env python3
-
+import sys
 from subprocess import run
 
 
@@ -28,4 +28,4 @@ def main():
 
 
 if __name__ == "__main__":
-    exit(main())
+    sys.exit(main())
diff --git a/perun/proxy/utils/nagios/check_ldap.py b/perun/proxy/utils/nagios/check_ldap.py
index b90f929f68b15246cf91c97e04541db82d0298a3..c74c48e96e9a65d6a21104e372bb49c9c239dab3 100644
--- a/perun/proxy/utils/nagios/check_ldap.py
+++ b/perun/proxy/utils/nagios/check_ldap.py
@@ -6,7 +6,6 @@ import time
 
 from ldap3 import Server, Connection, SUBTREE
 
-
 """
 check LDAP is available
 """
diff --git a/perun/proxy/utils/nagios/check_mongodb.py b/perun/proxy/utils/nagios/check_mongodb.py
index 9fca2d48c8ba8cf1674dfef9f89e68721972b053..18f8dc3eb8e2c292d1f4f1aa139033e0354b2427 100644
--- a/perun/proxy/utils/nagios/check_mongodb.py
+++ b/perun/proxy/utils/nagios/check_mongodb.py
@@ -129,7 +129,7 @@ def split_host_port(string):
     return (host, port)
 
 
-def main(argv):
+def main():
     p = optparse.OptionParser(
         conflict_handler="resolve",
         description="This Nagios plugin checks the health of mongodb.",
@@ -2218,4 +2218,4 @@ def replication_get_time_diff(con):
 # main app
 #
 if __name__ == "__main__":
-    sys.exit(main(sys.argv[1:]))
+    sys.exit(main())
diff --git a/perun/proxy/utils/nagios/check_nginx.py b/perun/proxy/utils/nagios/check_nginx.py
index d7ac825c5842a647e53cf25506f18ce9f49701bd..d2634be63995193869c94b42fa8e43a488d15f69 100644
--- a/perun/proxy/utils/nagios/check_nginx.py
+++ b/perun/proxy/utils/nagios/check_nginx.py
@@ -1,4 +1,5 @@
 #!/usr/bin/env python3
+import sys
 
 from check_nginx_status.check_nginx_status import main
 
@@ -6,4 +7,4 @@ from check_nginx_status.check_nginx_status import main
 # https://gitlab.ics.muni.cz/perun-proxy-aai/python/check_nginx_status/
 
 if __name__ == "__main__":
-    main()
+    sys.exit(main())
diff --git a/perun/proxy/utils/nagios/check_rpc_status.py b/perun/proxy/utils/nagios/check_rpc_status.py
index e13a976c8a62a465fde9602ac0f4bd4f847f8619..8e4d31d4870586c7f5c8fd298f08ad77852f9566 100644
--- a/perun/proxy/utils/nagios/check_rpc_status.py
+++ b/perun/proxy/utils/nagios/check_rpc_status.py
@@ -6,7 +6,6 @@ import sys
 import time
 import requests
 
-
 """
 check RPC API is available
 """
diff --git a/perun/proxy/utils/nagios/check_saml.py b/perun/proxy/utils/nagios/check_saml.py
index f446205795561cdf4e9219eeff2d677938029736..269638afc39f19caa086c9bedf05a471c0e331af 100755
--- a/perun/proxy/utils/nagios/check_saml.py
+++ b/perun/proxy/utils/nagios/check_saml.py
@@ -388,7 +388,7 @@ class SAMLChecker:
             )
         if self.args.cache_timeout > 0:
             try:
-                file_path = tempfile.gettempdir() + "/" + args.cache_file
+                file_path = tempfile.gettempdir() + "/" + self.args.cache_file
                 f = open(file_path, "w")
                 f.write("{}_{}_{}".format(cache_time, status, message))
                 f.close()
@@ -402,7 +402,7 @@ class SAMLChecker:
     def check_cache(self):
         try:
             tempdir = tempfile.gettempdir()
-            file_path = tempdir + "/" + args.cache_file
+            file_path = tempdir + "/" + self.args.cache_file
             if os.path.isfile(file_path):
                 with open(file_path, "r") as f:
                     res_b = f.read()
@@ -414,7 +414,7 @@ class SAMLChecker:
                 message = res[2]
                 actual_time = time.time()
                 time_diff = actual_time - float(cached_time)
-                if time_diff < args.cache_timeout:
+                if time_diff < self.args.cache_timeout:
                     self.finish(
                         message=message,
                         status=status,
@@ -551,7 +551,10 @@ class SAMLChecker:
             )
 
 
-if __name__ == "__main__":
-    args = get_args()
-    checker = SAMLChecker(args)
+def main():
+    checker = SAMLChecker(get_args())
     checker.main()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/perun/proxy/utils/nagios/check_user_logins.py b/perun/proxy/utils/nagios/check_user_logins.py
index 6a2793b830f7021b212f868f74a47a677198e544..7d97c8b6fa2ad40e1b0e7b2d48009e415faf6157 100644
--- a/perun/proxy/utils/nagios/check_user_logins.py
+++ b/perun/proxy/utils/nagios/check_user_logins.py
@@ -110,8 +110,8 @@ def main():
     path, regex, datetime_format, logins, seconds = command_line_validate(argv)
     user_dict = parse_log_data(path, regex, datetime_format)
     check_log_data(user_dict, logins, seconds)
-    print("OK", logins, seconds)
-    return 0
+    print("OK - ", logins, seconds)
+    sys.exit(0)
 
 
 if __name__ == "__main__":
diff --git a/perun/proxy/utils/nagios/webserver_availability.py b/perun/proxy/utils/nagios/webserver_availability.py
index cee20e3d2ed7c6c9236df62e3ad313e280d81cf3..e667fb714066ad677d3977494016caf6e3f0cf3c 100755
--- a/perun/proxy/utils/nagios/webserver_availability.py
+++ b/perun/proxy/utils/nagios/webserver_availability.py
@@ -1,5 +1,6 @@
 #!/usr/bin/env python3
 import argparse
+import sys
 
 import requests
 
@@ -43,4 +44,4 @@ def main():
 
 
 if __name__ == "__main__":
-    exit(main())
+    sys.exit(main())
diff --git a/perun/proxy/utils/print_docker_versions.py b/perun/proxy/utils/print_docker_versions.py
index 8096be432e71b177b79004b4acda9347a65b3a0b..7e2b6257f83b9635aea65e9671d8bb6a6c443d72 100755
--- a/perun/proxy/utils/print_docker_versions.py
+++ b/perun/proxy/utils/print_docker_versions.py
@@ -4,65 +4,76 @@ import multiprocessing
 import os
 import re
 import json
+
 import docker
 import argparse
 import platform
 
-output = {
-    "cpu_count": "",
-    "memory": "",
-    "os_version": "",
-    "kernel_version": "",
-    "docker_version": "",
-    "containerd_version": "",
-    "containers": {},
-}
 
-parser = argparse.ArgumentParser()
-parser.add_argument(
-    "-e", "--exclude", type=str, help="Space delimited list of containers to exclude"
-)
-args = parser.parse_args()
-exc_containers = args.exclude.split(" ") if args.exclude is not None else []
-output["cpu_count"] = str(multiprocessing.cpu_count())
-mem_bytes = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")
-if mem_bytes > 1:
-    output["memory"] = str(round(mem_bytes / (1024.0**3), 2)) + "GiB"
-name = ""
-maj_version = ""
-with open("/etc/os-release") as file:
-    contents = file.read()
-    match = re.search(r"NAME=\"(.*)\"", contents)
-    if match is not None:
-        name = match.group(1)
-    match = re.search(r"VERSION_ID=\"(.*)\"", contents)
-    if match is not None:
-        maj_version = match.group(1).split(".")[0]
-if name.startswith("Debian"):
-    name = name.split(" ")[0]
-output["os_version"] = name + " " + maj_version
-output["kernel_version"] = platform.release()
-client = docker.from_env()
-if client is not None:
-    version_info = client.version()
-    docker_ver_filter = list(
-        filter(lambda x: x["Name"] == "Engine", version_info["Components"])
-    )
-    output["docker_version"] = (
-        docker_ver_filter[0]["Version"] if len(docker_ver_filter) > 0 else ""
-    )
-    containerd_ver_filter = list(
-        filter(lambda x: x["Name"] == "containerd", version_info["Components"])
+def main():
+    output = {
+        "cpu_count": "",
+        "memory": "",
+        "os_version": "",
+        "kernel_version": "",
+        "docker_version": "",
+        "containerd_version": "",
+        "containers": {},
+    }
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "-e",
+        "--exclude",
+        type=str,
+        help="Space delimited list of containers to exclude",
     )
-    containerd_version = (
-        containerd_ver_filter[0]["Version"] if len(containerd_ver_filter) > 0 else ""
-    )
-    if len(containerd_version) > 0 and containerd_version[0] == "v":
-        containerd_version = containerd_version[1:]
-    output["containerd_version"] = containerd_version
-    containers = client.containers.list()
-    containers = list(filter(lambda x: x.name not in exc_containers, containers))
-    for container in containers:
-        container_image = container.image.tags[0] if container.image.tags else ""
-        output["containers"][container.name] = container_image.split(":")[-1]
-print(json.dumps(output))
+    args = parser.parse_args()
+    exc_containers = args.exclude.split(" ") if args.exclude is not None else []
+    output["cpu_count"] = str(multiprocessing.cpu_count())
+    mem_bytes = os.sysconf("SC_PAGE_SIZE") * os.sysconf("SC_PHYS_PAGES")
+    if mem_bytes > 1:
+        output["memory"] = str(round(mem_bytes / (1024.0**3), 2)) + "GiB"
+    name = ""
+    maj_version = ""
+    with open("/etc/os-release") as file:
+        contents = file.read()
+        match = re.search(r"NAME=\"(.*)\"", contents)
+        if match is not None:
+            name = match.group(1)
+        match = re.search(r"VERSION_ID=\"(.*)\"", contents)
+        if match is not None:
+            maj_version = match.group(1).split(".")[0]
+    if name.startswith("Debian"):
+        name = name.split(" ")[0]
+    output["os_version"] = name + " " + maj_version
+    output["kernel_version"] = platform.release()
+    client = docker.from_env()
+    if client is not None:
+        version_info = client.version()
+        docker_ver_filter = list(
+            filter(lambda x: x["Name"] == "Engine", version_info["Components"])
+        )
+        output["docker_version"] = (
+            docker_ver_filter[0]["Version"] if len(docker_ver_filter) > 0 else ""
+        )
+        containerd_ver_filter = list(
+            filter(lambda x: x["Name"] == "containerd", version_info["Components"])
+        )
+        containerd_version = (
+            containerd_ver_filter[0]["Version"]
+            if len(containerd_ver_filter) > 0
+            else ""
+        )
+        if len(containerd_version) > 0 and containerd_version[0] == "v":
+            containerd_version = containerd_version[1:]
+        output["containerd_version"] = containerd_version
+        containers = client.containers.list()
+        containers = list(filter(lambda x: x.name not in exc_containers, containers))
+        for container in containers:
+            container_image = container.image.tags[0] if container.image.tags else ""
+            output["containers"][container.name] = container_image.split(":")[-1]
+    print(json.dumps(output))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/perun/proxy/utils/run_probes.py b/perun/proxy/utils/run_probes.py
index 3599e9bfba1133575c4affc544acdebc6ffbac42..c64a9edb2551d85c1fd121bbb3eed7a1d024900e 100644
--- a/perun/proxy/utils/run_probes.py
+++ b/perun/proxy/utils/run_probes.py
@@ -16,20 +16,24 @@ def open_file(filepath):
             f"Cannot open config with path: {filepath}, error: {e.strerror}",
             file=sys.stderr,
         )
-        exit(2)
+        sys.exit(2)
 
 
 def run_probe(probe_name, command):
-    result = subprocess.run(command, text=True, capture_output=True)
-    search = re.search(r" - .*", result.stdout)
+    result = subprocess.run(
+        command, text=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
+    )
+    output = re.sub("[ \t\n]+", " ", result.stdout)
+    search = re.search(r" - .*", output)
     if search:
         print(f"{result.returncode} {probe_name} {search.group()}")
     else:
-        print(f"{result.returncode} {probe_name} - {result.stdout}")
+        print(f"{result.returncode} {probe_name} - {output}")
     return result.returncode
 
 
-def main(config_filepath):
+def main():
+    config_filepath = "/etc/run_probes_cfg.yaml"
     config = yaml.safe_load(open_file(config_filepath))
     if not config:
         return
@@ -37,7 +41,7 @@ def main(config_filepath):
     for _, options in config.items():
         module = options["module"]
         for name, args in options.get("runs").items():
-            command = ["python", "-m", module]
+            command = ["python3", "-m", module]
             for arg_name, arg_val in args.items():
                 if len(arg_name) == 1:
                     arg_name = "-" + arg_name
@@ -48,10 +52,10 @@ def main(config_filepath):
                 elif arg_val is False:
                     arg_val = "false"
                 command.append(arg_name)
-                command.append(str(arg_val))
+                if arg_val is not None:
+                    command.append(str(arg_val))
             Thread(target=run_probe, args=[name, command]).start()
 
 
 if __name__ == "__main__":
-    config_filepath = "/etc/run_probes_cfg.yaml"
-    main(config_filepath)
+    main()
diff --git a/perun/proxy/utils/run_version_script.py b/perun/proxy/utils/run_version_script.py
index 260f2416a19d9b5f2db01961cd02898daa08c30d..0a83d7ad4d2db1d5cb3fcbc6d6a8b3e4a33fb27c 100644
--- a/perun/proxy/utils/run_version_script.py
+++ b/perun/proxy/utils/run_version_script.py
@@ -50,7 +50,7 @@ def dict_to_md_table(dictionary):
         )
 
 
-async def run_script(user, host):
+async def run_script(user, host, exc_containers):
     try:
         async with asyncssh.connect(host, username=user) as conn:
             await asyncssh.scp("print_docker_versions.py", (conn, "/tmp/"))
@@ -64,8 +64,8 @@ async def run_script(user, host):
         return e, host
 
 
-async def collect_info(hosts):
-    tasks = (run_script(host[0], host[1]) for host in hosts)
+async def collect_info(hosts, exc_containers):
+    tasks = (run_script(host[0], host[1], exc_containers) for host in hosts)
     results = await asyncio.gather(*tasks, return_exceptions=True)
     stdouts = []
     hosts = []
@@ -85,13 +85,21 @@ async def collect_info(hosts):
         dict_to_md_table(jsons_to_dictionary(stdouts, hosts))
 
 
-parser = argparse.ArgumentParser()
-parser.add_argument(
-    "-e", "--exclude", type=str, help="Space delimited list of containers to exclude"
-)
-parser.add_argument("machines", nargs="+", help="Machines to collect the info from")
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "-e",
+        "--exclude",
+        type=str,
+        help="Space delimited list of containers to exclude",
+    )
+    parser.add_argument("machines", nargs="+", help="Machines to collect the info from")
+
+    args = parser.parse_args()
+    exc_containers = args.exclude if args.exclude is not None else ""
+    machines = list(map(lambda x: x.split("@"), args.machines))
+    asyncio.run(collect_info(machines, exc_containers))
+
 
-args = parser.parse_args()
-exc_containers = args.exclude if args.exclude is not None else ""
-machines = list(map(lambda x: x.split("@"), args.machines))
-asyncio.run(collect_info(machines))
+if __name__ == "__main__":
+    main()
diff --git a/perun/proxy/utils/separate_oidc_logs.py b/perun/proxy/utils/separate_oidc_logs.py
index d6aefbd2273806dda7916dc4cf9da3f42ec7944e..4be01cbd91233f8724bfe62caf22f1ff27ef7191 100644
--- a/perun/proxy/utils/separate_oidc_logs.py
+++ b/perun/proxy/utils/separate_oidc_logs.py
@@ -6,66 +6,72 @@ import re
 import sys
 from os import mkdir, path, rename, remove, system
 
-if len(sys.argv) <= 1:
-    print("One argument is expected!")
-    exit(-1)
 
-absolute_file_name = sys.argv[1]
+def main():
+    if len(sys.argv) <= 1:
+        print("One argument is expected!")
+        sys.exit(-1)
 
-if not path.exists(absolute_file_name):
-    print("File with name " + absolute_file_name + " doesn't exists!")
-    exit(-1)
+    absolute_file_name = sys.argv[1]
 
-file_name = path.basename(absolute_file_name)
-dir_name = path.dirname(absolute_file_name)
+    if not path.exists(absolute_file_name):
+        print("File with name " + absolute_file_name + " doesn't exists!")
+        sys.exit(-1)
 
-if len(dir_name) != 0:
-    dir_name += "/"
+    file_name = path.basename(absolute_file_name)
+    dir_name = path.dirname(absolute_file_name)
 
-full_log_dir_name = dir_name + "full_logs/"
-full_log_file_name = "full_" + file_name
-full_log_absolute_name = full_log_dir_name + full_log_file_name
+    if len(dir_name) != 0:
+        dir_name += "/"
 
-if not path.exists(full_log_dir_name):
-    mkdir(full_log_dir_name)
+    full_log_dir_name = dir_name + "full_logs/"
+    full_log_file_name = "full_" + file_name
+    full_log_absolute_name = full_log_dir_name + full_log_file_name
 
-rename(absolute_file_name, full_log_absolute_name)
+    if not path.exists(full_log_dir_name):
+        mkdir(full_log_dir_name)
 
-session_ids = set()
-regex_session_id = r"(?<=\s\[)\w+(?=\]\s+\S+\s+:)"
+    rename(absolute_file_name, full_log_absolute_name)
 
-file = open(full_log_absolute_name, "r")
-for line in file:
-    res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
-    if res is not None:
+    session_ids = set()
+    regex_session_id = r"(?<=\s\[)\w+(?=\]\s+\S+\s+:)"
+
+    file = open(full_log_absolute_name, "r")
+    for line in file:
+        res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
+        if res is not None:
+            session_id = re.search(regex_session_id, line)
+            if session_id is not None:
+                session_ids.add(session_id.group(0))
+    file.close()
+
+    file = open(full_log_absolute_name, "r")
+
+    final_log_file = open(absolute_file_name, "w")
+    last_session_id = ""
+    for line in file:
         session_id = re.search(regex_session_id, line)
         if session_id is not None:
-            session_ids.add(session_id.group(0))
-file.close()
-
-file = open(full_log_absolute_name, "r")
-
-final_log_file = open(absolute_file_name, "w")
-last_session_id = ""
-for line in file:
-    session_id = re.search(regex_session_id, line)
-    if session_id is not None:
-        last_session_id = session_id.group(0)
-    if session_id is None or session_id.group(0) not in session_ids:
-        if last_session_id not in session_ids:
-            final_log_file.write(line)
-
-file.close()
-final_log_file.close()
-
-# Zip old log file
-with open(full_log_absolute_name, "rb") as f_in, gzip.open(
-    full_log_absolute_name + ".gz", "wb"
-) as f_out:
-    shutil.copyfileobj(f_in, f_out)
-
-# Remove unzip file
-remove(full_log_absolute_name)
-
-# Remove old files
-system("find " + full_log_dir_name + " -mtime +7 -delete")
+            last_session_id = session_id.group(0)
+        if session_id is None or session_id.group(0) not in session_ids:
+            if last_session_id not in session_ids:
+                final_log_file.write(line)
+
+    file.close()
+    final_log_file.close()
+
+    # Zip old log file
+    with open(full_log_absolute_name, "rb") as f_in, gzip.open(
+        full_log_absolute_name + ".gz", "wb"
+    ) as f_out:
+        shutil.copyfileobj(f_in, f_out)
+
+    # Remove unzip file
+    remove(full_log_absolute_name)
+
+    # Remove old files
+    system("find " + full_log_dir_name + " -mtime +7 -delete")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/perun/proxy/utils/separate_ssp_logs.py b/perun/proxy/utils/separate_ssp_logs.py
index 462bd3bbd2aa1eb44d7d04656b77b8de812c2863..2223d6c7ea8ef35c4458486e654fabb39860f3e8 100755
--- a/perun/proxy/utils/separate_ssp_logs.py
+++ b/perun/proxy/utils/separate_ssp_logs.py
@@ -6,76 +6,82 @@ import re
 import sys
 from os import mkdir, path, rename, remove, system
 
-if len(sys.argv) <= 1:
-    print("One argument is expected!")
-    exit(-1)
 
-absolute_file_name = sys.argv[1]
+def main():
+    if len(sys.argv) <= 1:
+        print("One argument is expected!")
+        sys.exit(-1)
 
-if not path.exists(absolute_file_name):
-    print("File with name " + absolute_file_name + " doesn't exists!")
-    exit(-1)
+    absolute_file_name = sys.argv[1]
 
-file_name = path.basename(absolute_file_name)
-dir_name = path.dirname(absolute_file_name)
+    if not path.exists(absolute_file_name):
+        print("File with name " + absolute_file_name + " doesn't exists!")
+        sys.exit(-1)
 
-if len(dir_name) != 0:
-    dir_name += "/"
+    file_name = path.basename(absolute_file_name)
+    dir_name = path.dirname(absolute_file_name)
 
-full_log_dir_name = dir_name + "full_logs/"
-full_log_file_name = "full_" + file_name
-full_log_absolute_name = full_log_dir_name + full_log_file_name
+    if len(dir_name) != 0:
+        dir_name += "/"
 
-if not path.exists(full_log_dir_name):
-    mkdir(full_log_dir_name)
+    full_log_dir_name = dir_name + "full_logs/"
+    full_log_file_name = "full_" + file_name
+    full_log_absolute_name = full_log_dir_name + full_log_file_name
 
-rename(absolute_file_name, full_log_absolute_name)
+    if not path.exists(full_log_dir_name):
+        mkdir(full_log_dir_name)
 
-session_ids = set()
-thread_ids = set()
-regex_session_id = r"^.*]:\s\d\s\[(.*?)\].*$"
-regex_thread_id = r"^.*\[(.*?)\]:.*$"
+    rename(absolute_file_name, full_log_absolute_name)
 
-file = open(full_log_absolute_name, "r")
-for line in file:
-    res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
-    if res is not None:
+    session_ids = set()
+    thread_ids = set()
+    regex_session_id = r"^.*]:\s\d\s\[(.*?)\].*$"
+    regex_thread_id = r"^.*\[(.*?)\]:.*$"
+
+    file = open(full_log_absolute_name, "r")
+    for line in file:
+        res = re.search("proxyidptester@cesnet.cz|9006464@muni.cz", line)
+        if res is not None:
+            session_id = re.search(regex_session_id, line)
+            if session_id is not None:
+                session_ids.add(session_id.group(1))
+            thread_id = re.search(regex_thread_id, line)
+            if thread_id is not None:
+                thread_ids.add(thread_id.group(1))
+    file.close()
+
+    file = open(full_log_absolute_name, "r")
+
+    final_log_file = open(absolute_file_name, "w")
+    last_session_id = ""
+    for line in file:
         session_id = re.search(regex_session_id, line)
         if session_id is not None:
-            session_ids.add(session_id.group(1))
-        thread_id = re.search(regex_thread_id, line)
-        if thread_id is not None:
-            thread_ids.add(thread_id.group(1))
-file.close()
-
-file = open(full_log_absolute_name, "r")
-
-final_log_file = open(absolute_file_name, "w")
-last_session_id = ""
-for line in file:
-    session_id = re.search(regex_session_id, line)
-    if session_id is not None:
-        last_session_id = session_id.group(1)
-    if session_id is None or session_id.group(1) not in session_ids:
-        thread_id = re.search(regex_thread_id, line)
-        if (
-            thread_id is None
-            or thread_id.group(1) not in thread_ids
-            or last_session_id not in session_ids
-        ):
-            final_log_file.write(line)
-
-file.close()
-final_log_file.close()
-
-# Zip old log file
-with open(full_log_absolute_name, "rb") as f_in, gzip.open(
-    full_log_absolute_name + ".gz", "wb"
-) as f_out:
-    shutil.copyfileobj(f_in, f_out)
-
-# Remove unzip file
-remove(full_log_absolute_name)
-
-# Remove old files
-system("find " + full_log_dir_name + " -mtime +7 -delete")
+            last_session_id = session_id.group(1)
+        if session_id is None or session_id.group(1) not in session_ids:
+            thread_id = re.search(regex_thread_id, line)
+            if (
+                thread_id is None
+                or thread_id.group(1) not in thread_ids
+                or last_session_id not in session_ids
+            ):
+                final_log_file.write(line)
+
+    file.close()
+    final_log_file.close()
+
+    # Zip old log file
+    with open(full_log_absolute_name, "rb") as f_in, gzip.open(
+        full_log_absolute_name + ".gz", "wb"
+    ) as f_out:
+        shutil.copyfileobj(f_in, f_out)
+
+    # Remove unzip file
+    remove(full_log_absolute_name)
+
+    # Remove old files
+    system("find " + full_log_dir_name + " -mtime +7 -delete")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/setup.py b/setup.py
index 22f569802891c4075dcdde4f43df3aea2c67c85b..4867a4fbd84354ab6128d5640f7525dcd4456ec4 100644
--- a/setup.py
+++ b/setup.py
@@ -19,4 +19,27 @@ setuptools.setup(
         "check_syncrepl_extended~=2020.13",
         "check_nginx_status~=1.0",
     ],
+    entry_points={
+        "console_scripts": [
+            "run_probes=perun.proxy.utils.run_probes:main",
+            "check_custom_command=perun.proxy.utils.nagios.check_custom_command:main",
+            "check_dockers=perun.proxy.utils.nagios.check_dockers:main",
+            "check_exabgp_propagation="
+            "perun.proxy.utils.nagios.check_exabgp_propagation:main",
+            "check_ldap=perun.proxy.utils.nagios.check_ldap:main",
+            "check_ldap_syncrepl=check_syncrepl_extended.check_syncrepl_extended:main",
+            "check_mongodb=perun.proxy.utils.nagios.check_mongodb:main",
+            "check_nginx=check_nginx_status.check_nginx_status:main",
+            "check_rpc_status=perun.proxy.utils.nagios.check_rpc_status:main",
+            "check_saml=perun.proxy.utils.nagios.check_saml:main",
+            "check_user_logins=perun.proxy.utils.nagios.check_user_logins:main",
+            "check_webserver_availability="
+            "perun.proxy.utils.nagios.webserver_availability:main",
+            "metadata_expiration=perun.proxy.utils.metadata_expiration:main",
+            "print_docker_versions=perun.proxy.utils.print_docker_versions:main",
+            "run_version_script=perun.proxy.utils.run_version_script:main",
+            "separate_oidc_logs=perun.proxy.utils.separate_oidc_logs:main",
+            "separate_ssp_logs=perun.proxy.utils.separate_ssp_logs:main",
+        ]
+    },
 )