diff --git a/.github/workflows/mamonsu-tests-dev.yml b/.github/workflows/mamonsu-tests-dev.yml
index c259768..41f1f3b 100644
--- a/.github/workflows/mamonsu-tests-dev.yml
+++ b/.github/workflows/mamonsu-tests-dev.yml
@@ -45,6 +45,9 @@ jobs:
- docker_os: 'ubuntu:24.04'
pg_version: '16'
zabbix_version: '6.4.13'
+ - docker_os: 'ubuntu:24.04'
+ pg_version: '18'
+ zabbix_version: '6.4.13'
exclude:
- docker_os: 'centos:8'
pg_version: '12'
diff --git a/.github/workflows/mamonsu-tests-master.yml b/.github/workflows/mamonsu-tests-master.yml
index af22e98..d1a7943 100644
--- a/.github/workflows/mamonsu-tests-master.yml
+++ b/.github/workflows/mamonsu-tests-master.yml
@@ -45,6 +45,9 @@ jobs:
- docker_os: 'ubuntu:24.04'
pg_version: '16'
zabbix_version: '6.4.13'
+ - docker_os: 'ubuntu:24.04'
+ pg_version: '18'
+ zabbix_version: '6.4.13'
exclude:
# excludes PG 15, 16 on CentOS
- docker_os: 'centos:7'
diff --git a/documentation/metrics.md b/documentation/metrics.md
index d507a8e..ccdaebc 100644
--- a/documentation/metrics.md
+++ b/documentation/metrics.md
@@ -2374,6 +2374,69 @@ Default config:
*Buffers Written During Checkpoints* maps `buffers_written`.
+- **Done Checkpoints**
+
+ Zabbix item:
+
+
+ | Name |
+ PostgreSQL Checkpoints: Done (in hour) |
+
+
+ | Key |
+ pgsql.checkpoint[count_done] |
+
+
+ | Type |
+ Numeric (float) |
+
+
+ | Units |
+ |
+
+
+ | Delta |
+ Speed Per Second |
+
+
+ | Supported Version |
+ 18+ |
+
+
+
+ *Done Checkpoints* maps `num_done`.
+
+- **SLRU Written**
+
+ Zabbix item:
+
+
+ | Name |
+ PostgreSQL Checkpoints: SLRU Written (in hour) |
+
+
+ | Key |
+ pgsql.checkpoint[slru_written] |
+
+
+ | Type |
+ Numeric (float) |
+
+
+ | Units |
+ |
+
+
+ | Delta |
+ Speed Per Second |
+
+
+ | Supported Version |
+ 18+ |
+
+
+
+ *SLRU Written* maps `slru_written`.
### Graphs
@@ -2920,6 +2983,70 @@ Default config:
*Checksum Failures* maps `checksum_failures`.
+- **Parallel Workers To Launch**
+
+ Zabbix item:
+
+
+ | Name |
+ PostgreSQL Instance: Parallel Workers To Launch |
+
+
+ | Key |
+ pgsql.parallel[instance_to_launch] |
+
+
+ | Type |
+ Numeric (float) |
+
+
+ | Units |
+ |
+
+
+ | Delta |
+ Simple Change |
+
+
+ | Supported Version |
+ 18+ |
+
+
+
+ *Parallel Workers To Launch* maps `parallel_workers_to_launch`.
+
+- **Parallel Workers Launched**
+
+ Zabbix item:
+
+
+ | Name |
+ PostgreSQL Instance: Parallel Workers Launched |
+
+
+ | Key |
+ pgsql.parallel[instance_launched] |
+
+
+ | Type |
+ Numeric (float) |
+
+
+ | Units |
+ |
+
+
+ | Delta |
+ Simple Change |
+
+
+ | Supported Version |
+ 18+ |
+
+
+
+ *Parallel Workers Launched* maps `parallel_workers_launched`.
+
### Graphs
@@ -4001,6 +4128,37 @@ Default config:
*Amount of WAL Records* maps `wal_records`.
+- **WAL Buffers Full**
+
+ Zabbix item:
+
+
+ | Name |
+ PostgreSQL Statements: WAL Buffers Full |
+
+
+ | Key |
+ pgsql.stat[wal_buffers_full] |
+
+
+ | Type |
+ Numeric (float) |
+
+
+ | Units |
+ |
+
+
+ | Delta |
+ Simple Change |
+
+
+ | Supported Version |
+ 18+ |
+
+
+
+ *WAL Buffers Full* maps `wal_buffers_full`.
- **Dirty Bytes**
@@ -4298,6 +4456,70 @@ Default config:
*pg_stat_statements.max Exceeding Count* maps `dealloc`.
+- **Parallel Workers To Launch**
+
+ Zabbix item:
+
+
+ | Name |
+ PostgreSQL Statements: Parallel Workers To Launch |
+
+
+ | Key |
+ pgsql.parallel[statements_to_launch] |
+
+
+ | Type |
+ Numeric (float) |
+
+
+ | Units |
+ |
+
+
+ | Delta |
+ Simple Change |
+
+
+ | Supported Version |
+ 18+ |
+
+
+
+ *Parallel Workers To Launch* maps `parallel_workers_to_launch`.
+
+- **Parallel Workers Launched**
+
+ Zabbix item:
+
+
+ | Name |
+ PostgreSQL Statements: Parallel Workers Launched |
+
+
+ | Key |
+ pgsql.parallel[statements_launched] |
+
+
+ | Type |
+ Numeric (float) |
+
+
+ | Units |
+ |
+
+
+ | Delta |
+ Simple Change |
+
+
+ | Supported Version |
+ 18+ |
+
+
+
+ *Parallel Workers Launched* maps `parallel_workers_launched`.
+
### Graphs
diff --git a/github-actions-tests/metrics.sh b/github-actions-tests/metrics.sh
index 5ea6694..c7c37db 100644
--- a/github-actions-tests/metrics.sh
+++ b/github-actions-tests/metrics.sh
@@ -64,6 +64,7 @@ for metric in $(cat ${METRICS_FILE}); do
GREP=$( mamonsu agent metric-get ${metric} | grep "pgsql\|sys\|mamonsu" )
if [ -z "$GREP" ]; then
echo "---> ERROR: Cannot found metric $metric"
+ mamonsu agent metric-list
exit 11
fi
done
diff --git a/github-actions-tests/sources/metrics-linux-18.txt b/github-actions-tests/sources/metrics-linux-18.txt
new file mode 100644
index 0000000..3e0c098
--- /dev/null
+++ b/github-actions-tests/sources/metrics-linux-18.txt
@@ -0,0 +1,123 @@
+mamonsu.memory.rss[max]
+mamonsu.plugin.errors[]
+mamonsu.plugin.keepalive[]
+pgsql.archive_command[archived_files]
+pgsql.archive_command[count_files_to_archive]
+pgsql.archive_command[failed_trying_to_archive]
+pgsql.archive_command[size_files_to_archive]
+pgsql.autovacuum.count[]
+pgsql.autovacuum.utilization[]
+pgsql.bgwriter[buffers_alloc]
+pgsql.bgwriter[buffers_clean]
+pgsql.bgwriter[maxwritten_clean]
+pgsql.blocks[hit]
+pgsql.blocks[read]
+pgsql.checkpoint[checkpoint_sync_time]
+pgsql.checkpoint[count_timed]
+pgsql.checkpoint[count_wal]
+pgsql.checkpoint[write_time]
+pgsql.checkpoint[count_done]
+pgsql.checkpoint[slru_written]
+pgsql.connections[active]
+pgsql.connections[disabled]
+pgsql.connections[fastpath_function_call]
+pgsql.connections[idle]
+pgsql.connections[idle_in_transaction]
+pgsql.connections[idle_in_transaction_aborted]
+pgsql.connections[max_connections]
+pgsql.connections[other]
+pgsql.connections[total]
+pgsql.connections[waiting]
+pgsql.database.discovery[]
+pgsql.database.bloating_tables[mamonsu_test_db]
+pgsql.database.bloating_tables[postgres]
+pgsql.database.invalid_indexes[mamonsu_test_db]
+pgsql.database.invalid_indexes[postgres]
+pgsql.database.max_age[mamonsu_test_db]
+pgsql.database.max_age[postgres]
+pgsql.database.size[mamonsu_test_db]
+pgsql.database.size[postgres]
+pgsql.events[checksum_failures]
+pgsql.events[conflicts]
+pgsql.events[deadlocks]
+pgsql.events[xact_rollback]
+pgsql.oldest[transaction_time]
+pgsql.oldest[xid_age]
+pgsql.ping[]
+pgsql.pg_locks[accessexclusive]
+pgsql.pg_locks[accessshare]
+pgsql.pg_locks[exclusive]
+pgsql.pg_locks[rowexclusive]
+pgsql.pg_locks[rowshare]
+pgsql.pg_locks[share]
+pgsql.pg_locks[sharerowexclusive]
+pgsql.pg_locks[shareupdateexclusive]
+pgsql.parallel[instance_to_launch]
+pgsql.parallel[instance_launched]
+pgsql.parallel[queries]
+pgsql.parallel[statements_to_launch]
+pgsql.parallel[statements_launched]
+pgsql.prepared.count
+pgsql.prepared.oldest
+pgsql.relation.size[]
+pgsql.relation.size[mamonsu_test_db.mamonsu.config]
+pgsql.relation.size[postgres.pg_catalog.pg_class]
+pgsql.replication.non_active_slots[]
+pgsql.replication_lag[sec]
+pgsql.replication_lag[sec]
+pgsql.stat[wal_buffers_full]
+pgsql.temp[bytes]
+pgsql.temp[files]
+pgsql.transactions[committed]
+pgsql.tuples[deleted]
+pgsql.tuples[fetched]
+pgsql.tuples[inserted]
+pgsql.tuples[returned]
+pgsql.tuples[updated]
+pgsql.uptime[]
+pgsql.wal.buffers_full[]
+pgsql.wal.count[]
+pgsql.wal.fpi.count[]
+pgsql.wal.records.count[]
+pgsql.wal.sync_time[]
+pgsql.wal.write[]
+pgsql.wal.write_time[]
+system.cpu[idle]
+system.cpu[iowait]
+system.cpu[irq]
+system.cpu[nice]
+system.cpu[softirq]
+system.cpu[system]
+system.cpu[user]
+system.disk.discovery[]
+system.disk.all_read[]
+system.disk.all_write[]
+system.disk.all_read_b[]
+system.disk.all_write_b[]
+system.la[1]
+system.memory[active]
+system.memory[available]
+system.memory[buffers]
+system.memory[cached]
+system.memory[committed]
+system.memory[inactive]
+system.memory[mapped]
+system.memory[page_tables]
+system.memory[slab]
+system.memory[swap]
+system.memory[swap_cache]
+system.memory[total]
+system.memory[vmalloc_used]
+system.memory[unused]
+system.memory[used]
+system.net.discovery[]
+system.open_files[]
+system.processes[blocked]
+system.processes[forkrate]
+system.processes[running]
+system.vfs.discovery[]
+system.vfs.free[/]
+system.vfs.percent_free[/]
+system.vfs.percent_inode_free[/]
+system.vfs.used[/]
+system.uptime[]
diff --git a/mamonsu/lib/runner.py b/mamonsu/lib/runner.py
index 67135b8..e79c174 100644
--- a/mamonsu/lib/runner.py
+++ b/mamonsu/lib/runner.py
@@ -15,7 +15,7 @@
from mamonsu.lib.zbx_template import ZbxTemplate
from mamonsu.lib.get_keys import GetKeys
-if platform.LINUX:
+if platform.LINUX or platform.DARWIN:
from mamonsu.plugins.system.linux.scripts import Scripts
@@ -224,8 +224,7 @@ def is_any_equal(array):
# extract pg version from input
def define_pg_version(version_args):
if len(version_args) < 4:
- if version_args == "15" or version_args == "14" or version_args == "11" or version_args == "12" or version_args == "13" or version_args == "10" \
- or version_args == "9.6" or version_args == "9.5":
+ if version_args in ["9.5", "9.6", "10", "11", "12", "13", "14", "15", "16", "17", "18"]:
version_number = version_args[0].split('.')
for num in version_number:
if not num.isdigit():
diff --git a/mamonsu/plugins/pgsql/checkpoint.py b/mamonsu/plugins/pgsql/checkpoint.py
index c1ca9ac..bb663db 100644
--- a/mamonsu/plugins/pgsql/checkpoint.py
+++ b/mamonsu/plugins/pgsql/checkpoint.py
@@ -8,6 +8,7 @@
class Checkpoint(Plugin):
AgentPluginType = "pg"
Interval = 60 * 5
+ FactorIndex = 6
key = "pgsql.checkpoint{0}"
@@ -86,6 +87,18 @@ def __init__(self, config):
("PostgreSQL Checkpoints: Write/Sync", "FF5656", 1),
Plugin.UNITS.ms, Plugin.DELTA.speed_per_second, 1)
]
+ if Pooler.server_version_greater("18"):
+ self.Items += [
+ ("num_done", "count_done",
+ "Done (in hour)",
+ ("PostgreSQL Checkpoints: Done (in hour)", "00CC00", 0),
+ Plugin.UNITS.none, Plugin.DELTA.speed_per_second, 60 * 60),
+
+ ("slru_written", "slru_written",
+ "SLRU Written (in hour)",
+ ("PostgreSQL Checkpoints: SLRU Written (in hour)", "00CC00", 0),
+ Plugin.UNITS.none, Plugin.DELTA.speed_per_second, 60 * 60)
+ ]
def run(self, zbx):
columns = [x[0] for x in self.Items]
@@ -174,8 +187,8 @@ def triggers(self, template, dashboard=False):
def keys_and_queries(self, template_zabbix):
result = []
- for num, item in enumerate(self.Items):
- if num > 1:
+ for item in self.Items:
+ if item[self.FactorIndex] == 1:
result.append(
"{0}[*],$2 $1 -c \"{1}\"".format(self.key.format("." + item[1]), self.query.format(item[0])))
else:
diff --git a/mamonsu/plugins/pgsql/instance.py b/mamonsu/plugins/pgsql/instance.py
index c0a3a9c..b7aa74c 100644
--- a/mamonsu/plugins/pgsql/instance.py
+++ b/mamonsu/plugins/pgsql/instance.py
@@ -68,6 +68,17 @@ class Instance(Plugin):
("PostgreSQL Instance: Events", "006AAE", 0),
Plugin.UNITS.none, Plugin.DELTA.simple_change)
]
+ Items_pg_18 = [
+ # key, zbx_key, description,
+ # ('graph name', color, side), units, delta
+ ("parallel_workers_to_launch", "parallel[instance_to_launch]", "",
+ ("PostgreSQL Instance: Parallel Workers To Launch", "00CC00", 0),
+ Plugin.UNITS.none, Plugin.DELTA.simple_change),
+
+ ("parallel_workers_launched", "parallel[instance_launched]", "",
+ ("PostgreSQL Instance: Parallel Workers Launched", "00CC00", 0),
+ Plugin.UNITS.none, Plugin.DELTA.simple_change)
+ ]
key_server_mode = "pgsql.server_mode{0}"
query_server_mode = """
@@ -86,8 +97,9 @@ class Instance(Plugin):
def run(self, zbx):
all_items = self.Items
if Pooler.server_version_greater("12.0"):
- all_items = self.Items + self.Items_pg_12
-
+ all_items += self.Items_pg_12
+ if Pooler.server_version_greater("18.0"):
+ all_items += self.Items_pg_18
columns = ["sum(COALESCE({0}, 0)) as {0}".format(x[0]) for x in all_items]
result = Pooler.query("""
SELECT {0}
@@ -102,7 +114,7 @@ def run(self, zbx):
def items(self, template, dashboard=False):
result = ""
- for num, item in enumerate(self.Items + self.Items_pg_12):
+ for num, item in enumerate(self.Items + self.Items_pg_12 + self.Items_pg_18):
if self.Type == "mamonsu":
delta = Plugin.DELTA.as_is
else:
@@ -154,7 +166,7 @@ def graphs(self, template, dashboard=False):
result = ""
for name in self.graphs_name.values():
items = []
- for num, item in enumerate(self.Items + self.Items_pg_12):
+ for num, item in enumerate(self.Items + self.Items_pg_12 + self.Items_pg_18):
if item[3][0] == name:
# split each item to get values for keys of both agent type and mamonsu type
keys = item[1].split("[")
@@ -201,10 +213,11 @@ def triggers(self, template, dashboard=False):
def keys_and_queries(self, template_zabbix):
result = []
- if Pooler.server_version_less("11"):
- all_items = self.Items
- else:
- all_items = self.Items + self.Items_pg_12
+ all_items = self.Items
+ if Pooler.server_version_greater("12.0"):
+ all_items += self.Items_pg_12
+ if Pooler.server_version_greater("18.0"):
+ all_items += self.Items_pg_18
for item in all_items:
# split each item to get values for keys of both agent type and mamonsu type
keys = item[1].split("[")
diff --git a/mamonsu/plugins/pgsql/statements.py b/mamonsu/plugins/pgsql/statements.py
index dbcc526..aa2f543 100644
--- a/mamonsu/plugins/pgsql/statements.py
+++ b/mamonsu/plugins/pgsql/statements.py
@@ -88,6 +88,29 @@ class Statements(Plugin):
("PostgreSQL Statements Info: Last Statistics Reset Time", "9C8A4E", 0))
]
+ Items_pg_18 = [
+ ("parallel[statements_to_launch]",
+ "sum(parallel_workers_to_launch)",
+ "Number of parallel workers planned to be launched",
+ Plugin.UNITS.none,
+ Plugin.DELTA.simple_change,
+ ("PostgreSQL Statements: Parallel Workers To Launch", "87C2B9", 0)),
+
+ ("parallel[statements_launched]",
+ "sum(parallel_workers_launched)",
+ "Number of parallel workers actually launched",
+ Plugin.UNITS.none,
+ Plugin.DELTA.simple_change,
+ ("PostgreSQL Statements: Parallel Workers Launched", "793F5D", 0)),
+
+ ("stat[wal_buffers_full]",
+ "sum(wal_buffers_full)",
+ "Number of times the WAL buffers became full",
+ Plugin.UNITS.none,
+ Plugin.DELTA.simple_change,
+ ("PostgreSQL Statements: WAL Buffers Full", "9C8A4E", 0)),
+ ]
+
Items_pgpro_stats_1_8 = [
("stat[read_bytes]",
"(sum(shared_blks_read+local_blks_read+temp_blks_read)*8*1024)::bigint",
@@ -168,6 +191,8 @@ def run(self, zbx):
self.Items[3][1] = self.Items[3][1].format("blk_read_time")
self.Items[4][1] = self.Items[4][1].format("blk_write_time")
self.Items[5][1] = self.Items[5][1].format("total_exec_time+total_plan_time", "blk_read_time-blk_write_time")
+ if Pooler.server_version_greater("18"):
+ all_items += self.Items_pg_18
all_items += self.Items_pg_13
info_view = 'pgpro_stats_info'
if self.extension == "pg_stat_statements":
@@ -210,7 +235,7 @@ def items(self, template, dashboard=False):
delta = Plugin.DELTA.as_is
else:
delta = Plugin.DELTA.speed_per_second
- for item in self.Items + self.Items_pg_13 + self.Items_pg_14:
+ for item in self.Items + self.Items_pg_13 + self.Items_pg_14 + self.Items_pg_18:
# split each item to get values for keys of both agent type and mamonsu type
keys = item[0].split("[")
result += template.item({
@@ -229,7 +254,7 @@ def graphs(self, template, dashboard=False):
result = ""
for graph_item in self.all_graphs:
items = []
- for item in self.Items + self.Items_pg_13:
+ for item in self.Items + self.Items_pg_13 + self.Items_pg_18:
if item[5][0] == graph_item[0]:
keys = item[0].split("[")
items.append({
@@ -268,9 +293,15 @@ def keys_and_queries(self, template_zabbix):
if Pooler.server_version_less("12"):
self.Items[5][1] = self.Items[5][1].format("total_time")
else:
- self.Items[5][1] = self.Items[5][1].format("total_exec_time+total_plan_time")
+ if Pooler.server_version_greater("17"):
+ self.Items[5][1] = self.Items[5][1].format("total_exec_time+total_plan_time",
+ "shared_blk_read_time-local_blk_read_time-temp_blk_read_time-shared_blk_write_time-local_blk_write_time-temp_blk_write_time")
+ else:
+ self.Items[5][1] = self.Items[5][1].format("total_exec_time+total_plan_time", "blk_read_time-blk_write_time")
if Pooler.is_pgpro() or Pooler.is_pgpro_ee():
all_items += self.Items_pg_13
+ if Pooler.server_version_greater("18"):
+ all_items += self.Items_pg_18
columns = [x[1] for x in all_items]
@@ -287,14 +318,18 @@ def keys_and_queries(self, template_zabbix):
extension_schema=extension_schema),
i + 1))
+ # Info view
if Pooler.server_version_greater("14"):
+ info_view = 'pgpro_stats_info'
if self.extension == "pg_stat_statements":
- for i, item in enumerate(self.Items_pg_14):
- keys = item[0].split("[")
- result.append(
- "{0}[*],$2 $1 -c \"{1}\" | awk -F '|' '{{print ${2}}}'".format(
- "{0}{1}.{2}".format(self.key, keys[0], keys[1][:-1]),
- self.query_info.format(metrics=(item[1]), extension_schema=extension_schema),
+ info_view = 'pg_stat_statements_info'
+ for i, item in enumerate(self.Items_pg_14):
+ keys = item[0].split("[")
+ result.append(
+ "{0}[*],$2 $1 -c \"{1}\" | awk -F '|' '{{print ${2}}}'".format(
+ "{0}{1}.{2}".format(self.key, keys[0], keys[1][:-1]),
+ self.query_info.format(metrics=(item[1]), extension_schema=extension_schema,
+ info_view_name=info_view),
i + 1))
return template_zabbix.key_and_query(result)
else:
diff --git a/tests/.env b/tests/.env
new file mode 100644
index 0000000..3c752fc
--- /dev/null
+++ b/tests/.env
@@ -0,0 +1,29 @@
+WAIT_MAMONSU_TIMEOUT=180
+DEFAULT_HOSTGROUP="Zabbix servers"
+DEFAULT_TEMPLATE="Mamonsu PostgreSQL Linux"
+POSTGRES_VERSION=15
+
+# creds
+POSTGRES_USER=postgres
+POSTGRES_PASSWORD=postgres
+POSTGRES_DB=mamonsu_test_db
+
+ZABBIX_ADMIN_USER=Admin
+ZABBIX_ADMIN_PASS=zabbix
+
+# hosts
+ZABBIX_EXT_URL=127.0.0.1:1337
+ZABBIX_INT_URL=zabbix-web:8080
+POSTGRES_EXT_HOST=127.0.0.1
+
+# external ports
+POSTGRES_EXT_PORT=15432
+MAMONSU_AGENT_EXT_PORT=11050
+ZABBIX_SERVER_EXT_PORT=11051
+ZABBIX_WEB_EXT_PORT=1337
+
+# internal ports
+POSTGRES_PORT=5432
+MAMONSU_AGENT_PORT=10050
+ZABBIX_SERVER_PORT=10051
+ZABBIX_WEB_PORT=8080
diff --git a/tests/README.md b/tests/README.md
new file mode 100644
index 0000000..d82efad
--- /dev/null
+++ b/tests/README.md
@@ -0,0 +1,32 @@
+
+# Mamonsu autotests
+
+Mamonsu testing with different Postgres version, different operation systems(not supported yet). Uses docker-compose to run all services.
+
+
+## Installation
+
+
+```bash
+ pip3 install -e requirement.txt
+```
+
+## Usage/Examples
+
+You can simly run tests with only pytest mark "bash" and it will be ran with Postgres version from env variable POSTGRES_VERSION which is specified in .env file
+
+```bash
+pytest -m bash
+```
+
+You can run tests with different Postgres versions with POSTGRES_VERSIONS variable
+
+```bash
+POSTGRES_VERSIONS=12,13 pytest -m bash
+```
+
+To run specific test you have to use -k flag with function name
+
+```bash
+POSTGRES_VERSIONS=12,13 pytest -k test_export_zabbix_params
+```
\ No newline at end of file
diff --git a/tests/config/__init__.py b/tests/config/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/config/config.py b/tests/config/config.py
new file mode 100644
index 0000000..0a88016
--- /dev/null
+++ b/tests/config/config.py
@@ -0,0 +1,29 @@
+import os
+from pathlib import Path
+from typing import Any
+
+from dotenv import load_dotenv
+
+
+class Config:
+ def __init__(self, env_path: Path | None = None):
+ self._root_path = Path(__file__).parent.parent
+ load_dotenv(env_path or self._root_path / ".env")
+
+ def __getattr__(self, name: str) -> Any:
+ value = os.getenv(name)
+ if value is None:
+ return None
+ return self._convert_value(value)
+
+ @staticmethod
+ def _convert_value(value: str) -> Any:
+ if value.lower() in ("true", "false"):
+ return value.lower() == "true"
+ try:
+ return int(value)
+ except ValueError:
+ try:
+ return float(value)
+ except ValueError:
+ return value
diff --git a/tests/config/constants/__init__.py b/tests/config/constants/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/config/constants/containers.py b/tests/config/constants/containers.py
new file mode 100644
index 0000000..40932ef
--- /dev/null
+++ b/tests/config/constants/containers.py
@@ -0,0 +1,8 @@
+from enum import StrEnum
+
+
+class ContainersEnum(StrEnum):
+ POSTGRES = "mamonsu-pg"
+ MAMONSU = "mamonsu-pg"
+ ZABBIX_WEB = "zabbix-web"
+ ZABBIX_SERVER = "zabbix-server"
diff --git a/tests/debian.Dockerfile b/tests/debian.Dockerfile
new file mode 100644
index 0000000..3c0a2e7
--- /dev/null
+++ b/tests/debian.Dockerfile
@@ -0,0 +1,44 @@
+# Стейдж нужен для того, что бы потом скопировать из него энтрипоинт нужной версии, т.к. в --from= нельзя использовать env
+ARG POSTGRES_VERSION=15
+FROM postgres:${POSTGRES_VERSION} AS postgres_base
+
+FROM debian:bookworm-slim AS builder
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ curl \
+ software-properties-common \
+ make \
+ dpkg-dev \
+ debhelper \
+ build-essential \
+ python3-dev \
+ python3-setuptools && \
+ rm -rf /var/lib/apt/lists/*
+
+WORKDIR /app
+COPY . /app
+RUN make deb
+
+FROM postgres:${POSTGRES_VERSION}
+
+COPY --from=builder /app/mamonsu*.deb /tmp/
+
+RUN apt-get update && \
+ apt-get install -y --no-install-recommends \
+ python3 \
+ python3-setuptools \
+ sudo \
+ && rm -rf /var/lib/apt/lists/*
+RUN dpkg -i /tmp/mamonsu*.deb || apt-get install -f -y && \
+ rm /tmp/mamonsu*.deb
+RUN mkdir -p /var/log/mamonsu && \
+ chown postgres:postgres /var/log/mamonsu && \
+ chmod 755 /var/log/mamonsu
+
+COPY --from=postgres_base /usr/local/bin/docker-entrypoint.sh /usr/local/bin/
+COPY ./tests/service-scripts/mamonsu-pg/mamonsu.conf /etc/mamonsu/agent.conf
+COPY ./tests/service-scripts/mamonsu-pg/entrypoint.sh ./tests/service-scripts/mamonsu-pg/init_mamonsu_in_zbx.sh /app/
+
+RUN chmod +x /app/entrypoint.sh /app/init_mamonsu_in_zbx.sh
+
+ENTRYPOINT ["/app/entrypoint.sh"]
\ No newline at end of file
diff --git a/tests/docker-compose.yaml b/tests/docker-compose.yaml
new file mode 100644
index 0000000..18b3622
--- /dev/null
+++ b/tests/docker-compose.yaml
@@ -0,0 +1,68 @@
+services:
+ mamonsu-pg:
+ build:
+ context: .
+ dockerfile: tests/debian.Dockerfile
+ args:
+ POSTGRES_VERSION: ${POSTGRES_VERSION}
+ container_name: mamonsu-pg
+ hostname: mamonsu-pg
+ image: mamonsu-pg
+ ports:
+ - "${MAMONSU_AGENT_EXT_PORT}:${MAMONSU_AGENT_PORT}"
+ - "${POSTGRES_EXT_PORT}:${POSTGRES_PORT}"
+ environment:
+ POSTGRES_VERSION: ${POSTGRES_VERSION}
+ MAMONSU_AGENT_PORT: ${MAMONSU_AGENT_PORT}
+ POSTGRES_USER: ${POSTGRES_USER}
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
+ POSTGRES_DB: postgres
+ POSTGRES_HOST_AUTH_METHOD: md5
+ ZABBIX_USER: ${ZABBIX_ADMIN_USER}
+ ZABBIX_PASSWD: ${ZABBIX_ADMIN_PASS}
+ ZABBIX_URL: http://${ZABBIX_INT_URL}/
+ restart: no
+
+ zabbix:
+ image: zabbix/zabbix-server-pgsql:6.4.13-ubuntu
+ container_name: zabbix
+ hostname: zabbix
+ environment:
+ - DB_SERVER_HOST=mamonsu-pg
+ - POSTGRES_USER=${POSTGRES_USER}
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
+ - POSTGRES_DB=zabbix
+ - PGPASSWORD=${POSTGRES_PASSWORD}
+ ports:
+ - "${ZABBIX_SERVER_EXT_PORT}:${ZABBIX_SERVER_PORT}"
+ depends_on:
+ - mamonsu-pg
+
+ zabbix-web:
+ image: zabbix/zabbix-web-nginx-pgsql:6.4.13-ubuntu
+ container_name: zabbix-web
+ hostname: zabbix-web
+ environment:
+ - DB_SERVER_HOST=mamonsu-pg
+ - POSTGRES_USER=${POSTGRES_USER}
+ - POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
+ - POSTGRES_DB=zabbix
+ - ZBX_SERVER_HOST=zabbix-server
+ - ZBX_SERVER_PORT=${ZABBIX_SERVER_PORT}
+ - ZABBIX_ADMIN_USER=Admin
+ - ZABBIX_ADMIN_PASS=zabbix
+ ports:
+ - "${ZABBIX_WEB_EXT_PORT}:${ZABBIX_WEB_PORT}"
+ depends_on:
+ - zabbix
+ healthcheck:
+ test: |
+ curl -fsS "http://localhost:${ZABBIX_WEB_PORT}/api_jsonrpc.php" \
+ -X POST \
+ -H "Content-Type: application/json-rpc" \
+ -d '{"jsonrpc":"2.0","method":"apiinfo.version","id":1,"auth":null,"params":{}}' \
+ | grep -q '"result"' || exit 1
+ interval: 5s
+ timeout: 5s
+ retries: 15
+ start_period: 30s
diff --git a/tests/pytest.ini b/tests/pytest.ini
new file mode 100644
index 0000000..69c1141
--- /dev/null
+++ b/tests/pytest.ini
@@ -0,0 +1,8 @@
+
+[pytest]
+log_cli=true
+log_level=INFO
+log_format = %(asctime)s %(levelname)s %(message)s
+log_date_format = %Y-%m-%d %H:%M:%S
+markers =
+ bash
diff --git a/tests/requirements.txt b/tests/requirements.txt
new file mode 100644
index 0000000..5f7074a
--- /dev/null
+++ b/tests/requirements.txt
@@ -0,0 +1,5 @@
+pytest==8.3.5
+docker==7.1.0
+zabbix==1.3.1
+python-dotenv==1.1.0
+psycopg2==2.9.10
diff --git a/tests/service-scripts/mamonsu-pg/entrypoint.sh b/tests/service-scripts/mamonsu-pg/entrypoint.sh
new file mode 100644
index 0000000..ba551ce
--- /dev/null
+++ b/tests/service-scripts/mamonsu-pg/entrypoint.sh
@@ -0,0 +1,77 @@
+#!/bin/bash
+set -e
+
+RECOVERY_FILE="standby.signal"
+
+DATA_DIR=/var/lib/postgresql/data
+DATA_SLAVE_PHYSICAL_DIR=/var/lib/postgresql/data_slave_physical
+WAL_DIR=/var/lib/postgresql/wals
+DATA_SLAVE_LOGICAL_DIR=/var/lib/postgresql/data_slave_logical
+
+su postgres -c '/usr/local/bin/docker-entrypoint.sh postgres "$@" &'
+sleep 5
+su postgres -c "pg_ctl stop -D $DATA_DIR"
+
+sudo mkdir -p $DATA_SLAVE_PHYSICAL_DIR
+sudo mkdir -p $WAL_DIR
+sudo chown -R postgres:postgres $DATA_SLAVE_PHYSICAL_DIR $WAL_DIR
+sudo chmod 700 $DATA_SLAVE_PHYSICAL_DIR
+
+sudo -u postgres echo "shared_preload_libraries = 'pg_stat_statements'" >> $DATA_DIR/postgresql.conf
+sudo -u postgres echo "pg_stat_statements.track = all" >> $DATA_DIR/postgresql.conf
+sudo -u postgres echo "archive_mode=on" >> $DATA_DIR/postgresql.conf
+sudo -u postgres echo "archive_command='cp %p $WAL_DIR/%f'" >> $DATA_DIR/postgresql.conf
+sudo -u postgres echo "wal_level=replica" >> $DATA_DIR/postgresql.conf
+sudo -u postgres echo "max_wal_senders=4" >> $DATA_DIR/postgresql.conf
+sudo -u postgres echo "hot_standby=on" >> $DATA_DIR/postgresql.conf
+
+sudo -u postgres echo "track_io_timing = on" >> $DATA_DIR/postgresql.conf
+sudo -u postgres echo "track_functions = all" >> $DATA_DIR/postgresql.conf
+
+sudo -u postgres echo "host replication replicator 127.0.0.1/0 trust" >> $DATA_DIR/pg_hba.conf
+
+su postgres -c "pg_ctl start -D $DATA_DIR"
+sleep 3
+
+sudo -u postgres psql -c "CREATE DATABASE mamonsu_test_db;"
+sudo -u postgres psql -d mamonsu_test_db -c "CREATE EXTENSION pg_stat_statements;"
+sudo -u postgres psql -d mamonsu_test_db -c "CREATE EXTENSION pg_buffercache;"
+sudo -u postgres psql -d mamonsu_test_db -c "CREATE TABLE mamonsu_test_table(id serial, value integer);"
+sudo -u postgres psql -d mamonsu_test_db -c "INSERT INTO mamonsu_test_table(value) SELECT * FROM generate_series(1, 10000);"
+sudo -u postgres psql -c "CREATE USER replicator WITH REPLICATION ENCRYPTED PASSWORD 'secret';"
+sudo -u postgres pg_basebackup -h 127.0.0.1 -U replicator -Fp -Xs -P -R -D $DATA_SLAVE_PHYSICAL_DIR/
+sudo -u postgres sed -i '/^archive_mode/s/^\(.*\)$/#\1/' $DATA_SLAVE_PHYSICAL_DIR/postgresql.conf
+sudo -u postgres sed -i '/^archive_command/s/^\(.*\)$/#\1/' $DATA_SLAVE_PHYSICAL_DIR/postgresql.conf
+sudo -u postgres echo "port=5433" >> $DATA_SLAVE_PHYSICAL_DIR/postgresql.conf
+sudo -u postgres echo "restore_command = 'cp $WAL_DIR/%f %p'" >> $DATA_SLAVE_PHYSICAL_DIR/${RECOVERY_FILE}
+
+su postgres -c "pg_ctl start -D $DATA_SLAVE_PHYSICAL_DIR"
+
+# create logical slave
+if [ "$POSTGRES_VERSION" -ge 100 ]; then # TODO: Пофиксить, пока что отключено
+ # create PGDATA directory
+ sudo mkdir -p $DATA_SLAVE_LOGICAL_DIR
+ sudo chown postgres:postgres $DATA_SLAVE_LOGICAL_DIR
+ sudo chmod 700 $DATA_SLAVE_LOGICAL_DIR
+
+ sudo -u postgres sed -i '/^wal_level/s/^\(.*\)$/#\1/' $DATA_DIR/postgresql.conf
+ sudo -u postgres echo "wal_level=logical" >> $DATA_DIR/postgresql.conf
+ su postgres -c "pg_ctl restart -D $DATA_DIR"
+ sleep 3
+ sudo -u postgres psql -c "GRANT ALL PRIVILEGES ON DATABASE mamonsu_test_db TO replicator;"
+ sudo -u postgres psql -d mamonsu_test_db -c "GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA public TO replicator;"
+ sudo -u postgres psql -d mamonsu_test_db -c "CREATE PUBLICATION mamonsu_publication;"
+ sudo -u postgres psql -d mamonsu_test_db -c "ALTER PUBLICATION mamonsu_publication ADD TABLE mamonsu_test_table;"
+ sudo -u postgres echo "host all all 127.0.0.1/0 trust" >> $DATA_SLAVE_LOGICAL_DIR/pg_hba.conf
+ sudo -u postgres echo "port=5434" >> $DATA_SLAVE_LOGICAL_DIR/postgresql.conf
+ su postgres -c "pg_ctl start -D $DATA_SLAVE_LOGICAL_DIR"
+ sleep 3
+ sudo -u postgres psql -p 5434 -c "CREATE DATABASE mamonsu_test_db;"
+ sudo -u postgres psql -p 5434 -d mamonsu_test_db -c "CREATE TABLE mamonsu_test_table(id serial, value integer);"
+ sudo -u postgres psql -p 5434 -d mamonsu_test_db -c "CREATE SUBSCRIPTION mamonsu_subscription CONNECTION 'host=127.0.0.1 port=5432 user=replicator dbname=mamonsu_test_db' PUBLICATION mamonsu_publication;"
+fi
+
+mamonsu bootstrap -x --user postgres -d mamonsu_test_db
+service mamonsu restart
+
+tail -f /dev/null
diff --git a/tests/service-scripts/mamonsu-pg/init_mamonsu_in_zbx.sh b/tests/service-scripts/mamonsu-pg/init_mamonsu_in_zbx.sh
new file mode 100644
index 0000000..543e525
--- /dev/null
+++ b/tests/service-scripts/mamonsu-pg/init_mamonsu_in_zbx.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+INIT_MARKER="/app/.init_done"
+if [ ! -f "$INIT_MARKER" ]; then
+ echo "[INFO] Exporting templates"
+ mamonsu export template template.xml
+ mamonsu zabbix template export template.xml
+
+ echo "[INFO] Adding host in Zabbix"
+ mamonsu zabbix host create "$(hostname)" \
+ "$(mamonsu zabbix hostgroup id "Zabbix servers")" \
+ "$(mamonsu zabbix template id "Mamonsu PostgreSQL Linux")" \
+ "$(getent hosts "$(hostname)" | awk '{print $1}')"
+ service mamonsu start
+
+ echo "[INFO] Waiting for host to appear in Zabbix"
+ sleep 5
+ touch "$INIT_MARKER"
+else
+ echo "[INFO] Initialization already done. Skipping Mamonsu setup"
+fi
+
diff --git a/tests/service-scripts/mamonsu-pg/mamonsu.conf b/tests/service-scripts/mamonsu-pg/mamonsu.conf
new file mode 100644
index 0000000..bf73789
--- /dev/null
+++ b/tests/service-scripts/mamonsu-pg/mamonsu.conf
@@ -0,0 +1,178 @@
+[agent]
+host = 0.0.0.0
+port = 10050
+
+[postgres]
+host = localhost
+port = 5432
+user = postgres
+password = postgres
+database = mamonsu_test_db
+application_name = mamonsu
+query_timeout = 10
+
+[zabbix]
+enabled = true
+address = zabbix
+port = 10051
+
+; ######### General parameters sections ############
+;
+; # enable or disable collection of system metrics.
+;
+[system]
+enabled = True
+;
+; # control the queue size of the data to be sent to the Zabbix server
+;
+[sender]
+queue = 2048
+;
+; # specify the location of mamonsu and whether it is allowed to access metrics from the command line
+;
+; ; [agent]
+; ; enabled = True
+; ; host = 127.0.0.1
+; ; port = 10052
+;
+; # specify custom plugins to be added for metrics collection
+
+[plugins]
+enabled = False
+directory = /etc/mamonsu/plugins
+
+; ######### Individual Plugin Sections ############
+;
+; # to disable any plugin set the enabled option to False.
+; # modify collection interval for each plugin in the interval field.
+; # set customer parameters for some plugins in the individual section.
+; # below listed all available parameters for each plugin to modify.
+
+[archivecommand]
+interval = 60
+
+; # Besides standard autovacuum workers count, mamonsu also counts autovacuum utilization.
+; # But this metric is instantaneous, so recommended to run this plugin frequently
+; # to get a complete picture of autovacuum utilization.
+[autovacuum]
+interval = 30
+
+[bgwriter]
+interval = 60
+
+[cfs]
+force_enable = False
+interval = 60
+
+[checkpoint]
+interval = 300
+
+[connections]
+interval = 60
+
+[databases]
+interval = 300
+
+[pghealth]
+interval = 60
+
+[instance]
+interval = 60
+;
+; # This plugin allows detects possible memory leaks while working with PostgreSQL using /proc/pid/status and /proc/pid/statm
+; # We use RES and SHR difference to calculate approximate volume of private anonymous backend memory.
+; # If it exceeds private_anon_mem_threshold then that pid will be added to a message. An example is presented below
+; # statm - 'pid: {pid}, RES {RES} - SHR {SHR} more then {private_anon_mem_threshold}\n'
+; # Since Linux 4.5 RssAnon, RssFile and RssShmem have been added.
+; # They allows to distinguish types of memory such as private anonymous, file-backed, and shared anonymous memory.
+; # We are interested in RssAnon. If its value exceeds private_anon_mem_threshold then that pid will also be added to a message.
+; # By default this plugin disabled. To enable this plugin - set bellow "enabled = False"
+; # #interval - (onitoring frequency in seconds. 60 seconds by default
+; # private_anon_mem_threshold - memory volume threshold after which we need an investigation about memory leak. 1GB by default.
+; # Possible values MB, GB, TB. For example 1GB
+[memoryleakdiagnostic]
+enabled = True
+interval = 15
+private_anon_mem_threshold = 1GB
+;
+[oldest]
+interval = 60
+
+[pgbuffercache]
+interval = 60
+
+[pglocks]
+interval = 60
+
+; # Get age (in seconds) of the oldest running prepared transaction and number of all prepared transactions for two-phase commit.
+; # https://www.postgresql.org/docs/current/sql-prepare-transaction.html
+; # https://www.postgresql.org/docs/12/view-pg-prepared-xacts.html
+; # max_prepared_transaction_time - age of prepared transaction in seconds.
+; # If pgsql.prepared.oldest exceeds max_prepared_transaction_time the trigger fires.
+[preparedtransaction]
+interval = 60
+;
+; # Get size of relations defined in this section
+; # Relations - comma separated list of objects - tables and endexes (database_name.schema.relation) used to calculate relations size.
+; # Example:
+; # relations=postgres.pg_catalog.pg_class,postgres.pg_catalog.pg_user
+; # If the relation is blocked by some process such as vacuum full or create index, the result will be -1
+; # by default this plugin disabled. To enable this plugin - set bellow "enabled = False" and define a list of relations.
+[relationssize]
+enabled = True
+relations=postgres.pg_catalog.pg_class,mamonsu_test_db.mamonsu.config
+interval = 15
+
+[replication]
+interval = 60
+
+[statstatements]
+interval = 60
+
+[waitsampling]
+interval = 60
+
+[wal]
+interval = 60
+
+[disksizes]
+interval = 60
+
+[diskstats]
+interval = 60
+
+[la]
+interval = 60
+
+[memory]
+interval = 60
+
+[net]
+interval = 60
+
+[openfiles]
+interval = 60
+;
+; # Get size of backup catalogs stroring all WAL and backup files using pg_probackup
+; # (https://github.com/postgrespro/pg_probackup)
+; # Trigger fires if some backup has bad status e.g. (ERROR,CORRUPT,ORPHAN).
+[pgprobackup]
+enabled = False
+interval = 300
+backup_dirs = /backup_dir1,/backup_dir2
+pg_probackup_path = /usr/bin/pg_probackup-11
+;
+[procstat]
+interval = 60
+
+[systemuptime]
+interval = 60
+
+[agentapi]
+interval = 60
+
+[logsender]
+interval = 2
+
+[zbxsender]
+interval = 10
diff --git a/tests/src/__init__.py b/tests/src/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/src/core/__init__.py b/tests/src/core/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/src/core/docker.py b/tests/src/core/docker.py
new file mode 100644
index 0000000..fac9b3a
--- /dev/null
+++ b/tests/src/core/docker.py
@@ -0,0 +1,59 @@
+import logging
+
+import docker
+
+from config.constants.containers import ContainersEnum
+
+
+class DockerManager:
+ def __init__(self, container_name: ContainersEnum) -> None:
+ self._client = docker.from_env()
+ self._container_name = container_name
+ self._container = self._client.containers.get(container_name)
+ self._logger = logging.getLogger(__name__)
+
+ def __call__(self, command: str) -> tuple[int, str]:
+ return self._run_in_container(command)
+
+ @property
+ def env_vars(self) -> dict[str, str]:
+ env_dict = {}
+ for env_line in self._container.attrs['Config']['Env']:
+ if '=' in env_line:
+ key, value = env_line.split('=', 1)
+ env_dict[key] = value
+ return env_dict
+
+ @property
+ def ip_address(self) -> str | None:
+ networks = self._container.attrs['NetworkSettings']['Networks']
+ return list(networks.values())[0]['IPAddress']
+
+ @property
+ def hostname(self) -> ContainersEnum:
+ return self._container_name
+
+ def stop(self) -> None:
+ try:
+ self._container.stop()
+ except docker.errors.NotFound:
+ pass
+
+ def remove(self) -> None:
+ try:
+ self._container.remove()
+ except docker.errors.NotFound:
+ pass
+
+ def remove_image(self) -> None:
+ try:
+ self._client.images.remove(image_id=self._container.image)
+ except docker.errors.NotFound:
+ pass
+
+ def _run_in_container(self, command: str) -> tuple[int, str]:
+ self._logger.info(f"Command: {command}")
+ exit_code, output = self._container.exec_run(["/bin/bash", "-c", command])
+ formatted_output = output.decode('utf-8').strip()
+ self._logger.info(f"Exited with code {exit_code}, output: {formatted_output}")
+ return exit_code, formatted_output
diff --git a/tests/src/core/paths.py b/tests/src/core/paths.py
new file mode 100644
index 0000000..a610d70
--- /dev/null
+++ b/tests/src/core/paths.py
@@ -0,0 +1,17 @@
+from pathlib import Path
+
+
+def _find_tests_root() -> Path:
+ current = Path(__file__).absolute()
+ for parent in current.parents:
+ if (parent / "debian.Dockerfile").exists() or (parent / "pytest.ini").exists():
+ return parent
+ raise FileNotFoundError("Project root not found")
+
+
+class ProjectPaths:
+ TESTS_ROOT = _find_tests_root()
+ MAMONSU_ROOT = TESTS_ROOT.parent
+
+ COMPOSE_FILE = TESTS_ROOT / "docker-compose.yaml"
+ METRICS_PATH = MAMONSU_ROOT / "github-actions-tests" / "sources"
diff --git a/tests/src/services/__init__.py b/tests/src/services/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/src/services/postgres.py b/tests/src/services/postgres.py
new file mode 100644
index 0000000..c679465
--- /dev/null
+++ b/tests/src/services/postgres.py
@@ -0,0 +1,89 @@
+from contextlib import contextmanager
+
+import psycopg2
+
+from config.config import Config
+from src.utils.logger import LoggerClass
+
+config = Config()
+
+
+class PostgresManager:
+ def __init__(
+ self,
+ user=config.POSTGRES_USER,
+ password=config.POSTGRES_PASSWORD,
+ dbname=config.POSTGRES_DB,
+ host=config.POSTGRES_EXT_HOST,
+ port=config.POSTGRES_EXT_PORT,
+ ):
+ self._logger = LoggerClass(self.__class__.__name__)
+ self.conn_params = {
+ "user": user,
+ "dbname": dbname,
+ "password": password,
+ "host": host,
+ "port": port,
+ }
+
+ @contextmanager
+ def connect(self, dbname: str | None = None):
+ params = self.conn_params.copy()
+ if dbname:
+ params["dbname"] = dbname
+ conn = psycopg2.connect(**params)
+ conn.autocommit = True
+ try:
+ yield conn
+ finally:
+ conn.close()
+
+ def run_sql(self, sql: str, dbname: str | None = None) -> list[tuple]:
+ with self.connect(dbname) as conn:
+ with conn.cursor() as cur:
+ self._logger.debug(f"EXECUTING SQL: {sql}")
+ cur.execute(sql)
+ if cur.description:
+ return cur.fetchall()
+ return []
+
+ def user_exists(self, username: str) -> bool:
+ res = self.run_sql(f"SELECT 1 FROM pg_roles WHERE rolname = '{username}'")
+ return bool(res)
+
+ def drop_user(self, username: str, reassigned_to: str = "postgres"):
+ if not self.user_exists(username):
+ return
+ dbs = self.run_sql("SELECT datname FROM pg_database WHERE datname NOT IN ('template0','template1')")
+ for (db,) in dbs:
+ self.run_sql(f"REASSIGN OWNED BY {username} TO {reassigned_to}", db)
+ self.run_sql(f"DROP OWNED BY {username}", db)
+ self.run_sql(f"DROP ROLE {username}")
+
+ def create_user(self, username: str):
+ self.run_sql(f"CREATE USER {username} SUPERUSER PASSWORD 'your_password'")
+
+ def create_database(self, dbname: str, owner: str):
+ self.run_sql(f"CREATE DATABASE {dbname} OWNER {owner}")
+
+ def drop_database(self, dbname: str):
+ self.run_sql(f"""
+ SELECT pg_terminate_backend(pid)
+ FROM pg_stat_activity WHERE datname = '{dbname}' AND pid <> pg_backend_pid()
+ """)
+ self.run_sql(f"DROP DATABASE IF EXISTS {dbname}")
+
+ def check_table_exists(self, table: str, schema: str = "mamonsu", dbname: str | None = None) -> bool:
+ result = self.run_sql(
+ f"SELECT 1 FROM information_schema.tables WHERE table_schema = '{schema}' AND table_name = '{table}'",
+ dbname,
+ )
+ return bool(result)
+
+ def check_function_exists(self, function: str, schema: str = "mamonsu", dbname: str | None = None) -> bool:
+ result = self.run_sql(
+ f"SELECT 1 FROM pg_proc p JOIN pg_namespace n ON n.oid = p.pronamespace "
+ f"WHERE p.proname = '{function}' AND n.nspname = '{schema}'",
+ dbname,
+ )
+ return bool(result)
diff --git a/tests/src/services/zabbix.py b/tests/src/services/zabbix.py
new file mode 100644
index 0000000..2a52782
--- /dev/null
+++ b/tests/src/services/zabbix.py
@@ -0,0 +1,159 @@
+from typing import Any
+
+from pyzabbix import ZabbixAPI
+
+from config.config import Config
+from src.utils.logger import LoggerClass
+
+config = Config()
+
+
+class ZabbixManager:
+ def __init__(
+ self,
+ url: str = f"http://{config.ZABBIX_EXT_URL}/",
+ username: str = config.ZABBIX_ADMIN_USER,
+ password: str = config.ZABBIX_ADMIN_PASS,
+ ):
+ self.zbx = ZabbixAPI(url)
+ self.zbx.login(username, password)
+ self._logger = LoggerClass(self.__class__.__name__)
+
+ self.host_ids = []
+ self.hostgroup_ids = []
+ self.template_ids = []
+
+ @property
+ def default_hostgroup_id(self) -> str:
+ return self.get_hostgroup_id(config.DEFAULT_HOSTGROUP)
+
+ @property
+ def default_template_id(self) -> str:
+ return self.get_template_id(config.DEFAULT_TEMPLATE)
+
+ def remove_entities(self) -> None:
+ for host_id in self.host_ids:
+ self.delete_host(host_id)
+
+ for hostgroup_id in self.hostgroup_ids:
+ self.delete_hostgroup(hostgroup_id)
+
+ for template_id in self.template_ids:
+ self.delete_template(template_id)
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.remove_entities()
+
+ def get_host(self, hostname: str) -> dict[str, Any]:
+ hosts = self.zbx.host.get(
+ filter={"host": hostname},
+ selectInterfaces=["ip"],
+ selectGroups=["groupid"],
+ selectParentTemplates=["templateid"]
+ )
+ return hosts[0] if hosts else None
+
+ def get_host_id(self, hostname: str) -> str | None:
+ host = self.get_host(hostname)
+ return host.get("hostid") if host else None
+
+ def list_hosts(self) -> list[dict[str, Any]]:
+ return self.zbx.host.get(
+ output=["hostid", "host"],
+ selectGroups=["groupid"],
+ selectParentTemplates=["templateid"]
+ )
+
+ def create_host(
+ self,
+ hostname: str,
+ hostgroup_ids: list[str],
+ template_ids: list[str],
+ ip_address: str,
+ port: int = 10050,
+ ) -> str | None:
+ self._logger.info(f"Creating host: {hostname}")
+ interfaces = [{
+ "type": 1,
+ "main": 1,
+ "useip": 1,
+ "ip": ip_address,
+ "dns": "",
+ "port": str(port)
+ }]
+
+ groups = [{"groupid": gid} for gid in hostgroup_ids]
+ templates = [{"templateid": tid} for tid in template_ids]
+ data = {
+ "host": hostname,
+ "name": hostname,
+ "interfaces": interfaces,
+ "groups": groups,
+ "templates": templates
+ }
+
+ host_id = self.zbx.host.create(data)['hostids'][0]
+ self.host_ids.append(host_id)
+ return host_id
+
+ def delete_host(self, host_id: str) -> bool:
+ try:
+ self._logger.info(f"Deleting host ID: {host_id}")
+ self.zbx.host.delete(host_id)
+ return True
+ except Exception as e:
+ self._logger.warning(f"Failed to delete host: {str(e)}")
+ return False
+
+ def list_hostgroups(self) -> list[dict[str, Any]]:
+ return self.zbx.hostgroup.get(output=["groupid", "name"])
+
+ def get_hostgroup(self, name: str) -> dict[str, Any] | None:
+ hostgroups = self.zbx.hostgroup.get(
+ filter={"name": name},
+ output=["groupid", "name"]
+ )
+ return hostgroups[0] if hostgroups else None
+
+ def get_hostgroup_id(self, name: str) -> str | None:
+ hostgroup = self.get_hostgroup(name)
+ return hostgroup.get("groupid") if hostgroup else None
+
+ def create_hostgroup(self, name: str) -> str | None:
+ self._logger.info(f"Creating hostgroup: {name}")
+ data = self.zbx.hostgroup.create({"name": name})
+ hostgroup_id = data["groupids"][0]
+ self.hostgroup_ids.append(hostgroup_id)
+ return hostgroup_id
+
+ def delete_hostgroup(self, group_id: str) -> bool:
+ try:
+ self._logger.info(f"Deleting hostgroup ID: {group_id}")
+ self.zbx.hostgroup.delete(group_id)
+ return True
+ except Exception as e:
+ self._logger.warning(f"Failed to delete hostgroup: {str(e)}")
+ return False
+
+ def get_template(self, name: str) -> dict[str, Any] | None:
+ templates = self.zbx.template.get(filter={"host": name})
+ return templates[0] if templates else None
+
+ def get_template_id(self, name: str) -> str | None:
+ template = self.get_template(name)
+ return template.get("templateid") if template else None
+
+ def list_templates(self) -> list[dict[str, Any]]:
+ return self.zbx.template.get(output=["templateid", "host"])
+
+ def delete_template(self, template_id: str) -> bool:
+ try:
+ self._logger.info(f"Deleting template ID: {template_id}")
+ self.zbx.template.delete(template_id)
+ return True
+ except Exception as e:
+ self._logger.warning(f"Failed to delete template: {str(e)}")
+ return False
diff --git a/tests/src/utils/__init__.py b/tests/src/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/src/utils/logger.py b/tests/src/utils/logger.py
new file mode 100644
index 0000000..cd24f27
--- /dev/null
+++ b/tests/src/utils/logger.py
@@ -0,0 +1,40 @@
+import logging
+from logging import DEBUG, Formatter, StreamHandler, basicConfig, getLogger
+from sys import stdout
+from typing import Union
+
+
+class LoggerClass:
+ def __init__(self, logger_name: str, level: Union[str, int] = "INFO"):
+ console_handler = StreamHandler(stdout)
+ console_handler.setLevel(DEBUG)
+
+ formatter = Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
+
+ console_handler.setFormatter(formatter)
+
+ basicConfig(
+ encoding="utf-8",
+ format="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
+ handlers=[console_handler],
+ )
+ self.logger = getLogger(logger_name)
+ self.set_level(level)
+
+ def get_logger(self) -> logging.Logger:
+ return self.logger
+
+ def set_level(self, level: Union[str, int] = "INFO") -> None:
+ self.logger.setLevel(level)
+
+ def info(self, msg: str) -> None:
+ self.logger.info(msg)
+
+ def debug(self, msg: str) -> None:
+ self.logger.debug(msg)
+
+ def warning(self, msg: str) -> None:
+ self.logger.warning(msg)
+
+ def error(self, msg: Union[str, Exception]) -> None:
+ self.logger.error(msg)
diff --git a/tests/tests/__init__.py b/tests/tests/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/tests/bash/__init__.py b/tests/tests/bash/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/tests/bash/test_agent.py b/tests/tests/bash/test_agent.py
new file mode 100644
index 0000000..86469c4
--- /dev/null
+++ b/tests/tests/bash/test_agent.py
@@ -0,0 +1,44 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+
+
+class TestAgentSuite:
+ @pytest.mark.parametrize(
+ "command",
+ (
+ "",
+ "-c /etc/mamonsu/agent.conf"
+ )
+ )
+ @pytest.mark.bash
+ def test_agent_version(self, mamonsu_container: 'DockerManager', init_mamonsu_in_zbx, command):
+ exit_code, output = mamonsu_container(f"mamonsu agent version {command}")
+ assert exit_code == 0
+
+ @pytest.mark.parametrize(
+ "command",
+ (
+ "",
+ " -c /etc/mamonsu/agent.conf"
+ )
+ )
+ @pytest.mark.bash
+ def test_agent_metric_get_disk_all_read(self, mamonsu_container: 'DockerManager', init_mamonsu_in_zbx, command):
+ exit_code, output = mamonsu_container(f"mamonsu agent metric-get system.disk.all_read[] {command}")
+ assert exit_code == 0
+
+ @pytest.mark.parametrize(
+ "command",
+ (
+ "",
+ " -c /etc/mamonsu/agent.conf"
+ )
+ )
+ @pytest.mark.bash
+ def test_agent_metric_list(self, mamonsu_container: 'DockerManager', init_mamonsu_in_zbx, command):
+ exit_code, output = mamonsu_container(f"mamonsu agent metric-list {command}")
+ assert exit_code == 0
diff --git a/tests/tests/bash/test_bootstrap/__init__.py b/tests/tests/bash/test_bootstrap/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/tests/bash/test_bootstrap/test_bootstrap.py b/tests/tests/bash/test_bootstrap/test_bootstrap.py
new file mode 100644
index 0000000..8349070
--- /dev/null
+++ b/tests/tests/bash/test_bootstrap/test_bootstrap.py
@@ -0,0 +1,96 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+ from src.services.postgres import PostgresManager
+ from config.config import Config
+
+
+class TestZabbixCliDashboardTemplateSuite:
+ @staticmethod
+ def mamonsu_version(mamonsu_container: 'DockerManager') -> str:
+ _, output = mamonsu_container(
+ f'mamonsu --version'
+ )
+ version = output.split()[1].strip()
+ version = version.replace(".", "_")
+ return version
+
+ @staticmethod
+ def check_db_objects(pg: 'PostgresManager', dbname: str, version: str):
+ assert pg.check_table_exists("config", dbname=dbname)
+ assert pg.check_table_exists(f"timestamp_master_{version}", dbname=dbname)
+
+ functions = [
+ "archive_command_files", "archive_stat", "buffer_cache",
+ "count_autovacuum", "count_wal_files", "count_wal_lag_lsn",
+ # в оригинале может быть xlog вместо wal, но это актуально только для PG <10
+ "get_connections_states", "get_oldest_transaction", "get_oldest_xid",
+ "get_sys_param", "pg_buffercache_pages", "prepared_transaction",
+ "timestamp_get", "timestamp_master_update"
+ ]
+
+ for func in functions:
+ assert pg.check_function_exists(func, dbname=dbname)
+
+ @pytest.mark.bash
+ @pytest.mark.parametrize("db_name", ("mamonsu_test", "test_db"))
+ def test_mamonsu_bootstrap_postgres( # TODO: нужен тирдаун
+ self,
+ mamonsu_container: 'DockerManager',
+ postgres: 'PostgresManager',
+ config: 'Config',
+ db_name: str
+ ) -> None:
+ postgres.drop_user("mamonsu")
+ postgres.create_database(db_name, config.POSTGRES_USER)
+
+ exit_code, _ = mamonsu_container(
+ f'mamonsu bootstrap -x -U {config.POSTGRES_USER} -d {db_name} --password {config.POSTGRES_PASSWORD}'
+ )
+ assert exit_code == 0
+ self.check_db_objects(postgres, db_name, self.mamonsu_version(mamonsu_container))
+
+ @pytest.mark.bash
+ def test_mamonsu_bootstrap_custom_user(
+ self,
+ mamonsu_container: 'DockerManager',
+ postgres: 'PostgresManager',
+ ):
+ db = user = "test_superuser"
+ postgres.drop_user("mamonsu")
+ postgres.drop_database(db)
+ postgres.drop_user(user)
+ postgres.create_user(user)
+ postgres.create_database(db, user)
+
+ exit_code, _ = mamonsu_container(f"mamonsu bootstrap -x -U {user} -d {db}")
+ assert exit_code == 0
+ self.check_db_objects(postgres, db, self.mamonsu_version(mamonsu_container))
+
+ postgres.drop_user("mamonsu")
+ postgres.drop_user(user)
+ postgres.drop_database(db)
+
+ @pytest.mark.bash
+ def test_mamonsu_bootstrap_custom_user_custom_host(
+ self,
+ mamonsu_container: 'DockerManager',
+ postgres: 'PostgresManager'
+ ):
+ db = user = "test_superuser"
+ postgres.drop_user("mamonsu")
+ postgres.drop_database(db)
+ postgres.drop_user(user)
+ postgres.create_user(user)
+ postgres.create_database(db, user)
+
+ exit_code, _ = mamonsu_container(f"mamonsu bootstrap -x -U {user} -d {db} -h localhost -p 5432")
+ assert exit_code == 0
+ self.check_db_objects(postgres, db, self.mamonsu_version(mamonsu_container))
+
+ postgres.drop_user("mamonsu")
+ postgres.drop_user(user)
+ postgres.drop_database(db)
diff --git a/tests/tests/bash/test_export.py b/tests/tests/bash/test_export.py
new file mode 100644
index 0000000..6de8392
--- /dev/null
+++ b/tests/tests/bash/test_export.py
@@ -0,0 +1,46 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+ from config.config import Config
+
+
+class TestExportSuite:
+ @pytest.mark.bash
+ def test_export_config(self, mamonsu_container: 'DockerManager') -> None:
+ exit_code, output = mamonsu_container("mamonsu export config mamonsu.conf --add-plugins=/etc/mamonsu/plugins")
+ assert exit_code == 0
+ exit_code, output = mamonsu_container("test -f mamonsu.conf") # we use "test -f" because 'file' always return 0
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_export_template(self, mamonsu_container: 'DockerManager') -> None:
+ exit_code, output = mamonsu_container("mamonsu export template template.xml --add-plugins=/etc/mamonsu/plugins")
+ assert exit_code == 0
+ exit_code, output = mamonsu_container("test -f template.xml")
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_export_zabbix_params(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ config: 'Config'
+ ) -> None:
+ exit_code, output = mamonsu_container("mamonsu export zabbix-parameters zabbix.conf"
+ " --add-plugins=/etc/mamonsu/plugins --config=/etc/mamonsu/agent.conf"
+ f" --pg-version={config.POSTGRES_VERSION}")
+ assert exit_code == 0
+ exit_code, output = mamonsu_container("test -f zabbix.conf")
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_export_zabbix_template(self, mamonsu_container: 'DockerManager') -> None:
+ exit_code, output = mamonsu_container('mamonsu export zabbix-template zabbix_template.xml'
+ ' --template-name="mamonsu-zabbix" --add-plugins=/etc/mamonsu/plugins'
+ ' --config=/etc/mamonsu/agent.conf')
+ assert exit_code == 0
+ exit_code, output = mamonsu_container("test -f zabbix_template.xml")
+ assert exit_code == 0
diff --git a/tests/tests/bash/test_metrics.py b/tests/tests/bash/test_metrics.py
new file mode 100644
index 0000000..aaa2fe1
--- /dev/null
+++ b/tests/tests/bash/test_metrics.py
@@ -0,0 +1,52 @@
+import time
+from typing import TYPE_CHECKING
+
+import pytest
+
+from config.config import Config
+from src.core.paths import ProjectPaths
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+ from src.services.postgres import PostgresManager
+
+
+class TestMetricsSuite:
+ @staticmethod
+ def get_metrics_list():
+ pg_ver = Config().POSTGRES_VERSION
+ pg_ver = 14 if pg_ver == 15 else pg_ver # We have no specific metric list for 15 ver of PG
+ metrics_list = []
+ with open(ProjectPaths.METRICS_PATH / f"metrics-linux-{pg_ver}.txt", 'r') as metrics_file:
+ metrics_list = metrics_file.readlines()
+ return metrics_list
+
+ @pytest.mark.bash
+ def test_metrics(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ postgres: 'PostgresManager'
+ ) -> None:
+ postgres.run_sql(
+ """
+ DO
+ $do$
+ DECLARE
+ func_name varchar;
+ BEGIN
+ SELECT proname INTO func_name FROM pg_proc WHERE proname LIKE 'pg_switch_%';
+ EXECUTE FORMAT('SELECT %s();', func_name);
+ END
+ $do$;
+ """
+ )
+ time.sleep(120)
+ bad_codes = []
+ for metric in self.get_metrics_list():
+ exit_code, output = mamonsu_container(
+ f'mamonsu agent metric-get {metric.strip()} | grep "pgsql\|sys\|mamonsu"'
+ )
+ if exit_code != 0:
+ bad_codes.append(metric)
+ assert not bad_codes
diff --git a/tests/tests/bash/test_report.py b/tests/tests/bash/test_report.py
new file mode 100644
index 0000000..e7dc09c
--- /dev/null
+++ b/tests/tests/bash/test_report.py
@@ -0,0 +1,25 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+
+
+class TestReportSuite:
+ @pytest.mark.parametrize(
+ "params",
+ (
+ None,
+ " --port 5433",
+ " --run-system=false",
+ " --run-postgres=false",
+ " --disable-sudo",
+ " -w rep1.txt",
+ " --report-path=rep2.txt",
+ )
+ )
+ @pytest.mark.bash
+ def test_report(self, mamonsu_container: 'DockerManager', params) -> None:
+ exit_code, output = mamonsu_container("mamonsu report" + (params or ''))
+ assert exit_code == 0
diff --git a/tests/tests/bash/test_zabbix_cli/__init__.py b/tests/tests/bash/test_zabbix_cli/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/tests/tests/bash/test_zabbix_cli/conftest.py b/tests/tests/bash/test_zabbix_cli/conftest.py
new file mode 100644
index 0000000..fac3e3a
--- /dev/null
+++ b/tests/tests/bash/test_zabbix_cli/conftest.py
@@ -0,0 +1,17 @@
+import pytest
+from typing import TYPE_CHECKING
+
+from tests.conftest import mamonsu_container
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+
+
+@pytest.fixture()
+def zabbix_options(mamonsu_container: 'DockerManager') -> str:
+ mamonsu_env = mamonsu_container.env_vars
+ zbx_web_url = mamonsu_env["ZABBIX_URL"]
+ zbx_user = mamonsu_env["ZABBIX_USER"]
+ zbx_password = mamonsu_env["ZABBIX_PASSWD"]
+
+ return f"--url={zbx_web_url} --user={zbx_user} --password={zbx_password}"
diff --git a/tests/tests/bash/test_zabbix_cli/test_host.py b/tests/tests/bash/test_zabbix_cli/test_host.py
new file mode 100644
index 0000000..efc287f
--- /dev/null
+++ b/tests/tests/bash/test_zabbix_cli/test_host.py
@@ -0,0 +1,100 @@
+import uuid
+from typing import TYPE_CHECKING
+
+import pytest
+
+from config.constants.containers import ContainersEnum
+from tests.conftest import mamonsu_container
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+ from src.services.zabbix import ZabbixManager
+
+
+class TestZabbixCliHostSuite:
+ @pytest.mark.bash
+ def test_host_list(self, mamonsu_container: 'DockerManager', init_mamonsu_in_zbx, zabbix_options) -> None:
+ exit_code, output = mamonsu_container(
+ f"mamonsu zabbix {zabbix_options} host list"
+ )
+ assert exit_code == 0
+ assert ContainersEnum.MAMONSU in output
+
+ @pytest.mark.bash
+ def test_host_show(self, mamonsu_container: 'DockerManager', init_mamonsu_in_zbx, zabbix_options) -> None:
+ exit_code, output = mamonsu_container(
+ f"mamonsu zabbix {zabbix_options} host show $(hostname) | grep $(hostname) || exit 11"
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_host_id(self, mamonsu_container: 'DockerManager', init_mamonsu_in_zbx, zabbix_options) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} host id $(hostname) | grep -x -E "[[:digit:]]+" || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.parametrize(
+ "params",
+ (
+ "templates",
+ "hostgroups",
+ "graphs",
+ "items"
+ )
+ )
+ @pytest.mark.bash
+ def test_host_info(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ zabbix_options: str,
+ zabbix: 'ZabbixManager',
+ params,
+ ) -> None:
+ host_id = zabbix.get_host_id(mamonsu_container.hostname)
+ exit_code, output = mamonsu_container(
+ f"mamonsu zabbix {zabbix_options} host info {params} {host_id} | grep $(hostname) || exit 11"
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_host_create(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ zabbix: 'ZabbixManager',
+ zabbix_options: str,
+ ) -> None:
+ new_host = 'test_create'
+ hostgroup_id = zabbix.default_hostgroup_id
+ template_id = zabbix.default_template_id
+
+ exit_code, output = mamonsu_container(
+ f"mamonsu zabbix {zabbix_options} host create {new_host!r} {hostgroup_id} {template_id} {mamonsu_container.ip_address}"
+ )
+ assert exit_code == 0
+
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} host id {new_host!r} | grep -x -E "[[:digit:]]+" || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_host_delete(
+ self,
+ mamonsu_container: 'DockerManager',
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ zabbix: 'ZabbixManager',
+ ) -> None:
+ with zabbix as zbx:
+ hostgroup_ids = zbx.list_hostgroups()[0]['groupid']
+ template_ids = zbx.list_templates()[0]['templateid']
+
+ host = zbx.create_host(str(uuid.uuid4()), [hostgroup_ids], [template_ids], mamonsu_container.ip_address)
+
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} host delete {host} | grep "hostids.*{host}" || exit 11'
+ )
+ assert exit_code == 0
diff --git a/tests/tests/bash/test_zabbix_cli/test_hostgroup.py b/tests/tests/bash/test_zabbix_cli/test_hostgroup.py
new file mode 100644
index 0000000..6f1ee57
--- /dev/null
+++ b/tests/tests/bash/test_zabbix_cli/test_hostgroup.py
@@ -0,0 +1,73 @@
+import uuid
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+ from src.services.zabbix import ZabbixManager
+
+
+class TestZabbixCliHostgroupSuite:
+ @pytest.mark.bash
+ def test_hostgroup_list(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ zabbix_options: str,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f"mamonsu zabbix {zabbix_options} hostgroup list | grep Linux || exit 11"
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_hostgroup_show(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ zabbix_options: str,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} hostgroup show "Linux servers" | grep Linux || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_hostgroup_id(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ zabbix_options: str,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} hostgroup id "Linux servers" | grep -x -E "[[:digit:]]+" || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_hostgroup_create(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ zabbix_options: str,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} hostgroup create "{str(uuid.uuid4())}"'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_hostgroup_delete(
+ self,
+ mamonsu_container: 'DockerManager',
+ init_mamonsu_in_zbx,
+ zabbix_options: str,
+ zabbix: 'ZabbixManager'
+ ) -> None:
+ hostgroup_id = zabbix.create_hostgroup("test")
+
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} hostgroup delete {hostgroup_id} | grep "groupids.*{hostgroup_id}" || exit 11'
+ )
+ assert exit_code == 0
diff --git a/tests/tests/bash/test_zabbix_cli/test_item.py b/tests/tests/bash/test_zabbix_cli/test_item.py
new file mode 100644
index 0000000..cd8512a
--- /dev/null
+++ b/tests/tests/bash/test_zabbix_cli/test_item.py
@@ -0,0 +1,27 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+
+
+class TestZabbixCliItemSuite:
+ @pytest.mark.parametrize(
+ "params",
+ (
+ "error",
+ "lastvalue",
+ "lastclock",
+ )
+ )
+ @pytest.mark.bash
+ def test_item(
+ self,
+ mamonsu_container: 'DockerManager',
+ params: str,
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ ) -> None:
+ exit_code, output = mamonsu_container(f"mamonsu zabbix {zabbix_options} item {params} $(hostname)")
+ assert exit_code == 0
diff --git a/tests/tests/bash/test_zabbix_cli/test_template_dashboard.py b/tests/tests/bash/test_zabbix_cli/test_template_dashboard.py
new file mode 100644
index 0000000..1dfe306
--- /dev/null
+++ b/tests/tests/bash/test_zabbix_cli/test_template_dashboard.py
@@ -0,0 +1,117 @@
+from typing import TYPE_CHECKING
+
+import pytest
+
+from config.config import Config
+
+if TYPE_CHECKING:
+ from src.core.docker import DockerManager
+
+
+class TestZabbixCliDashboardTemplateSuite:
+ default_template = Config().DEFAULT_TEMPLATE
+
+ @pytest.mark.bash
+ def test_dashboard_upload(
+ self,
+ mamonsu_container: 'DockerManager',
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} dashboard upload "{self.default_template}" | grep "True\\|Mamonsu dashboard" || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_template_list(
+ self,
+ mamonsu_container: 'DockerManager',
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template list | grep "{self.default_template}" || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_template_show(
+ self,
+ mamonsu_container: 'DockerManager',
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template show "{self.default_template}" | grep "{self.default_template}" || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_template_id(
+ self,
+ mamonsu_container: 'DockerManager',
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ ) -> None:
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template id "{self.default_template}" | grep -x -E "[[:digit:]]+" || exit 11'
+ )
+ assert exit_code == 0
+
+ @pytest.mark.bash
+ def test_template_export_import(
+ self,
+ mamonsu_container: 'DockerManager',
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ ) -> None:
+ exit_code, template_id = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template id "{self.default_template}"'
+ )
+ if exit_code == 0:
+ mamonsu_container(f'mamonsu zabbix {zabbix_options} template delete {template_id.strip()}')
+
+ exit_code, output = mamonsu_container(
+ 'mamonsu export template template.xml --template-name="mamonsu-zabbix"'
+ )
+ assert exit_code == 0
+
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template export template.xml'
+ )
+ assert exit_code == 0
+
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template id "mamonsu-zabbix" | grep -x -E "[[:digit:]]+" || exit 11'
+ )
+ assert exit_code == 0
+
+ exit_code, template_id = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template id "mamonsu-zabbix"'
+ )
+ mamonsu_container(f'mamonsu zabbix {zabbix_options} template delete {template_id.strip()}')
+ mamonsu_container('rm -rf template.xml')
+
+ @pytest.mark.bash
+ def test_template_delete(
+ self,
+ mamonsu_container: 'DockerManager',
+ zabbix_options: str,
+ init_mamonsu_in_zbx,
+ ) -> None:
+ mamonsu_container('mamonsu export template template.xml --template-name="test-template"')
+ mamonsu_container(f'mamonsu zabbix {zabbix_options} template export template.xml')
+
+ exit_code, template_id = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template id "test-template"'
+ )
+ assert exit_code == 0
+ template_id = template_id.strip()
+
+ exit_code, output = mamonsu_container(
+ f'mamonsu zabbix {zabbix_options} template delete {template_id} | grep "templateids.*{template_id}" || exit 11'
+ )
+ assert exit_code == 0
+
+ mamonsu_container('rm -rf template.xml')
diff --git a/tests/tests/conftest.py b/tests/tests/conftest.py
new file mode 100644
index 0000000..f249b48
--- /dev/null
+++ b/tests/tests/conftest.py
@@ -0,0 +1,76 @@
+import os
+import subprocess
+
+import pytest
+from docker.models.containers import Container
+
+from config.config import Config
+from config.constants.containers import ContainersEnum
+from src.core.docker import DockerManager
+from src.core.paths import ProjectPaths
+from src.services.postgres import PostgresManager
+from src.services.zabbix import ZabbixManager
+from src.utils.logger import LoggerClass
+
+logger = LoggerClass(__name__)
+
+
+@pytest.fixture(scope="session")
+def config() -> Config:
+ return Config()
+
+
+@pytest.fixture(scope="package")
+def init_mamonsu_in_zbx(mamonsu_container: DockerManager) -> None:
+ exit_code, _ = mamonsu_container('./app/init_mamonsu_in_zbx.sh')
+ assert exit_code == 0, "Mamonsu initialization didn't complete successfully"
+
+
+@pytest.fixture(scope="package")
+def mamonsu_container(docker_compose) -> Container: # noqa
+ container = DockerManager(ContainersEnum.MAMONSU)
+ yield container # noqa
+ container.stop()
+ container.remove()
+
+
+@pytest.fixture(scope="session", params=os.getenv('POSTGRES_VERSIONS', str(Config().POSTGRES_VERSION)).split(','))
+def docker_compose(config: Config, request) -> None:
+ subprocess.run(
+ ["docker", "rmi", f"{ContainersEnum.MAMONSU}:latest"]
+ )
+ os.environ["POSTGRES_VERSION"] = str(request.param)
+
+ subprocess.run(
+ [
+ "docker-compose",
+ "-f", ProjectPaths.COMPOSE_FILE,
+ "--project-directory", ProjectPaths.MAMONSU_ROOT,
+ "up",
+ "-d",
+ "--wait",
+ ],
+ check=True,
+ )
+ yield # noqa
+ subprocess.run(
+ [
+ "docker-compose",
+ "-f", ProjectPaths.COMPOSE_FILE,
+ "--project-directory", ProjectPaths.MAMONSU_ROOT,
+ "down",
+ ],
+ check=True,
+ )
+
+
+@pytest.fixture()
+def zabbix() -> ZabbixManager: # noqa
+ zbx = ZabbixManager()
+ yield zbx # noqa
+ zbx.remove_entities()
+
+
+@pytest.fixture()
+def postgres() -> PostgresManager: # noqa
+ yield PostgresManager() # noqa