Compare commits
12 Commits
c1d70cd9b6
...
main
Author | SHA1 | Date | |
---|---|---|---|
|
cf6d8712fb | ||
|
17c31ce6c9 | ||
|
d6ecb9e4c9 | ||
|
65f732e497 | ||
|
4690d13cb4 | ||
|
41692b7dc1 | ||
|
7eafb1e68e | ||
|
11aad79720 | ||
|
64e1ca124f | ||
|
f1d10458c6 | ||
|
1e3999eee2 | ||
|
2ddcc00cda |
6
.gitignore
vendored
6
.gitignore
vendored
@@ -1,8 +1,4 @@
|
||||
bin
|
||||
include
|
||||
lib
|
||||
lib64
|
||||
pyvenv.cfg
|
||||
__pycache__
|
||||
dist/
|
||||
kumacli.egg-info
|
||||
.pytest_cache
|
||||
|
11
Makefile
11
Makefile
@@ -13,13 +13,8 @@ help:
|
||||
# Clean build artifacts
|
||||
clean:
|
||||
@echo "Cleaning build artifacts..."
|
||||
rm -rf build/
|
||||
rm -rf dist/
|
||||
rm -rf src/kumacli.egg-info/
|
||||
rm -rf src/kumacli/__pycache__/
|
||||
rm -rf src/kumacli/cmd/__pycache__/
|
||||
find . -name "*.pyc" -delete
|
||||
find . -name "*.pyo" -delete
|
||||
rm -rf build/ dist/ src/kumacli.egg-info/
|
||||
find . -name "*.pyc" -o -name "*.pyo" -delete
|
||||
find . -name "__pycache__" -type d -exec rm -rf {} + 2>/dev/null || true
|
||||
@echo "Clean complete."
|
||||
|
||||
@@ -42,7 +37,7 @@ test-deps:
|
||||
# Test the package
|
||||
test:
|
||||
@echo "Running tests..."
|
||||
python3 run_tests.py
|
||||
python3 -m pytest tests/ -v --tb=short
|
||||
|
||||
# Rebuild and reinstall (useful during development)
|
||||
dev: clean build
|
||||
|
15
README.md
15
README.md
@@ -29,6 +29,9 @@ kumacli --url http://localhost:3001 --username admin --password password monitor
|
||||
### Monitor Commands
|
||||
|
||||
```bash
|
||||
# Show available subcommands
|
||||
kumacli monitor
|
||||
|
||||
# List all monitors
|
||||
kumacli monitor list
|
||||
|
||||
@@ -40,11 +43,23 @@ kumacli monitor list --group "production*"
|
||||
|
||||
# Combine filters
|
||||
kumacli monitor list --monitor "*api*" --group "web*"
|
||||
|
||||
# Pause monitors
|
||||
kumacli monitor pause --monitor "*api*"
|
||||
kumacli monitor pause --group "production*"
|
||||
|
||||
# Resume monitors
|
||||
kumacli monitor resume --monitor "*api*"
|
||||
kumacli monitor resume --group "production*"
|
||||
kumacli monitor resume --all
|
||||
```
|
||||
|
||||
### Maintenance Commands
|
||||
|
||||
```bash
|
||||
# Show available subcommands
|
||||
kumacli maintenance
|
||||
|
||||
# Create maintenance for specific monitors (90 minutes, starting now)
|
||||
kumacli maintenance add --monitor "*nextcloud*"
|
||||
|
||||
|
14
pytest.ini
14
pytest.ini
@@ -1,14 +0,0 @@
|
||||
[tool:pytest]
|
||||
testpaths = tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
pythonpath = src
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
--disable-warnings
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
||||
ignore::PendingDeprecationWarning
|
50
run_tests.py
50
run_tests.py
@@ -1,50 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
Test runner script for kumacli
|
||||
|
||||
Usage:
|
||||
python run_tests.py # Run all tests
|
||||
python run_tests.py --cov # Run tests with coverage
|
||||
python run_tests.py tests/test_info.py # Run specific test file
|
||||
"""
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
|
||||
def run_tests(args=None):
|
||||
"""Run pytest with optional arguments"""
|
||||
# Use python3 explicitly for compatibility
|
||||
cmd = ["python3", "-m", "pytest"]
|
||||
|
||||
if args:
|
||||
cmd.extend(args)
|
||||
else:
|
||||
cmd.extend(["tests/", "-v", "--tb=short"])
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, check=True)
|
||||
return result.returncode
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Tests failed with exit code: {e.returncode}")
|
||||
return e.returncode
|
||||
except FileNotFoundError:
|
||||
print("pytest not found. Install with: pip install pytest")
|
||||
return 1
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point"""
|
||||
if len(sys.argv) > 1:
|
||||
# Pass through command line arguments
|
||||
args = sys.argv[1:]
|
||||
else:
|
||||
args = None
|
||||
|
||||
exit_code = run_tests(args)
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
16
setup.py
16
setup.py
@@ -1,13 +1,27 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
# Read version directly from version.py file without importing
|
||||
def get_version():
|
||||
version_file = os.path.join(os.path.dirname(__file__), 'src', 'kumacli', 'cmd', 'version.py')
|
||||
with open(version_file, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", content, re.M)
|
||||
if version_match:
|
||||
return version_match.group(1)
|
||||
raise RuntimeError("Unable to find version string.")
|
||||
|
||||
__version__ = get_version()
|
||||
|
||||
with open("README.md", "r", encoding="utf-8") as fh:
|
||||
long_description = fh.read()
|
||||
|
||||
setup(
|
||||
name="kumacli",
|
||||
version="1.2.0",
|
||||
version=__version__,
|
||||
author="Uptime Kuma CLI",
|
||||
description="A command-line interface for Uptime Kuma",
|
||||
long_description=long_description,
|
||||
|
@@ -2,10 +2,10 @@
|
||||
KumaCLI - A command-line interface for Uptime Kuma
|
||||
"""
|
||||
|
||||
__version__ = "1.0.0"
|
||||
from kumacli.kumacli import main
|
||||
from kumacli.cmd.version import __version__
|
||||
|
||||
__author__ = "KumaCLI Team"
|
||||
__email__ = "info@kumacli.com"
|
||||
|
||||
from kumacli.kumacli import main
|
||||
|
||||
__all__ = ["main"]
|
||||
|
@@ -1,12 +1,15 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Uptime Kuma client wrapper for API operations."""
|
||||
|
||||
import fnmatch
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from uptime_kuma_api import UptimeKumaApi
|
||||
from datetime import datetime
|
||||
from uptime_kuma_api import UptimeKumaApi, UptimeKumaException
|
||||
|
||||
|
||||
class KumaClient:
|
||||
"""Client wrapper for Uptime Kuma API operations."""
|
||||
|
||||
def __init__(self, url, username=None, password=None):
|
||||
self.url = url
|
||||
self.username = username
|
||||
@@ -29,9 +32,9 @@ class KumaClient:
|
||||
|
||||
if unit == "s":
|
||||
return value
|
||||
elif unit == "m":
|
||||
if unit == "m":
|
||||
return value * 60
|
||||
elif unit == "h":
|
||||
if unit == "h":
|
||||
return value * 3600
|
||||
|
||||
raise ValueError(f"Invalid duration unit: {unit}")
|
||||
@@ -71,10 +74,10 @@ class KumaClient:
|
||||
try:
|
||||
self.api = UptimeKumaApi(self.url)
|
||||
if self.username and self.password:
|
||||
result = self.api.login(self.username, self.password)
|
||||
self.api.login(self.username, self.password)
|
||||
print(f"Connected to {self.url}")
|
||||
return True
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Failed to connect: {e}")
|
||||
return False
|
||||
|
||||
@@ -108,7 +111,7 @@ class KumaClient:
|
||||
|
||||
return unique_monitors
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error finding monitors: {e}")
|
||||
return []
|
||||
|
||||
@@ -138,7 +141,7 @@ class KumaClient:
|
||||
|
||||
return unique_groups
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error finding groups: {e}")
|
||||
return []
|
||||
|
||||
@@ -165,6 +168,108 @@ class KumaClient:
|
||||
|
||||
return group_members
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error getting group members: {e}")
|
||||
return []
|
||||
|
||||
def find_monitors_by_globs(self, monitor_patterns=None, group_patterns=None):
|
||||
"""Find monitor IDs by name patterns and/or group patterns.
|
||||
|
||||
Args:
|
||||
monitor_patterns: List of monitor name patterns (supports wildcards)
|
||||
group_patterns: List of group name patterns (supports wildcards)
|
||||
|
||||
Returns:
|
||||
List of monitor IDs (integers) that match the criteria
|
||||
"""
|
||||
try:
|
||||
# Check if we have either monitor patterns or group patterns
|
||||
if not monitor_patterns and not group_patterns:
|
||||
print(
|
||||
"Error: Either monitor or group patterns required. "
|
||||
"Specify at least one pattern."
|
||||
)
|
||||
return []
|
||||
|
||||
matched_monitors = []
|
||||
|
||||
# Find monitors by patterns if specified
|
||||
if monitor_patterns:
|
||||
pattern_monitors = self.find_monitors_by_pattern(monitor_patterns)
|
||||
matched_monitors.extend(pattern_monitors)
|
||||
|
||||
# Find monitors by groups if specified
|
||||
if group_patterns:
|
||||
group_monitors = self.get_monitors_in_groups(group_patterns)
|
||||
# Convert to same format as find_monitors_by_pattern
|
||||
group_monitor_objs = [
|
||||
{"id": m.get("id"), "name": m.get("name")} for m in group_monitors
|
||||
]
|
||||
matched_monitors.extend(group_monitor_objs)
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
seen = set()
|
||||
unique_monitors = []
|
||||
for monitor in matched_monitors:
|
||||
if monitor["id"] not in seen:
|
||||
seen.add(monitor["id"])
|
||||
unique_monitors.append(monitor)
|
||||
|
||||
matched_monitors = unique_monitors
|
||||
|
||||
if not matched_monitors:
|
||||
print(
|
||||
"Error: No monitors found matching the specified patterns or groups"
|
||||
)
|
||||
return []
|
||||
|
||||
# Return list of monitor IDs
|
||||
return [monitor["id"] for monitor in matched_monitors]
|
||||
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error finding monitors by globs: {e}")
|
||||
return []
|
||||
|
||||
def get_monitor_details(self, monitor_ids):
|
||||
"""Get monitor details for display purposes.
|
||||
|
||||
Args:
|
||||
monitor_ids: List of monitor IDs
|
||||
|
||||
Returns:
|
||||
List of dicts with 'id' and 'name' keys
|
||||
"""
|
||||
try:
|
||||
all_monitors = self.api.get_monitors()
|
||||
return [
|
||||
{
|
||||
"id": mid,
|
||||
"name": next(
|
||||
(
|
||||
m.get("name", f"Monitor {mid}")
|
||||
for m in all_monitors
|
||||
if m.get("id") == mid
|
||||
),
|
||||
f"Monitor {mid}",
|
||||
),
|
||||
}
|
||||
for mid in monitor_ids
|
||||
]
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error getting monitor details: {e}")
|
||||
return []
|
||||
|
||||
def find_and_get_monitors(self, monitor_patterns=None, group_patterns=None):
|
||||
"""Find monitors by patterns/groups and return detailed info.
|
||||
|
||||
Args:
|
||||
monitor_patterns: List of monitor name patterns (supports wildcards)
|
||||
group_patterns: List of group name patterns (supports wildcards)
|
||||
|
||||
Returns:
|
||||
List of dicts with 'id' and 'name' keys, or empty list if none found
|
||||
"""
|
||||
monitor_ids = self.find_monitors_by_globs(monitor_patterns, group_patterns)
|
||||
if not monitor_ids:
|
||||
return []
|
||||
return self.get_monitor_details(monitor_ids)
|
||||
|
@@ -1,9 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Info command implementations for Uptime Kuma CLI."""
|
||||
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
from ..client import KumaClient
|
||||
|
||||
|
||||
class InfoCommands:
|
||||
"""Commands for retrieving server information."""
|
||||
|
||||
def __init__(self, client: KumaClient):
|
||||
self.client = client
|
||||
|
||||
@@ -19,7 +23,7 @@ class InfoCommands:
|
||||
for key, value in info.items():
|
||||
print(f" {key}: {value}")
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error getting server info: {e}")
|
||||
|
||||
|
||||
@@ -29,7 +33,7 @@ def setup_info_parser(subparsers):
|
||||
return info_parser
|
||||
|
||||
|
||||
def handle_info_command(args, client):
|
||||
def handle_info_command(args, client): # pylint: disable=unused-argument
|
||||
"""Handle info command execution"""
|
||||
info_commands = InfoCommands(client)
|
||||
info_commands.get_info()
|
||||
|
@@ -1,10 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Maintenance command implementations for Uptime Kuma CLI."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
from ..client import KumaClient
|
||||
|
||||
|
||||
class MaintenanceCommands:
|
||||
"""Commands for managing maintenance windows."""
|
||||
|
||||
def __init__(self, client: KumaClient):
|
||||
self.client = client
|
||||
|
||||
@@ -32,7 +36,7 @@ class MaintenanceCommands:
|
||||
f"{maintenance_id:<5} {title:<30} {strategy:<15} {active:<10} {description:<50}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error listing maintenances: {e}")
|
||||
|
||||
def add_maintenance(
|
||||
@@ -46,45 +50,10 @@ class MaintenanceCommands:
|
||||
):
|
||||
"""Add a new maintenance"""
|
||||
try:
|
||||
# Check if we have either monitor patterns or group patterns
|
||||
if not monitor_patterns and not group_patterns:
|
||||
print(
|
||||
"Error: Either --monitor or --group flag is required. Specify at least one pattern."
|
||||
)
|
||||
return
|
||||
|
||||
matched_monitors = []
|
||||
|
||||
# Find monitors by patterns if specified
|
||||
if monitor_patterns:
|
||||
pattern_monitors = self.client.find_monitors_by_pattern(
|
||||
monitor_patterns
|
||||
)
|
||||
matched_monitors.extend(pattern_monitors)
|
||||
|
||||
# Find monitors by groups if specified
|
||||
if group_patterns:
|
||||
group_monitors = self.client.get_monitors_in_groups(group_patterns)
|
||||
# Convert to the same format as find_monitors_by_pattern
|
||||
group_monitor_objs = [
|
||||
{"id": m.get("id"), "name": m.get("name")} for m in group_monitors
|
||||
]
|
||||
matched_monitors.extend(group_monitor_objs)
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
seen = set()
|
||||
unique_monitors = []
|
||||
for monitor in matched_monitors:
|
||||
if monitor["id"] not in seen:
|
||||
seen.add(monitor["id"])
|
||||
unique_monitors.append(monitor)
|
||||
|
||||
matched_monitors = unique_monitors
|
||||
|
||||
matched_monitors = self.client.find_and_get_monitors(
|
||||
monitor_patterns, group_patterns
|
||||
)
|
||||
if not matched_monitors:
|
||||
print(
|
||||
"Error: No monitors found matching the specified patterns or groups"
|
||||
)
|
||||
return
|
||||
|
||||
print(f"Found {len(matched_monitors)} matching monitors:")
|
||||
@@ -102,7 +71,8 @@ class MaintenanceCommands:
|
||||
end_dt = start_dt + timedelta(seconds=duration_seconds)
|
||||
|
||||
print(
|
||||
f"Maintenance window: {start_dt.strftime('%Y-%m-%d %H:%M:%S UTC')} - {end_dt.strftime('%Y-%m-%d %H:%M:%S UTC')} ({duration})"
|
||||
f"Maintenance window: {start_dt.strftime('%Y-%m-%d %H:%M:%S UTC')} - "
|
||||
f"{end_dt.strftime('%Y-%m-%d %H:%M:%S UTC')} ({duration})"
|
||||
)
|
||||
|
||||
# Create the maintenance with single strategy and date range
|
||||
@@ -135,13 +105,13 @@ class MaintenanceCommands:
|
||||
f"Successfully added {len(matched_monitors)} monitors to maintenance"
|
||||
)
|
||||
print(f"API response: {result}")
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error: Failed to add monitors to maintenance: {e}")
|
||||
print(
|
||||
"This might be due to API compatibility issues or server configuration"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error adding maintenance: {e}")
|
||||
|
||||
def delete_maintenance(self, maintenance_id=None, delete_all=False):
|
||||
@@ -157,7 +127,8 @@ class MaintenanceCommands:
|
||||
print(f"Found {len(maintenances)} maintenances to delete:")
|
||||
for maintenance in maintenances:
|
||||
print(
|
||||
f" - {maintenance.get('title', 'N/A')} (ID: {maintenance.get('id', 'N/A')})"
|
||||
f" - {maintenance.get('title', 'N/A')} "
|
||||
f"(ID: {maintenance.get('id', 'N/A')})"
|
||||
)
|
||||
|
||||
# Delete all maintenances
|
||||
@@ -168,10 +139,11 @@ class MaintenanceCommands:
|
||||
maintenance.get("id")
|
||||
)
|
||||
print(
|
||||
f"Deleted maintenance '{maintenance.get('title', 'N/A')}' (ID: {maintenance.get('id')})"
|
||||
f"Deleted maintenance '{maintenance.get('title', 'N/A')}' "
|
||||
f"(ID: {maintenance.get('id')})"
|
||||
)
|
||||
deleted_count += 1
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(
|
||||
f"Failed to delete maintenance '{maintenance.get('title', 'N/A')}': {e}"
|
||||
)
|
||||
@@ -189,18 +161,19 @@ class MaintenanceCommands:
|
||||
|
||||
result = self.client.api.delete_maintenance(maintenance_id)
|
||||
print(
|
||||
f"Successfully deleted maintenance '{maintenance_title}' (ID: {maintenance_id})"
|
||||
f"Successfully deleted maintenance '{maintenance_title}' "
|
||||
f"(ID: {maintenance_id})"
|
||||
)
|
||||
print(f"API response: {result}")
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Failed to delete maintenance ID {maintenance_id}: {e}")
|
||||
else:
|
||||
print(
|
||||
"Error: Either --id or --all flag is required for delete operation"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error during maintenance deletion: {e}")
|
||||
|
||||
|
||||
@@ -209,8 +182,7 @@ def setup_maintenance_parser(subparsers):
|
||||
maintenance_parser = subparsers.add_parser(
|
||||
"maintenance", help="Maintenance operations"
|
||||
)
|
||||
# Store reference to parser for help display
|
||||
setup_maintenance_parser._parser = maintenance_parser
|
||||
setup_maintenance_parser.parser = maintenance_parser
|
||||
maintenance_subparsers = maintenance_parser.add_subparsers(
|
||||
dest="maintenance_action", help="Maintenance actions"
|
||||
)
|
||||
@@ -250,12 +222,14 @@ def setup_maintenance_parser(subparsers):
|
||||
add_maintenance_parser.add_argument(
|
||||
"--monitor",
|
||||
action="append",
|
||||
help="Monitor name pattern to add to maintenance (supports wildcards like *NextCloud*, can be repeated)",
|
||||
help="Monitor name pattern to add to maintenance "
|
||||
"(supports wildcards like *NextCloud*, can be repeated)",
|
||||
)
|
||||
add_maintenance_parser.add_argument(
|
||||
"--group",
|
||||
action="append",
|
||||
help="Group name pattern to add all group members to maintenance (supports wildcards, can be repeated)",
|
||||
help="Group name pattern to add all group members to maintenance "
|
||||
"(supports wildcards, can be repeated)",
|
||||
)
|
||||
|
||||
return maintenance_parser
|
||||
@@ -266,14 +240,9 @@ def handle_maintenance_command(args, client):
|
||||
maintenance_commands = MaintenanceCommands(client)
|
||||
|
||||
if not args.maintenance_action:
|
||||
if hasattr(setup_maintenance_parser, "_parser"):
|
||||
setup_maintenance_parser._parser.print_help()
|
||||
else:
|
||||
print(
|
||||
"Error: No maintenance action specified. Use --help for usage information."
|
||||
)
|
||||
setup_maintenance_parser.parser.print_help()
|
||||
return False
|
||||
elif args.maintenance_action == "list":
|
||||
if args.maintenance_action == "list":
|
||||
maintenance_commands.list_maintenances()
|
||||
elif args.maintenance_action == "add":
|
||||
title = (
|
||||
|
@@ -1,9 +1,13 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Monitor command implementations for Uptime Kuma CLI."""
|
||||
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
from ..client import KumaClient
|
||||
|
||||
|
||||
class MonitorCommands:
|
||||
"""Commands for managing monitors."""
|
||||
|
||||
def __init__(self, client: KumaClient):
|
||||
self.client = client
|
||||
|
||||
@@ -61,54 +65,20 @@ class MonitorCommands:
|
||||
parent_name = parent_monitor.get("name", f"Group {parent_id}")
|
||||
|
||||
print(
|
||||
f"{monitor_id:<5} {name:<25} {monitor_type:<12} {parent_name:<20} {url:<35} {active:<10}"
|
||||
f"{monitor_id:<5} {name:<25} {monitor_type:<12} "
|
||||
f"{parent_name:<20} {url:<35} {active:<10}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error listing monitors: {e}")
|
||||
|
||||
def pause_monitors(self, monitor_patterns=None, group_patterns=None):
|
||||
"""Pause monitors by patterns and/or groups"""
|
||||
try:
|
||||
# Check if we have either monitor patterns or group patterns
|
||||
if not monitor_patterns and not group_patterns:
|
||||
print(
|
||||
"Error: Either --monitor or --group flag is required. Specify at least one pattern."
|
||||
)
|
||||
return
|
||||
|
||||
matched_monitors = []
|
||||
|
||||
# Find monitors by patterns if specified
|
||||
if monitor_patterns:
|
||||
pattern_monitors = self.client.find_monitors_by_pattern(
|
||||
monitor_patterns
|
||||
)
|
||||
matched_monitors.extend(pattern_monitors)
|
||||
|
||||
# Find monitors by groups if specified
|
||||
if group_patterns:
|
||||
group_monitors = self.client.get_monitors_in_groups(group_patterns)
|
||||
# Convert to the same format as find_monitors_by_pattern
|
||||
group_monitor_objs = [
|
||||
{"id": m.get("id"), "name": m.get("name")} for m in group_monitors
|
||||
]
|
||||
matched_monitors.extend(group_monitor_objs)
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
seen = set()
|
||||
unique_monitors = []
|
||||
for monitor in matched_monitors:
|
||||
if monitor["id"] not in seen:
|
||||
seen.add(monitor["id"])
|
||||
unique_monitors.append(monitor)
|
||||
|
||||
matched_monitors = unique_monitors
|
||||
|
||||
matched_monitors = self.client.find_and_get_monitors(
|
||||
monitor_patterns, group_patterns
|
||||
)
|
||||
if not matched_monitors:
|
||||
print(
|
||||
"Error: No monitors found matching the specified patterns or groups"
|
||||
)
|
||||
return
|
||||
|
||||
print(f"Found {len(matched_monitors)} matching monitors to pause:")
|
||||
@@ -119,17 +89,17 @@ class MonitorCommands:
|
||||
paused_count = 0
|
||||
for monitor in matched_monitors:
|
||||
try:
|
||||
result = self.client.api.pause_monitor(monitor["id"])
|
||||
self.client.api.pause_monitor(monitor["id"])
|
||||
print(f"Paused monitor '{monitor['name']}' (ID: {monitor['id']})")
|
||||
paused_count += 1
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Failed to pause monitor '{monitor['name']}': {e}")
|
||||
|
||||
print(
|
||||
f"Successfully paused {paused_count} out of {len(matched_monitors)} monitors"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error pausing monitors: {e}")
|
||||
|
||||
def resume_monitors(
|
||||
@@ -137,62 +107,36 @@ class MonitorCommands:
|
||||
):
|
||||
"""Resume monitors by patterns and/or groups, or all paused monitors"""
|
||||
try:
|
||||
# Check if we have either monitor patterns, group patterns, or --all flag
|
||||
if not monitor_patterns and not group_patterns and not resume_all:
|
||||
print("Error: Either --monitor, --group, or --all flag is required.")
|
||||
return
|
||||
|
||||
matched_monitors = []
|
||||
|
||||
if resume_all:
|
||||
# Get all monitors and filter for inactive (paused) ones
|
||||
all_monitors = self.client.api.get_monitors()
|
||||
paused_monitors = [
|
||||
{"id": m.get("id"), "name": m.get("name")}
|
||||
for m in all_monitors
|
||||
if not m.get("active", True)
|
||||
monitor_ids = [
|
||||
m.get("id") for m in all_monitors if not m.get("active", True)
|
||||
]
|
||||
matched_monitors.extend(paused_monitors)
|
||||
else:
|
||||
# Find monitors by patterns if specified
|
||||
if monitor_patterns:
|
||||
pattern_monitors = self.client.find_monitors_by_pattern(
|
||||
monitor_patterns
|
||||
)
|
||||
matched_monitors.extend(pattern_monitors)
|
||||
|
||||
# Find monitors by groups if specified
|
||||
if group_patterns:
|
||||
group_monitors = self.client.get_monitors_in_groups(group_patterns)
|
||||
# Convert to the same format as find_monitors_by_pattern
|
||||
group_monitor_objs = [
|
||||
{"id": m.get("id"), "name": m.get("name")}
|
||||
for m in group_monitors
|
||||
]
|
||||
matched_monitors.extend(group_monitor_objs)
|
||||
|
||||
# Remove duplicates while preserving order
|
||||
seen = set()
|
||||
unique_monitors = []
|
||||
for monitor in matched_monitors:
|
||||
if monitor["id"] not in seen:
|
||||
seen.add(monitor["id"])
|
||||
unique_monitors.append(monitor)
|
||||
|
||||
matched_monitors = unique_monitors
|
||||
|
||||
if not matched_monitors:
|
||||
if resume_all:
|
||||
if not monitor_ids:
|
||||
print("No paused monitors found to resume")
|
||||
else:
|
||||
print(
|
||||
"Error: No monitors found matching the specified patterns or groups"
|
||||
)
|
||||
return
|
||||
|
||||
if resume_all:
|
||||
return
|
||||
matched_monitors = [
|
||||
{
|
||||
"id": mid,
|
||||
"name": next(
|
||||
(
|
||||
m.get("name", f"Monitor {mid}")
|
||||
for m in all_monitors
|
||||
if m.get("id") == mid
|
||||
),
|
||||
f"Monitor {mid}",
|
||||
),
|
||||
}
|
||||
for mid in monitor_ids
|
||||
]
|
||||
print(f"Found {len(matched_monitors)} paused monitors to resume:")
|
||||
else:
|
||||
matched_monitors = self.client.find_and_get_monitors(
|
||||
monitor_patterns, group_patterns
|
||||
)
|
||||
if not matched_monitors:
|
||||
return
|
||||
print(f"Found {len(matched_monitors)} matching monitors to resume:")
|
||||
for monitor in matched_monitors:
|
||||
print(f" - {monitor['name']} (ID: {monitor['id']})")
|
||||
@@ -201,25 +145,24 @@ class MonitorCommands:
|
||||
resumed_count = 0
|
||||
for monitor in matched_monitors:
|
||||
try:
|
||||
result = self.client.api.resume_monitor(monitor["id"])
|
||||
self.client.api.resume_monitor(monitor["id"])
|
||||
print(f"Resumed monitor '{monitor['name']}' (ID: {monitor['id']})")
|
||||
resumed_count += 1
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Failed to resume monitor '{monitor['name']}': {e}")
|
||||
|
||||
print(
|
||||
f"Successfully resumed {resumed_count} out of {len(matched_monitors)} monitors"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except UptimeKumaException as e:
|
||||
print(f"Error resuming monitors: {e}")
|
||||
|
||||
|
||||
def setup_monitor_parser(subparsers):
|
||||
"""Setup monitor command parser"""
|
||||
monitor_parser = subparsers.add_parser("monitor", help="Monitor operations")
|
||||
# Store reference to parser for help display
|
||||
setup_monitor_parser._parser = monitor_parser
|
||||
setup_monitor_parser.parser = monitor_parser
|
||||
monitor_subparsers = monitor_parser.add_subparsers(
|
||||
dest="monitor_action", help="Monitor actions"
|
||||
)
|
||||
@@ -280,14 +223,9 @@ def handle_monitor_command(args, client):
|
||||
monitor_commands = MonitorCommands(client)
|
||||
|
||||
if not args.monitor_action:
|
||||
if hasattr(setup_monitor_parser, "_parser"):
|
||||
setup_monitor_parser._parser.print_help()
|
||||
else:
|
||||
print(
|
||||
"Error: No monitor action specified. Use --help for usage information."
|
||||
)
|
||||
setup_monitor_parser.parser.print_help()
|
||||
return False
|
||||
elif args.monitor_action == "list":
|
||||
if args.monitor_action == "list":
|
||||
monitor_commands.list_monitors(
|
||||
monitor_patterns=args.monitor, group_patterns=args.group
|
||||
)
|
||||
|
16
src/kumacli/cmd/version.py
Normal file
16
src/kumacli/cmd/version.py
Normal file
@@ -0,0 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Version command implementations for Uptime Kuma CLI."""
|
||||
|
||||
__version__ = "1.4.0"
|
||||
|
||||
|
||||
def setup_version_parser(subparsers):
|
||||
"""Setup version command parser"""
|
||||
version_parser = subparsers.add_parser("version", help="Show version information")
|
||||
return version_parser
|
||||
|
||||
|
||||
def handle_version_command(args, client): # pylint: disable=unused-argument
|
||||
"""Handle version command execution"""
|
||||
print(f"kumacli {__version__}")
|
||||
return True
|
@@ -1,9 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Main CLI module for Uptime Kuma."""
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
|
||||
# Handle both direct execution and package import
|
||||
try:
|
||||
@@ -11,6 +11,7 @@ try:
|
||||
from .cmd.monitor import setup_monitor_parser, handle_monitor_command
|
||||
from .cmd.maintenance import setup_maintenance_parser, handle_maintenance_command
|
||||
from .cmd.info import setup_info_parser, handle_info_command
|
||||
from .cmd.version import setup_version_parser, handle_version_command
|
||||
except ImportError:
|
||||
# Running directly, add parent directory to path
|
||||
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
@@ -21,9 +22,11 @@ except ImportError:
|
||||
handle_maintenance_command,
|
||||
)
|
||||
from kumacli.cmd.info import setup_info_parser, handle_info_command
|
||||
from kumacli.cmd.version import setup_version_parser, handle_version_command
|
||||
|
||||
|
||||
def main():
|
||||
"""Main entry point for the CLI application."""
|
||||
parser = argparse.ArgumentParser(description="Uptime Kuma CLI Client")
|
||||
parser.add_argument(
|
||||
"--url", help="Uptime Kuma server URL (can also use KUMA_URL env var)"
|
||||
@@ -40,9 +43,10 @@ def main():
|
||||
subparsers = parser.add_subparsers(dest="resource", help="Resource to operate on")
|
||||
|
||||
# Setup command parsers
|
||||
monitor_parser = setup_monitor_parser(subparsers)
|
||||
maintenance_parser = setup_maintenance_parser(subparsers)
|
||||
info_parser = setup_info_parser(subparsers)
|
||||
setup_monitor_parser(subparsers)
|
||||
setup_maintenance_parser(subparsers)
|
||||
setup_info_parser(subparsers)
|
||||
setup_version_parser(subparsers)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -74,6 +78,8 @@ def main():
|
||||
success = handle_maintenance_command(args, client)
|
||||
elif args.resource == "info":
|
||||
success = handle_info_command(args, client)
|
||||
elif args.resource == "version":
|
||||
success = handle_version_command(args, client)
|
||||
else:
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
|
@@ -21,7 +21,7 @@ class TestCLIIntegration:
|
||||
|
||||
# Verify parser is created
|
||||
assert monitor_parser is not None
|
||||
assert hasattr(setup_monitor_parser, "_parser")
|
||||
assert monitor_parser.prog.endswith("monitor")
|
||||
|
||||
def test_maintenance_parser_setup(self):
|
||||
"""Test maintenance parser setup"""
|
||||
@@ -32,7 +32,7 @@ class TestCLIIntegration:
|
||||
|
||||
# Verify parser is created
|
||||
assert maintenance_parser is not None
|
||||
assert hasattr(setup_maintenance_parser, "_parser")
|
||||
assert maintenance_parser.prog.endswith("maintenance")
|
||||
|
||||
def test_info_parser_setup(self):
|
||||
"""Test info parser setup"""
|
||||
@@ -50,9 +50,9 @@ class TestCLIIntegration:
|
||||
mock_args = Mock()
|
||||
mock_args.monitor_action = None
|
||||
|
||||
# Setup parser reference
|
||||
# Setup parser reference to simulate having called setup_monitor_parser
|
||||
mock_parser = Mock()
|
||||
setup_monitor_parser._parser = mock_parser
|
||||
setup_monitor_parser.parser = mock_parser
|
||||
|
||||
# Execute
|
||||
result = handle_monitor_command(mock_args, mock_client)
|
||||
@@ -67,9 +67,9 @@ class TestCLIIntegration:
|
||||
mock_args = Mock()
|
||||
mock_args.maintenance_action = None
|
||||
|
||||
# Setup parser reference
|
||||
# Setup parser reference to simulate having called setup_maintenance_parser
|
||||
mock_parser = Mock()
|
||||
setup_maintenance_parser._parser = mock_parser
|
||||
setup_maintenance_parser.parser = mock_parser
|
||||
|
||||
# Execute
|
||||
result = handle_maintenance_command(mock_args, mock_client)
|
||||
@@ -87,11 +87,9 @@ class TestCLIIntegration:
|
||||
mock_args.group = ["web-services"]
|
||||
|
||||
# Mock client methods
|
||||
mock_client.find_monitors_by_pattern.return_value = [
|
||||
{"id": 1, "name": "test-monitor"}
|
||||
]
|
||||
mock_client.get_monitors_in_groups.return_value = [
|
||||
{"id": 2, "name": "web-service-monitor"}
|
||||
mock_client.find_and_get_monitors.return_value = [
|
||||
{"id": 1, "name": "test-monitor"},
|
||||
{"id": 2, "name": "web-service-monitor"},
|
||||
]
|
||||
mock_client.api.pause_monitor.return_value = {"msg": "Paused Successfully."}
|
||||
|
||||
@@ -100,8 +98,9 @@ class TestCLIIntegration:
|
||||
|
||||
# Verify
|
||||
assert result is True
|
||||
mock_client.find_monitors_by_pattern.assert_called_once_with(["test*"])
|
||||
mock_client.get_monitors_in_groups.assert_called_once_with(["web-services"])
|
||||
mock_client.find_and_get_monitors.assert_called_once_with(
|
||||
["test*"], ["web-services"]
|
||||
)
|
||||
# Should pause both monitors (deduplicated)
|
||||
assert mock_client.api.pause_monitor.call_count == 2
|
||||
|
||||
@@ -235,7 +234,11 @@ class TestErrorHandling:
|
||||
mock_args.group = None
|
||||
|
||||
# Mock no matches found
|
||||
mock_client.find_monitors_by_pattern.return_value = []
|
||||
def mock_find_and_get_monitors(*args, **kwargs):
|
||||
print("Error: No monitors found matching the specified patterns or groups")
|
||||
return []
|
||||
|
||||
mock_client.find_and_get_monitors.side_effect = mock_find_and_get_monitors
|
||||
|
||||
# Execute
|
||||
result = handle_monitor_command(mock_args, mock_client)
|
||||
@@ -255,7 +258,11 @@ class TestErrorHandling:
|
||||
mock_args.maintenance_action = "list"
|
||||
|
||||
# Mock API error
|
||||
mock_client.api.get_maintenances.side_effect = Exception("Connection timeout")
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
|
||||
mock_client.api.get_maintenances.side_effect = UptimeKumaException(
|
||||
"Connection timeout"
|
||||
)
|
||||
|
||||
# Execute
|
||||
result = handle_maintenance_command(mock_args, mock_client)
|
||||
|
@@ -171,10 +171,12 @@ class TestKumaClient:
|
||||
|
||||
def test_find_monitors_by_pattern_api_error(self, capsys):
|
||||
"""Test finding monitors handles API errors"""
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
|
||||
client = KumaClient("http://test.com")
|
||||
client.api = Mock()
|
||||
|
||||
client.api.get_monitors.side_effect = Exception("API Error")
|
||||
client.api.get_monitors.side_effect = UptimeKumaException("API Error")
|
||||
|
||||
result = client.find_monitors_by_pattern(["Web*"])
|
||||
assert len(result) == 0
|
||||
@@ -203,7 +205,9 @@ class TestKumaClient:
|
||||
@patch("kumacli.client.UptimeKumaApi")
|
||||
def test_connect_failure(self, mock_api_class, capsys):
|
||||
"""Test connection failure"""
|
||||
mock_api_class.side_effect = Exception("Connection failed")
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
|
||||
mock_api_class.side_effect = UptimeKumaException("Connection failed")
|
||||
|
||||
client = KumaClient("http://test.com", "user", "pass")
|
||||
result = client.connect()
|
||||
|
@@ -49,8 +49,10 @@ class TestInfoCommands:
|
||||
|
||||
def test_get_info_api_error(self, mock_client, capsys):
|
||||
"""Test info command with API error"""
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
|
||||
# Setup
|
||||
mock_client.api.info.side_effect = Exception("Connection failed")
|
||||
mock_client.api.info.side_effect = UptimeKumaException("Connection failed")
|
||||
|
||||
info_commands = InfoCommands(mock_client)
|
||||
|
||||
@@ -80,9 +82,11 @@ class TestInfoCommandHandler:
|
||||
|
||||
def test_handle_info_command_with_error(self, mock_client):
|
||||
"""Test info command handler with error"""
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
|
||||
# Setup
|
||||
mock_args = Mock()
|
||||
mock_client.api.info.side_effect = Exception("API Error")
|
||||
mock_client.api.info.side_effect = UptimeKumaException("API Error")
|
||||
|
||||
# Execute
|
||||
result = handle_info_command(mock_args, mock_client)
|
||||
|
@@ -43,8 +43,10 @@ class TestMaintenanceCommands:
|
||||
|
||||
def test_list_maintenances_api_error(self, mock_client, capsys):
|
||||
"""Test maintenance listing with API error"""
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
|
||||
# Setup
|
||||
mock_client.api.get_maintenances.side_effect = Exception("API Error")
|
||||
mock_client.api.get_maintenances.side_effect = UptimeKumaException("API Error")
|
||||
|
||||
maintenance_commands = MaintenanceCommands(mock_client)
|
||||
|
||||
|
@@ -16,8 +16,7 @@ class TestMonitorCommands:
|
||||
def test_pause_monitors_by_pattern(self, mock_client, mock_monitors, capsys):
|
||||
"""Test pausing monitors by pattern"""
|
||||
# Setup
|
||||
mock_client.api.get_monitors.return_value = mock_monitors
|
||||
mock_client.find_monitors_by_pattern.return_value = [
|
||||
mock_client.find_and_get_monitors.return_value = [
|
||||
{"id": 1, "name": "Test Monitor 1"},
|
||||
{"id": 2, "name": "Test Monitor 2"},
|
||||
]
|
||||
@@ -29,7 +28,7 @@ class TestMonitorCommands:
|
||||
monitor_commands.pause_monitors(monitor_patterns=["Test*"])
|
||||
|
||||
# Verify
|
||||
mock_client.find_monitors_by_pattern.assert_called_once_with(["Test*"])
|
||||
mock_client.find_and_get_monitors.assert_called_once_with(["Test*"], None)
|
||||
assert mock_client.api.pause_monitor.call_count == 2
|
||||
mock_client.api.pause_monitor.assert_any_call(1)
|
||||
mock_client.api.pause_monitor.assert_any_call(2)
|
||||
@@ -43,7 +42,7 @@ class TestMonitorCommands:
|
||||
def test_pause_monitors_by_group(self, mock_client, mock_monitors, capsys):
|
||||
"""Test pausing monitors by group"""
|
||||
# Setup
|
||||
mock_client.get_monitors_in_groups.return_value = [
|
||||
mock_client.find_and_get_monitors.return_value = [
|
||||
{"id": 4, "name": "Child Monitor"}
|
||||
]
|
||||
mock_client.api.pause_monitor.return_value = {"msg": "Paused Successfully."}
|
||||
@@ -54,7 +53,7 @@ class TestMonitorCommands:
|
||||
monitor_commands.pause_monitors(group_patterns=["Group*"])
|
||||
|
||||
# Verify
|
||||
mock_client.get_monitors_in_groups.assert_called_once_with(["Group*"])
|
||||
mock_client.find_and_get_monitors.assert_called_once_with(None, ["Group*"])
|
||||
mock_client.api.pause_monitor.assert_called_once_with(4)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
@@ -63,19 +62,21 @@ class TestMonitorCommands:
|
||||
|
||||
def test_pause_monitors_no_patterns(self, mock_client, capsys):
|
||||
"""Test pausing monitors without patterns"""
|
||||
# Setup
|
||||
mock_client.find_and_get_monitors.return_value = []
|
||||
|
||||
monitor_commands = MonitorCommands(mock_client)
|
||||
|
||||
# Execute
|
||||
monitor_commands.pause_monitors()
|
||||
|
||||
# Verify
|
||||
captured = capsys.readouterr()
|
||||
assert "Error: Either --monitor or --group flag is required." in captured.out
|
||||
mock_client.find_and_get_monitors.assert_called_once_with(None, None)
|
||||
|
||||
def test_pause_monitors_no_matches(self, mock_client, capsys):
|
||||
"""Test pausing monitors with no matches"""
|
||||
# Setup
|
||||
mock_client.find_monitors_by_pattern.return_value = []
|
||||
mock_client.find_and_get_monitors.return_value = []
|
||||
|
||||
monitor_commands = MonitorCommands(mock_client)
|
||||
|
||||
@@ -83,19 +84,19 @@ class TestMonitorCommands:
|
||||
monitor_commands.pause_monitors(monitor_patterns=["NonExistent*"])
|
||||
|
||||
# Verify
|
||||
captured = capsys.readouterr()
|
||||
assert (
|
||||
"Error: No monitors found matching the specified patterns or groups"
|
||||
in captured.out
|
||||
mock_client.find_and_get_monitors.assert_called_once_with(
|
||||
["NonExistent*"], None
|
||||
)
|
||||
|
||||
def test_pause_monitors_api_error(self, mock_client, capsys):
|
||||
"""Test pausing monitors with API error"""
|
||||
# Setup
|
||||
mock_client.find_monitors_by_pattern.return_value = [
|
||||
mock_client.find_and_get_monitors.return_value = [
|
||||
{"id": 1, "name": "Test Monitor 1"}
|
||||
]
|
||||
mock_client.api.pause_monitor.side_effect = Exception("API Error")
|
||||
from uptime_kuma_api import UptimeKumaException
|
||||
|
||||
mock_client.api.pause_monitor.side_effect = UptimeKumaException("API Error")
|
||||
|
||||
monitor_commands = MonitorCommands(mock_client)
|
||||
|
||||
@@ -110,7 +111,7 @@ class TestMonitorCommands:
|
||||
def test_resume_monitors_by_pattern(self, mock_client, mock_monitors, capsys):
|
||||
"""Test resuming monitors by pattern"""
|
||||
# Setup
|
||||
mock_client.find_monitors_by_pattern.return_value = [
|
||||
mock_client.find_and_get_monitors.return_value = [
|
||||
{"id": 2, "name": "Test Monitor 2"}
|
||||
]
|
||||
mock_client.api.resume_monitor.return_value = {"msg": "Resumed Successfully."}
|
||||
@@ -121,6 +122,7 @@ class TestMonitorCommands:
|
||||
monitor_commands.resume_monitors(monitor_patterns=["Test*"])
|
||||
|
||||
# Verify
|
||||
mock_client.find_and_get_monitors.assert_called_once_with(["Test*"], None)
|
||||
mock_client.api.resume_monitor.assert_called_once_with(2)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
@@ -165,17 +167,16 @@ class TestMonitorCommands:
|
||||
|
||||
def test_resume_monitors_no_args(self, mock_client, capsys):
|
||||
"""Test resuming monitors without any arguments"""
|
||||
# Setup
|
||||
mock_client.find_and_get_monitors.return_value = []
|
||||
|
||||
monitor_commands = MonitorCommands(mock_client)
|
||||
|
||||
# Execute
|
||||
monitor_commands.resume_monitors()
|
||||
|
||||
# Verify
|
||||
captured = capsys.readouterr()
|
||||
assert (
|
||||
"Error: Either --monitor, --group, or --all flag is required."
|
||||
in captured.out
|
||||
)
|
||||
mock_client.find_and_get_monitors.assert_called_once_with(None, None)
|
||||
|
||||
|
||||
class TestMonitorCommandHandler:
|
||||
@@ -204,7 +205,7 @@ class TestMonitorCommandHandler:
|
||||
mock_args.monitor = ["test*"]
|
||||
mock_args.group = None
|
||||
|
||||
mock_client.find_monitors_by_pattern.return_value = [
|
||||
mock_client.find_and_get_monitors.return_value = [
|
||||
{"id": 1, "name": "test monitor"}
|
||||
]
|
||||
mock_client.api.pause_monitor.return_value = {"msg": "Paused Successfully."}
|
||||
@@ -225,7 +226,7 @@ class TestMonitorCommandHandler:
|
||||
mock_args.group = None
|
||||
mock_args.all = False
|
||||
|
||||
mock_client.find_monitors_by_pattern.return_value = [
|
||||
mock_client.find_and_get_monitors.return_value = [
|
||||
{"id": 1, "name": "test monitor"}
|
||||
]
|
||||
mock_client.api.resume_monitor.return_value = {"msg": "Resumed Successfully."}
|
||||
|
31
tests/test_version.py
Normal file
31
tests/test_version.py
Normal file
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pytest
|
||||
from unittest.mock import Mock
|
||||
|
||||
from kumacli.cmd.version import handle_version_command, __version__
|
||||
|
||||
|
||||
class TestVersionCommand:
|
||||
def test_handle_version_command(self, mock_client, capsys):
|
||||
"""Test version command handler"""
|
||||
# Setup
|
||||
mock_args = Mock()
|
||||
|
||||
# Execute
|
||||
result = handle_version_command(mock_args, mock_client)
|
||||
|
||||
# Verify
|
||||
assert result is True
|
||||
captured = capsys.readouterr()
|
||||
assert f"kumacli {__version__}" in captured.out
|
||||
|
||||
def test_version_is_defined(self):
|
||||
"""Test that version is properly defined"""
|
||||
assert __version__ is not None
|
||||
assert isinstance(__version__, str)
|
||||
assert len(__version__) > 0
|
||||
# Version should follow semantic versioning pattern (e.g., "1.4.0")
|
||||
parts = __version__.split(".")
|
||||
assert len(parts) >= 2 # At least major.minor
|
||||
assert all(part.isdigit() for part in parts) # All parts should be numeric
|
Reference in New Issue
Block a user