d5018c2b34
Sprint 1 — Security & correctness:
- Restore all 10 commented-out is_local_request() checks (vault, containers, images, volumes)
- Fix XFF spoofing: only trust the LAST X-Forwarded-For entry (Caddy's append), not all
- Require prefix length in wireguard.address (was accepting bare IPs like 10.0.0.1)
- Validate service_access list in add_peer (valid: calendar/files/mail/webdav)
- Fix dhcp/reservations POST/DELETE: unpack mac/ip/hostname from body (was passing dict as positional arg)
- Fix network/test POST: remove spurious data arg (test_connectivity takes no args)
- Fix remove_peer: clear iptables rules and regenerate DNS ACLs on deletion (was leaving stale rules)
- Fix CoreDNS reload: SIGHUP → SIGUSR1 (SIGHUP kills the process; SIGUSR1 triggers reload plugin)
- Remove local.{domain} block from Corefile template (local.zone doesn't exist, caused log spam)
- Fix routing_manager._remove_nat_rule: targeted -D instead of flushing entire POSTROUTING chain
Sprint 2 — State consistency:
- Atomic config writes in config_manager, ip_utils, firewall_manager, network_manager
(write to .tmp → fsync → os.replace, prevents truncated files on kill)
- backup_config: now also backs up Caddyfile, Corefile, .env, DNS zone files
- restore_config: restores all of the above so config stays consistent after restore
Sprint 3 — Dead code / documentation:
- Remove CellManager instantiation from app startup (was never called, double-instantiated all managers)
- Document routing_manager scope (targets host, not cell-wireguard; methods not called by any active route)
Sprint 4 — Test infrastructure:
- Add tests/conftest.py with shared tmp_dir, tmp_config_dir, tmp_data_dir, flask_client fixtures
- Add tests/test_config_validation.py: 400 paths for ip_range, port, wireguard.address validation
- Add tests/test_ip_utils_caddyfile.py: 14 tests for write_caddyfile (was completely untested)
- Expand test_app_misc.py: 7 new is_local_request tests covering XFF spoofing and cell-network IPs
- Add --cov-fail-under=70 to make test-coverage
- Add pre-commit hook that runs pytest before every commit
414 tests pass (was 372).
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
418 lines
16 KiB
Python
418 lines
16 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Configuration Manager for Personal Internet Cell
|
|
Centralized configuration management for all services
|
|
"""
|
|
|
|
import os
|
|
import json
|
|
import yaml
|
|
import shutil
|
|
import hashlib
|
|
from datetime import datetime
|
|
from typing import Dict, List, Optional, Any
|
|
from pathlib import Path
|
|
import logging
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
class ConfigManager:
|
|
"""Centralized configuration management for all services (unified config)"""
|
|
|
|
def __init__(self, config_file: str = '/app/config/cell_config.json', data_dir: str = '/app/data'):
|
|
config_file = Path(config_file)
|
|
if config_file.is_dir():
|
|
config_file = config_file / 'cell_config.json'
|
|
print(f"[DEBUG] ConfigManager.__init__: config_file = {config_file}")
|
|
self.config_file = config_file
|
|
self.data_dir = Path(data_dir)
|
|
self.backup_dir = self.data_dir / 'config_backups'
|
|
self.secrets_file = self.config_file.parent / 'secrets.yaml'
|
|
try:
|
|
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
|
except (PermissionError, OSError):
|
|
pass
|
|
self.service_schemas = self._load_service_schemas()
|
|
self.configs = self._load_all_configs()
|
|
if not self.config_file.exists():
|
|
self._save_all_configs()
|
|
|
|
def _load_service_schemas(self) -> Dict[str, Dict]:
|
|
"""Load configuration schemas for all services"""
|
|
return {
|
|
'network': {
|
|
'required': ['dns_port', 'dhcp_range', 'ntp_servers'],
|
|
'optional': ['dns_zones', 'dhcp_reservations'],
|
|
'types': {
|
|
'dns_port': int,
|
|
'dhcp_range': str,
|
|
'ntp_servers': list
|
|
}
|
|
},
|
|
'wireguard': {
|
|
'required': ['port', 'private_key', 'address'],
|
|
'optional': ['peers', 'allowed_ips'],
|
|
'types': {
|
|
'port': int,
|
|
'private_key': str,
|
|
'address': str
|
|
}
|
|
},
|
|
'email': {
|
|
'required': ['domain', 'smtp_port', 'imap_port'],
|
|
'optional': ['users', 'ssl_cert', 'ssl_key', 'submission_port', 'webmail_port'],
|
|
'types': {
|
|
'smtp_port': int,
|
|
'submission_port': int,
|
|
'imap_port': int,
|
|
'webmail_port': int,
|
|
'domain': str
|
|
}
|
|
},
|
|
'calendar': {
|
|
'required': ['port', 'data_dir'],
|
|
'optional': ['users', 'calendars'],
|
|
'types': {
|
|
'port': int,
|
|
'data_dir': str
|
|
}
|
|
},
|
|
'files': {
|
|
'required': ['port', 'data_dir'],
|
|
'optional': ['users', 'quota', 'manager_port'],
|
|
'types': {
|
|
'port': int,
|
|
'manager_port': int,
|
|
'data_dir': str,
|
|
'quota': int
|
|
}
|
|
},
|
|
'routing': {
|
|
'required': ['nat_enabled', 'firewall_enabled'],
|
|
'optional': ['nat_rules', 'firewall_rules', 'peer_routes'],
|
|
'types': {
|
|
'nat_enabled': bool,
|
|
'firewall_enabled': bool
|
|
}
|
|
},
|
|
'vault': {
|
|
'required': ['ca_configured', 'fernet_configured'],
|
|
'optional': ['certificates', 'trusted_keys'],
|
|
'types': {
|
|
'ca_configured': bool,
|
|
'fernet_configured': bool
|
|
}
|
|
}
|
|
}
|
|
|
|
def _load_all_configs(self) -> Dict[str, Dict]:
|
|
"""Load all existing service configurations"""
|
|
if self.config_file.exists():
|
|
try:
|
|
with open(self.config_file, 'r') as f:
|
|
return json.load(f)
|
|
except Exception as e:
|
|
logger.error(f"Error loading unified config: {e}")
|
|
return {}
|
|
return {}
|
|
|
|
def _save_all_configs(self):
|
|
"""Save all service configurations to the unified config file (atomic write)."""
|
|
try:
|
|
self.config_file.parent.mkdir(parents=True, exist_ok=True)
|
|
tmp = self.config_file.with_suffix('.tmp')
|
|
with open(tmp, 'w') as f:
|
|
json.dump(self.configs, f, indent=2)
|
|
f.flush()
|
|
os.fsync(f.fileno())
|
|
os.replace(tmp, self.config_file)
|
|
except (PermissionError, OSError):
|
|
pass
|
|
|
|
def get_service_config(self, service: str) -> Dict[str, Any]:
|
|
"""Get configuration for a specific service"""
|
|
if service not in self.service_schemas:
|
|
raise ValueError(f"Unknown service: {service}")
|
|
return self.configs.get(service, {})
|
|
|
|
def update_service_config(self, service: str, config: Dict[str, Any]) -> bool:
|
|
"""Update configuration for a specific service"""
|
|
if service not in self.service_schemas:
|
|
raise ValueError(f"Unknown service: {service}")
|
|
try:
|
|
# Validate types only (required fields are checked by validate_config, not here)
|
|
schema = self.service_schemas[service]
|
|
for field, expected_type in schema['types'].items():
|
|
if field in config and not isinstance(config[field], expected_type):
|
|
logger.error(f"Invalid type for {field}: expected {expected_type.__name__}")
|
|
return False
|
|
|
|
# Backup current config
|
|
self._backup_service_config(service)
|
|
|
|
# Update configuration
|
|
self.configs[service] = config
|
|
self._save_all_configs()
|
|
|
|
logger.info(f"Updated configuration for {service}")
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error updating config for {service}: {e}")
|
|
return False
|
|
|
|
def validate_config(self, service: str, config: Dict[str, Any]) -> Dict[str, Any]:
|
|
"""Validate configuration for a service"""
|
|
if service not in self.service_schemas:
|
|
return {
|
|
"valid": False,
|
|
"errors": [f"Unknown service: {service}"],
|
|
"warnings": []
|
|
}
|
|
|
|
schema = self.service_schemas[service]
|
|
errors = []
|
|
warnings = []
|
|
|
|
# Check required fields (missing = error, wrong type = error)
|
|
for field in schema['required']:
|
|
if field not in config:
|
|
errors.append(f"Missing required field: {field}")
|
|
elif field in schema['types']:
|
|
expected_type = schema['types'][field]
|
|
if not isinstance(config[field], expected_type):
|
|
errors.append(f"Field {field} must be of type {expected_type.__name__}")
|
|
|
|
# Check optional fields
|
|
for field in schema['optional']:
|
|
if field in config and field in schema['types']:
|
|
expected_type = schema['types'][field]
|
|
if not isinstance(config[field], expected_type):
|
|
warnings.append(f"Field {field} should be of type {expected_type.__name__}")
|
|
|
|
return {
|
|
"valid": len(errors) == 0,
|
|
"errors": errors,
|
|
"warnings": warnings
|
|
}
|
|
|
|
def get_all_configs(self) -> Dict[str, Dict]:
|
|
"""Return all stored service configurations."""
|
|
return dict(self.configs)
|
|
|
|
def get_config_summary(self) -> Dict[str, Any]:
|
|
"""Return a high-level summary of configuration state."""
|
|
backup_count = sum(
|
|
1 for p in self.backup_dir.iterdir() if p.is_dir()
|
|
) if self.backup_dir.exists() else 0
|
|
return {
|
|
'total_services': len(self.service_schemas),
|
|
'configured_services': len(self.configs),
|
|
'backup_count': backup_count,
|
|
}
|
|
|
|
def backup_config(self) -> str:
|
|
"""Create a backup of cell_config.json, secrets, Caddyfile, .env, Corefile, and DNS zones."""
|
|
try:
|
|
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
|
backup_id = f"backup_{timestamp}"
|
|
backup_path = self.backup_dir / backup_id
|
|
backup_path.mkdir(parents=True, exist_ok=True)
|
|
|
|
# Primary config and secrets
|
|
if self.config_file.exists():
|
|
shutil.copy2(self.config_file, backup_path / 'cell_config.json')
|
|
if self.secrets_file.exists():
|
|
shutil.copy2(self.secrets_file, backup_path / 'secrets.yaml')
|
|
|
|
# Runtime-generated files that must match cell_config.json after restore
|
|
config_dir = Path(os.environ.get('CONFIG_DIR', '/app/config'))
|
|
data_dir = Path(os.environ.get('DATA_DIR', '/app/data'))
|
|
env_file = Path(os.environ.get('ENV_FILE', '/app/.env'))
|
|
|
|
extra = [
|
|
(config_dir / 'caddy' / 'Caddyfile', 'Caddyfile'),
|
|
(config_dir / 'dns' / 'Corefile', 'Corefile'),
|
|
(env_file, '.env'),
|
|
]
|
|
for src, dest_name in extra:
|
|
if src.exists():
|
|
shutil.copy2(src, backup_path / dest_name)
|
|
|
|
# DNS zone files
|
|
dns_data = data_dir / 'dns'
|
|
if dns_data.is_dir():
|
|
zones_dir = backup_path / 'dns_zones'
|
|
zones_dir.mkdir(exist_ok=True)
|
|
for zone_file in dns_data.glob('*.zone'):
|
|
shutil.copy2(zone_file, zones_dir / zone_file.name)
|
|
|
|
manifest = {
|
|
"backup_id": backup_id,
|
|
"timestamp": datetime.now().isoformat(),
|
|
"services": list(self.service_schemas.keys()),
|
|
"files": [f.name for f in backup_path.iterdir()],
|
|
}
|
|
with open(backup_path / 'manifest.json', 'w') as f:
|
|
json.dump(manifest, f, indent=2)
|
|
|
|
logger.info(f"Created configuration backup: {backup_id}")
|
|
return backup_id
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error creating backup: {e}")
|
|
raise
|
|
|
|
def restore_config(self, backup_id: str) -> bool:
|
|
"""Restore cell_config.json, secrets, Caddyfile, .env, Corefile, and DNS zones from backup."""
|
|
try:
|
|
backup_path = self.backup_dir / backup_id
|
|
if not backup_path.exists():
|
|
raise ValueError(f"Backup {backup_id} not found")
|
|
manifest_file = backup_path / 'manifest.json'
|
|
if not manifest_file.exists():
|
|
raise ValueError(f"Backup manifest not found")
|
|
|
|
# Restore primary config
|
|
config_backup = backup_path / 'cell_config.json'
|
|
if config_backup.exists():
|
|
shutil.copy2(config_backup, self.config_file)
|
|
secrets_backup = backup_path / 'secrets.yaml'
|
|
if secrets_backup.exists():
|
|
shutil.copy2(secrets_backup, self.secrets_file)
|
|
|
|
# Restore runtime-generated files so they stay consistent with cell_config.json
|
|
config_dir = Path(os.environ.get('CONFIG_DIR', '/app/config'))
|
|
data_dir = Path(os.environ.get('DATA_DIR', '/app/data'))
|
|
env_file = Path(os.environ.get('ENV_FILE', '/app/.env'))
|
|
|
|
restore_map = [
|
|
(backup_path / 'Caddyfile', config_dir / 'caddy' / 'Caddyfile'),
|
|
(backup_path / 'Corefile', config_dir / 'dns' / 'Corefile'),
|
|
(backup_path / '.env', env_file),
|
|
]
|
|
for src, dest in restore_map:
|
|
if src.exists():
|
|
dest.parent.mkdir(parents=True, exist_ok=True)
|
|
shutil.copy2(src, dest)
|
|
|
|
# Restore DNS zone files
|
|
zones_backup = backup_path / 'dns_zones'
|
|
if zones_backup.is_dir():
|
|
dns_data = data_dir / 'dns'
|
|
dns_data.mkdir(parents=True, exist_ok=True)
|
|
for zone_file in zones_backup.glob('*.zone'):
|
|
shutil.copy2(zone_file, dns_data / zone_file.name)
|
|
|
|
self.configs = self._load_all_configs()
|
|
logger.info(f"Restored configuration from backup: {backup_id}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Error restoring backup {backup_id}: {e}")
|
|
return False
|
|
|
|
def list_backups(self) -> List[Dict[str, Any]]:
|
|
"""List all available backups"""
|
|
backups = []
|
|
for backup_dir in self.backup_dir.iterdir():
|
|
if backup_dir.is_dir():
|
|
manifest_file = backup_dir / 'manifest.json'
|
|
if manifest_file.exists():
|
|
try:
|
|
with open(manifest_file, 'r') as f:
|
|
manifest = json.load(f)
|
|
backups.append(manifest)
|
|
except Exception as e:
|
|
logger.error(f"Error reading backup manifest {backup_dir.name}: {e}")
|
|
|
|
return sorted(backups, key=lambda x: x['timestamp'], reverse=True)
|
|
|
|
def delete_backup(self, backup_id: str) -> bool:
|
|
"""Delete a backup"""
|
|
try:
|
|
backup_path = self.backup_dir / backup_id
|
|
if not backup_path.exists():
|
|
raise ValueError(f"Backup {backup_id} not found")
|
|
|
|
shutil.rmtree(backup_path)
|
|
logger.info(f"Deleted backup: {backup_id}")
|
|
return True
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error deleting backup {backup_id}: {e}")
|
|
return False
|
|
|
|
def get_config_hash(self, service: str) -> str:
|
|
"""Get hash of service configuration for change detection"""
|
|
config = self.get_service_config(service)
|
|
config_str = json.dumps(config, sort_keys=True)
|
|
return hashlib.sha256(config_str.encode()).hexdigest()
|
|
|
|
def has_config_changed(self, service: str, previous_hash: str) -> bool:
|
|
"""Check if configuration has changed"""
|
|
current_hash = self.get_config_hash(service)
|
|
return current_hash != previous_hash
|
|
|
|
def export_config(self, format: str = 'json') -> str:
|
|
"""Export all configurations in specified format"""
|
|
try:
|
|
if format == 'json':
|
|
return json.dumps(self.configs, indent=2)
|
|
elif format == 'yaml':
|
|
return yaml.dump(self.configs, default_flow_style=False)
|
|
else:
|
|
raise ValueError(f"Unsupported format: {format}")
|
|
except Exception as e:
|
|
logger.error(f"Error exporting config: {e}")
|
|
raise
|
|
|
|
def import_config(self, config_data: str, format: str = 'json') -> bool:
|
|
"""Import configurations from string"""
|
|
try:
|
|
if format == 'json':
|
|
configs = json.loads(config_data)
|
|
elif format == 'yaml':
|
|
configs = yaml.safe_load(config_data)
|
|
else:
|
|
raise ValueError(f"Unsupported format: {format}")
|
|
# Import only services present in the data — don't fabricate missing ones
|
|
for service, config in configs.items():
|
|
if service in self.service_schemas:
|
|
self.update_service_config(service, config)
|
|
logger.info("Imported configurations successfully")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Error importing config: {e}")
|
|
return False
|
|
|
|
def _backup_service_config(self, service: str):
|
|
"""Create backup of specific service config before update"""
|
|
# No-op for unified config, but keep for compatibility
|
|
pass
|
|
|
|
def get_all_configs(self) -> Dict[str, Dict]:
|
|
"""Get all service configurations"""
|
|
return self.configs.copy()
|
|
|
|
def get_config_summary(self) -> Dict[str, Any]:
|
|
"""Get summary of all configurations"""
|
|
summary = {
|
|
"total_services": len(self.service_schemas),
|
|
"configured_services": [],
|
|
"unconfigured_services": [],
|
|
"backup_count": len(self.list_backups()),
|
|
"last_backup": None
|
|
}
|
|
|
|
backups = self.list_backups()
|
|
if backups:
|
|
summary["last_backup"] = backups[0]["timestamp"]
|
|
|
|
for service in self.service_schemas.keys():
|
|
config = self.get_service_config(service)
|
|
if config and not config.get("error"):
|
|
summary["configured_services"].append(service)
|
|
else:
|
|
summary["unconfigured_services"].append(service)
|
|
|
|
return summary |