feat: persistent container log collection, unified rotation, logs page redesign

- log_manager: add collect_container_logs (appends docker logs to container_<name>.log),
  get_container_log_lines, rotate_container_log, get_all_log_file_infos
- app.py: new endpoints /api/logs/files (all log file sizes), /api/logs/containers/<name>
  (collect+return stored container logs); rotate endpoint now handles both service and container logs
- Logs page: split into API Service Logs tab (python manager logs) and Container Logs tab
  (persistent docker stdout/stderr); Statistics tab shows both kinds with per-row rotate;
  each tab has a description explaining what it shows and where files live
- wireguard_manager: test_connectivity peer_ip=None guard (already in previous commit, now rebuilt)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-04-21 01:54:33 -04:00
parent 50f2200b45
commit 7b39331417
4 changed files with 308 additions and 231 deletions
+30 -4
View File
@@ -619,17 +619,43 @@ def get_log_statistics():
@app.route('/api/logs/rotate', methods=['POST'])
def rotate_logs():
"""Manually rotate logs."""
"""Manually rotate logs (API service log or container log)."""
try:
data = request.get_json(silent=True) or {}
service = data.get('service')
log_manager.rotate_logs(service)
name = data.get('name') # e.g. 'wireguard' or 'container_cell-api'
kind = data.get('kind', 'service') # 'service' or 'container'
if kind == 'container':
container_name = name[len('container_'):] if name and name.startswith('container_') else name
log_manager.rotate_container_log(container_name)
else:
log_manager.rotate_logs(name)
return jsonify({"message": "Logs rotated successfully"})
except Exception as e:
logger.error(f"Error rotating logs: {e}")
return jsonify({"error": str(e)}), 500
@app.route('/api/logs/files', methods=['GET'])
def get_log_file_infos():
"""List all stored log files (service + container) with sizes."""
try:
return jsonify(log_manager.get_all_log_file_infos())
except Exception as e:
logger.error(f"Error listing log files: {e}")
return jsonify({"error": str(e)}), 500
@app.route('/api/logs/containers/<container_name>', methods=['GET'])
def get_stored_container_logs(container_name):
"""Collect latest docker logs into file and return last N lines."""
try:
tail = int(request.args.get('tail', 100))
log_manager.collect_container_logs(container_name)
lines = log_manager.get_container_log_lines(container_name, tail)
return jsonify({'container': container_name, 'lines': lines})
except Exception as e:
logger.error(f"Error getting stored container logs for {container_name}: {e}")
return jsonify({"error": str(e)}), 500
# Network Services API
@app.route('/api/dns/records', methods=['GET'])
def get_dns_records():
+90
View File
@@ -498,6 +498,96 @@ class LogManager:
except Exception as e:
return {'error': str(e)}
def collect_container_logs(self, container_name: str, docker_client=None) -> int:
"""Append new docker container stdout/stderr to a persistent log file.
Returns number of new lines written, or -1 on error."""
try:
import subprocess
log_file = self.log_dir / f'container_{container_name}.log'
# Determine --since timestamp from last line of existing file
since_arg = []
if log_file.exists() and log_file.stat().st_size > 0:
with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
for line in f:
pass
# Parse last timestamp from docker log line (format: 2006-01-02T15:04:05...)
ts_match = re.match(r'^(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+Z)', line.strip())
if ts_match:
since_arg = ['--since', ts_match.group(1)]
result = subprocess.run(
['docker', 'logs', '--timestamps'] + since_arg + [container_name],
capture_output=True, text=True, timeout=30
)
output = result.stdout + result.stderr
lines = [l for l in output.splitlines() if l.strip()]
if lines:
with open(log_file, 'a', encoding='utf-8') as f:
f.write('\n'.join(lines) + '\n')
return len(lines)
except Exception as e:
logger.error(f"Error collecting container logs for {container_name}: {e}")
return -1
def get_container_log_lines(self, container_name: str, lines: int = 100) -> List[str]:
"""Read last N lines from stored container log file."""
try:
log_file = self.log_dir / f'container_{container_name}.log'
if not log_file.exists():
return []
with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
all_lines = f.readlines()
return [l.rstrip() for l in all_lines[-lines:]] if lines > 0 else [l.rstrip() for l in all_lines]
except Exception as e:
logger.error(f"Error reading container log for {container_name}: {e}")
return []
def rotate_container_log(self, container_name: str):
"""Rotate a stored container log file."""
try:
log_file = self.log_dir / f'container_{container_name}.log'
if not log_file.exists():
return
# Find next available backup index
for i in range(1, self.backup_count + 1):
backup = self.log_dir / f'container_{container_name}.log.{i}'
if not backup.exists():
log_file.rename(backup)
logger.info(f"Rotated container log for {container_name}{backup.name}")
return
# All slots full — remove oldest, shift others
oldest = self.log_dir / f'container_{container_name}.log.{self.backup_count}'
oldest.unlink(missing_ok=True)
for i in range(self.backup_count - 1, 0, -1):
src = self.log_dir / f'container_{container_name}.log.{i}'
if src.exists():
src.rename(self.log_dir / f'container_{container_name}.log.{i + 1}')
log_file.rename(self.log_dir / f'container_{container_name}.log.1')
logger.info(f"Rotated container log for {container_name}")
except Exception as e:
logger.error(f"Error rotating container log for {container_name}: {e}")
def get_all_log_file_infos(self) -> List[Dict[str, Any]]:
"""Return size/mtime info for all log files (API service logs + container logs)."""
results = []
for log_file in sorted(self.log_dir.glob('*.log')):
try:
stat = log_file.stat()
name = log_file.stem # e.g. 'wireguard' or 'container_cell-api'
kind = 'container' if name.startswith('container_') else 'service'
label = name[len('container_'):] if kind == 'container' else name
results.append({
'name': name,
'label': label,
'kind': kind,
'file': log_file.name,
'size': stat.st_size,
'modified': datetime.fromtimestamp(stat.st_mtime).isoformat(),
})
except Exception:
pass
return results
def compress_old_logs(self):
"""Compress old log files to save space"""
try: