kin: KIN-021 Аудит-лог для --dangerously-skip-permissions в auto mode

This commit is contained in:
Gros Frumos 2026-03-16 07:13:32 +02:00
parent 67071c757d
commit a0b0976d8d
16 changed files with 1477 additions and 14 deletions

View file

@ -91,7 +91,7 @@ def build_context(
def _slim_task(task: dict) -> dict:
"""Extract only relevant fields from a task for the prompt."""
return {
result = {
"id": task["id"],
"title": task["title"],
"status": task["status"],
@ -100,6 +100,9 @@ def _slim_task(task: dict) -> dict:
"brief": task.get("brief"),
"spec": task.get("spec"),
}
if task.get("revise_comment"):
result["revise_comment"] = task["revise_comment"]
return result
def _slim_project(project: dict) -> dict:

View file

@ -42,6 +42,8 @@ CREATE TABLE IF NOT EXISTS tasks (
forgejo_issue_id INTEGER,
execution_mode TEXT,
blocked_reason TEXT,
dangerously_skipped BOOLEAN DEFAULT 0,
revise_comment TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
@ -135,6 +137,20 @@ CREATE TABLE IF NOT EXISTS hook_logs (
created_at TEXT DEFAULT (datetime('now'))
);
-- Аудит-лог опасных операций (dangerously-skip-permissions)
CREATE TABLE IF NOT EXISTS audit_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
task_id TEXT REFERENCES tasks(id),
step_id TEXT,
event_type TEXT NOT NULL DEFAULT 'dangerous_skip',
reason TEXT,
project_id TEXT REFERENCES projects(id)
);
CREATE INDEX IF NOT EXISTS idx_audit_log_task ON audit_log(task_id);
CREATE INDEX IF NOT EXISTS idx_audit_log_event ON audit_log(event_type, timestamp);
-- Кросс-проектные зависимости
CREATE TABLE IF NOT EXISTS project_links (
id INTEGER PRIMARY KEY AUTOINCREMENT,
@ -220,6 +236,38 @@ def _migrate(conn: sqlite3.Connection):
conn.execute("ALTER TABLE projects ADD COLUMN autocommit_enabled INTEGER DEFAULT 0")
conn.commit()
if "dangerously_skipped" not in task_cols:
conn.execute("ALTER TABLE tasks ADD COLUMN dangerously_skipped BOOLEAN DEFAULT 0")
conn.commit()
if "revise_comment" not in task_cols:
conn.execute("ALTER TABLE tasks ADD COLUMN revise_comment TEXT")
conn.commit()
if "obsidian_vault_path" not in proj_cols:
conn.execute("ALTER TABLE projects ADD COLUMN obsidian_vault_path TEXT")
conn.commit()
# Migrate audit_log table (KIN-021)
existing_tables = {r[0] for r in conn.execute(
"SELECT name FROM sqlite_master WHERE type='table'"
).fetchall()}
if "audit_log" not in existing_tables:
conn.executescript("""
CREATE TABLE IF NOT EXISTS audit_log (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP,
task_id TEXT REFERENCES tasks(id),
step_id TEXT,
event_type TEXT NOT NULL DEFAULT 'dangerous_skip',
reason TEXT,
project_id TEXT REFERENCES projects(id)
);
CREATE INDEX IF NOT EXISTS idx_audit_log_task ON audit_log(task_id);
CREATE INDEX IF NOT EXISTS idx_audit_log_event ON audit_log(event_type, timestamp);
""")
conn.commit()
# Rename legacy 'auto' → 'auto_complete' (KIN-063)
conn.execute(
"UPDATE projects SET execution_mode = 'auto_complete' WHERE execution_mode = 'auto'"

View file

@ -207,7 +207,7 @@ def resolve_pending_action(
if choice == "manual_task":
new_id = _next_task_id(conn, project_id)
brief_dict = {"source": f"followup:{task_id}"}
brief_dict = {"source": f"followup:{task_id}", "task_type": "manual_escalation"}
if item.get("type"):
brief_dict["route_type"] = item["type"]
if item.get("brief"):

View file

@ -477,6 +477,58 @@ def list_tickets(
return _rows_to_list(conn.execute(query, params).fetchall())
# ---------------------------------------------------------------------------
# Audit Log
# ---------------------------------------------------------------------------
def log_audit_event(
conn: sqlite3.Connection,
event_type: str,
task_id: str | None = None,
step_id: str | None = None,
reason: str | None = None,
project_id: str | None = None,
) -> dict:
"""Log a security-sensitive event to audit_log.
event_type='dangerous_skip' is used when --dangerously-skip-permissions is invoked.
"""
cur = conn.execute(
"""INSERT INTO audit_log (event_type, task_id, step_id, reason, project_id)
VALUES (?, ?, ?, ?, ?)""",
(event_type, task_id, step_id, reason, project_id),
)
conn.commit()
row = conn.execute(
"SELECT * FROM audit_log WHERE id = ?", (cur.lastrowid,)
).fetchone()
return _row_to_dict(row)
def get_audit_log(
conn: sqlite3.Connection,
task_id: str | None = None,
project_id: str | None = None,
event_type: str | None = None,
limit: int = 100,
) -> list[dict]:
"""Query audit log entries with optional filters."""
query = "SELECT * FROM audit_log WHERE 1=1"
params: list = []
if task_id:
query += " AND task_id = ?"
params.append(task_id)
if project_id:
query += " AND project_id = ?"
params.append(project_id)
if event_type:
query += " AND event_type = ?"
params.append(event_type)
query += " ORDER BY timestamp DESC LIMIT ?"
params.append(limit)
return _rows_to_list(conn.execute(query, params).fetchall())
# ---------------------------------------------------------------------------
# Statistics / Dashboard
# ---------------------------------------------------------------------------

180
core/obsidian_sync.py Normal file
View file

@ -0,0 +1,180 @@
"""
Kin двусторонний sync с Obsidian vault.
Export: decisions .md-файлы с YAML frontmatter
Import: чекбоксы в .md-файлах статус задач
"""
import re
import sqlite3
from pathlib import Path
from typing import Optional
from core import models
def _slug(title: str) -> str:
"""Генерирует slug из заголовка для имени файла."""
s = title.lower()
s = re.sub(r"[^a-zа-я0-9\s-]", "", s)
s = re.sub(r"\s+", "-", s.strip())
return s[:50]
def _decision_to_md(decision: dict) -> str:
"""Форматирует decision как .md файл с YAML frontmatter."""
tags = decision.get("tags") or []
if isinstance(tags, str):
try:
import json
tags = json.loads(tags)
except Exception:
tags = []
tags_str = "[" + ", ".join(str(t) for t in tags) + "]"
created_at = (decision.get("created_at") or "")[:10] # только дата
frontmatter = (
"---\n"
f"kin_decision_id: {decision['id']}\n"
f"project: {decision['project_id']}\n"
f"type: {decision['type']}\n"
f"category: {decision.get('category') or ''}\n"
f"tags: {tags_str}\n"
f"created_at: {created_at}\n"
"---\n"
)
body = f"\n# {decision['title']}\n\n{decision['description']}\n"
return frontmatter + body
def _parse_frontmatter(text: str) -> dict:
"""Парсит YAML frontmatter из .md файла (упрощённый парсер через re)."""
result = {}
match = re.match(r"^---\n(.*?)\n---", text, re.DOTALL)
if not match:
return result
for line in match.group(1).splitlines():
if ":" in line:
key, _, val = line.partition(":")
result[key.strip()] = val.strip()
return result
def export_decisions_to_md(
project_id: str,
decisions: list[dict],
vault_path: Path,
) -> list[Path]:
"""Экспортирует decisions в .md-файлы Obsidian. Возвращает список созданных файлов."""
out_dir = vault_path / project_id / "decisions"
out_dir.mkdir(parents=True, exist_ok=True)
created: list[Path] = []
for d in decisions:
slug = _slug(d["title"])
fname = f"{d['id']}-{slug}.md"
fpath = out_dir / fname
fpath.write_text(_decision_to_md(d), encoding="utf-8")
created.append(fpath)
return created
def parse_task_checkboxes(
vault_path: Path,
project_id: str,
) -> list[dict]:
"""Парсит *.md-файлы в vault/{project_id}/tasks/ и {project_id}/ на чекбоксы с task ID.
Returns: [{"task_id": "KIN-013", "done": True, "title": "..."}]
"""
pattern = re.compile(r"^[-*]\s+\[([xX ])\]\s+([A-Z]+-\d+)\s+(.+)$")
results: list[dict] = []
search_dirs = [
vault_path / project_id / "tasks",
vault_path / project_id,
]
for search_dir in search_dirs:
if not search_dir.is_dir():
continue
for md_file in search_dir.glob("*.md"):
try:
text = md_file.read_text(encoding="utf-8")
except OSError:
continue
for line in text.splitlines():
m = pattern.match(line.strip())
if m:
check_char, task_id, title = m.group(1), m.group(2), m.group(3)
results.append({
"task_id": task_id,
"done": check_char.lower() == "x",
"title": title.strip(),
})
return results
def sync_obsidian(conn: sqlite3.Connection, project_id: str) -> dict:
"""Оркестратор: export decisions + import checkboxes.
Returns:
{
"exported_decisions": int,
"tasks_updated": int,
"errors": list[str],
"vault_path": str
}
"""
project = models.get_project(conn, project_id)
if not project:
raise ValueError(f"Project '{project_id}' not found")
vault_path_str: Optional[str] = project.get("obsidian_vault_path")
if not vault_path_str:
raise ValueError(f"obsidian_vault_path not set for project '{project_id}'")
vault_path = Path(vault_path_str)
errors: list[str] = []
# --- Export decisions ---
exported_count = 0
if not vault_path.is_dir():
errors.append(f"Vault path does not exist or is not a directory: {vault_path_str}")
else:
try:
decisions = models.get_decisions(conn, project_id)
created_files = export_decisions_to_md(project_id, decisions, vault_path)
exported_count = len(created_files)
except Exception as e:
errors.append(f"Export error: {e}")
# --- Import checkboxes ---
tasks_updated = 0
if vault_path.is_dir():
try:
checkboxes = parse_task_checkboxes(vault_path, project_id)
for item in checkboxes:
if not item["done"]:
continue
task = models.get_task(conn, item["task_id"])
if task is None:
continue
if task.get("project_id") != project_id:
continue
if task.get("status") != "done":
models.update_task(conn, item["task_id"], status="done")
tasks_updated += 1
except Exception as e:
errors.append(f"Import error: {e}")
return {
"exported_decisions": exported_count,
"tasks_updated": tasks_updated,
"errors": errors,
"vault_path": vault_path_str,
}