kin: auto-commit after pipeline
This commit is contained in:
parent
17d7806838
commit
eab9e951ab
12 changed files with 1696 additions and 5 deletions
46
core/db.py
46
core/db.py
|
|
@ -29,6 +29,10 @@ CREATE TABLE IF NOT EXISTS projects (
|
|||
ssh_key_path TEXT,
|
||||
ssh_proxy_jump TEXT,
|
||||
description TEXT,
|
||||
deploy_host TEXT,
|
||||
deploy_path TEXT,
|
||||
deploy_runtime TEXT,
|
||||
deploy_restart_cmd TEXT,
|
||||
autocommit_enabled INTEGER DEFAULT 0,
|
||||
obsidian_vault_path TEXT,
|
||||
worktrees_enabled INTEGER DEFAULT 0,
|
||||
|
|
@ -302,6 +306,18 @@ CREATE TABLE IF NOT EXISTS task_attachments (
|
|||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_task_attachments_task ON task_attachments(task_id);
|
||||
|
||||
-- Live console log (KIN-084)
|
||||
CREATE TABLE IF NOT EXISTS pipeline_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pipeline_id INTEGER NOT NULL REFERENCES pipelines(id),
|
||||
ts TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
level TEXT NOT NULL DEFAULT 'INFO',
|
||||
message TEXT NOT NULL,
|
||||
extra_json TEXT
|
||||
);
|
||||
|
||||
CREATE INDEX IF NOT EXISTS idx_pipeline_log_pipeline_id ON pipeline_log(pipeline_id, id);
|
||||
"""
|
||||
|
||||
|
||||
|
|
@ -419,6 +435,22 @@ def _migrate(conn: sqlite3.Connection):
|
|||
conn.execute("ALTER TABLE projects ADD COLUMN description TEXT")
|
||||
conn.commit()
|
||||
|
||||
if "deploy_host" not in proj_cols:
|
||||
conn.execute("ALTER TABLE projects ADD COLUMN deploy_host TEXT")
|
||||
conn.commit()
|
||||
|
||||
if "deploy_path" not in proj_cols:
|
||||
conn.execute("ALTER TABLE projects ADD COLUMN deploy_path TEXT")
|
||||
conn.commit()
|
||||
|
||||
if "deploy_runtime" not in proj_cols:
|
||||
conn.execute("ALTER TABLE projects ADD COLUMN deploy_runtime TEXT")
|
||||
conn.commit()
|
||||
|
||||
if "deploy_restart_cmd" not in proj_cols:
|
||||
conn.execute("ALTER TABLE projects ADD COLUMN deploy_restart_cmd TEXT")
|
||||
conn.commit()
|
||||
|
||||
# Migrate audit_log + project_phases tables
|
||||
existing_tables = {r[0] for r in conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table'"
|
||||
|
|
@ -612,6 +644,20 @@ def _migrate(conn: sqlite3.Connection):
|
|||
""")
|
||||
conn.commit()
|
||||
|
||||
if "pipeline_log" not in existing_tables:
|
||||
conn.executescript("""
|
||||
CREATE TABLE IF NOT EXISTS pipeline_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pipeline_id INTEGER NOT NULL REFERENCES pipelines(id),
|
||||
ts TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
level TEXT NOT NULL DEFAULT 'INFO',
|
||||
message TEXT NOT NULL,
|
||||
extra_json TEXT
|
||||
);
|
||||
CREATE INDEX IF NOT EXISTS idx_pipeline_log_pipeline_id ON pipeline_log(pipeline_id, id);
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
# Migrate pipelines: add parent_pipeline_id and department columns (KIN-098)
|
||||
# Guard: table may not exist in legacy schemas without pipelines (old test fixtures)
|
||||
if "pipelines" in existing_tables:
|
||||
|
|
|
|||
223
core/deploy.py
Normal file
223
core/deploy.py
Normal file
|
|
@ -0,0 +1,223 @@
|
|||
"""
|
||||
Kin — structured deploy module.
|
||||
|
||||
Business logic for project deployments:
|
||||
- Runtime-based step templates (docker/node/python/static)
|
||||
- Local and SSH execution
|
||||
- Dependency chain traversal via project_links
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
VALID_RUNTIMES = {"docker", "node", "python", "static"}
|
||||
|
||||
# Base command templates per runtime.
|
||||
# deploy_restart_cmd (if set) is appended as the final step for all runtimes.
|
||||
RUNTIME_STEPS = {
|
||||
"docker": ["git pull", "docker compose up -d --build"],
|
||||
"node": ["git pull", "npm install --production", "pm2 restart all"],
|
||||
"python": ["git pull", "pip install -r requirements.txt"],
|
||||
"static": ["git pull", "nginx -s reload"],
|
||||
}
|
||||
|
||||
|
||||
def build_deploy_steps(project: dict) -> list[str]:
|
||||
"""Build deploy command list based on runtime and project config.
|
||||
|
||||
Returns empty list if deploy_runtime is not set or invalid.
|
||||
Appends deploy_restart_cmd as the last step if provided.
|
||||
"""
|
||||
runtime = project.get("deploy_runtime")
|
||||
if not runtime or runtime not in RUNTIME_STEPS:
|
||||
return []
|
||||
|
||||
steps = list(RUNTIME_STEPS[runtime])
|
||||
|
||||
restart_cmd = project.get("deploy_restart_cmd")
|
||||
if restart_cmd and restart_cmd.strip():
|
||||
steps.append(restart_cmd.strip())
|
||||
|
||||
return steps
|
||||
|
||||
|
||||
def _build_ssh_cmd(project: dict, command: str) -> list[str]:
|
||||
"""Build SSH subprocess command list to run a shell command on deploy_host."""
|
||||
deploy_host = project.get("deploy_host") or project.get("ssh_host")
|
||||
ssh_user = project.get("ssh_user") or "root"
|
||||
ssh_key = project.get("ssh_key_path")
|
||||
proxy_jump = project.get("ssh_proxy_jump")
|
||||
deploy_path = project.get("deploy_path")
|
||||
|
||||
full_cmd = f"cd {deploy_path} && {command}" if deploy_path else command
|
||||
|
||||
cmd = ["ssh"]
|
||||
if ssh_key:
|
||||
cmd += ["-i", ssh_key]
|
||||
if proxy_jump:
|
||||
cmd += ["-J", proxy_jump]
|
||||
cmd += ["-o", "StrictHostKeyChecking=no", "-o", "BatchMode=yes"]
|
||||
cmd += [f"{ssh_user}@{deploy_host}", full_cmd]
|
||||
return cmd
|
||||
|
||||
|
||||
def execute_deploy(project: dict, conn: sqlite3.Connection) -> dict:
|
||||
"""Execute deploy steps for a project. Returns structured result.
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": bool,
|
||||
"steps": list[str],
|
||||
"results": list[{"step", "stdout", "stderr", "exit_code"}],
|
||||
}
|
||||
"""
|
||||
deploy_host = project.get("deploy_host") or project.get("ssh_host")
|
||||
steps = build_deploy_steps(project)
|
||||
|
||||
if not steps:
|
||||
return {
|
||||
"success": False,
|
||||
"steps": [],
|
||||
"results": [],
|
||||
"error": "No deploy steps: deploy_runtime not set or invalid",
|
||||
}
|
||||
|
||||
deploy_path = project.get("deploy_path") or project.get("path") or None
|
||||
results = []
|
||||
overall_success = True
|
||||
|
||||
for step in steps:
|
||||
try:
|
||||
if deploy_host:
|
||||
cmd = _build_ssh_cmd(project, step)
|
||||
proc = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=120,
|
||||
)
|
||||
else:
|
||||
# WARNING: shell=True — deploy commands are admin-only, set by project owner
|
||||
proc = subprocess.run(
|
||||
step,
|
||||
shell=True, # WARNING: shell=True — command is admin-only
|
||||
cwd=deploy_path,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=120,
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
results.append({
|
||||
"step": step,
|
||||
"stdout": "",
|
||||
"stderr": "Timed out after 120 seconds",
|
||||
"exit_code": -1,
|
||||
})
|
||||
overall_success = False
|
||||
break
|
||||
except Exception as e:
|
||||
results.append({
|
||||
"step": step,
|
||||
"stdout": "",
|
||||
"stderr": str(e),
|
||||
"exit_code": -1,
|
||||
})
|
||||
overall_success = False
|
||||
break
|
||||
|
||||
results.append({
|
||||
"step": step,
|
||||
"stdout": proc.stdout,
|
||||
"stderr": proc.stderr,
|
||||
"exit_code": proc.returncode,
|
||||
})
|
||||
|
||||
if proc.returncode != 0:
|
||||
overall_success = False
|
||||
break # Stop on first failure
|
||||
|
||||
return {
|
||||
"success": overall_success,
|
||||
"steps": steps,
|
||||
"results": results,
|
||||
}
|
||||
|
||||
|
||||
def get_deploy_chain(conn: sqlite3.Connection, project_id: str) -> list[str]:
|
||||
"""Get ordered deploy chain: project + all downstream dependents (BFS, cycle-safe).
|
||||
|
||||
Traverses project_links where type='depends_on':
|
||||
- from_project depends_on to_project
|
||||
- When to_project changes, from_project must also be redeployed.
|
||||
|
||||
Returns list starting with project_id, followed by dependents in BFS order.
|
||||
"""
|
||||
visited: set[str] = {project_id}
|
||||
chain: list[str] = [project_id]
|
||||
queue: list[str] = [project_id]
|
||||
|
||||
while queue:
|
||||
current = queue.pop(0)
|
||||
rows = conn.execute(
|
||||
"SELECT from_project FROM project_links"
|
||||
" WHERE to_project = ? AND type = 'depends_on'",
|
||||
(current,),
|
||||
).fetchall()
|
||||
for row in rows:
|
||||
dep_id = row[0]
|
||||
if dep_id not in visited:
|
||||
visited.add(dep_id)
|
||||
chain.append(dep_id)
|
||||
queue.append(dep_id)
|
||||
|
||||
return chain
|
||||
|
||||
|
||||
def deploy_with_dependents(conn: sqlite3.Connection, project_id: str) -> dict:
|
||||
"""Deploy project and all dependents in chain order.
|
||||
|
||||
Returns:
|
||||
{
|
||||
"success": bool,
|
||||
"steps": list[str], # main project steps
|
||||
"results": list[dict], # main project step results
|
||||
"dependents_deployed": list[str],
|
||||
}
|
||||
"""
|
||||
from core.models import get_project
|
||||
|
||||
chain = get_deploy_chain(conn, project_id)
|
||||
dependents = [pid for pid in chain if pid != project_id]
|
||||
|
||||
# Deploy main project first
|
||||
main_project = get_project(conn, project_id)
|
||||
if not main_project:
|
||||
return {
|
||||
"success": False,
|
||||
"steps": [],
|
||||
"results": [],
|
||||
"dependents_deployed": [],
|
||||
"error": f"Project '{project_id}' not found",
|
||||
}
|
||||
|
||||
main_result = execute_deploy(main_project, conn)
|
||||
overall_success = main_result["success"]
|
||||
|
||||
dependents_deployed = []
|
||||
if main_result["success"] and dependents:
|
||||
for dep_id in dependents:
|
||||
dep_project = get_project(conn, dep_id)
|
||||
if dep_project:
|
||||
dep_result = execute_deploy(dep_project, conn)
|
||||
if dep_result["success"]:
|
||||
dependents_deployed.append(dep_id)
|
||||
else:
|
||||
overall_success = False
|
||||
|
||||
return {
|
||||
"success": overall_success,
|
||||
"steps": main_result.get("steps", []),
|
||||
"results": main_result.get("results", []),
|
||||
"dependents_deployed": dependents_deployed,
|
||||
}
|
||||
121
core/models.py
121
core/models.py
|
|
@ -531,6 +531,86 @@ def get_running_pipelines_with_pid(conn: sqlite3.Connection) -> list[dict]:
|
|||
return _rows_to_list(rows)
|
||||
|
||||
|
||||
def get_pipeline_for_watch(conn: sqlite3.Connection, task_id: str) -> dict | None:
|
||||
"""Return the most recent top-level pipeline for a task (for kin watch)."""
|
||||
row = conn.execute(
|
||||
"""SELECT id, task_id, project_id, status, pid, steps, created_at, completed_at
|
||||
FROM pipelines
|
||||
WHERE task_id = ? AND parent_pipeline_id IS NULL
|
||||
ORDER BY created_at DESC, id DESC LIMIT 1""",
|
||||
(task_id,),
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
|
||||
|
||||
def get_current_agent_log(
|
||||
conn: sqlite3.Connection, task_id: str, since_iso: str
|
||||
) -> dict | None:
|
||||
"""Return the most recent agent log for a task since a given datetime (for kin watch)."""
|
||||
row = conn.execute(
|
||||
"""SELECT agent_role, output_summary, duration_seconds, success, created_at
|
||||
FROM agent_logs
|
||||
WHERE task_id = ? AND created_at >= ?
|
||||
ORDER BY id DESC LIMIT 1""",
|
||||
(task_id, since_iso),
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
|
||||
|
||||
def write_log(
|
||||
conn: sqlite3.Connection,
|
||||
pipeline_id: int,
|
||||
message: str,
|
||||
level: str = "INFO",
|
||||
extra: dict | list | None = None,
|
||||
) -> dict:
|
||||
"""Insert a pipeline log entry. Returns inserted row as dict."""
|
||||
extra_json = json.dumps(extra, ensure_ascii=False) if extra is not None else None
|
||||
cur = conn.execute(
|
||||
"""INSERT INTO pipeline_log (pipeline_id, message, level, extra_json)
|
||||
VALUES (?, ?, ?, ?)""",
|
||||
(pipeline_id, message, level, extra_json),
|
||||
)
|
||||
conn.commit()
|
||||
row = conn.execute(
|
||||
"SELECT * FROM pipeline_log WHERE id = ?", (cur.lastrowid,)
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
|
||||
|
||||
def get_pipeline_logs(
|
||||
conn: sqlite3.Connection,
|
||||
pipeline_id: int,
|
||||
since_id: int = 0,
|
||||
) -> list[dict]:
|
||||
"""Get pipeline log entries after since_id in chronological order."""
|
||||
rows = conn.execute(
|
||||
"""SELECT * FROM pipeline_log
|
||||
WHERE pipeline_id = ? AND id > ?
|
||||
ORDER BY id ASC""",
|
||||
(pipeline_id, since_id),
|
||||
).fetchall()
|
||||
return _rows_to_list(rows)
|
||||
|
||||
|
||||
def get_all_running_pipelines(conn: sqlite3.Connection) -> list[dict]:
|
||||
"""Return all running pipelines with task/project info and current agent (for kin ps)."""
|
||||
rows = conn.execute(
|
||||
"""SELECT p.id, p.task_id, p.status, p.pid, p.created_at,
|
||||
p.parent_pipeline_id,
|
||||
t.title, proj.name AS project_name,
|
||||
(SELECT agent_role FROM agent_logs
|
||||
WHERE task_id = p.task_id AND created_at >= p.created_at
|
||||
ORDER BY id DESC LIMIT 1) AS current_agent
|
||||
FROM pipelines p
|
||||
JOIN tasks t ON p.task_id = t.id
|
||||
JOIN projects proj ON p.project_id = proj.id
|
||||
WHERE p.status = 'running'
|
||||
ORDER BY p.created_at DESC"""
|
||||
).fetchall()
|
||||
return _rows_to_list(rows)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Support
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
@ -998,6 +1078,47 @@ def get_last_handoff(
|
|||
return _row_to_dict(row)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Project Links (KIN-079)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def create_project_link(
|
||||
conn: sqlite3.Connection,
|
||||
from_project: str,
|
||||
to_project: str,
|
||||
type: str,
|
||||
description: str | None = None,
|
||||
) -> dict:
|
||||
"""Create a project dependency link. Returns the created link as dict."""
|
||||
cur = conn.execute(
|
||||
"""INSERT INTO project_links (from_project, to_project, type, description)
|
||||
VALUES (?, ?, ?, ?)""",
|
||||
(from_project, to_project, type, description),
|
||||
)
|
||||
conn.commit()
|
||||
row = conn.execute(
|
||||
"SELECT * FROM project_links WHERE id = ?", (cur.lastrowid,)
|
||||
).fetchone()
|
||||
return _row_to_dict(row)
|
||||
|
||||
|
||||
def get_project_links(conn: sqlite3.Connection, project_id: str) -> list[dict]:
|
||||
"""Get all project links where project_id is from_project or to_project."""
|
||||
rows = conn.execute(
|
||||
"SELECT * FROM project_links WHERE from_project = ? OR to_project = ?"
|
||||
" ORDER BY created_at",
|
||||
(project_id, project_id),
|
||||
).fetchall()
|
||||
return _rows_to_list(rows)
|
||||
|
||||
|
||||
def delete_project_link(conn: sqlite3.Connection, link_id: int) -> bool:
|
||||
"""Delete a project link by id. Returns True if deleted, False if not found."""
|
||||
cur = conn.execute("DELETE FROM project_links WHERE id = ?", (link_id,))
|
||||
conn.commit()
|
||||
return cur.rowcount > 0
|
||||
|
||||
|
||||
def get_chat_messages(
|
||||
conn: sqlite3.Connection,
|
||||
project_id: str,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue