kin: auto-commit after pipeline

This commit is contained in:
Gros Frumos 2026-03-17 18:29:32 +02:00
parent 94591ab7ae
commit 3d9b5766ab
10 changed files with 250 additions and 30 deletions

View file

@ -1252,6 +1252,9 @@ def run_pipeline(
initial_previous_output: str | None = None,
parent_pipeline_id: int | None = None,
department: str | None = None,
pm_result: dict | None = None,
pm_started_at: str | None = None,
pm_ended_at: str | None = None,
) -> dict:
"""Execute a multi-step pipeline of agents.
@ -1312,6 +1315,39 @@ def run_pipeline(
)
except Exception:
pass
# KIN-OBS-025: log PM step retrospectively (after create_pipeline FK is satisfied)
# Only for top-level pipelines — sub-pipelines have no PM step.
if pm_result is not None and not parent_pipeline_id:
try:
models.write_log(
conn, pipeline["id"],
"PM start: task planning",
level="INFO",
ts=pm_started_at,
extra={"role": "pm"},
)
except Exception:
pass
try:
_n_steps = len(steps)
_success = pm_result.get("success")
_cost = pm_result.get("cost_usd") or 0.0
_tokens = pm_result.get("tokens_used") or 0
models.write_log(
conn, pipeline["id"],
f"PM done: {_n_steps} steps planned, success={_success}, cost=${_cost:.4f}, tokens={_tokens}",
level="INFO",
ts=pm_ended_at,
extra={
"role": "pm",
"steps_count": _n_steps,
"success": _success,
"cost_usd": _cost,
"tokens_used": _tokens,
},
)
except Exception:
pass
results = []
total_cost = 0.0

View file

@ -618,11 +618,13 @@ def run_task(ctx, task_id, dry_run, allow_write):
# Step 1: PM decomposes
click.echo("Running PM to decompose task...")
pm_started_at = datetime.utcnow().isoformat()
pm_result = run_agent(
conn, "pm", task_id, project_id,
model="sonnet", dry_run=dry_run,
allow_write=allow_write, noninteractive=is_noninteractive,
)
pm_ended_at = datetime.utcnow().isoformat()
if dry_run:
click.echo("\n--- PM Prompt (dry-run) ---")
@ -691,7 +693,10 @@ def run_task(ctx, task_id, dry_run, allow_write):
click.echo("\nExecuting pipeline...")
result = run_pipeline(conn, task_id, pipeline_steps,
allow_write=allow_write,
noninteractive=is_noninteractive)
noninteractive=is_noninteractive,
pm_result=pm_result,
pm_started_at=pm_started_at,
pm_ended_at=pm_ended_at)
if result["success"]:
click.echo(f"\nPipeline completed: {result['steps_completed']} steps")

View file

@ -572,9 +572,21 @@ def write_log(
message: str,
level: str = "INFO",
extra: dict | list | None = None,
ts: str | None = None,
) -> dict:
"""Insert a pipeline log entry. Returns inserted row as dict."""
"""Insert a pipeline log entry. Returns inserted row as dict.
ts: optional ISO-8601 UTC timestamp override (e.g. for retrospective PM entries).
If None, the DB DEFAULT (datetime('now')) is used.
"""
extra_json = json.dumps(extra, ensure_ascii=False) if extra is not None else None
if ts is not None:
cur = conn.execute(
"""INSERT INTO pipeline_log (pipeline_id, ts, message, level, extra_json)
VALUES (?, ?, ?, ?, ?)""",
(pipeline_id, ts, message, level, extra_json),
)
else:
cur = conn.execute(
"""INSERT INTO pipeline_log (pipeline_id, message, level, extra_json)
VALUES (?, ?, ?, ?)""",

View file

@ -603,6 +603,22 @@ class TestDeployWithDependents:
assert "steps" in result
assert "results" in result
def test_dependent_failure_sets_overall_success_false(self, conn):
"""If a dependent project fails, overall_success becomes False even if main succeeded."""
self._setup_chain(conn)
_ok = {"success": True, "steps": ["git pull"], "results": [{"step": "git pull", "exit_code": 0}]}
_fail = {"success": False, "steps": ["git pull"], "results": [{"step": "git pull", "exit_code": 1}]}
def _mock_execute(project, db_conn):
return _ok if dict(project)["id"] == "api" else _fail
with patch("core.deploy.execute_deploy", side_effect=_mock_execute):
result = deploy_with_dependents(conn, "api")
assert result["success"] is False
assert result["dependents_deployed"] == []
# ---------------------------------------------------------------------------
# 10. build_deploy_steps — python runtime full steps with restart_cmd

View file

@ -12,7 +12,7 @@
"""
import pytest
from datetime import datetime, timedelta
from datetime import datetime, timedelta, timezone
from unittest.mock import patch
from core.db import init_db
@ -51,14 +51,15 @@ def project_and_task(conn):
# ---------------------------------------------------------------------------
def _call_format_elapsed(dt_iso: str, elapsed_seconds: int) -> str:
"""Call _format_elapsed with datetime.utcnow frozen to (started + elapsed_seconds)."""
"""Call _format_elapsed with datetime.now(timezone.utc) frozen to (started + elapsed_seconds)."""
normalized = dt_iso.replace(' ', 'T') if ' ' in dt_iso else dt_iso
started = datetime.fromisoformat(normalized)
frozen_now = started + timedelta(seconds=elapsed_seconds)
with patch("cli.watch.datetime") as mock_dt:
mock_dt.fromisoformat = datetime.fromisoformat
mock_dt.utcnow.return_value = frozen_now
# New code uses datetime.now(timezone.utc).replace(tzinfo=None)
mock_dt.now.return_value = frozen_now.replace(tzinfo=timezone.utc)
return _format_elapsed(dt_iso)

View file

@ -0,0 +1,152 @@
"""Regression tests for KIN-OBS-022 — three fixes in cli/watch.py:
AC1: models.get_task(conn, task_id) called at the start of each while-True iteration.
AC2: 'Waiting for first agent...' appears exactly once (agent-role line);
output section uses 'No output yet.' instead of a second occurrence.
AC3: datetime.utcnow() absent from source; datetime.now(timezone.utc) used;
'timezone' imported from datetime stdlib.
"""
import inspect
import pytest
from datetime import datetime, timedelta, timezone
from unittest.mock import patch
from core.db import init_db
from core import models
from cli.watch import _format_elapsed, _render_watch, cmd_watch
# ---------------------------------------------------------------------------
# Fixtures
# ---------------------------------------------------------------------------
@pytest.fixture
def conn():
c = init_db(db_path=":memory:")
yield c
c.close()
@pytest.fixture
def project_and_task(conn):
proj = models.create_project(conn, "p1", "Obs022 Project", "/obs022", tech_stack=["python"])
task = models.create_task(conn, "KIN-OBS-022-T1", "p1", "Watch task")
return proj, task
# ---------------------------------------------------------------------------
# AC1: task re-read from DB in each loop iteration
# ---------------------------------------------------------------------------
def test_cmd_watch_calls_get_task_inside_loop(conn, project_and_task):
"""models.get_task must be called at least twice: once before the loop,
once inside the loop body so status changes in DB are picked up."""
_, task = project_and_task
pipeline = models.create_pipeline(conn, task["id"], "p1", "standard", [])
models.update_pipeline(conn, pipeline["id"], status="completed")
with patch("cli.watch.models.get_task", wraps=models.get_task) as mock_get_task, \
patch("cli.watch._clear_screen"), \
patch("cli.watch.time.sleep"):
cmd_watch(conn, task["id"])
# ≥2: one before the existence check, one inside the loop body
assert mock_get_task.call_count >= 2
def test_cmd_watch_loop_get_task_uses_correct_task_id(conn, project_and_task):
"""Every get_task call in cmd_watch passes the same task_id."""
_, task = project_and_task
pipeline = models.create_pipeline(conn, task["id"], "p1", "standard", [])
models.update_pipeline(conn, pipeline["id"], status="completed")
with patch("cli.watch.models.get_task", wraps=models.get_task) as mock_get_task, \
patch("cli.watch._clear_screen"), \
patch("cli.watch.time.sleep"):
cmd_watch(conn, task["id"])
for call in mock_get_task.call_args_list:
assert call.args[1] == task["id"]
# ---------------------------------------------------------------------------
# AC2: 'Waiting for first agent...' exactly once; output section → 'No output yet.'
# ---------------------------------------------------------------------------
def test_render_watch_waiting_message_appears_exactly_once(capsys):
"""When pipeline has no log, 'Waiting for first agent...' appears once
(agent-role line), and 'No output yet.' appears in the output section."""
task = {"id": "KIN-001", "title": "T", "status": "in_progress"}
pipeline = {
"id": 1, "status": "running", "pid": None,
"created_at": "2026-03-17 12:00:00", "steps": [],
}
with patch("cli.watch._format_elapsed", return_value="0s"):
_render_watch(task, pipeline, None, 0, "?")
out = capsys.readouterr().out
assert out.count("Waiting for first agent") == 1
def test_render_watch_output_section_shows_no_output_yet_not_waiting(capsys):
"""Output section must say 'No output yet.' — not 'Waiting for first agent...'."""
task = {"id": "KIN-001", "title": "T", "status": "in_progress"}
pipeline = {
"id": 1, "status": "running", "pid": None,
"created_at": "2026-03-17 12:00:00", "steps": [],
}
with patch("cli.watch._format_elapsed", return_value="0s"):
_render_watch(task, pipeline, None, 0, "?")
out = capsys.readouterr().out
assert "No output yet." in out
def test_render_watch_log_with_empty_output_summary_shows_no_output_yet(capsys):
"""Log present but output_summary is None → 'No output yet.' in output section."""
task = {"id": "KIN-001", "title": "T", "status": "in_progress"}
pipeline = {
"id": 1, "status": "running", "pid": 42,
"created_at": "2026-03-17 12:00:00", "steps": [],
}
log = {"agent_role": "dev", "output_summary": None}
with patch("cli.watch._format_elapsed", return_value="5s"):
_render_watch(task, pipeline, log, 1, 3)
out = capsys.readouterr().out
assert "No output yet." in out
assert "Waiting for first agent" not in out
# ---------------------------------------------------------------------------
# AC3: datetime.utcnow() absent; timezone imported; now(timezone.utc) used
# ---------------------------------------------------------------------------
def test_watch_py_does_not_contain_utcnow():
"""cli/watch.py must not reference datetime.utcnow()."""
import cli.watch as watch_module
source = inspect.getsource(watch_module)
assert "utcnow" not in source
def test_watch_py_imports_timezone():
"""cli/watch.py must import 'timezone' from stdlib datetime."""
import cli.watch as watch_module
source = inspect.getsource(watch_module)
assert "timezone" in source
def test_format_elapsed_mocked_via_now_not_utcnow():
"""_format_elapsed is controllable through datetime.now(), not utcnow().
Verifies the call path changed to datetime.now(timezone.utc)."""
dt_iso = "2026-03-17 12:00:00"
started = datetime.fromisoformat("2026-03-17T12:00:00")
frozen_now = started + timedelta(seconds=75)
with patch("cli.watch.datetime") as mock_dt:
mock_dt.fromisoformat = datetime.fromisoformat
mock_dt.now.return_value = frozen_now.replace(tzinfo=timezone.utc)
result = _format_elapsed(dt_iso)
assert result == "1m 15s"
mock_dt.now.assert_called_once()
mock_dt.utcnow.assert_not_called()

View file

@ -9,6 +9,7 @@ import mimetypes
import shutil
import subprocess
import sys
import time
from pathlib import Path
# Ensure project root on sys.path
@ -27,6 +28,7 @@ from agents.bootstrap import (
detect_tech_stack, detect_modules, extract_decisions_from_claude_md,
find_vault_root, scan_obsidian, save_to_db,
)
from core.deploy import VALID_RUNTIMES, deploy_with_dependents
DB_PATH = Path.home() / ".kin" / "kin.db"
@ -226,9 +228,6 @@ class ProjectCreate(BaseModel):
return self
VALID_DEPLOY_RUNTIMES = {"docker", "node", "python", "static"}
class ProjectPatch(BaseModel):
execution_mode: str | None = None
autocommit_enabled: bool | None = None
@ -266,8 +265,8 @@ def patch_project(project_id: str, body: ProjectPatch):
raise HTTPException(400, f"Invalid execution_mode '{body.execution_mode}'. Must be one of: {', '.join(VALID_EXECUTION_MODES)}")
if body.project_type is not None and body.project_type not in VALID_PROJECT_TYPES:
raise HTTPException(400, f"Invalid project_type '{body.project_type}'. Must be one of: {', '.join(VALID_PROJECT_TYPES)}")
if body.deploy_runtime is not None and body.deploy_runtime != "" and body.deploy_runtime not in VALID_DEPLOY_RUNTIMES:
raise HTTPException(400, f"Invalid deploy_runtime '{body.deploy_runtime}'. Must be one of: {', '.join(sorted(VALID_DEPLOY_RUNTIMES))}")
if body.deploy_runtime is not None and body.deploy_runtime != "" and body.deploy_runtime not in VALID_RUNTIMES:
raise HTTPException(400, f"Invalid deploy_runtime '{body.deploy_runtime}'. Must be one of: {', '.join(sorted(VALID_RUNTIMES))}")
conn = get_conn()
p = models.get_project(conn, project_id)
if not p:
@ -352,9 +351,6 @@ def deploy_project(project_id: str):
WARNING: shell=True deploy commands are admin-only, set in Settings by the project owner.
"""
import time
from core.deploy import deploy_with_dependents
conn = get_conn()
p = models.get_project(conn, project_id)
if not p:

View file

@ -161,6 +161,7 @@ export interface PipelineStep {
export interface DeployStepResult {
step: string
command: string
stdout: string
stderr: string
exit_code: number
@ -189,7 +190,7 @@ export interface ProjectLink {
id: number
from_project: string
to_project: string
type: string
link_type: string
description: string | null
created_at: string
}
@ -198,6 +199,7 @@ export interface TaskFull extends Task {
pipeline_steps: PipelineStep[]
related_decisions: Decision[]
project_deploy_command: string | null
project_deploy_runtime: string | null
pipeline_id: string | null
}
@ -411,7 +413,7 @@ export const api = {
get<PipelineLog[]>(`/pipelines/${pipelineId}/logs?since_id=${sinceId}`),
projectLinks: (projectId: string) =>
get<ProjectLink[]>(`/projects/${projectId}/links`),
createProjectLink: (data: { from_project: string; to_project: string; type: string; description?: string }) =>
createProjectLink: (data: { from_project: string; to_project: string; link_type: string; description?: string }) =>
post<ProjectLink>('/project-links', data),
deleteProjectLink: (id: number) =>
del<void>(`/project-links/${id}`),

View file

@ -395,7 +395,7 @@ const links = ref<ProjectLink[]>([])
const linksLoading = ref(false)
const linksError = ref('')
const showAddLink = ref(false)
const linkForm = ref({ to_project: '', type: 'depends_on', description: '' })
const linkForm = ref({ to_project: '', link_type: 'depends_on', description: '' })
const linkFormError = ref('')
const linkSaving = ref(false)
@ -419,11 +419,11 @@ async function addLink() {
await api.createProjectLink({
from_project: props.id,
to_project: linkForm.value.to_project,
type: linkForm.value.type,
link_type: linkForm.value.link_type,
description: linkForm.value.description || undefined,
})
showAddLink.value = false
linkForm.value = { to_project: '', type: 'depends_on', description: '' }
linkForm.value = { to_project: '', link_type: 'depends_on', description: '' }
await loadLinks()
} catch (e: any) {
linkFormError.value = e.message
@ -1368,7 +1368,7 @@ async function addDecision() {
<span class="text-gray-400 font-mono text-xs">{{ link.from_project }}</span>
<span class="text-gray-600">-></span>
<span class="text-gray-400 font-mono text-xs">{{ link.to_project }}</span>
<span class="px-1.5 py-0.5 text-[10px] bg-indigo-900/30 text-indigo-400 border border-indigo-800 rounded">{{ link.type }}</span>
<span class="px-1.5 py-0.5 text-[10px] bg-indigo-900/30 text-indigo-400 border border-indigo-800 rounded">{{ link.link_type }}</span>
<span v-if="link.description" class="text-gray-500 text-xs">{{ link.description }}</span>
</div>
<button @click="deleteLink(link.id)"
@ -1379,7 +1379,7 @@ async function addDecision() {
</div>
<!-- Add Link Modal -->
<Modal v-if="showAddLink" title="Add Link" @close="showAddLink = false; linkForm = { to_project: '', type: 'depends_on', description: '' }; linkFormError = ''">
<Modal v-if="showAddLink" title="Add Link" @close="showAddLink = false; linkForm = { to_project: '', link_type: 'depends_on', description: '' }; linkFormError = ''">
<form @submit.prevent="addLink" class="space-y-3">
<div>
<label class="block text-xs text-gray-500 mb-1">From (current project)</label>
@ -1396,7 +1396,7 @@ async function addDecision() {
</div>
<div>
<label class="block text-xs text-gray-500 mb-1">Link type</label>
<select v-model="linkForm.type"
<select v-model="linkForm.link_type"
class="w-full bg-gray-800 border border-gray-700 rounded px-3 py-2 text-sm text-gray-300">
<option value="depends_on">depends_on</option>
<option value="triggers">triggers</option>

View file

@ -50,10 +50,10 @@ async function saveDeployConfig(projectId: string) {
saveDeployConfigStatus.value[projectId] = ''
try {
await api.patchProject(projectId, {
deploy_host: deployHosts.value[projectId] || undefined,
deploy_path: deployPaths.value[projectId] || undefined,
deploy_runtime: deployRuntimes.value[projectId] || undefined,
deploy_restart_cmd: deployRestartCmds.value[projectId] || undefined,
deploy_host: deployHosts.value[projectId],
deploy_path: deployPaths.value[projectId],
deploy_runtime: deployRuntimes.value[projectId],
deploy_restart_cmd: deployRestartCmds.value[projectId],
})
saveDeployConfigStatus.value[projectId] = 'Saved'
} catch (e) {