kin: auto-commit after pipeline
This commit is contained in:
parent
c30a4c0fc4
commit
33fc38b01f
4 changed files with 459 additions and 67 deletions
|
|
@ -2602,3 +2602,165 @@ class TestCheckClaudeAuth:
|
|||
def test_ok_when_timeout(self, mock_run):
|
||||
"""При TimeoutExpired не бросает исключение (не блокируем на timeout)."""
|
||||
check_claude_auth() # должна вернуть None без исключений
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# KIN-OBS-030: PM-шаг инструментирован в pipeline_log
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestPMStepPipelineLog:
|
||||
"""Проверяет, что PM-шаг записывается в pipeline_log после run_pipeline."""
|
||||
|
||||
@patch("agents.runner._run_autocommit")
|
||||
@patch("agents.runner._run_learning_extraction")
|
||||
@patch("agents.runner.subprocess.run")
|
||||
def test_pm_log_entry_written_when_pm_result_provided(
|
||||
self, mock_run, mock_learn, mock_autocommit, conn
|
||||
):
|
||||
"""Если pm_result передан в run_pipeline, в pipeline_log появляется запись PM-шага."""
|
||||
mock_run.return_value = _mock_claude_success({"result": "done"})
|
||||
mock_learn.return_value = {"added": 0, "skipped": 0}
|
||||
|
||||
pm_result = {"success": True, "duration_seconds": 5, "tokens_used": 1000, "cost_usd": 0.01}
|
||||
steps = [{"role": "debugger", "brief": "find bug"}]
|
||||
run_pipeline(
|
||||
conn, "VDOL-001", steps,
|
||||
pm_result=pm_result,
|
||||
pm_started_at="2026-03-17T10:00:00",
|
||||
pm_ended_at="2026-03-17T10:00:05",
|
||||
)
|
||||
|
||||
logs = conn.execute(
|
||||
"SELECT * FROM pipeline_log WHERE message='PM step: task decomposed'"
|
||||
).fetchall()
|
||||
assert len(logs) == 1
|
||||
|
||||
@patch("agents.runner._run_autocommit")
|
||||
@patch("agents.runner._run_learning_extraction")
|
||||
@patch("agents.runner.subprocess.run")
|
||||
def test_pm_log_entry_has_correct_pipeline_id(
|
||||
self, mock_run, mock_learn, mock_autocommit, conn
|
||||
):
|
||||
"""pipeline_id в PM-записи pipeline_log совпадает с реальным pipeline."""
|
||||
mock_run.return_value = _mock_claude_success({"result": "done"})
|
||||
mock_learn.return_value = {"added": 0, "skipped": 0}
|
||||
|
||||
pm_result = {"success": True, "duration_seconds": 3, "tokens_used": 800, "cost_usd": 0.008}
|
||||
steps = [{"role": "debugger", "brief": "find bug"}]
|
||||
run_pipeline(
|
||||
conn, "VDOL-001", steps,
|
||||
pm_result=pm_result,
|
||||
pm_started_at="2026-03-17T10:00:00",
|
||||
pm_ended_at="2026-03-17T10:00:03",
|
||||
)
|
||||
|
||||
pipeline = conn.execute("SELECT * FROM pipelines WHERE task_id='VDOL-001'").fetchone()
|
||||
assert pipeline is not None
|
||||
|
||||
pm_log = conn.execute(
|
||||
"SELECT * FROM pipeline_log WHERE message='PM step: task decomposed'"
|
||||
).fetchone()
|
||||
assert pm_log is not None
|
||||
assert pm_log["pipeline_id"] == pipeline["id"]
|
||||
|
||||
@patch("agents.runner._run_autocommit")
|
||||
@patch("agents.runner._run_learning_extraction")
|
||||
@patch("agents.runner.subprocess.run")
|
||||
def test_pm_log_entry_has_step_pm_in_extra(
|
||||
self, mock_run, mock_learn, mock_autocommit, conn
|
||||
):
|
||||
"""extra_json PM-записи содержит role='pm' и корректные данные тайминга."""
|
||||
mock_run.return_value = _mock_claude_success({"result": "done"})
|
||||
mock_learn.return_value = {"added": 0, "skipped": 0}
|
||||
|
||||
pm_result = {"success": True, "duration_seconds": 7, "tokens_used": 1500, "cost_usd": 0.02}
|
||||
steps = [{"role": "debugger", "brief": "find bug"}]
|
||||
run_pipeline(
|
||||
conn, "VDOL-001", steps,
|
||||
pm_result=pm_result,
|
||||
pm_started_at="2026-03-17T10:00:00",
|
||||
pm_ended_at="2026-03-17T10:00:07",
|
||||
)
|
||||
|
||||
row = conn.execute(
|
||||
"SELECT extra_json FROM pipeline_log WHERE message='PM step: task decomposed'"
|
||||
).fetchone()
|
||||
assert row is not None
|
||||
extra = json.loads(row["extra_json"])
|
||||
assert extra["role"] == "pm"
|
||||
assert extra["duration_seconds"] == 7
|
||||
assert extra["pm_started_at"] == "2026-03-17T10:00:00"
|
||||
assert extra["pm_ended_at"] == "2026-03-17T10:00:07"
|
||||
|
||||
@patch("agents.runner._run_autocommit")
|
||||
@patch("agents.runner._run_learning_extraction")
|
||||
@patch("agents.runner.subprocess.run")
|
||||
def test_pm_log_not_written_when_pm_result_is_none(
|
||||
self, mock_run, mock_learn, mock_autocommit, conn
|
||||
):
|
||||
"""Если pm_result не передан (None), записи PM-шага в pipeline_log нет."""
|
||||
mock_run.return_value = _mock_claude_success({"result": "done"})
|
||||
mock_learn.return_value = {"added": 0, "skipped": 0}
|
||||
|
||||
steps = [{"role": "debugger", "brief": "find bug"}]
|
||||
run_pipeline(conn, "VDOL-001", steps) # pm_result=None по умолчанию
|
||||
|
||||
pm_logs = conn.execute(
|
||||
"SELECT * FROM pipeline_log WHERE message='PM step: task decomposed'"
|
||||
).fetchall()
|
||||
assert len(pm_logs) == 0
|
||||
|
||||
@patch("agents.runner._run_autocommit")
|
||||
@patch("agents.runner._run_learning_extraction")
|
||||
@patch("agents.runner.subprocess.run")
|
||||
def test_pm_log_not_written_for_sub_pipeline(
|
||||
self, mock_run, mock_learn, mock_autocommit, conn
|
||||
):
|
||||
"""PM-лог НЕ записывается в sub-pipeline (parent_pipeline_id задан)."""
|
||||
mock_run.return_value = _mock_claude_success({"result": "done"})
|
||||
mock_learn.return_value = {"added": 0, "skipped": 0}
|
||||
|
||||
# Сначала создаём родительский pipeline
|
||||
parent_pipeline = models.create_pipeline(conn, "VDOL-001", "vdol", "linear", [])
|
||||
|
||||
pm_result = {"success": True, "duration_seconds": 4, "tokens_used": 900, "cost_usd": 0.009}
|
||||
steps = [{"role": "debugger", "brief": "find bug"}]
|
||||
run_pipeline(
|
||||
conn, "VDOL-001", steps,
|
||||
pm_result=pm_result,
|
||||
pm_started_at="2026-03-17T10:00:00",
|
||||
pm_ended_at="2026-03-17T10:00:04",
|
||||
parent_pipeline_id=parent_pipeline["id"],
|
||||
)
|
||||
|
||||
pm_logs = conn.execute(
|
||||
"SELECT * FROM pipeline_log WHERE message='PM step: task decomposed'"
|
||||
).fetchall()
|
||||
assert len(pm_logs) == 0
|
||||
|
||||
@patch("agents.runner._run_autocommit")
|
||||
@patch("agents.runner._run_learning_extraction")
|
||||
@patch("agents.runner.subprocess.run")
|
||||
def test_pm_log_no_orphan_records(
|
||||
self, mock_run, mock_learn, mock_autocommit, conn
|
||||
):
|
||||
"""FK integrity: все записи pipeline_log ссылаются на существующий pipeline."""
|
||||
mock_run.return_value = _mock_claude_success({"result": "done"})
|
||||
mock_learn.return_value = {"added": 0, "skipped": 0}
|
||||
|
||||
pm_result = {"success": True, "duration_seconds": 2, "tokens_used": 500, "cost_usd": 0.005}
|
||||
steps = [{"role": "debugger", "brief": "find bug"}]
|
||||
run_pipeline(
|
||||
conn, "VDOL-001", steps,
|
||||
pm_result=pm_result,
|
||||
pm_started_at="2026-03-17T10:00:00",
|
||||
pm_ended_at="2026-03-17T10:00:02",
|
||||
)
|
||||
|
||||
# Проверяем FK через JOIN — orphan-записей не должно быть
|
||||
orphans = conn.execute(
|
||||
"""SELECT pl.id FROM pipeline_log pl
|
||||
LEFT JOIN pipelines p ON pl.pipeline_id = p.id
|
||||
WHERE p.id IS NULL"""
|
||||
).fetchall()
|
||||
assert len(orphans) == 0
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue