kin: auto-commit after pipeline
This commit is contained in:
parent
4144c521be
commit
3c902eaeab
6 changed files with 1354 additions and 7 deletions
171
tests/test_api_pipeline_logs.py
Normal file
171
tests/test_api_pipeline_logs.py
Normal file
|
|
@ -0,0 +1,171 @@
|
|||
"""Tests for GET /api/pipelines/{id}/logs endpoint (KIN-084 Live Console).
|
||||
|
||||
Convention #418: since_id cursor pagination for append-only tables.
|
||||
Convention #420: 404 for non-existent resources.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
import web.api as api_module
|
||||
from core.db import init_db
|
||||
from core import models
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Fixtures
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
@pytest.fixture
|
||||
def client(tmp_path):
|
||||
"""Bare TestClient с изолированной БД (без предварительных данных)."""
|
||||
db_path = tmp_path / "test.db"
|
||||
api_module.DB_PATH = db_path
|
||||
from web.api import app
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def pipeline_client(tmp_path):
|
||||
"""TestClient с project + task + pipeline, готовый к тестированию логов."""
|
||||
db_path = tmp_path / "test.db"
|
||||
api_module.DB_PATH = db_path
|
||||
from web.api import app
|
||||
c = TestClient(app)
|
||||
|
||||
# Seed project + task через API
|
||||
c.post("/api/projects", json={"id": "p1", "name": "P1", "path": "/p1"})
|
||||
c.post("/api/tasks", json={"project_id": "p1", "title": "Task 1"})
|
||||
|
||||
# Создаём pipeline напрямую в БД
|
||||
conn = init_db(db_path)
|
||||
pipeline = models.create_pipeline(conn, "P1-001", "p1", "linear", ["step1"])
|
||||
conn.close()
|
||||
|
||||
yield c, pipeline["id"], db_path
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Тест: пустой pipeline → пустой список
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_get_pipeline_logs_empty_returns_empty_list(pipeline_client):
|
||||
"""GET /api/pipelines/{id}/logs возвращает [] для pipeline без записей."""
|
||||
c, pipeline_id, _ = pipeline_client
|
||||
r = c.get(f"/api/pipelines/{pipeline_id}/logs")
|
||||
assert r.status_code == 200
|
||||
assert r.json() == []
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Тест: несуществующий pipeline → 404 (Convention #420)
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_get_pipeline_logs_nonexistent_pipeline_returns_404(client):
|
||||
"""GET /api/pipelines/99999/logs возвращает 404 для несуществующего pipeline."""
|
||||
r = client.get("/api/pipelines/99999/logs")
|
||||
assert r.status_code == 404
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Тест: 3 записи → правильные поля (id, ts, level, message, extra_json)
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_get_pipeline_logs_returns_three_entries_with_correct_fields(pipeline_client):
|
||||
"""После write_log() x3 GET возвращает 3 записи с полями id, ts, level, message, extra_json."""
|
||||
c, pipeline_id, db_path = pipeline_client
|
||||
|
||||
conn = init_db(db_path)
|
||||
models.write_log(conn, pipeline_id, "PM started", level="INFO")
|
||||
models.write_log(conn, pipeline_id, "Running agent", level="DEBUG")
|
||||
models.write_log(conn, pipeline_id, "Agent error", level="ERROR", extra={"code": 500})
|
||||
conn.close()
|
||||
|
||||
r = c.get(f"/api/pipelines/{pipeline_id}/logs")
|
||||
assert r.status_code == 200
|
||||
logs = r.json()
|
||||
assert len(logs) == 3
|
||||
|
||||
# Проверяем наличие всех обязательных полей
|
||||
first = logs[0]
|
||||
for field in ("id", "ts", "level", "message", "extra_json"):
|
||||
assert field in first, f"Поле '{field}' отсутствует в ответе"
|
||||
|
||||
assert first["message"] == "PM started"
|
||||
assert first["level"] == "INFO"
|
||||
assert first["extra_json"] is None
|
||||
|
||||
assert logs[2]["level"] == "ERROR"
|
||||
assert logs[2]["extra_json"] == {"code": 500}
|
||||
|
||||
|
||||
def test_get_pipeline_logs_returns_entries_in_chronological_order(pipeline_client):
|
||||
"""Записи возвращаются в хронологическом порядке (по id ASC)."""
|
||||
c, pipeline_id, db_path = pipeline_client
|
||||
|
||||
conn = init_db(db_path)
|
||||
for msg in ("first", "second", "third"):
|
||||
models.write_log(conn, pipeline_id, msg)
|
||||
conn.close()
|
||||
|
||||
logs = c.get(f"/api/pipelines/{pipeline_id}/logs").json()
|
||||
assert [e["message"] for e in logs] == ["first", "second", "third"]
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Тест: since_id cursor pagination (Convention #418)
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_get_pipeline_logs_since_id_returns_entries_after_cursor(pipeline_client):
|
||||
"""GET ?since_id=<id> возвращает только записи с id > since_id."""
|
||||
c, pipeline_id, db_path = pipeline_client
|
||||
|
||||
conn = init_db(db_path)
|
||||
for i in range(1, 6): # 5 записей
|
||||
models.write_log(conn, pipeline_id, f"Message {i}")
|
||||
conn.close()
|
||||
|
||||
# Получаем все записи
|
||||
all_logs = c.get(f"/api/pipelines/{pipeline_id}/logs").json()
|
||||
assert len(all_logs) == 5
|
||||
|
||||
# Берём id третьей записи как курсор
|
||||
cursor_id = all_logs[2]["id"]
|
||||
|
||||
r = c.get(f"/api/pipelines/{pipeline_id}/logs?since_id={cursor_id}")
|
||||
assert r.status_code == 200
|
||||
partial = r.json()
|
||||
|
||||
assert len(partial) == 2
|
||||
for entry in partial:
|
||||
assert entry["id"] > cursor_id
|
||||
|
||||
|
||||
def test_get_pipeline_logs_since_id_zero_returns_all(pipeline_client):
|
||||
"""GET ?since_id=0 возвращает все записи (значение по умолчанию)."""
|
||||
c, pipeline_id, db_path = pipeline_client
|
||||
|
||||
conn = init_db(db_path)
|
||||
models.write_log(conn, pipeline_id, "A")
|
||||
models.write_log(conn, pipeline_id, "B")
|
||||
conn.close()
|
||||
|
||||
r = c.get(f"/api/pipelines/{pipeline_id}/logs?since_id=0")
|
||||
assert r.status_code == 200
|
||||
assert len(r.json()) == 2
|
||||
|
||||
|
||||
def test_get_pipeline_logs_since_id_beyond_last_returns_empty(pipeline_client):
|
||||
"""GET ?since_id=<last_id> возвращает [] (нет записей после последней)."""
|
||||
c, pipeline_id, db_path = pipeline_client
|
||||
|
||||
conn = init_db(db_path)
|
||||
models.write_log(conn, pipeline_id, "Only entry")
|
||||
conn.close()
|
||||
|
||||
all_logs = c.get(f"/api/pipelines/{pipeline_id}/logs").json()
|
||||
last_id = all_logs[-1]["id"]
|
||||
|
||||
r = c.get(f"/api/pipelines/{pipeline_id}/logs?since_id={last_id}")
|
||||
assert r.status_code == 200
|
||||
assert r.json() == []
|
||||
169
tests/test_migrate_pipeline_log.py
Normal file
169
tests/test_migrate_pipeline_log.py
Normal file
|
|
@ -0,0 +1,169 @@
|
|||
"""Tests for core/db._migrate() — pipeline_log table migration (KIN-084).
|
||||
|
||||
Convention #384: three tests for conditional DDL guard.
|
||||
Convention #385: paired schema helper.
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
|
||||
from core.db import SCHEMA, _migrate, init_db
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Helpers (Convention #385)
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def _get_tables(conn: sqlite3.Connection) -> set:
|
||||
return {r[0] for r in conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table'"
|
||||
).fetchall()}
|
||||
|
||||
|
||||
def _get_indexes(conn: sqlite3.Connection) -> set:
|
||||
return {r[1] for r in conn.execute(
|
||||
"SELECT * FROM sqlite_master WHERE type='index'"
|
||||
).fetchall()}
|
||||
|
||||
|
||||
def _get_columns(conn: sqlite3.Connection, table: str) -> set:
|
||||
return {r[1] for r in conn.execute(f"PRAGMA table_info({table})").fetchall()}
|
||||
|
||||
|
||||
def _make_db_without_pipeline_log() -> sqlite3.Connection:
|
||||
"""In-memory DB with full schema minus pipeline_log (simulates legacy DB)."""
|
||||
conn = sqlite3.connect(":memory:")
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("PRAGMA foreign_keys=ON")
|
||||
conn.row_factory = sqlite3.Row
|
||||
# Split on the pipeline_log section comment (last section in SCHEMA)
|
||||
schema = SCHEMA.split("-- Live console log (KIN-084)")[0]
|
||||
conn.executescript(schema)
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Тест 1: таблица отсутствует → _migrate() создаёт её
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_migrate_creates_pipeline_log_table_when_absent():
|
||||
"""_migrate() создаёт таблицу pipeline_log, если она отсутствует."""
|
||||
conn = _make_db_without_pipeline_log()
|
||||
assert "pipeline_log" not in _get_tables(conn)
|
||||
|
||||
_migrate(conn)
|
||||
|
||||
assert "pipeline_log" in _get_tables(conn)
|
||||
conn.close()
|
||||
|
||||
|
||||
def test_migrate_creates_pipeline_log_index_when_absent():
|
||||
"""_migrate() создаёт idx_pipeline_log_pipeline_id, если pipeline_log отсутствует."""
|
||||
conn = _make_db_without_pipeline_log()
|
||||
assert "idx_pipeline_log_pipeline_id" not in _get_indexes(conn)
|
||||
|
||||
_migrate(conn)
|
||||
|
||||
assert "idx_pipeline_log_pipeline_id" in _get_indexes(conn)
|
||||
conn.close()
|
||||
|
||||
|
||||
def test_migrate_created_pipeline_log_has_all_columns():
|
||||
"""pipeline_log, созданная _migrate(), содержит все нужные колонки."""
|
||||
conn = _make_db_without_pipeline_log()
|
||||
|
||||
_migrate(conn)
|
||||
|
||||
cols = _get_columns(conn, "pipeline_log")
|
||||
assert {"id", "pipeline_id", "ts", "level", "message", "extra_json"} <= cols
|
||||
conn.close()
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Тест 2: таблица есть + полная схема → идемпотентность
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_migrate_idempotent_when_pipeline_log_exists():
|
||||
"""_migrate() не ломает pipeline_log и не падает при повторном вызове."""
|
||||
conn = init_db(":memory:")
|
||||
assert "pipeline_log" in _get_tables(conn)
|
||||
|
||||
# Повторный вызов не должен бросить исключение
|
||||
_migrate(conn)
|
||||
|
||||
assert "pipeline_log" in _get_tables(conn)
|
||||
assert "idx_pipeline_log_pipeline_id" in _get_indexes(conn)
|
||||
conn.close()
|
||||
|
||||
|
||||
def test_migrate_idempotent_preserves_existing_pipeline_log_data():
|
||||
"""_migrate() не удаляет данные из существующей pipeline_log."""
|
||||
from core import models
|
||||
|
||||
conn = init_db(":memory:")
|
||||
# Создаём минимальную цепочку project → task → pipeline → log
|
||||
models.create_project(conn, "tp", "Test", "/tp")
|
||||
models.create_task(conn, "TP-001", "tp", "T")
|
||||
pipeline = models.create_pipeline(conn, "TP-001", "tp", "linear", [])
|
||||
models.write_log(conn, pipeline["id"], "test-entry")
|
||||
|
||||
_migrate(conn)
|
||||
|
||||
rows = conn.execute("SELECT message FROM pipeline_log").fetchall()
|
||||
assert len(rows) == 1
|
||||
assert rows[0][0] == "test-entry"
|
||||
conn.close()
|
||||
|
||||
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
# Тест 3: таблица есть без extra_json → _migrate() не падает
|
||||
# ─────────────────────────────────────────────────────────────
|
||||
|
||||
def test_migrate_no_crash_when_pipeline_log_missing_extra_json_column():
|
||||
"""_migrate() не падает, если pipeline_log существует без колонки extra_json."""
|
||||
conn = _make_db_without_pipeline_log()
|
||||
|
||||
# Создаём pipeline_log без extra_json (старая схема)
|
||||
conn.executescript("""
|
||||
CREATE TABLE pipeline_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pipeline_id INTEGER NOT NULL,
|
||||
ts TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
level TEXT NOT NULL DEFAULT 'INFO',
|
||||
message TEXT NOT NULL
|
||||
);
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
assert "pipeline_log" in _get_tables(conn)
|
||||
assert "extra_json" not in _get_columns(conn, "pipeline_log")
|
||||
|
||||
# _migrate() должен завершиться без исключений
|
||||
_migrate(conn)
|
||||
|
||||
# Таблица по-прежнему существует
|
||||
assert "pipeline_log" in _get_tables(conn)
|
||||
conn.close()
|
||||
|
||||
|
||||
def test_migrate_does_not_add_extra_json_to_existing_pipeline_log():
|
||||
"""_migrate() не добавляет extra_json к существующей pipeline_log (нет ALTER TABLE)."""
|
||||
conn = _make_db_without_pipeline_log()
|
||||
|
||||
conn.executescript("""
|
||||
CREATE TABLE pipeline_log (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
pipeline_id INTEGER NOT NULL,
|
||||
ts TEXT NOT NULL DEFAULT (datetime('now')),
|
||||
level TEXT NOT NULL DEFAULT 'INFO',
|
||||
message TEXT NOT NULL
|
||||
);
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
_migrate(conn)
|
||||
|
||||
# Документируем текущее поведение: колонка не добавляется
|
||||
cols = _get_columns(conn, "pipeline_log")
|
||||
assert "extra_json" not in cols
|
||||
conn.close()
|
||||
Loading…
Add table
Add a link
Reference in a new issue