kin: auto-commit after pipeline
This commit is contained in:
parent
eab9e951ab
commit
118b0185b1
1 changed files with 454 additions and 0 deletions
454
tests/test_deploy.py
Normal file
454
tests/test_deploy.py
Normal file
|
|
@ -0,0 +1,454 @@
|
|||
"""Tests for KIN-079 — deploy mechanism.
|
||||
|
||||
Covers:
|
||||
- core/deploy.py: build_deploy_steps, execute_deploy, get_deploy_chain, deploy_with_dependents
|
||||
- core/models.py: create_project_link, get_project_links, delete_project_link
|
||||
- web/api.py: POST /deploy, project-links CRUD endpoints
|
||||
- core/db.py: deploy columns migration
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
from core.db import init_db, _migrate
|
||||
from core import models
|
||||
from core.deploy import build_deploy_steps, execute_deploy, get_deploy_chain, deploy_with_dependents
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.fixture
|
||||
def conn():
|
||||
"""Fresh in-memory DB for each test."""
|
||||
c = init_db(db_path=":memory:")
|
||||
yield c
|
||||
c.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_project(conn):
|
||||
"""Project with docker runtime configured."""
|
||||
return models.create_project(
|
||||
conn, "api", "API Service", "/srv/api",
|
||||
tech_stack=["python", "fastapi"],
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(tmp_path):
|
||||
"""TestClient with isolated DB and a seeded project."""
|
||||
import web.api as api_module
|
||||
db_path = tmp_path / "test.db"
|
||||
api_module.DB_PATH = db_path
|
||||
from web.api import app
|
||||
from fastapi.testclient import TestClient
|
||||
c = TestClient(app)
|
||||
c.post("/api/projects", json={"id": "p1", "name": "P1", "path": "/p1"})
|
||||
return c
|
||||
|
||||
|
||||
def _make_proc(returncode=0, stdout="ok", stderr=""):
|
||||
"""Helper: create a mock CompletedProcess."""
|
||||
m = MagicMock()
|
||||
m.returncode = returncode
|
||||
m.stdout = stdout
|
||||
m.stderr = stderr
|
||||
return m
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 1. build_deploy_steps
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestBuildDeploySteps:
|
||||
def test_docker_runtime(self):
|
||||
p = {"deploy_runtime": "docker", "deploy_path": "/srv/app"}
|
||||
steps = build_deploy_steps(p)
|
||||
assert steps == ["git pull", "docker compose up -d --build"]
|
||||
|
||||
def test_node_runtime(self):
|
||||
p = {"deploy_runtime": "node"}
|
||||
steps = build_deploy_steps(p)
|
||||
assert steps == ["git pull", "npm install --production", "pm2 restart all"]
|
||||
|
||||
def test_python_runtime(self):
|
||||
p = {"deploy_runtime": "python"}
|
||||
steps = build_deploy_steps(p)
|
||||
assert steps == ["git pull", "pip install -r requirements.txt"]
|
||||
|
||||
def test_static_runtime(self):
|
||||
p = {"deploy_runtime": "static"}
|
||||
steps = build_deploy_steps(p)
|
||||
assert steps == ["git pull", "nginx -s reload"]
|
||||
|
||||
def test_custom_restart_cmd_appended(self):
|
||||
p = {"deploy_runtime": "docker", "deploy_restart_cmd": "docker compose restart worker"}
|
||||
steps = build_deploy_steps(p)
|
||||
assert steps[-1] == "docker compose restart worker"
|
||||
assert len(steps) == 3
|
||||
|
||||
def test_custom_restart_cmd_stripped(self):
|
||||
p = {"deploy_runtime": "python", "deploy_restart_cmd": " systemctl restart myapp "}
|
||||
steps = build_deploy_steps(p)
|
||||
assert steps[-1] == "systemctl restart myapp"
|
||||
|
||||
def test_none_runtime_returns_empty(self):
|
||||
p = {"deploy_runtime": None}
|
||||
assert build_deploy_steps(p) == []
|
||||
|
||||
def test_invalid_runtime_returns_empty(self):
|
||||
p = {"deploy_runtime": "ruby"}
|
||||
assert build_deploy_steps(p) == []
|
||||
|
||||
def test_missing_runtime_returns_empty(self):
|
||||
p = {}
|
||||
assert build_deploy_steps(p) == []
|
||||
|
||||
def test_empty_restart_cmd_not_appended(self):
|
||||
p = {"deploy_runtime": "static", "deploy_restart_cmd": " "}
|
||||
steps = build_deploy_steps(p)
|
||||
assert steps == ["git pull", "nginx -s reload"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 2. execute_deploy
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestExecuteDeploy:
|
||||
def test_local_deploy_success(self, conn, sample_project):
|
||||
models.update_project(conn, "api", deploy_runtime="docker", deploy_path="/srv/api")
|
||||
p = models.get_project(conn, "api")
|
||||
with patch("core.deploy.subprocess.run", return_value=_make_proc()) as mock_run:
|
||||
result = execute_deploy(p, conn)
|
||||
assert result["success"] is True
|
||||
assert len(result["steps"]) == 2
|
||||
assert len(result["results"]) == 2
|
||||
assert mock_run.call_count == 2
|
||||
|
||||
def test_local_deploy_stops_on_failure(self, conn, sample_project):
|
||||
models.update_project(conn, "api", deploy_runtime="python", deploy_path="/srv/api")
|
||||
p = models.get_project(conn, "api")
|
||||
# First step fails
|
||||
fail = _make_proc(returncode=1, stderr="git error")
|
||||
with patch("core.deploy.subprocess.run", return_value=fail):
|
||||
result = execute_deploy(p, conn)
|
||||
assert result["success"] is False
|
||||
assert len(result["results"]) == 1 # stopped after first failure
|
||||
|
||||
def test_no_runtime_returns_error(self, conn, sample_project):
|
||||
p = models.get_project(conn, "api") # no deploy_runtime set
|
||||
result = execute_deploy(p, conn)
|
||||
assert result["success"] is False
|
||||
assert "error" in result
|
||||
assert result["steps"] == []
|
||||
|
||||
def test_ssh_deploy_uses_ssh_cmd(self, conn, sample_project):
|
||||
models.update_project(
|
||||
conn, "api",
|
||||
deploy_runtime="docker",
|
||||
deploy_host="10.0.0.5",
|
||||
deploy_path="/srv/api",
|
||||
)
|
||||
p = models.get_project(conn, "api")
|
||||
with patch("core.deploy.subprocess.run", return_value=_make_proc()) as mock_run:
|
||||
result = execute_deploy(p, conn)
|
||||
assert result["success"] is True
|
||||
# SSH commands are lists (not shell=True)
|
||||
call_args = mock_run.call_args_list[0]
|
||||
cmd = call_args[0][0]
|
||||
assert isinstance(cmd, list)
|
||||
assert "ssh" in cmd[0]
|
||||
|
||||
def test_timeout_marks_failure(self, conn, sample_project):
|
||||
import subprocess
|
||||
models.update_project(conn, "api", deploy_runtime="static", deploy_path="/srv")
|
||||
p = models.get_project(conn, "api")
|
||||
with patch("core.deploy.subprocess.run", side_effect=subprocess.TimeoutExpired("git", 120)):
|
||||
result = execute_deploy(p, conn)
|
||||
assert result["success"] is False
|
||||
assert result["results"][0]["exit_code"] == -1
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 3. get_deploy_chain
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestGetDeployChain:
|
||||
def _create_projects(self, conn, *ids):
|
||||
for pid in ids:
|
||||
models.create_project(conn, pid, pid.upper(), f"/{pid}")
|
||||
|
||||
def test_no_dependents_returns_self(self, conn):
|
||||
self._create_projects(conn, "api")
|
||||
chain = get_deploy_chain(conn, "api")
|
||||
assert chain == ["api"]
|
||||
|
||||
def test_simple_chain_a_depends_on_b(self, conn):
|
||||
"""When B is deployed, A (which depends on B) must also be redeployed."""
|
||||
self._create_projects(conn, "api", "frontend")
|
||||
# frontend depends_on api
|
||||
models.create_project_link(conn, "frontend", "api", "depends_on")
|
||||
chain = get_deploy_chain(conn, "api")
|
||||
assert chain[0] == "api"
|
||||
assert "frontend" in chain
|
||||
|
||||
def test_deep_chain_a_b_c(self, conn):
|
||||
"""C → B → A: deploying C should include B and A in chain."""
|
||||
self._create_projects(conn, "c", "b", "a")
|
||||
models.create_project_link(conn, "b", "c", "depends_on") # b depends on c
|
||||
models.create_project_link(conn, "a", "b", "depends_on") # a depends on b
|
||||
chain = get_deploy_chain(conn, "c")
|
||||
assert chain[0] == "c"
|
||||
assert chain.index("b") < chain.index("a")
|
||||
|
||||
def test_cycle_does_not_loop_forever(self, conn):
|
||||
"""Cycle A→B→A must not cause infinite loop."""
|
||||
self._create_projects(conn, "x", "y")
|
||||
models.create_project_link(conn, "x", "y", "depends_on")
|
||||
models.create_project_link(conn, "y", "x", "depends_on")
|
||||
chain = get_deploy_chain(conn, "x")
|
||||
# No infinite loop — each node appears at most once
|
||||
assert len(chain) == len(set(chain))
|
||||
|
||||
def test_non_depends_on_links_ignored(self, conn):
|
||||
"""Links with type != 'depends_on' don't affect the deploy chain."""
|
||||
self._create_projects(conn, "api", "docs")
|
||||
models.create_project_link(conn, "docs", "api", "references")
|
||||
chain = get_deploy_chain(conn, "api")
|
||||
assert chain == ["api"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 4. project_links CRUD
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestProjectLinksCRUD:
|
||||
def _setup(self, conn):
|
||||
models.create_project(conn, "a", "A", "/a")
|
||||
models.create_project(conn, "b", "B", "/b")
|
||||
models.create_project(conn, "c", "C", "/c")
|
||||
|
||||
def test_create_and_get_link(self, conn):
|
||||
self._setup(conn)
|
||||
link = models.create_project_link(conn, "a", "b", "depends_on", "A uses B")
|
||||
assert link["from_project"] == "a"
|
||||
assert link["to_project"] == "b"
|
||||
assert link["type"] == "depends_on"
|
||||
assert link["description"] == "A uses B"
|
||||
assert link["id"] is not None
|
||||
|
||||
def test_get_project_links_includes_both_directions(self, conn):
|
||||
self._setup(conn)
|
||||
models.create_project_link(conn, "a", "b", "depends_on")
|
||||
models.create_project_link(conn, "c", "a", "depends_on")
|
||||
links = models.get_project_links(conn, "a")
|
||||
from_ids = {l["from_project"] for l in links}
|
||||
to_ids = {l["to_project"] for l in links}
|
||||
assert "a" in from_ids or "a" in to_ids
|
||||
assert len(links) == 2
|
||||
|
||||
def test_delete_link_returns_true(self, conn):
|
||||
self._setup(conn)
|
||||
link = models.create_project_link(conn, "a", "b", "depends_on")
|
||||
assert models.delete_project_link(conn, link["id"]) is True
|
||||
|
||||
def test_delete_link_removes_it(self, conn):
|
||||
self._setup(conn)
|
||||
link = models.create_project_link(conn, "a", "b", "depends_on")
|
||||
models.delete_project_link(conn, link["id"])
|
||||
remaining = models.get_project_links(conn, "a")
|
||||
assert remaining == []
|
||||
|
||||
def test_delete_nonexistent_link_returns_false(self, conn):
|
||||
assert models.delete_project_link(conn, 99999) is False
|
||||
|
||||
def test_get_links_for_project_with_no_links(self, conn):
|
||||
self._setup(conn)
|
||||
assert models.get_project_links(conn, "c") == []
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 5. API endpoint: POST /api/projects/{id}/deploy
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestDeployEndpoint:
|
||||
def test_deploy_with_runtime_returns_structured_result(self, client, tmp_path):
|
||||
import web.api as api_module
|
||||
from core.db import init_db as _init
|
||||
conn = _init(api_module.DB_PATH)
|
||||
models.update_project(conn, "p1", deploy_runtime="docker", deploy_path="/srv/p1")
|
||||
conn.close()
|
||||
with patch("core.deploy.subprocess.run", return_value=_make_proc()):
|
||||
r = client.post("/api/projects/p1/deploy")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert "success" in data
|
||||
assert "steps" in data
|
||||
assert "results" in data
|
||||
|
||||
def test_deploy_legacy_fallback_with_deploy_command(self, client, tmp_path):
|
||||
import web.api as api_module
|
||||
from core.db import init_db as _init
|
||||
conn = _init(api_module.DB_PATH)
|
||||
models.update_project(conn, "p1", deploy_command="echo deployed")
|
||||
conn.close()
|
||||
with patch("web.api.subprocess.run", return_value=_make_proc(stdout="deployed\n")):
|
||||
r = client.post("/api/projects/p1/deploy")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert "exit_code" in data
|
||||
assert "stdout" in data
|
||||
|
||||
def test_deploy_without_runtime_or_command_returns_400(self, client):
|
||||
r = client.post("/api/projects/p1/deploy")
|
||||
assert r.status_code == 400
|
||||
|
||||
def test_deploy_nonexistent_project_returns_404(self, client):
|
||||
r = client.post("/api/projects/nonexistent/deploy")
|
||||
assert r.status_code == 404
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 6. API endpoints: project-links CRUD
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestProjectLinksAPI:
|
||||
def _create_second_project(self, client):
|
||||
client.post("/api/projects", json={"id": "p2", "name": "P2", "path": "/p2"})
|
||||
|
||||
def test_create_project_link(self, client):
|
||||
self._create_second_project(client)
|
||||
r = client.post("/api/project-links", json={
|
||||
"from_project": "p1",
|
||||
"to_project": "p2",
|
||||
"type": "depends_on",
|
||||
"description": "P1 depends on P2",
|
||||
})
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert data["from_project"] == "p1"
|
||||
assert data["to_project"] == "p2"
|
||||
|
||||
def test_create_link_nonexistent_from_project_returns_404(self, client):
|
||||
self._create_second_project(client)
|
||||
r = client.post("/api/project-links", json={
|
||||
"from_project": "ghost",
|
||||
"to_project": "p2",
|
||||
"type": "depends_on",
|
||||
})
|
||||
assert r.status_code == 404
|
||||
|
||||
def test_create_link_nonexistent_to_project_returns_404(self, client):
|
||||
r = client.post("/api/project-links", json={
|
||||
"from_project": "p1",
|
||||
"to_project": "ghost",
|
||||
"type": "depends_on",
|
||||
})
|
||||
assert r.status_code == 404
|
||||
|
||||
def test_get_project_links(self, client):
|
||||
self._create_second_project(client)
|
||||
client.post("/api/project-links", json={
|
||||
"from_project": "p1", "to_project": "p2", "type": "depends_on"
|
||||
})
|
||||
r = client.get("/api/projects/p1/links")
|
||||
assert r.status_code == 200
|
||||
data = r.json()
|
||||
assert len(data) == 1
|
||||
assert data[0]["from_project"] == "p1"
|
||||
|
||||
def test_get_links_nonexistent_project_returns_404(self, client):
|
||||
r = client.get("/api/projects/ghost/links")
|
||||
assert r.status_code == 404
|
||||
|
||||
def test_delete_project_link(self, client):
|
||||
self._create_second_project(client)
|
||||
create_r = client.post("/api/project-links", json={
|
||||
"from_project": "p1", "to_project": "p2", "type": "depends_on"
|
||||
})
|
||||
link_id = create_r.json()["id"]
|
||||
r = client.delete(f"/api/project-links/{link_id}")
|
||||
assert r.status_code == 204
|
||||
|
||||
def test_delete_link_nonexistent_returns_404(self, client):
|
||||
r = client.delete("/api/project-links/99999")
|
||||
assert r.status_code == 404
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# 7. Migration: deploy columns (KIN-079)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _cols(conn, table: str) -> set[str]:
|
||||
return {row["name"] for row in conn.execute(f"PRAGMA table_info({table})").fetchall()}
|
||||
|
||||
|
||||
def _old_schema_no_deploy() -> "sqlite3.Connection":
|
||||
"""Minimal old schema without deploy columns."""
|
||||
import sqlite3
|
||||
conn = sqlite3.connect(":memory:")
|
||||
conn.row_factory = sqlite3.Row
|
||||
conn.executescript("""
|
||||
CREATE TABLE projects (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
path TEXT,
|
||||
status TEXT DEFAULT 'active',
|
||||
language TEXT DEFAULT 'ru',
|
||||
execution_mode TEXT NOT NULL DEFAULT 'review',
|
||||
project_type TEXT DEFAULT 'development'
|
||||
);
|
||||
CREATE TABLE tasks (
|
||||
id TEXT PRIMARY KEY,
|
||||
project_id TEXT NOT NULL,
|
||||
title TEXT NOT NULL,
|
||||
status TEXT DEFAULT 'pending'
|
||||
);
|
||||
""")
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
|
||||
class TestDeployColumnsMigration:
|
||||
def test_fresh_schema_has_deploy_host(self):
|
||||
conn = init_db(db_path=":memory:")
|
||||
assert "deploy_host" in _cols(conn, "projects")
|
||||
conn.close()
|
||||
|
||||
def test_fresh_schema_has_deploy_path(self):
|
||||
conn = init_db(db_path=":memory:")
|
||||
assert "deploy_path" in _cols(conn, "projects")
|
||||
conn.close()
|
||||
|
||||
def test_fresh_schema_has_deploy_runtime(self):
|
||||
conn = init_db(db_path=":memory:")
|
||||
assert "deploy_runtime" in _cols(conn, "projects")
|
||||
conn.close()
|
||||
|
||||
def test_fresh_schema_has_deploy_restart_cmd(self):
|
||||
conn = init_db(db_path=":memory:")
|
||||
assert "deploy_restart_cmd" in _cols(conn, "projects")
|
||||
conn.close()
|
||||
|
||||
def test_migrate_adds_deploy_columns_to_old_schema(self):
|
||||
conn = _old_schema_no_deploy()
|
||||
_migrate(conn)
|
||||
cols = _cols(conn, "projects")
|
||||
assert {"deploy_host", "deploy_path", "deploy_runtime", "deploy_restart_cmd"}.issubset(cols)
|
||||
conn.close()
|
||||
|
||||
def test_migrate_deploy_columns_idempotent(self):
|
||||
conn = init_db(db_path=":memory:")
|
||||
before = _cols(conn, "projects")
|
||||
_migrate(conn)
|
||||
after = _cols(conn, "projects")
|
||||
assert before == after
|
||||
conn.close()
|
||||
|
||||
def test_fresh_schema_has_project_links_table(self):
|
||||
conn = init_db(db_path=":memory:")
|
||||
tables = {row["name"] for row in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()}
|
||||
assert "project_links" in tables
|
||||
conn.close()
|
||||
Loading…
Add table
Add a link
Reference in a new issue