kin/tests/test_deploy.py

851 lines
34 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

"""Tests for KIN-079 — deploy mechanism.
Covers:
- core/deploy.py: build_deploy_steps, execute_deploy, get_deploy_chain, deploy_with_dependents
- core/models.py: create_project_link, get_project_links, delete_project_link
- web/api.py: POST /deploy, project-links CRUD endpoints
- core/db.py: deploy columns migration
"""
import shlex
import pytest
from unittest.mock import patch, MagicMock
from core.db import init_db, _migrate
from core import models
from core.deploy import (
build_deploy_steps, execute_deploy, get_deploy_chain, deploy_with_dependents,
_build_ssh_cmd,
)
# ---------------------------------------------------------------------------
# Fixtures
# ---------------------------------------------------------------------------
@pytest.fixture
def conn():
"""Fresh in-memory DB for each test."""
c = init_db(db_path=":memory:")
yield c
c.close()
@pytest.fixture
def sample_project(conn):
"""Project with docker runtime configured."""
return models.create_project(
conn, "api", "API Service", "/srv/api",
tech_stack=["python", "fastapi"],
)
@pytest.fixture
def client(tmp_path):
"""TestClient with isolated DB and a seeded project."""
import web.api as api_module
db_path = tmp_path / "test.db"
api_module.DB_PATH = db_path
from web.api import app
from fastapi.testclient import TestClient
c = TestClient(app)
c.post("/api/projects", json={"id": "p1", "name": "P1", "path": "/p1"})
return c
def _make_proc(returncode=0, stdout="ok", stderr=""):
"""Helper: create a mock CompletedProcess."""
m = MagicMock()
m.returncode = returncode
m.stdout = stdout
m.stderr = stderr
return m
# ---------------------------------------------------------------------------
# 1. build_deploy_steps
# ---------------------------------------------------------------------------
class TestBuildDeploySteps:
def test_docker_runtime(self):
p = {"deploy_runtime": "docker", "deploy_path": "/srv/app"}
steps = build_deploy_steps(p)
assert steps == ["git pull", "docker compose up -d --build"]
def test_node_runtime(self):
p = {"deploy_runtime": "node"}
steps = build_deploy_steps(p)
assert steps == ["git pull", "npm install --production", "pm2 restart all"]
def test_python_runtime(self):
p = {"deploy_runtime": "python"}
steps = build_deploy_steps(p)
assert steps == ["git pull", "pip install -r requirements.txt"]
def test_static_runtime(self):
p = {"deploy_runtime": "static"}
steps = build_deploy_steps(p)
assert steps == ["git pull", "nginx -s reload"]
def test_custom_restart_cmd_appended(self):
p = {"deploy_runtime": "docker", "deploy_restart_cmd": "docker compose restart worker"}
steps = build_deploy_steps(p)
assert steps[-1] == "docker compose restart worker"
assert len(steps) == 3
def test_custom_restart_cmd_stripped(self):
p = {"deploy_runtime": "python", "deploy_restart_cmd": " systemctl restart myapp "}
steps = build_deploy_steps(p)
assert steps[-1] == "systemctl restart myapp"
def test_none_runtime_returns_empty(self):
p = {"deploy_runtime": None}
assert build_deploy_steps(p) == []
def test_invalid_runtime_returns_empty(self):
p = {"deploy_runtime": "ruby"}
assert build_deploy_steps(p) == []
def test_missing_runtime_returns_empty(self):
p = {}
assert build_deploy_steps(p) == []
def test_empty_restart_cmd_not_appended(self):
p = {"deploy_runtime": "static", "deploy_restart_cmd": " "}
steps = build_deploy_steps(p)
assert steps == ["git pull", "nginx -s reload"]
# ---------------------------------------------------------------------------
# 2. execute_deploy
# ---------------------------------------------------------------------------
class TestExecuteDeploy:
def test_local_deploy_success(self, conn, sample_project):
models.update_project(conn, "api", deploy_runtime="docker", deploy_path="/srv/api")
p = models.get_project(conn, "api")
with patch("core.deploy.subprocess.run", return_value=_make_proc()) as mock_run:
result = execute_deploy(p, conn)
assert result["success"] is True
assert len(result["steps"]) == 2
assert len(result["results"]) == 2
assert mock_run.call_count == 2
def test_local_deploy_stops_on_failure(self, conn, sample_project):
models.update_project(conn, "api", deploy_runtime="python", deploy_path="/srv/api")
p = models.get_project(conn, "api")
# First step fails
fail = _make_proc(returncode=1, stderr="git error")
with patch("core.deploy.subprocess.run", return_value=fail):
result = execute_deploy(p, conn)
assert result["success"] is False
assert len(result["results"]) == 1 # stopped after first failure
def test_no_runtime_returns_error(self, conn, sample_project):
p = models.get_project(conn, "api") # no deploy_runtime set
result = execute_deploy(p, conn)
assert result["success"] is False
assert "error" in result
assert result["steps"] == []
def test_ssh_deploy_uses_ssh_cmd(self, conn, sample_project):
models.update_project(
conn, "api",
deploy_runtime="docker",
deploy_host="10.0.0.5",
deploy_path="/srv/api",
)
p = models.get_project(conn, "api")
with patch("core.deploy.subprocess.run", return_value=_make_proc()) as mock_run:
result = execute_deploy(p, conn)
assert result["success"] is True
# SSH commands are lists (not shell=True)
call_args = mock_run.call_args_list[0]
cmd = call_args[0][0]
assert isinstance(cmd, list)
assert "ssh" in cmd[0]
def test_timeout_marks_failure(self, conn, sample_project):
import subprocess
models.update_project(conn, "api", deploy_runtime="static", deploy_path="/srv")
p = models.get_project(conn, "api")
with patch("core.deploy.subprocess.run", side_effect=subprocess.TimeoutExpired("git", 120)):
result = execute_deploy(p, conn)
assert result["success"] is False
assert result["results"][0]["exit_code"] == -1
# ---------------------------------------------------------------------------
# 3. get_deploy_chain
# ---------------------------------------------------------------------------
class TestGetDeployChain:
def _create_projects(self, conn, *ids):
for pid in ids:
models.create_project(conn, pid, pid.upper(), f"/{pid}")
def test_no_dependents_returns_self(self, conn):
self._create_projects(conn, "api")
chain = get_deploy_chain(conn, "api")
assert chain == ["api"]
def test_simple_chain_a_depends_on_b(self, conn):
"""When B is deployed, A (which depends on B) must also be redeployed."""
self._create_projects(conn, "api", "frontend")
# frontend depends_on api
models.create_project_link(conn, "frontend", "api", "depends_on")
chain = get_deploy_chain(conn, "api")
assert chain[0] == "api"
assert "frontend" in chain
def test_deep_chain_a_b_c(self, conn):
"""C → B → A: deploying C should include B and A in chain."""
self._create_projects(conn, "c", "b", "a")
models.create_project_link(conn, "b", "c", "depends_on") # b depends on c
models.create_project_link(conn, "a", "b", "depends_on") # a depends on b
chain = get_deploy_chain(conn, "c")
assert chain[0] == "c"
assert chain.index("b") < chain.index("a")
def test_cycle_does_not_loop_forever(self, conn):
"""Cycle A→B→A must not cause infinite loop."""
self._create_projects(conn, "x", "y")
models.create_project_link(conn, "x", "y", "depends_on")
models.create_project_link(conn, "y", "x", "depends_on")
chain = get_deploy_chain(conn, "x")
# No infinite loop — each node appears at most once
assert len(chain) == len(set(chain))
def test_non_depends_on_links_ignored(self, conn):
"""Links with type != 'depends_on' don't affect the deploy chain."""
self._create_projects(conn, "api", "docs")
models.create_project_link(conn, "docs", "api", "references")
chain = get_deploy_chain(conn, "api")
assert chain == ["api"]
# ---------------------------------------------------------------------------
# 4. project_links CRUD
# ---------------------------------------------------------------------------
class TestProjectLinksCRUD:
def _setup(self, conn):
models.create_project(conn, "a", "A", "/a")
models.create_project(conn, "b", "B", "/b")
models.create_project(conn, "c", "C", "/c")
def test_create_and_get_link(self, conn):
self._setup(conn)
link = models.create_project_link(conn, "a", "b", "depends_on", "A uses B")
assert link["from_project"] == "a"
assert link["to_project"] == "b"
assert link["type"] == "depends_on"
assert link["description"] == "A uses B"
assert link["id"] is not None
def test_get_project_links_includes_both_directions(self, conn):
self._setup(conn)
models.create_project_link(conn, "a", "b", "depends_on")
models.create_project_link(conn, "c", "a", "depends_on")
links = models.get_project_links(conn, "a")
from_ids = {l["from_project"] for l in links}
to_ids = {l["to_project"] for l in links}
assert "a" in from_ids or "a" in to_ids
assert len(links) == 2
def test_delete_link_returns_true(self, conn):
self._setup(conn)
link = models.create_project_link(conn, "a", "b", "depends_on")
assert models.delete_project_link(conn, link["id"]) is True
def test_delete_link_removes_it(self, conn):
self._setup(conn)
link = models.create_project_link(conn, "a", "b", "depends_on")
models.delete_project_link(conn, link["id"])
remaining = models.get_project_links(conn, "a")
assert remaining == []
def test_delete_nonexistent_link_returns_false(self, conn):
assert models.delete_project_link(conn, 99999) is False
def test_get_links_for_project_with_no_links(self, conn):
self._setup(conn)
assert models.get_project_links(conn, "c") == []
# ---------------------------------------------------------------------------
# 5. API endpoint: POST /api/projects/{id}/deploy
# ---------------------------------------------------------------------------
class TestDeployEndpoint:
def test_deploy_with_runtime_returns_structured_result(self, client, tmp_path):
import web.api as api_module
from core.db import init_db as _init
conn = _init(api_module.DB_PATH)
models.update_project(conn, "p1", deploy_runtime="docker", deploy_path="/srv/p1")
conn.close()
with patch("core.deploy.subprocess.run", return_value=_make_proc()):
r = client.post("/api/projects/p1/deploy")
assert r.status_code == 200
data = r.json()
assert "success" in data
assert "steps" in data
assert "results" in data
def test_deploy_legacy_fallback_with_deploy_command(self, client, tmp_path):
import web.api as api_module
from core.db import init_db as _init
conn = _init(api_module.DB_PATH)
models.update_project(conn, "p1", deploy_command="echo deployed")
conn.close()
with patch("web.api.subprocess.run", return_value=_make_proc(stdout="deployed\n")):
r = client.post("/api/projects/p1/deploy")
assert r.status_code == 200
data = r.json()
assert "exit_code" in data
assert "stdout" in data
def test_deploy_without_runtime_or_command_returns_400(self, client):
r = client.post("/api/projects/p1/deploy")
assert r.status_code == 400
def test_deploy_nonexistent_project_returns_404(self, client):
r = client.post("/api/projects/nonexistent/deploy")
assert r.status_code == 404
# ---------------------------------------------------------------------------
# 6. API endpoints: project-links CRUD
# ---------------------------------------------------------------------------
class TestProjectLinksAPI:
def _create_second_project(self, client):
client.post("/api/projects", json={"id": "p2", "name": "P2", "path": "/p2"})
def test_create_project_link(self, client):
self._create_second_project(client)
r = client.post("/api/project-links", json={
"from_project": "p1",
"to_project": "p2",
"type": "depends_on",
"description": "P1 depends on P2",
})
assert r.status_code == 201
data = r.json()
assert data["from_project"] == "p1"
assert data["to_project"] == "p2"
def test_create_link_nonexistent_from_project_returns_404(self, client):
self._create_second_project(client)
r = client.post("/api/project-links", json={
"from_project": "ghost",
"to_project": "p2",
"type": "depends_on",
})
assert r.status_code == 404
def test_create_link_nonexistent_to_project_returns_404(self, client):
r = client.post("/api/project-links", json={
"from_project": "p1",
"to_project": "ghost",
"type": "depends_on",
})
assert r.status_code == 404
def test_get_project_links(self, client):
self._create_second_project(client)
client.post("/api/project-links", json={
"from_project": "p1", "to_project": "p2", "type": "depends_on"
})
r = client.get("/api/projects/p1/links")
assert r.status_code == 200
data = r.json()
assert len(data) == 1
assert data[0]["from_project"] == "p1"
def test_get_links_nonexistent_project_returns_404(self, client):
r = client.get("/api/projects/ghost/links")
assert r.status_code == 404
def test_delete_project_link(self, client):
self._create_second_project(client)
create_r = client.post("/api/project-links", json={
"from_project": "p1", "to_project": "p2", "type": "depends_on"
})
link_id = create_r.json()["id"]
r = client.delete(f"/api/project-links/{link_id}")
assert r.status_code == 204
def test_delete_link_nonexistent_returns_404(self, client):
r = client.delete("/api/project-links/99999")
assert r.status_code == 404
# ---------------------------------------------------------------------------
# 7. Migration: deploy columns (KIN-079)
# ---------------------------------------------------------------------------
def _cols(conn, table: str) -> set[str]:
return {row["name"] for row in conn.execute(f"PRAGMA table_info({table})").fetchall()}
def _old_schema_no_deploy() -> "sqlite3.Connection":
"""Minimal old schema without deploy columns."""
import sqlite3
conn = sqlite3.connect(":memory:")
conn.row_factory = sqlite3.Row
conn.executescript("""
CREATE TABLE projects (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
path TEXT,
status TEXT DEFAULT 'active',
language TEXT DEFAULT 'ru',
execution_mode TEXT NOT NULL DEFAULT 'review',
project_type TEXT DEFAULT 'development'
);
CREATE TABLE tasks (
id TEXT PRIMARY KEY,
project_id TEXT NOT NULL,
title TEXT NOT NULL,
status TEXT DEFAULT 'pending'
);
""")
conn.commit()
return conn
class TestDeployColumnsMigration:
def test_fresh_schema_has_deploy_host(self):
conn = init_db(db_path=":memory:")
assert "deploy_host" in _cols(conn, "projects")
conn.close()
def test_fresh_schema_has_deploy_path(self):
conn = init_db(db_path=":memory:")
assert "deploy_path" in _cols(conn, "projects")
conn.close()
def test_fresh_schema_has_deploy_runtime(self):
conn = init_db(db_path=":memory:")
assert "deploy_runtime" in _cols(conn, "projects")
conn.close()
def test_fresh_schema_has_deploy_restart_cmd(self):
conn = init_db(db_path=":memory:")
assert "deploy_restart_cmd" in _cols(conn, "projects")
conn.close()
def test_migrate_adds_deploy_columns_to_old_schema(self):
conn = _old_schema_no_deploy()
_migrate(conn)
cols = _cols(conn, "projects")
assert {"deploy_host", "deploy_path", "deploy_runtime", "deploy_restart_cmd"}.issubset(cols)
conn.close()
def test_migrate_deploy_columns_idempotent(self):
conn = init_db(db_path=":memory:")
before = _cols(conn, "projects")
_migrate(conn)
after = _cols(conn, "projects")
assert before == after
conn.close()
def test_fresh_schema_has_project_links_table(self):
conn = init_db(db_path=":memory:")
tables = {row["name"] for row in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()}
assert "project_links" in tables
conn.close()
# ---------------------------------------------------------------------------
# 8. _build_ssh_cmd — security: shlex.quote on deploy_path (fix #426)
# ---------------------------------------------------------------------------
class TestSSHBuildCmd:
def test_deploy_path_is_shlex_quoted(self):
"""deploy_path must be quoted via shlex.quote to prevent command injection."""
project = {
"deploy_host": "10.0.0.1",
"deploy_path": "/srv/my app/v2",
}
cmd = _build_ssh_cmd(project, "git pull")
full_cmd_arg = cmd[-1]
assert shlex.quote("/srv/my app/v2") in full_cmd_arg
def test_deploy_path_with_spaces_no_raw_unquoted(self):
"""Path with spaces must NOT appear unquoted after 'cd '."""
project = {
"deploy_host": "myserver",
"deploy_path": "/srv/app with spaces",
}
cmd = _build_ssh_cmd(project, "git pull")
full_cmd_arg = cmd[-1]
assert "cd /srv/app with spaces " not in full_cmd_arg
def test_deploy_path_with_dollar_sign_is_quoted(self):
"""Path containing $ must be quoted to prevent shell variable expansion."""
project = {
"deploy_host": "myserver",
"deploy_path": "/srv/$PROJECT",
}
cmd = _build_ssh_cmd(project, "git pull")
full_cmd_arg = cmd[-1]
assert "cd /srv/$PROJECT " not in full_cmd_arg
assert shlex.quote("/srv/$PROJECT") in full_cmd_arg
def test_normal_path_still_works(self):
"""Standard path without special chars should still produce valid cd command."""
project = {
"deploy_host": "10.0.0.5",
"deploy_path": "/srv/api",
}
cmd = _build_ssh_cmd(project, "git pull")
full_cmd_arg = cmd[-1]
assert "/srv/api" in full_cmd_arg
assert "git pull" in full_cmd_arg
def test_no_deploy_path_uses_command_directly(self):
"""When deploy_path is None, command is used as-is without cd."""
project = {
"deploy_host": "myserver",
"deploy_path": None,
}
cmd = _build_ssh_cmd(project, "git pull")
full_cmd_arg = cmd[-1]
assert "cd" not in full_cmd_arg
assert full_cmd_arg == "git pull"
def test_deploy_path_with_semicolon_injection_is_escaped(self):
"""Path containing ';' must be quoted so it cannot inject a second shell command."""
project = {
"deploy_host": "myserver",
"deploy_path": "/srv/api; rm -rf /",
}
cmd = _build_ssh_cmd(project, "git pull")
full_cmd_arg = cmd[-1]
# The dangerous path must appear only as a quoted argument, not as a bare shell fragment.
assert "cd /srv/api; rm -rf /" not in full_cmd_arg
# shlex.quote wraps it in single quotes — the semicolon is inside the quotes.
assert shlex.quote("/srv/api; rm -rf /") in full_cmd_arg
def test_deploy_restart_cmd_is_not_shlex_quoted(self):
"""deploy_restart_cmd must reach SSH as a plain shell command, not as a single quoted arg.
shlex.quote would turn 'docker compose restart worker' into a literal string
which the remote shell would refuse to execute. Admin-controlled field — no quoting.
"""
project = {
"deploy_host": "myserver",
"deploy_path": "/srv/api",
"deploy_runtime": "docker",
"deploy_restart_cmd": "docker compose restart worker",
}
# Build steps manually and feed one step into _build_ssh_cmd.
restart_cmd = "docker compose restart worker"
cmd = _build_ssh_cmd(project, restart_cmd)
full_cmd_arg = cmd[-1]
# The command must appear verbatim (not as a single quoted token).
assert "docker compose restart worker" in full_cmd_arg
assert full_cmd_arg != shlex.quote("docker compose restart worker")
# ---------------------------------------------------------------------------
# 9. deploy_with_dependents — cascade deploy unit tests
# ---------------------------------------------------------------------------
class TestDeployWithDependents:
def _setup_chain(self, conn):
"""Create api + frontend where frontend depends_on api."""
models.create_project(conn, "api", "API", "/srv/api")
models.create_project(conn, "fe", "Frontend", "/srv/fe")
models.update_project(conn, "api", deploy_runtime="docker", deploy_path="/srv/api")
models.update_project(conn, "fe", deploy_runtime="static", deploy_path="/srv/fe")
models.create_project_link(conn, "fe", "api", "depends_on")
def test_deploys_main_and_dependent_on_success(self, conn):
"""If main project deploys successfully, dependent is also deployed."""
self._setup_chain(conn)
with patch("core.deploy.subprocess.run", return_value=_make_proc()):
result = deploy_with_dependents(conn, "api")
assert result["success"] is True
assert "fe" in result["dependents_deployed"]
def test_main_failure_skips_dependents(self, conn):
"""If main project deployment fails, dependents are NOT deployed."""
self._setup_chain(conn)
with patch("core.deploy.subprocess.run", return_value=_make_proc(returncode=1)):
result = deploy_with_dependents(conn, "api")
assert result["success"] is False
assert result["dependents_deployed"] == []
def test_unknown_project_returns_error(self, conn):
"""Deploying non-existent project returns success=False with error key."""
result = deploy_with_dependents(conn, "nonexistent")
assert result["success"] is False
assert "error" in result
def test_no_dependents_returns_empty_list(self, conn):
"""Project with no dependents returns empty dependents_deployed list."""
models.create_project(conn, "solo", "Solo", "/srv/solo")
models.update_project(conn, "solo", deploy_runtime="python", deploy_path="/srv/solo")
with patch("core.deploy.subprocess.run", return_value=_make_proc()):
result = deploy_with_dependents(conn, "solo")
assert result["success"] is True
assert result["dependents_deployed"] == []
def test_result_contains_main_steps_and_results(self, conn):
"""Result always includes 'steps' and 'results' keys from main project."""
models.create_project(conn, "api2", "API2", "/srv/api2")
models.update_project(conn, "api2", deploy_runtime="node", deploy_path="/srv/api2")
with patch("core.deploy.subprocess.run", return_value=_make_proc()):
result = deploy_with_dependents(conn, "api2")
assert "steps" in result
assert "results" in result
def test_dependent_failure_sets_overall_success_false(self, conn):
"""If a dependent project fails, overall_success becomes False even if main succeeded."""
self._setup_chain(conn)
_ok = {"success": True, "steps": ["git pull"], "results": [{"step": "git pull", "exit_code": 0}]}
_fail = {"success": False, "steps": ["git pull"], "results": [{"step": "git pull", "exit_code": 1}]}
def _mock_execute(project, db_conn):
return _ok if dict(project)["id"] == "api" else _fail
with patch("core.deploy.execute_deploy", side_effect=_mock_execute):
result = deploy_with_dependents(conn, "api")
assert result["success"] is False
assert result["dependents_deployed"] == []
# ---------------------------------------------------------------------------
# 10. build_deploy_steps — python runtime full steps with restart_cmd
# ---------------------------------------------------------------------------
class TestBuildDeployStepsPythonRestartCmd:
def test_python_with_restart_cmd_full_steps(self):
"""Python runtime + restart_cmd yields full 3-step list."""
p = {"deploy_runtime": "python", "deploy_restart_cmd": "systemctl restart myapp"}
steps = build_deploy_steps(p)
assert steps == ["git pull", "pip install -r requirements.txt", "systemctl restart myapp"]
def test_node_with_custom_restart_cmd_appends_as_fourth_step(self):
"""Node runtime default ends with pm2 restart all; custom cmd is appended after."""
p = {"deploy_runtime": "node", "deploy_restart_cmd": "pm2 restart myservice"}
steps = build_deploy_steps(p)
assert steps[0] == "git pull"
assert steps[1] == "npm install --production"
assert steps[2] == "pm2 restart all"
assert steps[3] == "pm2 restart myservice"
assert len(steps) == 4
# ---------------------------------------------------------------------------
# 11. Migration: project_links indexes (KIN-INFRA-008)
# Convention #433: set-assert all columns/indexes after migration
# Convention #384: three test cases for conditional guard in _migrate()
# ---------------------------------------------------------------------------
def _schema_with_project_links_no_indexes():
"""Minimal schema: project_links table exists but its indexes are absent."""
import sqlite3 as _sqlite3
conn = _sqlite3.connect(":memory:")
conn.row_factory = _sqlite3.Row
conn.executescript("""
CREATE TABLE projects (
id TEXT PRIMARY KEY,
name TEXT NOT NULL,
path TEXT,
status TEXT DEFAULT 'active',
language TEXT DEFAULT 'ru',
execution_mode TEXT NOT NULL DEFAULT 'review',
project_type TEXT DEFAULT 'development'
);
CREATE TABLE tasks (
id TEXT PRIMARY KEY,
project_id TEXT NOT NULL,
title TEXT NOT NULL,
status TEXT DEFAULT 'pending'
);
CREATE TABLE project_links (
id INTEGER PRIMARY KEY AUTOINCREMENT,
from_project TEXT NOT NULL REFERENCES projects(id),
to_project TEXT NOT NULL REFERENCES projects(id),
type TEXT NOT NULL,
description TEXT,
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
);
""")
conn.commit()
return conn
def _get_indexes(conn) -> set:
return {r[0] for r in conn.execute(
"SELECT name FROM sqlite_master WHERE type='index'"
).fetchall()}
class TestProjectLinksIndexMigration:
"""KIN-INFRA-008: индексы idx_project_links_to / idx_project_links_from."""
# --- fresh schema ---
def test_fresh_schema_has_idx_project_links_to(self):
conn = init_db(db_path=":memory:")
assert "idx_project_links_to" in _get_indexes(conn)
conn.close()
def test_fresh_schema_has_idx_project_links_from(self):
conn = init_db(db_path=":memory:")
assert "idx_project_links_from" in _get_indexes(conn)
conn.close()
# Convention #433: assert all columns of project_links after fresh init
def test_fresh_schema_project_links_columns(self):
conn = init_db(db_path=":memory:")
cols = {r["name"] for r in conn.execute("PRAGMA table_info(project_links)").fetchall()}
assert cols == {"id", "from_project", "to_project", "type", "description", "created_at"}
conn.close()
# --- Convention #384: три кейса для guard в _migrate() ---
# Кейс 1: без таблицы — guard не падает, индексы не создаются
def test_migrate_without_project_links_table_no_error(self):
conn = _old_schema_no_deploy() # project_links отсутствует
_migrate(conn) # не должно упасть
indexes = _get_indexes(conn)
assert "idx_project_links_to" not in indexes
assert "idx_project_links_from" not in indexes
conn.close()
# Кейс 2: таблица есть, индексов нет → _migrate() создаёт оба
def test_migrate_creates_both_indexes_when_table_exists(self):
conn = _schema_with_project_links_no_indexes()
_migrate(conn)
indexes = _get_indexes(conn)
assert "idx_project_links_to" in indexes
assert "idx_project_links_from" in indexes
conn.close()
# Кейс 3: таблица есть, индексы уже есть → _migrate() идемпотентен
def test_migrate_is_idempotent_when_indexes_already_exist(self):
conn = init_db(db_path=":memory:")
before = _get_indexes(conn)
_migrate(conn)
after = _get_indexes(conn)
assert "idx_project_links_to" in after
assert "idx_project_links_from" in after
assert before == after
conn.close()
# ---------------------------------------------------------------------------
# 12. Migration: UNIQUE(from_project, to_project, type) (KIN-INFRA-013)
# Convention #433: set-assert unique constraint after fresh init
# Convention #434: negative test — ALTER TABLE cannot add UNIQUE in SQLite
# ---------------------------------------------------------------------------
class TestProjectLinksUniqueMigration:
"""KIN-INFRA-013: UNIQUE(from_project, to_project, type) на project_links."""
# --- fresh schema ---
def test_fresh_schema_project_links_has_unique_constraint(self):
"""Свежая схема должна иметь UNIQUE-ограничение на (from_project, to_project, type)."""
conn = init_db(db_path=":memory:")
unique_indexes = [
r for r in conn.execute("PRAGMA index_list(project_links)").fetchall()
if r[2] == 1 # unique == 1
]
assert len(unique_indexes) >= 1
conn.close()
# --- модельный уровень ---
def test_create_duplicate_link_raises_integrity_error(self, conn):
"""Дублирующая вставка должна вызывать IntegrityError."""
import sqlite3 as _sqlite3
models.create_project(conn, "dup_a", "A", "/a")
models.create_project(conn, "dup_b", "B", "/b")
models.create_project_link(conn, "dup_a", "dup_b", "depends_on")
with pytest.raises(_sqlite3.IntegrityError):
models.create_project_link(conn, "dup_a", "dup_b", "depends_on")
# --- migration guard: 3 кейса (Convention #384) ---
# Кейс 1: без таблицы — guard не падает
def test_migrate_without_project_links_table_no_error_unique(self):
conn = _old_schema_no_deploy() # project_links отсутствует
_migrate(conn) # не должно упасть
conn.close()
# Кейс 2: таблица без UNIQUE → _migrate() добавляет ограничение
def test_migrate_adds_unique_constraint_to_old_schema(self):
conn = _schema_with_project_links_no_indexes() # без UNIQUE
_migrate(conn)
pl_sql = conn.execute(
"SELECT sql FROM sqlite_master WHERE type='table' AND name='project_links'"
).fetchone()
assert "UNIQUE" in (pl_sql[0] or "").upper()
conn.close()
# Кейс 3: таблица уже с UNIQUE → _migrate() идемпотентен
def test_migrate_unique_constraint_is_idempotent(self):
conn = init_db(db_path=":memory:")
before_sql = conn.execute(
"SELECT sql FROM sqlite_master WHERE type='table' AND name='project_links'"
).fetchone()[0]
_migrate(conn)
after_sql = conn.execute(
"SELECT sql FROM sqlite_master WHERE type='table' AND name='project_links'"
).fetchone()[0]
assert before_sql == after_sql
conn.close()
# Convention #434: документируем, почему ALTER TABLE нельзя использовать
def test_alter_table_cannot_add_unique_constraint(self):
"""SQLite не поддерживает ALTER TABLE ADD CONSTRAINT.
Именно поэтому _migrate() пересоздаёт таблицу вместо ALTER TABLE.
"""
import sqlite3 as _sqlite3
_conn = _sqlite3.connect(":memory:")
_conn.execute("CREATE TABLE t (a TEXT, b TEXT)")
with pytest.raises(_sqlite3.OperationalError):
_conn.execute("ALTER TABLE t ADD CONSTRAINT uq UNIQUE (a, b)")
_conn.close()
# ---------------------------------------------------------------------------
# 13. API: POST /api/project-links возвращает 409 при дублировании
# ---------------------------------------------------------------------------
class TestProjectLinksDuplicateAPI:
def _create_projects(self, client):
client.post("/api/projects", json={"id": "dup_p2", "name": "P2", "path": "/p2"})
def test_create_duplicate_link_returns_409(self, client):
self._create_projects(client)
client.post("/api/project-links", json={
"from_project": "p1", "to_project": "dup_p2", "type": "depends_on"
})
r = client.post("/api/project-links", json={
"from_project": "p1", "to_project": "dup_p2", "type": "depends_on"
})
assert r.status_code == 409
assert "already exists" in r.json()["detail"].lower()
def test_same_projects_different_type_not_duplicate(self, client):
"""Одна пара проектов с разными type — не дубликат."""
self._create_projects(client)
r1 = client.post("/api/project-links", json={
"from_project": "p1", "to_project": "dup_p2", "type": "depends_on"
})
r2 = client.post("/api/project-links", json={
"from_project": "p1", "to_project": "dup_p2", "type": "references"
})
assert r1.status_code == 201
assert r2.status_code == 201