2026-03-01 15:35:30 +08:00
|
|
|
import asyncio
|
|
|
|
|
from io import BytesIO
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from unittest.mock import AsyncMock, MagicMock, patch
|
|
|
|
|
|
|
|
|
|
from fastapi import UploadFile
|
|
|
|
|
|
refactor: split backend into harness (deerflow.*) and app (app.*) (#1131)
* refactor: extract shared utils to break harness→app cross-layer imports
Move _validate_skill_frontmatter to src/skills/validation.py and
CONVERTIBLE_EXTENSIONS + convert_file_to_markdown to src/utils/file_conversion.py.
This eliminates the two reverse dependencies from client.py (harness layer)
into gateway/routers/ (app layer), preparing for the harness/app package split.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* refactor: split backend/src into harness (deerflow.*) and app (app.*)
Physically split the monolithic backend/src/ package into two layers:
- **Harness** (`packages/harness/deerflow/`): publishable agent framework
package with import prefix `deerflow.*`. Contains agents, sandbox, tools,
models, MCP, skills, config, and all core infrastructure.
- **App** (`app/`): unpublished application code with import prefix `app.*`.
Contains gateway (FastAPI REST API) and channels (IM integrations).
Key changes:
- Move 13 harness modules to packages/harness/deerflow/ via git mv
- Move gateway + channels to app/ via git mv
- Rename all imports: src.* → deerflow.* (harness) / app.* (app layer)
- Set up uv workspace with deerflow-harness as workspace member
- Update langgraph.json, config.example.yaml, all scripts, Docker files
- Add build-system (hatchling) to harness pyproject.toml
- Add PYTHONPATH=. to gateway startup commands for app.* resolution
- Update ruff.toml with known-first-party for import sorting
- Update all documentation to reflect new directory structure
Boundary rule enforced: harness code never imports from app.
All 429 tests pass. Lint clean.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* chore: add harness→app boundary check test and update docs
Add test_harness_boundary.py that scans all Python files in
packages/harness/deerflow/ and fails if any `from app.*` or
`import app.*` statement is found. This enforces the architectural
rule that the harness layer never depends on the app layer.
Update CLAUDE.md to document the harness/app split architecture,
import conventions, and the boundary enforcement test.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* feat: add config versioning with auto-upgrade on startup
When config.example.yaml schema changes, developers' local config.yaml
files can silently become outdated. This adds a config_version field and
auto-upgrade mechanism so breaking changes (like src.* → deerflow.*
renames) are applied automatically before services start.
- Add config_version: 1 to config.example.yaml
- Add startup version check warning in AppConfig.from_file()
- Add scripts/config-upgrade.sh with migration registry for value replacements
- Add `make config-upgrade` target
- Auto-run config-upgrade in serve.sh and start-daemon.sh before starting services
- Add config error hints in service failure messages
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* fix comments
* fix: update src.* import in test_sandbox_tools_security to deerflow.*
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* fix: handle empty config and search parent dirs for config.example.yaml
Address Copilot review comments on PR #1131:
- Guard against yaml.safe_load() returning None for empty config files
- Search parent directories for config.example.yaml instead of only
looking next to config.yaml, fixing detection in common setups
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* fix: correct skills root path depth and config_version type coercion
- loader.py: fix get_skills_root_path() to use 5 parent levels (was 3)
after harness split, file lives at packages/harness/deerflow/skills/
so parent×3 resolved to backend/packages/harness/ instead of backend/
- app_config.py: coerce config_version to int() before comparison in
_check_config_version() to prevent TypeError when YAML stores value
as string (e.g. config_version: "1")
- tests: add regression tests for both fixes
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
* fix: update test imports from src.* to deerflow.*/app.* after harness refactor
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
---------
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-14 22:55:52 +08:00
|
|
|
from app.gateway.routers import uploads
|
2026-03-01 15:35:30 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_upload_files_writes_thread_storage_and_skips_local_sandbox_sync(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "local"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
|
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
):
|
|
|
|
|
file = UploadFile(filename="notes.txt", file=BytesIO(b"hello uploads"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-local", files=[file]))
|
|
|
|
|
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert len(result.files) == 1
|
|
|
|
|
assert result.files[0]["filename"] == "notes.txt"
|
|
|
|
|
assert (thread_uploads_dir / "notes.txt").read_bytes() == b"hello uploads"
|
|
|
|
|
|
|
|
|
|
sandbox.update_file.assert_not_called()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_upload_files_syncs_non_local_sandbox_and_marks_markdown_file(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "aio-1"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
async def fake_convert(file_path: Path) -> Path:
|
|
|
|
|
md_path = file_path.with_suffix(".md")
|
|
|
|
|
md_path.write_text("converted", encoding="utf-8")
|
|
|
|
|
return md_path
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
|
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
patch.object(uploads, "convert_file_to_markdown", AsyncMock(side_effect=fake_convert)),
|
|
|
|
|
):
|
|
|
|
|
file = UploadFile(filename="report.pdf", file=BytesIO(b"pdf-bytes"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-aio", files=[file]))
|
|
|
|
|
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert len(result.files) == 1
|
|
|
|
|
file_info = result.files[0]
|
|
|
|
|
assert file_info["filename"] == "report.pdf"
|
|
|
|
|
assert file_info["markdown_file"] == "report.md"
|
|
|
|
|
|
|
|
|
|
assert (thread_uploads_dir / "report.pdf").read_bytes() == b"pdf-bytes"
|
|
|
|
|
assert (thread_uploads_dir / "report.md").read_text(encoding="utf-8") == "converted"
|
|
|
|
|
|
|
|
|
|
sandbox.update_file.assert_any_call("/mnt/user-data/uploads/report.pdf", b"pdf-bytes")
|
|
|
|
|
sandbox.update_file.assert_any_call("/mnt/user-data/uploads/report.md", b"converted")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_upload_files_rejects_dotdot_and_dot_filenames(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "local"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
|
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
):
|
|
|
|
|
# These filenames must be rejected outright
|
|
|
|
|
for bad_name in ["..", "."]:
|
|
|
|
|
file = UploadFile(filename=bad_name, file=BytesIO(b"data"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-local", files=[file]))
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert result.files == [], f"Expected no files for unsafe filename {bad_name!r}"
|
|
|
|
|
|
|
|
|
|
# Path-traversal prefixes are stripped to the basename and accepted safely
|
|
|
|
|
file = UploadFile(filename="../etc/passwd", file=BytesIO(b"data"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-local", files=[file]))
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert len(result.files) == 1
|
|
|
|
|
assert result.files[0]["filename"] == "passwd"
|
|
|
|
|
|
|
|
|
|
# Only the safely normalised file should exist
|
|
|
|
|
assert [f.name for f in thread_uploads_dir.iterdir()] == ["passwd"]
|
2026-03-18 16:31:26 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_delete_uploaded_file_removes_generated_markdown_companion(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
(thread_uploads_dir / "report.pdf").write_bytes(b"pdf-bytes")
|
|
|
|
|
(thread_uploads_dir / "report.md").write_text("converted", encoding="utf-8")
|
|
|
|
|
|
|
|
|
|
with patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir):
|
|
|
|
|
result = asyncio.run(uploads.delete_uploaded_file("thread-aio", "report.pdf"))
|
|
|
|
|
|
|
|
|
|
assert result == {"success": True, "message": "Deleted report.pdf"}
|
|
|
|
|
assert not (thread_uploads_dir / "report.pdf").exists()
|
|
|
|
|
assert not (thread_uploads_dir / "report.md").exists()
|