2026-03-01 15:35:30 +08:00
|
|
|
import asyncio
|
2026-03-27 17:37:44 +08:00
|
|
|
import stat
|
2026-03-01 15:35:30 +08:00
|
|
|
from io import BytesIO
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from unittest.mock import AsyncMock, MagicMock, patch
|
|
|
|
|
|
|
|
|
|
from fastapi import UploadFile
|
|
|
|
|
|
refactor: split backend into harness (deerflow.*) and app (app.*) (#1131)
* refactor: extract shared utils to break harness→app cross-layer imports
Move _validate_skill_frontmatter to src/skills/validation.py and
CONVERTIBLE_EXTENSIONS + convert_file_to_markdown to src/utils/file_conversion.py.
This eliminates the two reverse dependencies from client.py (harness layer)
into gateway/routers/ (app layer), preparing for the harness/app package split.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* refactor: split backend/src into harness (deerflow.*) and app (app.*)
Physically split the monolithic backend/src/ package into two layers:
- **Harness** (`packages/harness/deerflow/`): publishable agent framework
package with import prefix `deerflow.*`. Contains agents, sandbox, tools,
models, MCP, skills, config, and all core infrastructure.
- **App** (`app/`): unpublished application code with import prefix `app.*`.
Contains gateway (FastAPI REST API) and channels (IM integrations).
Key changes:
- Move 13 harness modules to packages/harness/deerflow/ via git mv
- Move gateway + channels to app/ via git mv
- Rename all imports: src.* → deerflow.* (harness) / app.* (app layer)
- Set up uv workspace with deerflow-harness as workspace member
- Update langgraph.json, config.example.yaml, all scripts, Docker files
- Add build-system (hatchling) to harness pyproject.toml
- Add PYTHONPATH=. to gateway startup commands for app.* resolution
- Update ruff.toml with known-first-party for import sorting
- Update all documentation to reflect new directory structure
Boundary rule enforced: harness code never imports from app.
All 429 tests pass. Lint clean.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* chore: add harness→app boundary check test and update docs
Add test_harness_boundary.py that scans all Python files in
packages/harness/deerflow/ and fails if any `from app.*` or
`import app.*` statement is found. This enforces the architectural
rule that the harness layer never depends on the app layer.
Update CLAUDE.md to document the harness/app split architecture,
import conventions, and the boundary enforcement test.
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* feat: add config versioning with auto-upgrade on startup
When config.example.yaml schema changes, developers' local config.yaml
files can silently become outdated. This adds a config_version field and
auto-upgrade mechanism so breaking changes (like src.* → deerflow.*
renames) are applied automatically before services start.
- Add config_version: 1 to config.example.yaml
- Add startup version check warning in AppConfig.from_file()
- Add scripts/config-upgrade.sh with migration registry for value replacements
- Add `make config-upgrade` target
- Auto-run config-upgrade in serve.sh and start-daemon.sh before starting services
- Add config error hints in service failure messages
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* fix comments
* fix: update src.* import in test_sandbox_tools_security to deerflow.*
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* fix: handle empty config and search parent dirs for config.example.yaml
Address Copilot review comments on PR #1131:
- Guard against yaml.safe_load() returning None for empty config files
- Search parent directories for config.example.yaml instead of only
looking next to config.yaml, fixing detection in common setups
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
* fix: correct skills root path depth and config_version type coercion
- loader.py: fix get_skills_root_path() to use 5 parent levels (was 3)
after harness split, file lives at packages/harness/deerflow/skills/
so parent×3 resolved to backend/packages/harness/ instead of backend/
- app_config.py: coerce config_version to int() before comparison in
_check_config_version() to prevent TypeError when YAML stores value
as string (e.g. config_version: "1")
- tests: add regression tests for both fixes
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
* fix: update test imports from src.* to deerflow.*/app.* after harness refactor
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
---------
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-14 22:55:52 +08:00
|
|
|
from app.gateway.routers import uploads
|
2026-03-01 15:35:30 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_upload_files_writes_thread_storage_and_skips_local_sandbox_sync(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "local"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
refactor: extract shared skill installer and upload manager to harness (#1202)
* refactor: extract shared skill installer and upload manager to harness
Move duplicated business logic from Gateway routers and Client into
shared harness modules, eliminating code duplication.
New shared modules:
- deerflow.skills.installer: 6 functions (zip security, extraction, install)
- deerflow.uploads.manager: 7 functions (normalize, deduplicate, validate,
list, delete, get_uploads_dir, ensure_uploads_dir)
Key improvements:
- SkillAlreadyExistsError replaces stringly-typed 409 status routing
- normalize_filename rejects backslash-containing filenames
- Read paths (list/delete) no longer mkdir via get_uploads_dir
- Write paths use ensure_uploads_dir for explicit directory creation
- list_files_in_dir does stat inside scandir context (no re-stat)
- install_skill_from_archive uses single is_file() check (one syscall)
- Fix agent config key not reset on update_mcp_config/update_skill
Tests: 42 new (22 installer + 20 upload manager) + client hardening
* refactor: centralize upload URL construction and clean up installer
- Extract upload_virtual_path(), upload_artifact_url(), enrich_file_listing()
into shared manager.py, eliminating 6 duplicated URL constructions across
Gateway router and Client
- Derive all upload URLs from VIRTUAL_PATH_PREFIX constant instead of
hardcoded "mnt/user-data/uploads" strings
- Eliminate TOCTOU pre-checks and double file read in installer — single
ZipFile() open with exception handling replaces is_file() + is_zipfile()
+ ZipFile() sequence
- Add missing re-exports: ensure_uploads_dir in uploads/__init__.py,
SkillAlreadyExistsError in skills/__init__.py
- Remove redundant .lower() on already-lowercase CONVERTIBLE_EXTENSIONS
- Hoist sandbox_uploads_dir(thread_id) before loop in uploads router
* fix: add input validation for thread_id and filename length
- Reject thread_id containing unsafe filesystem characters (only allow
alphanumeric, hyphens, underscores, dots) — prevents 500 on inputs
like <script> or shell metacharacters
- Reject filenames longer than 255 bytes (OS limit) in normalize_filename
- Gateway upload router maps ValueError to 400 for invalid thread_id
* fix: address PR review — symlink safety, input validation coverage, error ordering
- list_files_in_dir: use follow_symlinks=False to prevent symlink metadata
leakage; check is_dir() instead of exists() for non-directory paths
- install_skill_from_archive: restore is_file() pre-check before extension
validation so error messages match the documented exception contract
- validate_thread_id: move from ensure_uploads_dir to get_uploads_dir so
all entry points (upload/list/delete) are protected
- delete_uploaded_file: catch ValueError from thread_id validation (was 500)
- requires_llm marker: also skip when OPENAI_API_KEY is unset
- e2e fixture: update TitleMiddleware exclusion comment (kept filtering —
middleware triggers extra LLM calls that add non-determinism to tests)
* chore: revert uv.lock to main — no dependency changes in this PR
* fix: use monkeypatch for global config in e2e fixture to prevent test pollution
The e2e_env fixture was calling set_title_config() and
set_summarization_config() directly, which mutated global singletons
without automatic cleanup. When pytest ran test_client_e2e.py before
test_title_middleware_core_logic.py, the leaked enabled=False caused
5 title tests to fail in CI.
Switched to monkeypatch.setattr on the module-level private variables
so pytest restores the originals after each test.
* fix: address code review — URL encoding, API consistency, test isolation
- upload_artifact_url: percent-encode filename to handle spaces/#/?
- deduplicate_filename: mutate seen set in place (caller no longer
needs manual .add() — less error-prone API)
- list_files_in_dir: document that size is int, enrich stringifies
- e2e fixture: monkeypatch _app_config instead of set_app_config()
to prevent global singleton pollution (same pattern as title/summarization fix)
- _make_e2e_config: read LLM connection details from env vars so
external contributors can override defaults
- Update tests to match new deduplicate_filename contract
* docs: rewrite RFC in English and add alternatives/breaking changes sections
* fix: address code review feedback on PR #1202
- Rename deduplicate_filename to claim_unique_filename to make
the in-place set mutation explicit in the function name
- Replace PermissionError with PathTraversalError(ValueError) for
path traversal detection — malformed input is 400, not 403
* fix: set _app_config_is_custom in e2e test fixture to prevent config.yaml lookup in CI
---------
Co-authored-by: greatmengqi <chenmengqi.0376@bytedance.com>
Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
Co-authored-by: DanielWalnut <45447813+hetaoBackend@users.noreply.github.com>
2026-03-25 16:28:33 +08:00
|
|
|
patch.object(uploads, "ensure_uploads_dir", return_value=thread_uploads_dir),
|
2026-03-01 15:35:30 +08:00
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
):
|
|
|
|
|
file = UploadFile(filename="notes.txt", file=BytesIO(b"hello uploads"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-local", files=[file]))
|
|
|
|
|
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert len(result.files) == 1
|
|
|
|
|
assert result.files[0]["filename"] == "notes.txt"
|
|
|
|
|
assert (thread_uploads_dir / "notes.txt").read_bytes() == b"hello uploads"
|
|
|
|
|
|
|
|
|
|
sandbox.update_file.assert_not_called()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_upload_files_syncs_non_local_sandbox_and_marks_markdown_file(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "aio-1"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
async def fake_convert(file_path: Path) -> Path:
|
|
|
|
|
md_path = file_path.with_suffix(".md")
|
|
|
|
|
md_path.write_text("converted", encoding="utf-8")
|
|
|
|
|
return md_path
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
refactor: extract shared skill installer and upload manager to harness (#1202)
* refactor: extract shared skill installer and upload manager to harness
Move duplicated business logic from Gateway routers and Client into
shared harness modules, eliminating code duplication.
New shared modules:
- deerflow.skills.installer: 6 functions (zip security, extraction, install)
- deerflow.uploads.manager: 7 functions (normalize, deduplicate, validate,
list, delete, get_uploads_dir, ensure_uploads_dir)
Key improvements:
- SkillAlreadyExistsError replaces stringly-typed 409 status routing
- normalize_filename rejects backslash-containing filenames
- Read paths (list/delete) no longer mkdir via get_uploads_dir
- Write paths use ensure_uploads_dir for explicit directory creation
- list_files_in_dir does stat inside scandir context (no re-stat)
- install_skill_from_archive uses single is_file() check (one syscall)
- Fix agent config key not reset on update_mcp_config/update_skill
Tests: 42 new (22 installer + 20 upload manager) + client hardening
* refactor: centralize upload URL construction and clean up installer
- Extract upload_virtual_path(), upload_artifact_url(), enrich_file_listing()
into shared manager.py, eliminating 6 duplicated URL constructions across
Gateway router and Client
- Derive all upload URLs from VIRTUAL_PATH_PREFIX constant instead of
hardcoded "mnt/user-data/uploads" strings
- Eliminate TOCTOU pre-checks and double file read in installer — single
ZipFile() open with exception handling replaces is_file() + is_zipfile()
+ ZipFile() sequence
- Add missing re-exports: ensure_uploads_dir in uploads/__init__.py,
SkillAlreadyExistsError in skills/__init__.py
- Remove redundant .lower() on already-lowercase CONVERTIBLE_EXTENSIONS
- Hoist sandbox_uploads_dir(thread_id) before loop in uploads router
* fix: add input validation for thread_id and filename length
- Reject thread_id containing unsafe filesystem characters (only allow
alphanumeric, hyphens, underscores, dots) — prevents 500 on inputs
like <script> or shell metacharacters
- Reject filenames longer than 255 bytes (OS limit) in normalize_filename
- Gateway upload router maps ValueError to 400 for invalid thread_id
* fix: address PR review — symlink safety, input validation coverage, error ordering
- list_files_in_dir: use follow_symlinks=False to prevent symlink metadata
leakage; check is_dir() instead of exists() for non-directory paths
- install_skill_from_archive: restore is_file() pre-check before extension
validation so error messages match the documented exception contract
- validate_thread_id: move from ensure_uploads_dir to get_uploads_dir so
all entry points (upload/list/delete) are protected
- delete_uploaded_file: catch ValueError from thread_id validation (was 500)
- requires_llm marker: also skip when OPENAI_API_KEY is unset
- e2e fixture: update TitleMiddleware exclusion comment (kept filtering —
middleware triggers extra LLM calls that add non-determinism to tests)
* chore: revert uv.lock to main — no dependency changes in this PR
* fix: use monkeypatch for global config in e2e fixture to prevent test pollution
The e2e_env fixture was calling set_title_config() and
set_summarization_config() directly, which mutated global singletons
without automatic cleanup. When pytest ran test_client_e2e.py before
test_title_middleware_core_logic.py, the leaked enabled=False caused
5 title tests to fail in CI.
Switched to monkeypatch.setattr on the module-level private variables
so pytest restores the originals after each test.
* fix: address code review — URL encoding, API consistency, test isolation
- upload_artifact_url: percent-encode filename to handle spaces/#/?
- deduplicate_filename: mutate seen set in place (caller no longer
needs manual .add() — less error-prone API)
- list_files_in_dir: document that size is int, enrich stringifies
- e2e fixture: monkeypatch _app_config instead of set_app_config()
to prevent global singleton pollution (same pattern as title/summarization fix)
- _make_e2e_config: read LLM connection details from env vars so
external contributors can override defaults
- Update tests to match new deduplicate_filename contract
* docs: rewrite RFC in English and add alternatives/breaking changes sections
* fix: address code review feedback on PR #1202
- Rename deduplicate_filename to claim_unique_filename to make
the in-place set mutation explicit in the function name
- Replace PermissionError with PathTraversalError(ValueError) for
path traversal detection — malformed input is 400, not 403
* fix: set _app_config_is_custom in e2e test fixture to prevent config.yaml lookup in CI
---------
Co-authored-by: greatmengqi <chenmengqi.0376@bytedance.com>
Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
Co-authored-by: DanielWalnut <45447813+hetaoBackend@users.noreply.github.com>
2026-03-25 16:28:33 +08:00
|
|
|
patch.object(uploads, "ensure_uploads_dir", return_value=thread_uploads_dir),
|
2026-03-01 15:35:30 +08:00
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
patch.object(uploads, "convert_file_to_markdown", AsyncMock(side_effect=fake_convert)),
|
|
|
|
|
):
|
|
|
|
|
file = UploadFile(filename="report.pdf", file=BytesIO(b"pdf-bytes"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-aio", files=[file]))
|
|
|
|
|
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert len(result.files) == 1
|
|
|
|
|
file_info = result.files[0]
|
|
|
|
|
assert file_info["filename"] == "report.pdf"
|
|
|
|
|
assert file_info["markdown_file"] == "report.md"
|
|
|
|
|
|
|
|
|
|
assert (thread_uploads_dir / "report.pdf").read_bytes() == b"pdf-bytes"
|
|
|
|
|
assert (thread_uploads_dir / "report.md").read_text(encoding="utf-8") == "converted"
|
|
|
|
|
|
|
|
|
|
sandbox.update_file.assert_any_call("/mnt/user-data/uploads/report.pdf", b"pdf-bytes")
|
|
|
|
|
sandbox.update_file.assert_any_call("/mnt/user-data/uploads/report.md", b"converted")
|
|
|
|
|
|
|
|
|
|
|
2026-03-27 17:37:44 +08:00
|
|
|
def test_upload_files_makes_non_local_files_sandbox_writable(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "aio-1"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
async def fake_convert(file_path: Path) -> Path:
|
|
|
|
|
md_path = file_path.with_suffix(".md")
|
|
|
|
|
md_path.write_text("converted", encoding="utf-8")
|
|
|
|
|
return md_path
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
|
|
|
|
patch.object(uploads, "ensure_uploads_dir", return_value=thread_uploads_dir),
|
|
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
patch.object(uploads, "convert_file_to_markdown", AsyncMock(side_effect=fake_convert)),
|
|
|
|
|
patch.object(uploads, "_make_file_sandbox_writable") as make_writable,
|
|
|
|
|
):
|
|
|
|
|
file = UploadFile(filename="report.pdf", file=BytesIO(b"pdf-bytes"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-aio", files=[file]))
|
|
|
|
|
|
|
|
|
|
assert result.success is True
|
|
|
|
|
make_writable.assert_any_call(thread_uploads_dir / "report.pdf")
|
|
|
|
|
make_writable.assert_any_call(thread_uploads_dir / "report.md")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_upload_files_does_not_adjust_permissions_for_local_sandbox(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "local"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
|
|
|
|
patch.object(uploads, "ensure_uploads_dir", return_value=thread_uploads_dir),
|
|
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
patch.object(uploads, "_make_file_sandbox_writable") as make_writable,
|
|
|
|
|
):
|
|
|
|
|
file = UploadFile(filename="notes.txt", file=BytesIO(b"hello uploads"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-local", files=[file]))
|
|
|
|
|
|
|
|
|
|
assert result.success is True
|
|
|
|
|
make_writable.assert_not_called()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_make_file_sandbox_writable_adds_write_bits_for_regular_files(tmp_path):
|
|
|
|
|
file_path = tmp_path / "report.pdf"
|
|
|
|
|
file_path.write_bytes(b"pdf-bytes")
|
|
|
|
|
os_chmod_mode = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
|
|
|
|
|
file_path.chmod(os_chmod_mode)
|
|
|
|
|
|
|
|
|
|
uploads._make_file_sandbox_writable(file_path)
|
|
|
|
|
|
|
|
|
|
updated_mode = stat.S_IMODE(file_path.stat().st_mode)
|
|
|
|
|
assert updated_mode & stat.S_IWUSR
|
|
|
|
|
assert updated_mode & stat.S_IWGRP
|
|
|
|
|
assert updated_mode & stat.S_IWOTH
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_make_file_sandbox_writable_skips_symlinks(tmp_path):
|
|
|
|
|
file_path = tmp_path / "target-link.txt"
|
|
|
|
|
file_path.write_text("hello", encoding="utf-8")
|
|
|
|
|
symlink_stat = MagicMock(st_mode=stat.S_IFLNK)
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads.os, "lstat", return_value=symlink_stat),
|
|
|
|
|
patch.object(uploads.os, "chmod") as chmod,
|
|
|
|
|
):
|
|
|
|
|
uploads._make_file_sandbox_writable(file_path)
|
|
|
|
|
|
|
|
|
|
chmod.assert_not_called()
|
|
|
|
|
|
|
|
|
|
|
2026-03-01 15:35:30 +08:00
|
|
|
def test_upload_files_rejects_dotdot_and_dot_filenames(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
|
|
|
|
|
provider = MagicMock()
|
|
|
|
|
provider.acquire.return_value = "local"
|
|
|
|
|
sandbox = MagicMock()
|
|
|
|
|
provider.get.return_value = sandbox
|
|
|
|
|
|
|
|
|
|
with (
|
|
|
|
|
patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir),
|
refactor: extract shared skill installer and upload manager to harness (#1202)
* refactor: extract shared skill installer and upload manager to harness
Move duplicated business logic from Gateway routers and Client into
shared harness modules, eliminating code duplication.
New shared modules:
- deerflow.skills.installer: 6 functions (zip security, extraction, install)
- deerflow.uploads.manager: 7 functions (normalize, deduplicate, validate,
list, delete, get_uploads_dir, ensure_uploads_dir)
Key improvements:
- SkillAlreadyExistsError replaces stringly-typed 409 status routing
- normalize_filename rejects backslash-containing filenames
- Read paths (list/delete) no longer mkdir via get_uploads_dir
- Write paths use ensure_uploads_dir for explicit directory creation
- list_files_in_dir does stat inside scandir context (no re-stat)
- install_skill_from_archive uses single is_file() check (one syscall)
- Fix agent config key not reset on update_mcp_config/update_skill
Tests: 42 new (22 installer + 20 upload manager) + client hardening
* refactor: centralize upload URL construction and clean up installer
- Extract upload_virtual_path(), upload_artifact_url(), enrich_file_listing()
into shared manager.py, eliminating 6 duplicated URL constructions across
Gateway router and Client
- Derive all upload URLs from VIRTUAL_PATH_PREFIX constant instead of
hardcoded "mnt/user-data/uploads" strings
- Eliminate TOCTOU pre-checks and double file read in installer — single
ZipFile() open with exception handling replaces is_file() + is_zipfile()
+ ZipFile() sequence
- Add missing re-exports: ensure_uploads_dir in uploads/__init__.py,
SkillAlreadyExistsError in skills/__init__.py
- Remove redundant .lower() on already-lowercase CONVERTIBLE_EXTENSIONS
- Hoist sandbox_uploads_dir(thread_id) before loop in uploads router
* fix: add input validation for thread_id and filename length
- Reject thread_id containing unsafe filesystem characters (only allow
alphanumeric, hyphens, underscores, dots) — prevents 500 on inputs
like <script> or shell metacharacters
- Reject filenames longer than 255 bytes (OS limit) in normalize_filename
- Gateway upload router maps ValueError to 400 for invalid thread_id
* fix: address PR review — symlink safety, input validation coverage, error ordering
- list_files_in_dir: use follow_symlinks=False to prevent symlink metadata
leakage; check is_dir() instead of exists() for non-directory paths
- install_skill_from_archive: restore is_file() pre-check before extension
validation so error messages match the documented exception contract
- validate_thread_id: move from ensure_uploads_dir to get_uploads_dir so
all entry points (upload/list/delete) are protected
- delete_uploaded_file: catch ValueError from thread_id validation (was 500)
- requires_llm marker: also skip when OPENAI_API_KEY is unset
- e2e fixture: update TitleMiddleware exclusion comment (kept filtering —
middleware triggers extra LLM calls that add non-determinism to tests)
* chore: revert uv.lock to main — no dependency changes in this PR
* fix: use monkeypatch for global config in e2e fixture to prevent test pollution
The e2e_env fixture was calling set_title_config() and
set_summarization_config() directly, which mutated global singletons
without automatic cleanup. When pytest ran test_client_e2e.py before
test_title_middleware_core_logic.py, the leaked enabled=False caused
5 title tests to fail in CI.
Switched to monkeypatch.setattr on the module-level private variables
so pytest restores the originals after each test.
* fix: address code review — URL encoding, API consistency, test isolation
- upload_artifact_url: percent-encode filename to handle spaces/#/?
- deduplicate_filename: mutate seen set in place (caller no longer
needs manual .add() — less error-prone API)
- list_files_in_dir: document that size is int, enrich stringifies
- e2e fixture: monkeypatch _app_config instead of set_app_config()
to prevent global singleton pollution (same pattern as title/summarization fix)
- _make_e2e_config: read LLM connection details from env vars so
external contributors can override defaults
- Update tests to match new deduplicate_filename contract
* docs: rewrite RFC in English and add alternatives/breaking changes sections
* fix: address code review feedback on PR #1202
- Rename deduplicate_filename to claim_unique_filename to make
the in-place set mutation explicit in the function name
- Replace PermissionError with PathTraversalError(ValueError) for
path traversal detection — malformed input is 400, not 403
* fix: set _app_config_is_custom in e2e test fixture to prevent config.yaml lookup in CI
---------
Co-authored-by: greatmengqi <chenmengqi.0376@bytedance.com>
Co-authored-by: Willem Jiang <willem.jiang@gmail.com>
Co-authored-by: DanielWalnut <45447813+hetaoBackend@users.noreply.github.com>
2026-03-25 16:28:33 +08:00
|
|
|
patch.object(uploads, "ensure_uploads_dir", return_value=thread_uploads_dir),
|
2026-03-01 15:35:30 +08:00
|
|
|
patch.object(uploads, "get_sandbox_provider", return_value=provider),
|
|
|
|
|
):
|
|
|
|
|
# These filenames must be rejected outright
|
|
|
|
|
for bad_name in ["..", "."]:
|
|
|
|
|
file = UploadFile(filename=bad_name, file=BytesIO(b"data"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-local", files=[file]))
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert result.files == [], f"Expected no files for unsafe filename {bad_name!r}"
|
|
|
|
|
|
|
|
|
|
# Path-traversal prefixes are stripped to the basename and accepted safely
|
|
|
|
|
file = UploadFile(filename="../etc/passwd", file=BytesIO(b"data"))
|
|
|
|
|
result = asyncio.run(uploads.upload_files("thread-local", files=[file]))
|
|
|
|
|
assert result.success is True
|
|
|
|
|
assert len(result.files) == 1
|
|
|
|
|
assert result.files[0]["filename"] == "passwd"
|
|
|
|
|
|
|
|
|
|
# Only the safely normalised file should exist
|
|
|
|
|
assert [f.name for f in thread_uploads_dir.iterdir()] == ["passwd"]
|
2026-03-18 16:31:26 +08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def test_delete_uploaded_file_removes_generated_markdown_companion(tmp_path):
|
|
|
|
|
thread_uploads_dir = tmp_path / "uploads"
|
|
|
|
|
thread_uploads_dir.mkdir(parents=True)
|
|
|
|
|
(thread_uploads_dir / "report.pdf").write_bytes(b"pdf-bytes")
|
|
|
|
|
(thread_uploads_dir / "report.md").write_text("converted", encoding="utf-8")
|
|
|
|
|
|
|
|
|
|
with patch.object(uploads, "get_uploads_dir", return_value=thread_uploads_dir):
|
|
|
|
|
result = asyncio.run(uploads.delete_uploaded_file("thread-aio", "report.pdf"))
|
|
|
|
|
|
|
|
|
|
assert result == {"success": True, "message": "Deleted report.pdf"}
|
|
|
|
|
assert not (thread_uploads_dir / "report.pdf").exists()
|
|
|
|
|
assert not (thread_uploads_dir / "report.md").exists()
|