mirror of https://github.com/langgenius/dify.git
refactor all
This commit is contained in:
parent
806016244f
commit
cb12ada689
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -1,10 +1,12 @@
|
|||
from flask import abort
|
||||
from flask_restx import Resource
|
||||
from pydantic import BaseModel
|
||||
|
||||
from controllers.cli_api import cli_api_ns
|
||||
from controllers.cli_api.plugin.wraps import get_cli_user_tenant, plugin_data
|
||||
from controllers.cli_api.wraps import cli_api_only
|
||||
from controllers.console.wraps import setup_required
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.file.helpers import get_signed_file_url_for_plugin
|
||||
from core.plugin.backwards_invocation.app import PluginAppBackwardsInvocation
|
||||
from core.plugin.backwards_invocation.base import BaseBackwardsInvocationResponse
|
||||
|
|
@ -16,12 +18,24 @@ from core.plugin.entities.request import (
|
|||
RequestInvokeTool,
|
||||
RequestRequestUploadFile,
|
||||
)
|
||||
from core.sandbox.bash.dify_cli import DifyCliToolConfig
|
||||
from core.session.cli_api import CliContext
|
||||
from core.skill.entities import ToolInvocationRequest
|
||||
from core.tools.entities.tool_entities import ToolProviderType
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from libs.helper import length_prefixed_response
|
||||
from models import Account, Tenant
|
||||
from models.model import EndUser
|
||||
from models.account import Account
|
||||
from models.model import EndUser, Tenant
|
||||
|
||||
|
||||
class FetchToolItem(BaseModel):
|
||||
tool_provider: str
|
||||
tool_name: str
|
||||
credential_id: str | None = None
|
||||
|
||||
|
||||
class RequestFetchToolsBatch(BaseModel):
|
||||
tools: list[FetchToolItem]
|
||||
|
||||
|
||||
@cli_api_ns.route("/invoke/llm")
|
||||
|
|
@ -106,7 +120,6 @@ class CliUploadFileRequestApi(Resource):
|
|||
@setup_required
|
||||
@plugin_data(payload_type=RequestRequestUploadFile)
|
||||
def post(self, user_model: Account | EndUser, tenant_model: Tenant, payload: RequestRequestUploadFile):
|
||||
# generate signed url
|
||||
url = get_signed_file_url_for_plugin(
|
||||
filename=payload.filename,
|
||||
mimetype=payload.mimetype,
|
||||
|
|
@ -116,42 +129,46 @@ class CliUploadFileRequestApi(Resource):
|
|||
return BaseBackwardsInvocationResponse(data={"url": url}).model_dump()
|
||||
|
||||
|
||||
@cli_api_ns.route("/fetch/tools/list")
|
||||
class CliFetchToolsListApi(Resource):
|
||||
@cli_api_ns.route("/fetch/tools/batch")
|
||||
class CliFetchToolsBatchApi(Resource):
|
||||
@cli_api_only
|
||||
@get_cli_user_tenant
|
||||
@setup_required
|
||||
def post(self, user_model: Account | EndUser, tenant_model: Tenant):
|
||||
from sqlalchemy.orm import Session
|
||||
@plugin_data(payload_type=RequestFetchToolsBatch)
|
||||
def post(
|
||||
self,
|
||||
user_model: Account | EndUser,
|
||||
tenant_model: Tenant,
|
||||
payload: RequestFetchToolsBatch,
|
||||
cli_context: CliContext,
|
||||
):
|
||||
tools: list[dict] = []
|
||||
|
||||
from extensions.ext_database import db
|
||||
from services.tools.api_tools_manage_service import ApiToolManageService
|
||||
from services.tools.builtin_tools_manage_service import BuiltinToolManageService
|
||||
from services.tools.mcp_tools_manage_service import MCPToolManageService
|
||||
from services.tools.workflow_tools_manage_service import WorkflowToolManageService
|
||||
for item in payload.tools:
|
||||
provider_type = _resolve_provider_type(cli_context, item.tool_provider, item.tool_name)
|
||||
if provider_type is None:
|
||||
continue
|
||||
|
||||
providers = []
|
||||
try:
|
||||
tool_runtime = ToolManager.get_tool_runtime(
|
||||
tenant_id=tenant_model.id,
|
||||
provider_type=provider_type,
|
||||
provider_id=item.tool_provider,
|
||||
tool_name=item.tool_name,
|
||||
invoke_from=InvokeFrom.AGENT,
|
||||
credential_id=item.credential_id,
|
||||
)
|
||||
tool_config = DifyCliToolConfig.create_from_tool(tool_runtime)
|
||||
tools.append(tool_config.model_dump())
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Get builtin tools
|
||||
builtin_providers = BuiltinToolManageService.list_builtin_tools(user_model.id, tenant_model.id)
|
||||
for provider in builtin_providers:
|
||||
providers.append(provider.to_dict())
|
||||
return BaseBackwardsInvocationResponse(data={"tools": tools}).model_dump()
|
||||
|
||||
# Get API tools
|
||||
api_providers = ApiToolManageService.list_api_tools(tenant_model.id)
|
||||
for provider in api_providers:
|
||||
providers.append(provider.to_dict())
|
||||
|
||||
# Get workflow tools
|
||||
workflow_providers = WorkflowToolManageService.list_tenant_workflow_tools(user_model.id, tenant_model.id)
|
||||
for provider in workflow_providers:
|
||||
providers.append(provider.to_dict())
|
||||
|
||||
# Get MCP tools
|
||||
with Session(db.engine) as session:
|
||||
mcp_service = MCPToolManageService(session)
|
||||
mcp_providers = mcp_service.list_providers(tenant_id=tenant_model.id, for_list=True)
|
||||
for provider in mcp_providers:
|
||||
providers.append(provider.to_dict())
|
||||
|
||||
return BaseBackwardsInvocationResponse(data={"providers": providers}).model_dump()
|
||||
def _resolve_provider_type(cli_context: CliContext, tool_provider: str, tool_name: str) -> ToolProviderType | None:
|
||||
if cli_context.tool_access and cli_context.tool_access.allowed_tools:
|
||||
for tool_id, tool_desc in cli_context.tool_access.allowed_tools.items():
|
||||
if tool_desc.provider == tool_provider and tool_desc.tool_name == tool_name:
|
||||
return tool_desc.tool_type
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
from .constants import AppAssetsAttrs
|
||||
from .entities import (
|
||||
AssetItem,
|
||||
FileAsset,
|
||||
SkillAsset,
|
||||
)
|
||||
from .storage import AssetPaths
|
||||
|
||||
__all__ = [
|
||||
"AppAssetsAttrs",
|
||||
"AssetItem",
|
||||
"FileAsset",
|
||||
"AssetPaths",
|
||||
"SkillAsset",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
from core.app.entities.app_asset_entities import AppAssetFileTree, AppAssetNode
|
||||
from core.app_assets.entities import AssetItem, FileAsset
|
||||
from core.app_assets.storage import AssetPath
|
||||
from core.app_assets.entities import AssetItem
|
||||
from core.app_assets.storage import AssetPaths
|
||||
|
||||
from .base import BuildContext
|
||||
|
||||
|
|
@ -19,12 +19,12 @@ class FileBuilder:
|
|||
|
||||
def build(self, tree: AppAssetFileTree, ctx: BuildContext) -> list[AssetItem]:
|
||||
return [
|
||||
FileAsset(
|
||||
AssetItem(
|
||||
asset_id=node.id,
|
||||
path=path,
|
||||
file_name=node.name,
|
||||
extension=node.extension or "",
|
||||
storage_key=AssetPath.draft(ctx.tenant_id, ctx.app_id, node.id).get_storage_key(),
|
||||
storage_key=AssetPaths.draft(ctx.tenant_id, ctx.app_id, node.id),
|
||||
)
|
||||
for node, path in self._nodes
|
||||
]
|
||||
|
|
|
|||
|
|
@ -4,11 +4,12 @@ from dataclasses import dataclass
|
|||
from typing import Any, cast
|
||||
|
||||
from core.app.entities.app_asset_entities import AppAssetFileTree, AppAssetNode
|
||||
from core.app_assets.entities import AssetItem, FileAsset
|
||||
from core.app_assets.storage import AppAssetStorage, AssetPath, AssetPathBase
|
||||
from core.app_assets.entities import AssetItem
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from core.skill.entities.skill_bundle import SkillBundle
|
||||
from core.skill.entities.skill_document import SkillDocument
|
||||
from core.skill.skill_compiler import SkillCompiler
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
|
||||
from .base import BuildContext
|
||||
|
||||
|
|
@ -25,7 +26,6 @@ class _LoadedSkill:
|
|||
class _CompiledSkill:
|
||||
node: AppAssetNode
|
||||
path: str
|
||||
ref: AssetPathBase
|
||||
storage_key: str
|
||||
content_bytes: bytes
|
||||
|
||||
|
|
@ -34,9 +34,9 @@ class _CompiledSkill:
|
|||
class SkillBuilder:
|
||||
_nodes: list[tuple[AppAssetNode, str]]
|
||||
_max_workers: int
|
||||
_storage: AppAssetStorage
|
||||
_storage: CachedPresignStorage
|
||||
|
||||
def __init__(self, storage: AppAssetStorage, max_workers: int = 8) -> None:
|
||||
def __init__(self, storage: CachedPresignStorage, max_workers: int = 8) -> None:
|
||||
self._nodes = []
|
||||
self._max_workers = max_workers
|
||||
self._storage = storage
|
||||
|
|
@ -70,13 +70,11 @@ class SkillBuilder:
|
|||
artifact = artifact_set.get(skill.node.id)
|
||||
if artifact is None:
|
||||
continue
|
||||
resolved_ref = AssetPath.resolved(ctx.tenant_id, ctx.app_id, ctx.build_id, skill.node.id)
|
||||
to_upload.append(
|
||||
_CompiledSkill(
|
||||
node=skill.node,
|
||||
path=skill.path,
|
||||
ref=resolved_ref,
|
||||
storage_key=resolved_ref.get_storage_key(),
|
||||
storage_key=AssetPaths.resolved(ctx.tenant_id, ctx.app_id, ctx.build_id, skill.node.id),
|
||||
content_bytes=artifact.content.encode("utf-8"),
|
||||
)
|
||||
)
|
||||
|
|
@ -84,9 +82,9 @@ class SkillBuilder:
|
|||
# 5. Upload all compiled skills (parallel IO)
|
||||
self._upload_all(to_upload)
|
||||
|
||||
# 6. Return FileAssets
|
||||
# 6. Return AssetItems
|
||||
return [
|
||||
FileAsset(
|
||||
AssetItem(
|
||||
asset_id=s.node.id,
|
||||
path=s.path,
|
||||
file_name=s.node.name,
|
||||
|
|
@ -99,8 +97,8 @@ class SkillBuilder:
|
|||
def _load_all(self, ctx: BuildContext) -> list[_LoadedSkill]:
|
||||
def load_one(node: AppAssetNode, path: str) -> _LoadedSkill:
|
||||
try:
|
||||
draft_ref = AssetPath.draft(ctx.tenant_id, ctx.app_id, node.id)
|
||||
data = json.loads(self._storage.load(draft_ref))
|
||||
key = AssetPaths.draft(ctx.tenant_id, ctx.app_id, node.id)
|
||||
data = json.loads(self._storage.load_once(key))
|
||||
content = ""
|
||||
metadata: dict[str, Any] = {}
|
||||
if isinstance(data, dict):
|
||||
|
|
@ -121,7 +119,7 @@ class SkillBuilder:
|
|||
|
||||
def _upload_all(self, skills: list[_CompiledSkill]) -> None:
|
||||
def upload_one(skill: _CompiledSkill) -> None:
|
||||
self._storage.save(skill.ref, skill.content_bytes)
|
||||
self._storage.save(skill.storage_key, skill.content_bytes)
|
||||
|
||||
with ThreadPoolExecutor(max_workers=self._max_workers) as executor:
|
||||
futures = [executor.submit(upload_one, skill) for skill in skills]
|
||||
|
|
|
|||
|
|
@ -1,39 +1,20 @@
|
|||
from __future__ import annotations
|
||||
|
||||
from core.app.entities.app_asset_entities import AppAssetFileTree, AssetNodeType
|
||||
from core.app_assets.entities import FileAsset
|
||||
from core.app_assets.entities.assets import AssetItem
|
||||
from core.app_assets.storage import AssetPath
|
||||
from core.app_assets.entities import AssetItem
|
||||
from core.app_assets.storage import AssetPaths
|
||||
|
||||
|
||||
def tree_to_asset_items(
|
||||
tree: AppAssetFileTree,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
) -> list[AssetItem]:
|
||||
"""
|
||||
Convert AppAssetFileTree to list of FileAsset for packaging.
|
||||
|
||||
Args:
|
||||
tree: The asset file tree to convert
|
||||
tenant_id: Tenant ID for storage key generation
|
||||
app_id: App ID for storage key generation
|
||||
|
||||
Returns:
|
||||
List of FileAsset items ready for packaging
|
||||
"""
|
||||
items: list[AssetItem] = []
|
||||
for node in tree.nodes:
|
||||
if node.node_type == AssetNodeType.FILE:
|
||||
path = tree.get_path(node.id)
|
||||
asset_path = AssetPath.draft(tenant_id, app_id, node.id)
|
||||
items.append(
|
||||
FileAsset(
|
||||
asset_id=node.id,
|
||||
path=path,
|
||||
file_name=node.name,
|
||||
extension=node.extension or "",
|
||||
storage_key=asset_path.get_storage_key(),
|
||||
)
|
||||
)
|
||||
return items
|
||||
def tree_to_asset_items(tree: AppAssetFileTree, tenant_id: str, app_id: str) -> list[AssetItem]:
|
||||
"""Convert AppAssetFileTree to list of AssetItem for packaging."""
|
||||
return [
|
||||
AssetItem(
|
||||
asset_id=node.id,
|
||||
path=tree.get_path(node.id),
|
||||
file_name=node.name,
|
||||
extension=node.extension or "",
|
||||
storage_key=AssetPaths.draft(tenant_id, app_id, node.id),
|
||||
)
|
||||
for node in tree.nodes
|
||||
if node.node_type == AssetNodeType.FILE
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
from .assets import AssetItem, FileAsset
|
||||
from .assets import AssetItem
|
||||
from .skill import SkillAsset
|
||||
|
||||
__all__ = [
|
||||
"AssetItem",
|
||||
"FileAsset",
|
||||
"SkillAsset",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,22 +1,10 @@
|
|||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class AssetItem(ABC):
|
||||
class AssetItem:
|
||||
asset_id: str
|
||||
path: str
|
||||
file_name: str
|
||||
extension: str
|
||||
|
||||
@abstractmethod
|
||||
def get_storage_key(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@dataclass
|
||||
class FileAsset(AssetItem):
|
||||
storage_key: str
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return self.storage_key
|
||||
|
|
|
|||
|
|
@ -7,8 +7,4 @@ from .assets import AssetItem
|
|||
|
||||
@dataclass
|
||||
class SkillAsset(AssetItem):
|
||||
storage_key: str
|
||||
metadata: Mapping[str, Any] = field(default_factory=dict)
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return self.storage_key
|
||||
|
|
|
|||
|
|
@ -1,264 +1,77 @@
|
|||
"""App assets storage layer.
|
||||
"""App assets storage key generation.
|
||||
|
||||
This module provides storage abstractions for app assets (draft files, build zips,
|
||||
resolved assets, skill bundles, source zips, bundle exports/imports).
|
||||
|
||||
Key components:
|
||||
- AssetPath: Factory for creating typed storage paths
|
||||
- AppAssetStorage: High-level storage operations with presign support
|
||||
|
||||
All presign operations use the unified FilePresignStorage wrapper, which automatically
|
||||
falls back to Dify's file proxy when the underlying storage doesn't support presigned URLs.
|
||||
Provides AssetPaths facade for generating storage keys for app assets.
|
||||
Storage instances are obtained via AppAssetService.get_storage().
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Generator, Iterable
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, ClassVar
|
||||
from uuid import UUID
|
||||
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
from extensions.storage.file_presign_storage import FilePresignStorage
|
||||
from extensions.storage.silent_storage import SilentStorage
|
||||
|
||||
_ASSET_BASE = "app_assets"
|
||||
_SILENT_STORAGE_NOT_FOUND = b"File Not Found"
|
||||
_ASSET_PATH_REGISTRY: dict[str, tuple[bool, Any]] = {}
|
||||
_BASE = "app_assets"
|
||||
|
||||
|
||||
def _require_uuid(value: str, field_name: str) -> None:
|
||||
def _check_uuid(value: str, name: str) -> None:
|
||||
try:
|
||||
UUID(value)
|
||||
except (ValueError, TypeError) as exc:
|
||||
raise ValueError(f"{field_name} must be a UUID") from exc
|
||||
except (ValueError, TypeError) as e:
|
||||
raise ValueError(f"{name} must be a valid UUID") from e
|
||||
|
||||
|
||||
def register_asset_path(asset_type: str, *, requires_node: bool, factory: Any) -> None:
|
||||
_ASSET_PATH_REGISTRY[asset_type] = (requires_node, factory)
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AssetPathBase(ABC):
|
||||
"""Base class for all asset paths."""
|
||||
|
||||
asset_type: ClassVar[str]
|
||||
tenant_id: str
|
||||
app_id: str
|
||||
resource_id: str
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
_require_uuid(self.tenant_id, "tenant_id")
|
||||
_require_uuid(self.app_id, "app_id")
|
||||
_require_uuid(self.resource_id, "resource_id")
|
||||
|
||||
@abstractmethod
|
||||
def get_storage_key(self) -> str:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _DraftAssetPath(AssetPathBase):
|
||||
asset_type: ClassVar[str] = "draft"
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"{_ASSET_BASE}/{self.tenant_id}/{self.app_id}/draft/{self.resource_id}"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _BuildZipAssetPath(AssetPathBase):
|
||||
asset_type: ClassVar[str] = "build-zip"
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"{_ASSET_BASE}/{self.tenant_id}/{self.app_id}/artifacts/{self.resource_id}.zip"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _ResolvedAssetPath(AssetPathBase):
|
||||
asset_type: ClassVar[str] = "resolved"
|
||||
node_id: str
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
super().__post_init__()
|
||||
_require_uuid(self.node_id, "node_id")
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"{_ASSET_BASE}/{self.tenant_id}/{self.app_id}/artifacts/{self.resource_id}/resolved/{self.node_id}"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _SkillBundleAssetPath(AssetPathBase):
|
||||
asset_type: ClassVar[str] = "skill-bundle"
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"{_ASSET_BASE}/{self.tenant_id}/{self.app_id}/artifacts/{self.resource_id}/skill_artifact_set.json"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _SourceZipAssetPath(AssetPathBase):
|
||||
asset_type: ClassVar[str] = "source-zip"
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"{_ASSET_BASE}/{self.tenant_id}/{self.app_id}/sources/{self.resource_id}.zip"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class _BundleExportZipAssetPath(AssetPathBase):
|
||||
asset_type: ClassVar[str] = "bundle-export-zip"
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"{_ASSET_BASE}/{self.tenant_id}/{self.app_id}/bundle_exports/{self.resource_id}.zip"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class BundleImportZipPath:
|
||||
"""Path for temporary import zip files."""
|
||||
|
||||
tenant_id: str
|
||||
import_id: str
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
_require_uuid(self.tenant_id, "tenant_id")
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"{_ASSET_BASE}/{self.tenant_id}/imports/{self.import_id}.zip"
|
||||
|
||||
|
||||
class AssetPath:
|
||||
"""Factory for creating typed asset paths."""
|
||||
class AssetPaths:
|
||||
"""Facade for generating app asset storage keys."""
|
||||
|
||||
@staticmethod
|
||||
def draft(tenant_id: str, app_id: str, node_id: str) -> AssetPathBase:
|
||||
return _DraftAssetPath(tenant_id=tenant_id, app_id=app_id, resource_id=node_id)
|
||||
def draft(tenant_id: str, app_id: str, node_id: str) -> str:
|
||||
"""app_assets/{tenant}/{app}/draft/{node_id}"""
|
||||
_check_uuid(tenant_id, "tenant_id")
|
||||
_check_uuid(app_id, "app_id")
|
||||
_check_uuid(node_id, "node_id")
|
||||
return f"{_BASE}/{tenant_id}/{app_id}/draft/{node_id}"
|
||||
|
||||
@staticmethod
|
||||
def build_zip(tenant_id: str, app_id: str, assets_id: str) -> AssetPathBase:
|
||||
return _BuildZipAssetPath(tenant_id=tenant_id, app_id=app_id, resource_id=assets_id)
|
||||
def build_zip(tenant_id: str, app_id: str, assets_id: str) -> str:
|
||||
"""app_assets/{tenant}/{app}/artifacts/{assets_id}.zip"""
|
||||
_check_uuid(tenant_id, "tenant_id")
|
||||
_check_uuid(app_id, "app_id")
|
||||
_check_uuid(assets_id, "assets_id")
|
||||
return f"{_BASE}/{tenant_id}/{app_id}/artifacts/{assets_id}.zip"
|
||||
|
||||
@staticmethod
|
||||
def resolved(tenant_id: str, app_id: str, assets_id: str, node_id: str) -> AssetPathBase:
|
||||
return _ResolvedAssetPath(tenant_id=tenant_id, app_id=app_id, resource_id=assets_id, node_id=node_id)
|
||||
def resolved(tenant_id: str, app_id: str, assets_id: str, node_id: str) -> str:
|
||||
"""app_assets/{tenant}/{app}/artifacts/{assets_id}/resolved/{node_id}"""
|
||||
_check_uuid(tenant_id, "tenant_id")
|
||||
_check_uuid(app_id, "app_id")
|
||||
_check_uuid(assets_id, "assets_id")
|
||||
_check_uuid(node_id, "node_id")
|
||||
return f"{_BASE}/{tenant_id}/{app_id}/artifacts/{assets_id}/resolved/{node_id}"
|
||||
|
||||
@staticmethod
|
||||
def skill_bundle(tenant_id: str, app_id: str, assets_id: str) -> AssetPathBase:
|
||||
return _SkillBundleAssetPath(tenant_id=tenant_id, app_id=app_id, resource_id=assets_id)
|
||||
def skill_bundle(tenant_id: str, app_id: str, assets_id: str) -> str:
|
||||
"""app_assets/{tenant}/{app}/artifacts/{assets_id}/skill_artifact_set.json"""
|
||||
_check_uuid(tenant_id, "tenant_id")
|
||||
_check_uuid(app_id, "app_id")
|
||||
_check_uuid(assets_id, "assets_id")
|
||||
return f"{_BASE}/{tenant_id}/{app_id}/artifacts/{assets_id}/skill_artifact_set.json"
|
||||
|
||||
@staticmethod
|
||||
def source_zip(tenant_id: str, app_id: str, workflow_id: str) -> AssetPathBase:
|
||||
return _SourceZipAssetPath(tenant_id=tenant_id, app_id=app_id, resource_id=workflow_id)
|
||||
def source_zip(tenant_id: str, app_id: str, workflow_id: str) -> str:
|
||||
"""app_assets/{tenant}/{app}/sources/{workflow_id}.zip"""
|
||||
_check_uuid(tenant_id, "tenant_id")
|
||||
_check_uuid(app_id, "app_id")
|
||||
_check_uuid(workflow_id, "workflow_id")
|
||||
return f"{_BASE}/{tenant_id}/{app_id}/sources/{workflow_id}.zip"
|
||||
|
||||
@staticmethod
|
||||
def bundle_export_zip(tenant_id: str, app_id: str, export_id: str) -> AssetPathBase:
|
||||
return _BundleExportZipAssetPath(tenant_id=tenant_id, app_id=app_id, resource_id=export_id)
|
||||
def bundle_export(tenant_id: str, app_id: str, export_id: str) -> str:
|
||||
"""app_assets/{tenant}/{app}/bundle_exports/{export_id}.zip"""
|
||||
_check_uuid(tenant_id, "tenant_id")
|
||||
_check_uuid(app_id, "app_id")
|
||||
_check_uuid(export_id, "export_id")
|
||||
return f"{_BASE}/{tenant_id}/{app_id}/bundle_exports/{export_id}.zip"
|
||||
|
||||
@staticmethod
|
||||
def bundle_import_zip(tenant_id: str, import_id: str) -> BundleImportZipPath:
|
||||
return BundleImportZipPath(tenant_id=tenant_id, import_id=import_id)
|
||||
|
||||
@staticmethod
|
||||
def from_components(
|
||||
asset_type: str,
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
resource_id: str,
|
||||
sub_resource_id: str | None = None,
|
||||
) -> AssetPathBase:
|
||||
entry = _ASSET_PATH_REGISTRY.get(asset_type)
|
||||
if not entry:
|
||||
raise ValueError(f"Unsupported asset type: {asset_type}")
|
||||
requires_node, factory = entry
|
||||
if requires_node and not sub_resource_id:
|
||||
raise ValueError("resolved assets require node_id")
|
||||
if not requires_node and sub_resource_id:
|
||||
raise ValueError(f"{asset_type} assets do not accept node_id")
|
||||
if requires_node:
|
||||
return factory(tenant_id, app_id, resource_id, sub_resource_id)
|
||||
return factory(tenant_id, app_id, resource_id)
|
||||
|
||||
|
||||
register_asset_path("draft", requires_node=False, factory=AssetPath.draft)
|
||||
register_asset_path("build-zip", requires_node=False, factory=AssetPath.build_zip)
|
||||
register_asset_path("resolved", requires_node=True, factory=AssetPath.resolved)
|
||||
register_asset_path("skill-bundle", requires_node=False, factory=AssetPath.skill_bundle)
|
||||
register_asset_path("source-zip", requires_node=False, factory=AssetPath.source_zip)
|
||||
register_asset_path("bundle-export-zip", requires_node=False, factory=AssetPath.bundle_export_zip)
|
||||
|
||||
|
||||
class AppAssetStorage:
|
||||
"""High-level storage operations for app assets.
|
||||
|
||||
Wraps BaseStorage with:
|
||||
- FilePresignStorage for presign fallback support
|
||||
- CachedPresignStorage for URL caching
|
||||
|
||||
Usage:
|
||||
storage = AppAssetStorage(base_storage, redis_client=redis)
|
||||
storage.save(asset_path, content)
|
||||
url = storage.get_download_url(asset_path)
|
||||
"""
|
||||
|
||||
_storage: CachedPresignStorage
|
||||
|
||||
def __init__(self, storage: BaseStorage) -> None:
|
||||
# Wrap with FilePresignStorage for fallback support, then CachedPresignStorage for caching
|
||||
presign_storage = FilePresignStorage(SilentStorage(storage))
|
||||
self._storage = CachedPresignStorage(
|
||||
storage=presign_storage,
|
||||
cache_key_prefix="app_assets",
|
||||
)
|
||||
|
||||
@property
|
||||
def storage(self) -> BaseStorage:
|
||||
return self._storage
|
||||
|
||||
def save(self, asset_path: AssetPathBase, content: bytes) -> None:
|
||||
self._storage.save(asset_path.get_storage_key(), content)
|
||||
|
||||
def load(self, asset_path: AssetPathBase) -> bytes:
|
||||
return self._storage.load_once(asset_path.get_storage_key())
|
||||
|
||||
def load_stream(self, asset_path: AssetPathBase) -> Generator[bytes, None, None]:
|
||||
return self._storage.load_stream(asset_path.get_storage_key())
|
||||
|
||||
def load_or_none(self, asset_path: AssetPathBase) -> bytes | None:
|
||||
try:
|
||||
data = self._storage.load_once(asset_path.get_storage_key())
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
if data == _SILENT_STORAGE_NOT_FOUND:
|
||||
return None
|
||||
return data
|
||||
|
||||
def exists(self, asset_path: AssetPathBase) -> bool:
|
||||
return self._storage.exists(asset_path.get_storage_key())
|
||||
|
||||
def delete(self, asset_path: AssetPathBase) -> None:
|
||||
self._storage.delete(asset_path.get_storage_key())
|
||||
|
||||
def get_download_url(self, asset_path: AssetPathBase, expires_in: int = 3600) -> str:
|
||||
return self._storage.get_download_url(asset_path.get_storage_key(), expires_in)
|
||||
|
||||
def get_download_urls(self, asset_paths: Iterable[AssetPathBase], expires_in: int = 3600) -> list[str]:
|
||||
storage_keys = [p.get_storage_key() for p in asset_paths]
|
||||
return self._storage.get_download_urls(storage_keys, expires_in)
|
||||
|
||||
def get_upload_url(self, asset_path: AssetPathBase, expires_in: int = 3600) -> str:
|
||||
return self._storage.get_upload_url(asset_path.get_storage_key(), expires_in)
|
||||
|
||||
# Bundle import convenience methods
|
||||
def get_import_upload_url(self, path: BundleImportZipPath, expires_in: int = 3600) -> str:
|
||||
return self._storage.get_upload_url(path.get_storage_key(), expires_in)
|
||||
|
||||
def get_import_download_url(self, path: BundleImportZipPath, expires_in: int = 3600) -> str:
|
||||
return self._storage.get_download_url(path.get_storage_key(), expires_in)
|
||||
|
||||
def delete_import_zip(self, path: BundleImportZipPath) -> None:
|
||||
"""Delete import zip file. Errors are logged but not raised."""
|
||||
try:
|
||||
self._storage.delete(path.get_storage_key())
|
||||
except Exception:
|
||||
import logging
|
||||
|
||||
logging.getLogger(__name__).debug("Failed to delete import zip: %s", path.get_storage_key())
|
||||
def bundle_import(tenant_id: str, import_id: str) -> str:
|
||||
"""app_assets/{tenant}/imports/{import_id}.zip"""
|
||||
_check_uuid(tenant_id, "tenant_id")
|
||||
return f"{_BASE}/{tenant_id}/imports/{import_id}.zip"
|
||||
|
|
|
|||
|
|
@ -5,12 +5,10 @@ from typing import TYPE_CHECKING, Any
|
|||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from core.model_runtime.utils.encoders import jsonable_encoder
|
||||
from core.session.cli_api import CliApiSession
|
||||
from core.skill.entities import ToolDependencies, ToolReference
|
||||
from core.tools.entities.tool_entities import ToolParameter, ToolProviderType
|
||||
from core.tools.tool_manager import ToolManager
|
||||
from core.virtual_environment.__base.entities import Arch, OperatingSystem
|
||||
|
||||
from ..entities import DifyCli
|
||||
|
|
@ -138,20 +136,6 @@ class DifyCliConfig(BaseModel):
|
|||
|
||||
cli_api_url = dify_config.CLI_API_URL
|
||||
|
||||
tools: list[DifyCliToolConfig] = []
|
||||
for dependency in tool_deps.dependencies:
|
||||
tool = DifyCliToolConfig.create_from_tool(
|
||||
ToolManager.get_tool_runtime(
|
||||
tenant_id=tenant_id,
|
||||
provider_type=dependency.type,
|
||||
provider_id=dependency.provider,
|
||||
tool_name=dependency.tool_name,
|
||||
invoke_from=InvokeFrom.AGENT,
|
||||
)
|
||||
)
|
||||
tool.enabled = dependency.enabled
|
||||
tools.append(tool)
|
||||
|
||||
return cls(
|
||||
env=DifyCliEnvConfig(
|
||||
files_url=dify_config.FILES_URL,
|
||||
|
|
@ -160,7 +144,7 @@ class DifyCliConfig(BaseModel):
|
|||
cli_api_secret=session.secret,
|
||||
),
|
||||
tool_references=[DifyCliToolReference.create_from_tool_reference(ref) for ref in tool_deps.references],
|
||||
tools=tools,
|
||||
tools=[],
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
|
||||
from core.app_assets.storage import AssetPath
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from core.sandbox.sandbox import Sandbox
|
||||
from core.virtual_environment.__base.helpers import pipeline
|
||||
|
||||
|
|
@ -24,8 +24,8 @@ class AppAssetsInitializer(AsyncSandboxInitializer):
|
|||
# Load published app assets and unzip the artifact bundle.
|
||||
vm = sandbox.vm
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
zip_ref = AssetPath.build_zip(self._tenant_id, self._app_id, self._assets_id)
|
||||
download_url = asset_storage.get_download_url(zip_ref)
|
||||
key = AssetPaths.build_zip(self._tenant_id, self._app_id, self._assets_id)
|
||||
download_url = asset_storage.get_download_url(key)
|
||||
|
||||
(
|
||||
pipeline(vm)
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import logging
|
||||
|
||||
from core.app_assets.constants import AppAssetsAttrs
|
||||
from core.app_assets.storage import AssetPath
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from core.sandbox.entities import AppAssets
|
||||
from core.sandbox.sandbox import Sandbox
|
||||
from core.sandbox.services import AssetDownloadService
|
||||
|
|
@ -13,8 +13,7 @@ from .base import AsyncSandboxInitializer
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DRAFT_ASSETS_DOWNLOAD_TIMEOUT = 60 * 10
|
||||
DRAFT_ASSETS_EXPIRES_IN = 60 * 10
|
||||
_TIMEOUT = 600 # 10 minutes
|
||||
|
||||
|
||||
class DraftAppAssetsInitializer(AsyncSandboxInitializer):
|
||||
|
|
@ -24,7 +23,6 @@ class DraftAppAssetsInitializer(AsyncSandboxInitializer):
|
|||
self._assets_id = assets_id
|
||||
|
||||
def initialize(self, sandbox: Sandbox) -> None:
|
||||
# Load published app assets and unzip the artifact bundle.
|
||||
vm = sandbox.vm
|
||||
build_id = self._assets_id
|
||||
tree = sandbox.attrs.get(AppAssetsAttrs.FILE_TREE)
|
||||
|
|
@ -33,19 +31,19 @@ class DraftAppAssetsInitializer(AsyncSandboxInitializer):
|
|||
if not nodes:
|
||||
return
|
||||
# FIXME(Mairuis): should be more graceful
|
||||
refs = [
|
||||
AssetPath.resolved(self._tenant_id, self._app_id, build_id, node.id)
|
||||
keys = [
|
||||
AssetPaths.resolved(self._tenant_id, self._app_id, build_id, node.id)
|
||||
if node.extension == "md"
|
||||
else AssetPath.draft(self._tenant_id, self._app_id, node.id)
|
||||
else AssetPaths.draft(self._tenant_id, self._app_id, node.id)
|
||||
for node in nodes
|
||||
]
|
||||
urls = asset_storage.get_download_urls(refs, DRAFT_ASSETS_EXPIRES_IN)
|
||||
urls = asset_storage.get_download_urls(keys, _TIMEOUT)
|
||||
items = [AssetDownloadItem(path=tree.get_path(node.id).lstrip("/"), url=url) for node, url in zip(nodes, urls)]
|
||||
script = AssetDownloadService.build_download_script(items, AppAssets.PATH)
|
||||
pipeline(vm).add(
|
||||
["sh", "-c", script],
|
||||
error_message="Failed to download draft assets",
|
||||
).execute(timeout=DRAFT_ASSETS_DOWNLOAD_TIMEOUT, raise_on_error=True)
|
||||
).execute(timeout=_TIMEOUT, raise_on_error=True)
|
||||
|
||||
logger.info(
|
||||
"Draft app assets initialized for app_id=%s, assets_id=%s",
|
||||
|
|
|
|||
|
|
@ -3,17 +3,14 @@ from __future__ import annotations
|
|||
import json
|
||||
import os
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID, uuid4
|
||||
from uuid import uuid4
|
||||
|
||||
from core.sandbox.entities.files import SandboxFileDownloadTicket, SandboxFileNode
|
||||
from core.sandbox.inspector.base import SandboxFileSource
|
||||
from core.sandbox.storage import sandbox_file_storage
|
||||
from core.sandbox.storage.archive_storage import SandboxArchivePath
|
||||
from core.sandbox.storage.sandbox_file_storage import SandboxFilePath
|
||||
from core.sandbox.storage import SandboxFilePaths
|
||||
from core.virtual_environment.__base.exec import CommandExecutionError
|
||||
from core.virtual_environment.__base.helpers import execute
|
||||
from extensions.ext_storage import storage
|
||||
from extensions.storage.silent_storage import SilentStorage
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.zip_sandbox import ZipSandbox
|
||||
|
|
@ -71,11 +68,10 @@ print(json.dumps(entries))
|
|||
"""Get a pre-signed download URL for the sandbox archive."""
|
||||
from extensions.storage.file_presign_storage import FilePresignStorage
|
||||
|
||||
archive_path = SandboxArchivePath(tenant_id=UUID(self._tenant_id), sandbox_id=UUID(self._sandbox_id))
|
||||
storage_key = archive_path.get_storage_key()
|
||||
storage_key = f"sandbox_archives/{self._tenant_id}/{self._sandbox_id}.tar.gz"
|
||||
if not storage.exists(storage_key):
|
||||
raise ValueError("Sandbox archive not found")
|
||||
presign_storage = FilePresignStorage(SilentStorage(storage.storage_runner))
|
||||
presign_storage = FilePresignStorage(storage.storage_runner)
|
||||
return presign_storage.get_download_url(storage_key, self._EXPORT_EXPIRES_IN_SECONDS)
|
||||
|
||||
def _create_zip_sandbox(self) -> ZipSandbox:
|
||||
|
|
@ -188,29 +184,33 @@ raise SystemExit(2)
|
|||
if kind not in ("dir", "file"):
|
||||
raise ValueError("File not found in sandbox archive")
|
||||
|
||||
from services.sandbox.sandbox_file_service import SandboxFileService
|
||||
|
||||
sandbox_storage = SandboxFileService.get_storage()
|
||||
|
||||
if kind == "file":
|
||||
# Download file content from sandbox
|
||||
file_data = zs.read_file(target_path)
|
||||
export_path = SandboxFilePath(
|
||||
tenant_id=UUID(self._tenant_id),
|
||||
sandbox_id=UUID(self._sandbox_id),
|
||||
export_id=export_id,
|
||||
filename=os.path.basename(path) or "file",
|
||||
export_key = SandboxFilePaths.export(
|
||||
self._tenant_id,
|
||||
self._sandbox_id,
|
||||
export_id,
|
||||
os.path.basename(path) or "file",
|
||||
)
|
||||
sandbox_file_storage.save(export_path, file_data)
|
||||
sandbox_storage.save(export_key, file_data)
|
||||
else:
|
||||
# Create tar.gz archive of the directory
|
||||
tar_file = zs.tar(target_path, include_base=True, compress=True)
|
||||
tar_data = zs.read_file(tar_file.path)
|
||||
export_path = SandboxFilePath(
|
||||
tenant_id=UUID(self._tenant_id),
|
||||
sandbox_id=UUID(self._sandbox_id),
|
||||
export_id=export_id,
|
||||
filename=f"{export_name}.tar.gz",
|
||||
export_key = SandboxFilePaths.export(
|
||||
self._tenant_id,
|
||||
self._sandbox_id,
|
||||
export_id,
|
||||
f"{export_name}.tar.gz",
|
||||
)
|
||||
sandbox_file_storage.save(export_path, tar_data)
|
||||
sandbox_storage.save(export_key, tar_data)
|
||||
|
||||
download_url = sandbox_file_storage.get_download_url(export_path, expires_in=self._EXPORT_EXPIRES_IN_SECONDS)
|
||||
download_url = sandbox_storage.get_download_url(export_key, self._EXPORT_EXPIRES_IN_SECONDS)
|
||||
return SandboxFileDownloadTicket(
|
||||
download_url=download_url,
|
||||
expires_in=self._EXPORT_EXPIRES_IN_SECONDS,
|
||||
|
|
|
|||
|
|
@ -3,12 +3,11 @@ from __future__ import annotations
|
|||
import json
|
||||
import logging
|
||||
import os
|
||||
from uuid import UUID, uuid4
|
||||
from uuid import uuid4
|
||||
|
||||
from core.sandbox.entities.files import SandboxFileDownloadTicket, SandboxFileNode
|
||||
from core.sandbox.inspector.base import SandboxFileSource
|
||||
from core.sandbox.storage import sandbox_file_storage
|
||||
from core.sandbox.storage.sandbox_file_storage import SandboxFilePath
|
||||
from core.sandbox.storage import SandboxFilePaths
|
||||
from core.virtual_environment.__base.exec import CommandExecutionError
|
||||
from core.virtual_environment.__base.helpers import execute
|
||||
from core.virtual_environment.__base.virtual_environment import VirtualEnvironment
|
||||
|
|
@ -110,19 +109,22 @@ print(json.dumps(entries))
|
|||
return entries
|
||||
|
||||
def download_file(self, *, path: str) -> SandboxFileDownloadTicket:
|
||||
from services.sandbox.sandbox_file_service import SandboxFileService
|
||||
|
||||
kind = self._detect_path_kind(path)
|
||||
|
||||
export_name = os.path.basename(path.rstrip("/")) or "workspace"
|
||||
filename = f"{export_name}.tar.gz" if kind == "dir" else (os.path.basename(path) or "file")
|
||||
export_id = uuid4().hex
|
||||
export_path = SandboxFilePath(
|
||||
tenant_id=UUID(self._tenant_id),
|
||||
sandbox_id=UUID(self._sandbox_id),
|
||||
export_id=export_id,
|
||||
filename=filename,
|
||||
export_key = SandboxFilePaths.export(
|
||||
self._tenant_id,
|
||||
self._sandbox_id,
|
||||
export_id,
|
||||
filename,
|
||||
)
|
||||
|
||||
upload_url = sandbox_file_storage.get_upload_url(export_path, expires_in=self._EXPORT_EXPIRES_IN_SECONDS)
|
||||
sandbox_storage = SandboxFileService.get_storage()
|
||||
upload_url = sandbox_storage.get_upload_url(export_key, self._EXPORT_EXPIRES_IN_SECONDS)
|
||||
|
||||
if kind == "dir":
|
||||
archive_path = f"/tmp/{export_id}.tar.gz"
|
||||
|
|
@ -163,7 +165,7 @@ print(json.dumps(entries))
|
|||
except CommandExecutionError as exc:
|
||||
raise RuntimeError(str(exc)) from exc
|
||||
|
||||
download_url = sandbox_file_storage.get_download_url(export_path, expires_in=self._EXPORT_EXPIRES_IN_SECONDS)
|
||||
download_url = sandbox_storage.get_download_url(export_key, self._EXPORT_EXPIRES_IN_SECONDS)
|
||||
return SandboxFileDownloadTicket(
|
||||
download_url=download_url,
|
||||
expires_in=self._EXPORT_EXPIRES_IN_SECONDS,
|
||||
|
|
|
|||
|
|
@ -1,14 +1,11 @@
|
|||
from .archive_storage import ArchiveSandboxStorage, SandboxArchivePath
|
||||
from .archive_storage import ArchiveSandboxStorage
|
||||
from .noop_storage import NoopSandboxStorage
|
||||
from .sandbox_file_storage import SandboxFilePath, SandboxFileStorage, sandbox_file_storage
|
||||
from .sandbox_file_storage import SandboxFilePaths
|
||||
from .sandbox_storage import SandboxStorage
|
||||
|
||||
__all__ = [
|
||||
"ArchiveSandboxStorage",
|
||||
"NoopSandboxStorage",
|
||||
"SandboxArchivePath",
|
||||
"SandboxFilePath",
|
||||
"SandboxFileStorage",
|
||||
"SandboxFilePaths",
|
||||
"SandboxStorage",
|
||||
"sandbox_file_storage",
|
||||
]
|
||||
|
|
|
|||
|
|
@ -1,19 +1,8 @@
|
|||
"""Archive-based sandbox storage for persisting sandbox state.
|
||||
|
||||
This module provides storage operations for sandbox workspace archives (tar.gz),
|
||||
enabling state persistence across sandbox sessions.
|
||||
|
||||
Storage key format: sandbox_archives/{tenant_id}/{sandbox_id}.tar.gz
|
||||
|
||||
All presign operations use the unified FilePresignStorage wrapper, which automatically
|
||||
falls back to Dify's file proxy when the underlying storage doesn't support presigned URLs.
|
||||
"""
|
||||
"""Archive-based sandbox storage for persisting sandbox state."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from uuid import UUID
|
||||
|
||||
from core.virtual_environment.__base.exec import PipelineExecutionError
|
||||
from core.virtual_environment.__base.helpers import pipeline
|
||||
|
|
@ -21,43 +10,16 @@ from core.virtual_environment.__base.virtual_environment import VirtualEnvironme
|
|||
from extensions.storage.base_storage import BaseStorage
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
from extensions.storage.file_presign_storage import FilePresignStorage
|
||||
from extensions.storage.silent_storage import SilentStorage
|
||||
|
||||
from .sandbox_storage import SandboxStorage
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
WORKSPACE_DIR = "."
|
||||
ARCHIVE_DOWNLOAD_TIMEOUT = 60 * 5
|
||||
ARCHIVE_UPLOAD_TIMEOUT = 60 * 5
|
||||
|
||||
|
||||
def build_tar_exclude_args(patterns: list[str]) -> list[str]:
|
||||
return [f"--exclude={p}" for p in patterns]
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SandboxArchivePath:
|
||||
"""Path for sandbox workspace archives."""
|
||||
|
||||
tenant_id: UUID
|
||||
sandbox_id: UUID
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"sandbox_archives/{self.tenant_id}/{self.sandbox_id}.tar.gz"
|
||||
_ARCHIVE_TIMEOUT = 300 # 5 minutes
|
||||
|
||||
|
||||
class ArchiveSandboxStorage(SandboxStorage):
|
||||
"""Archive-based storage for sandbox workspace persistence.
|
||||
|
||||
Uses tar.gz archives to save and restore sandbox workspace state.
|
||||
Requires a presign-capable storage wrapper for generating download/upload URLs.
|
||||
"""
|
||||
|
||||
_tenant_id: str
|
||||
_sandbox_id: str
|
||||
_exclude_patterns: list[str]
|
||||
_storage: BaseStorage
|
||||
"""Archive-based storage for sandbox workspace persistence."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
|
@ -66,53 +28,35 @@ class ArchiveSandboxStorage(SandboxStorage):
|
|||
storage: BaseStorage,
|
||||
exclude_patterns: list[str] | None = None,
|
||||
):
|
||||
self._tenant_id = tenant_id
|
||||
self._sandbox_id = sandbox_id
|
||||
self._exclude_patterns = exclude_patterns or []
|
||||
# Wrap with FilePresignStorage for presign fallback support
|
||||
self._storage_key = f"sandbox_archives/{tenant_id}/{sandbox_id}.tar.gz"
|
||||
self._storage = CachedPresignStorage(
|
||||
storage=FilePresignStorage(SilentStorage(storage)),
|
||||
storage=FilePresignStorage(storage),
|
||||
cache_key_prefix="sandbox_archives",
|
||||
)
|
||||
|
||||
@property
|
||||
def _archive_path(self) -> SandboxArchivePath:
|
||||
return SandboxArchivePath(UUID(self._tenant_id), UUID(self._sandbox_id))
|
||||
|
||||
@property
|
||||
def _storage_key(self) -> str:
|
||||
return self._archive_path.get_storage_key()
|
||||
|
||||
@property
|
||||
def _archive_name(self) -> str:
|
||||
return f"{self._sandbox_id}.tar.gz"
|
||||
|
||||
@property
|
||||
def _archive_tmp_path(self) -> str:
|
||||
return f"/tmp/{self._archive_name}"
|
||||
|
||||
def mount(self, sandbox: VirtualEnvironment) -> bool:
|
||||
"""Load archive from storage into sandbox workspace."""
|
||||
if not self.exists():
|
||||
logger.debug("No archive found for sandbox %s, skipping mount", self._sandbox_id)
|
||||
return False
|
||||
|
||||
download_url = self._storage.get_download_url(self._storage_key, ARCHIVE_DOWNLOAD_TIMEOUT)
|
||||
archive_name = self._archive_name
|
||||
download_url = self._storage.get_download_url(self._storage_key, _ARCHIVE_TIMEOUT)
|
||||
archive = "archive.tar.gz"
|
||||
|
||||
try:
|
||||
(
|
||||
pipeline(sandbox)
|
||||
.add(["curl", "-fsSL", download_url, "-o", archive_name], error_message="Failed to download archive")
|
||||
.add(["curl", "-fsSL", download_url, "-o", archive], error_message="Failed to download archive")
|
||||
.add(
|
||||
["sh", "-c", 'tar -xzf "$1" 2>/dev/null; exit $?', "sh", archive_name],
|
||||
error_message="Failed to extract archive",
|
||||
["sh", "-c", 'tar -xzf "$1" 2>/dev/null; exit $?', "sh", archive], error_message="Failed to extract"
|
||||
)
|
||||
.add(["rm", archive_name], error_message="Failed to cleanup archive")
|
||||
.execute(timeout=ARCHIVE_DOWNLOAD_TIMEOUT, raise_on_error=True)
|
||||
.add(["rm", archive], error_message="Failed to cleanup")
|
||||
.execute(timeout=_ARCHIVE_TIMEOUT, raise_on_error=True)
|
||||
)
|
||||
except PipelineExecutionError:
|
||||
logger.exception("Failed to extract archive")
|
||||
logger.exception("Failed to mount archive for sandbox %s", self._sandbox_id)
|
||||
return False
|
||||
|
||||
logger.info("Mounted archive for sandbox %s", self._sandbox_id)
|
||||
|
|
@ -120,38 +64,23 @@ class ArchiveSandboxStorage(SandboxStorage):
|
|||
|
||||
def unmount(self, sandbox: VirtualEnvironment) -> bool:
|
||||
"""Save sandbox workspace to storage as archive."""
|
||||
upload_url = self._storage.get_upload_url(self._storage_key, ARCHIVE_UPLOAD_TIMEOUT)
|
||||
archive_path = self._archive_tmp_path
|
||||
upload_url = self._storage.get_upload_url(self._storage_key, _ARCHIVE_TIMEOUT)
|
||||
archive = f"/tmp/{self._sandbox_id}.tar.gz"
|
||||
exclude_args = [f"--exclude={p}" for p in self._exclude_patterns]
|
||||
|
||||
(
|
||||
pipeline(sandbox)
|
||||
.add(
|
||||
[
|
||||
"tar",
|
||||
"-czf",
|
||||
archive_path,
|
||||
*build_tar_exclude_args(self._exclude_patterns),
|
||||
"-C",
|
||||
WORKSPACE_DIR,
|
||||
".",
|
||||
],
|
||||
error_message="Failed to create archive",
|
||||
)
|
||||
.add(
|
||||
["curl", "-s", "-f", "-X", "PUT", "-T", archive_path, upload_url],
|
||||
error_message="Failed to upload archive",
|
||||
)
|
||||
.execute(timeout=ARCHIVE_UPLOAD_TIMEOUT, raise_on_error=True)
|
||||
.add(["tar", "-czf", archive, *exclude_args, "-C", ".", "."], error_message="Failed to create archive")
|
||||
.add(["curl", "-sf", "-X", "PUT", "-T", archive, upload_url], error_message="Failed to upload archive")
|
||||
.execute(timeout=_ARCHIVE_TIMEOUT, raise_on_error=True)
|
||||
)
|
||||
logger.info("Unmounted archive for sandbox %s", self._sandbox_id)
|
||||
return True
|
||||
|
||||
def exists(self) -> bool:
|
||||
"""Check if archive exists in storage."""
|
||||
return self._storage.exists(self._storage_key)
|
||||
|
||||
def delete(self) -> None:
|
||||
"""Delete archive from storage."""
|
||||
try:
|
||||
self._storage.delete(self._storage_key)
|
||||
logger.info("Deleted archive for sandbox %s", self._sandbox_id)
|
||||
|
|
|
|||
|
|
@ -1,101 +1,18 @@
|
|||
"""Sandbox file storage for exporting files from sandbox environments.
|
||||
"""Sandbox file storage key generation.
|
||||
|
||||
This module provides storage operations for files exported from sandbox environments,
|
||||
including download tickets for both runtime and archive-based file sources.
|
||||
|
||||
Storage key format: sandbox_file_downloads/{tenant_id}/{sandbox_id}/{export_id}/{filename}
|
||||
|
||||
All presign operations use the unified FilePresignStorage wrapper, which automatically
|
||||
falls back to Dify's file proxy when the underlying storage doesn't support presigned URLs.
|
||||
Provides SandboxFilePaths facade for generating storage keys for sandbox file exports.
|
||||
Storage instances are obtained via SandboxFileService.get_storage().
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any
|
||||
from uuid import UUID
|
||||
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
from extensions.storage.file_presign_storage import FilePresignStorage
|
||||
from extensions.storage.silent_storage import SilentStorage
|
||||
_BASE = "sandbox_files"
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class SandboxFilePath:
|
||||
"""Path for sandbox file exports."""
|
||||
class SandboxFilePaths:
|
||||
"""Facade for generating sandbox file export storage keys."""
|
||||
|
||||
tenant_id: UUID
|
||||
sandbox_id: UUID
|
||||
export_id: str
|
||||
filename: str
|
||||
|
||||
def get_storage_key(self) -> str:
|
||||
return f"sandbox_files/{self.tenant_id}/{self.sandbox_id}/{self.export_id}/{self.filename}"
|
||||
|
||||
|
||||
class SandboxFileStorage:
|
||||
"""Storage operations for sandbox file exports.
|
||||
|
||||
Wraps BaseStorage with:
|
||||
- FilePresignStorage for presign fallback support
|
||||
- CachedPresignStorage for URL caching
|
||||
|
||||
Usage:
|
||||
storage = SandboxFileStorage(base_storage, redis_client=redis)
|
||||
storage.save(download_path, content)
|
||||
url = storage.get_download_url(download_path)
|
||||
"""
|
||||
|
||||
_storage: CachedPresignStorage
|
||||
|
||||
def __init__(self, storage: BaseStorage, *, redis_client: Any) -> None:
|
||||
# Wrap with FilePresignStorage for fallback support, then CachedPresignStorage for caching
|
||||
presign_storage = FilePresignStorage(SilentStorage(storage))
|
||||
self._storage = CachedPresignStorage(
|
||||
storage=presign_storage,
|
||||
cache_key_prefix="sandbox_files",
|
||||
)
|
||||
|
||||
def save(self, file_path: SandboxFilePath, content: bytes) -> None:
|
||||
self._storage.save(file_path.get_storage_key(), content)
|
||||
|
||||
def get_download_url(self, file_path: SandboxFilePath, expires_in: int = 3600) -> str:
|
||||
return self._storage.get_download_url(file_path.get_storage_key(), expires_in)
|
||||
|
||||
def get_upload_url(self, file_path: SandboxFilePath, expires_in: int = 3600) -> str:
|
||||
return self._storage.get_upload_url(file_path.get_storage_key(), expires_in)
|
||||
|
||||
|
||||
class _LazySandboxFileStorage:
|
||||
"""Lazy initializer for singleton SandboxFileStorage.
|
||||
|
||||
Delays storage initialization until first access, ensuring Flask app
|
||||
context is available.
|
||||
"""
|
||||
|
||||
_instance: SandboxFileStorage | None
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._instance = None
|
||||
|
||||
def _get_instance(self) -> SandboxFileStorage:
|
||||
if self._instance is None:
|
||||
from extensions.ext_redis import redis_client
|
||||
from extensions.ext_storage import storage
|
||||
|
||||
if not hasattr(storage, "storage_runner"):
|
||||
raise RuntimeError(
|
||||
"Storage is not initialized; call storage.init_app before using sandbox_file_storage"
|
||||
)
|
||||
self._instance = SandboxFileStorage(
|
||||
storage=storage.storage_runner,
|
||||
redis_client=redis_client,
|
||||
)
|
||||
return self._instance
|
||||
|
||||
def __getattr__(self, name: str):
|
||||
return getattr(self._get_instance(), name)
|
||||
|
||||
|
||||
sandbox_file_storage: SandboxFileStorage = _LazySandboxFileStorage() # type: ignore[assignment]
|
||||
@staticmethod
|
||||
def export(tenant_id: str, sandbox_id: str, export_id: str, filename: str) -> str:
|
||||
"""sandbox_files/{tenant}/{sandbox}/{export_id}/{filename}"""
|
||||
return f"{_BASE}/{tenant_id}/{sandbox_id}/{export_id}/{filename}"
|
||||
|
|
|
|||
|
|
@ -1,53 +1,33 @@
|
|||
import logging
|
||||
|
||||
from core.app_assets.storage import AssetPath
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from core.skill.entities.skill_bundle import SkillBundle
|
||||
from extensions.ext_redis import redis_client
|
||||
from services.app_asset_service import AppAssetService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_CACHE_PREFIX = "skill_bundle"
|
||||
_CACHE_TTL = 86400 # 24 hours
|
||||
|
||||
|
||||
class SkillManager:
|
||||
_CACHE_KEY_PREFIX = "skill_bundle"
|
||||
_CACHE_TTL_SECONDS = 60 * 60 * 24
|
||||
|
||||
@staticmethod
|
||||
def get_cache_key(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
assets_id: str,
|
||||
) -> str:
|
||||
return f"{SkillManager._CACHE_KEY_PREFIX}:{tenant_id}:{app_id}:{assets_id}"
|
||||
|
||||
@staticmethod
|
||||
def load_bundle(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
assets_id: str,
|
||||
) -> SkillBundle:
|
||||
cache_key = SkillManager.get_cache_key(tenant_id, app_id, assets_id)
|
||||
def load_bundle(tenant_id: str, app_id: str, assets_id: str) -> SkillBundle:
|
||||
cache_key = f"{_CACHE_PREFIX}:{tenant_id}:{app_id}:{assets_id}"
|
||||
data = redis_client.get(cache_key)
|
||||
if data:
|
||||
return SkillBundle.model_validate_json(data)
|
||||
|
||||
asset_path = AssetPath.skill_bundle(tenant_id, app_id, assets_id)
|
||||
data = AppAssetService.get_storage().load(asset_path)
|
||||
key = AssetPaths.skill_bundle(tenant_id, app_id, assets_id)
|
||||
data = AppAssetService.get_storage().load_once(key)
|
||||
bundle = SkillBundle.model_validate_json(data)
|
||||
redis_client.setex(cache_key, SkillManager._CACHE_TTL_SECONDS, bundle.model_dump_json(indent=2).encode("utf-8"))
|
||||
redis_client.setex(cache_key, _CACHE_TTL, bundle.model_dump_json(indent=2).encode("utf-8"))
|
||||
return bundle
|
||||
|
||||
@staticmethod
|
||||
def save_bundle(
|
||||
tenant_id: str,
|
||||
app_id: str,
|
||||
assets_id: str,
|
||||
bundle: SkillBundle,
|
||||
) -> None:
|
||||
asset_path = AssetPath.skill_bundle(tenant_id, app_id, assets_id)
|
||||
AppAssetService.get_storage().save(
|
||||
asset_path,
|
||||
bundle.model_dump_json(indent=2).encode("utf-8"),
|
||||
)
|
||||
cache_key = SkillManager.get_cache_key(tenant_id, app_id, assets_id)
|
||||
def save_bundle(tenant_id: str, app_id: str, assets_id: str, bundle: SkillBundle) -> None:
|
||||
key = AssetPaths.skill_bundle(tenant_id, app_id, assets_id)
|
||||
AppAssetService.get_storage().save(key, bundle.model_dump_json(indent=2).encode("utf-8"))
|
||||
cache_key = f"{_CACHE_PREFIX}:{tenant_id}:{app_id}:{assets_id}"
|
||||
redis_client.delete(cache_key)
|
||||
|
|
|
|||
|
|
@ -17,8 +17,7 @@ class CachedPresignStorage(StorageWrapper):
|
|||
|
||||
Example:
|
||||
cached_storage = CachedPresignStorage(
|
||||
storage=FilePresignStorage(SilentStorage(base_storage)),
|
||||
redis_client=redis_client,
|
||||
storage=FilePresignStorage(base_storage),
|
||||
cache_key_prefix="app_asset:draft_download",
|
||||
)
|
||||
url = cached_storage.get_download_url("path/to/file.txt", expires_in=3600)
|
||||
|
|
|
|||
|
|
@ -1,60 +0,0 @@
|
|||
"""Storage wrapper that returns empty values instead of raising on get operations."""
|
||||
|
||||
import logging
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
|
||||
from extensions.storage.storage_wrapper import StorageWrapper
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SilentStorage(StorageWrapper):
|
||||
"""Storage wrapper that silently returns empty values when get operations fail.
|
||||
|
||||
Wraps any storage and catches exceptions on read operations (load_once, load_stream,
|
||||
download, exists), returning empty/default values instead of raising.
|
||||
|
||||
Example:
|
||||
silent_storage = SilentGetStorage(
|
||||
storage=CachedPresignStorage(...),
|
||||
)
|
||||
content = silent_storage.load_once("path/to/file.txt") # Returns b"" if not found
|
||||
"""
|
||||
|
||||
def load_once(self, filename: str) -> bytes:
|
||||
"""Load file content, returning empty bytes if not found."""
|
||||
try:
|
||||
return super().load_once(filename)
|
||||
except FileNotFoundError:
|
||||
logger.debug("File not found: %s", filename)
|
||||
return b"File Not Found"
|
||||
|
||||
def load_once_or_none(self, filename: str) -> bytes | None:
|
||||
"""Load file content, returning None if not found."""
|
||||
try:
|
||||
return super().load_once(filename)
|
||||
except FileNotFoundError:
|
||||
logger.debug("File not found: %s", filename)
|
||||
return b"File Not Found"
|
||||
|
||||
def load_stream(self, filename: str) -> Generator[bytes, None, None]:
|
||||
"""Load file as stream, yielding nothing if not found."""
|
||||
try:
|
||||
yield from super().load_stream(filename)
|
||||
except FileNotFoundError:
|
||||
logger.debug("File not found: %s", filename)
|
||||
yield b"File Not Found"
|
||||
|
||||
def download(self, filename: str, target_filepath: str) -> bool:
|
||||
"""Download file to target, returning False if not found."""
|
||||
try:
|
||||
super().download(filename, target_filepath)
|
||||
return True
|
||||
except FileNotFoundError:
|
||||
logger.debug("File not found or download failed: %s", filename)
|
||||
return False
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
"""Delegate any other attributes to the wrapped storage."""
|
||||
return getattr(self._storage, name)
|
||||
|
|
@ -17,8 +17,8 @@ from core.app.entities.app_asset_entities import AppAssetFileTree
|
|||
from core.app_assets.builder import AssetBuildPipeline, BuildContext
|
||||
from core.app_assets.builder.file_builder import FileBuilder
|
||||
from core.app_assets.builder.skill_builder import SkillBuilder
|
||||
from core.app_assets.entities.assets import AssetItem, FileAsset
|
||||
from core.app_assets.storage import AssetPath
|
||||
from core.app_assets.entities.assets import AssetItem
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from core.zip_sandbox import SandboxDownloadItem, ZipSandbox
|
||||
from models.app_asset import AppAssets
|
||||
from models.model import App
|
||||
|
|
@ -62,12 +62,12 @@ class AppAssetPackageService:
|
|||
"""Convert file tree to asset items for packaging."""
|
||||
files = file_tree.walk_files()
|
||||
return [
|
||||
FileAsset(
|
||||
AssetItem(
|
||||
asset_id=f.id,
|
||||
path=file_tree.get_path(f.id),
|
||||
file_name=f.name,
|
||||
extension=f.extension,
|
||||
storage_key=AssetPath.draft(tenant_id, app_id, f.id).get_storage_key(),
|
||||
storage_key=AssetPaths.draft(tenant_id, app_id, f.id),
|
||||
)
|
||||
for f in files
|
||||
]
|
||||
|
|
@ -98,8 +98,8 @@ class AppAssetPackageService:
|
|||
return
|
||||
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
asset_paths = [AssetPath.draft(tenant_id, app_id, asset.asset_id) for asset in assets]
|
||||
download_urls = asset_storage.get_download_urls(asset_paths)
|
||||
keys = [AssetPaths.draft(tenant_id, app_id, asset.asset_id) for asset in assets]
|
||||
download_urls = asset_storage.get_download_urls(keys)
|
||||
download_items = [
|
||||
SandboxDownloadItem(url=url, path=asset.path) for asset, url in zip(assets, download_urls, strict=True)
|
||||
]
|
||||
|
|
@ -139,8 +139,8 @@ class AppAssetPackageService:
|
|||
ctx = BuildContext(tenant_id=tenant_id, app_id=app_id, build_id=publish_id)
|
||||
built_assets = AssetBuildPipeline([SkillBuilder(storage=asset_storage), FileBuilder()]).build_all(tree, ctx)
|
||||
|
||||
runtime_zip_path = AssetPath.build_zip(tenant_id, app_id, publish_id)
|
||||
runtime_upload_url = asset_storage.get_upload_url(runtime_zip_path)
|
||||
runtime_zip_key = AssetPaths.build_zip(tenant_id, app_id, publish_id)
|
||||
runtime_upload_url = asset_storage.get_upload_url(runtime_zip_key)
|
||||
AppAssetPackageService.package_and_upload(
|
||||
assets=built_assets,
|
||||
upload_url=runtime_upload_url,
|
||||
|
|
@ -150,8 +150,8 @@ class AppAssetPackageService:
|
|||
)
|
||||
|
||||
source_items = AppAssetService.get_draft_assets(tenant_id, app_id)
|
||||
source_zip_path = AssetPath.source_zip(tenant_id, app_id, workflow_id)
|
||||
source_upload_url = asset_storage.get_upload_url(source_zip_path)
|
||||
source_key = AssetPaths.source_zip(tenant_id, app_id, workflow_id)
|
||||
source_upload_url = asset_storage.get_upload_url(source_key)
|
||||
AppAssetPackageService.package_and_upload(
|
||||
assets=source_items,
|
||||
upload_url=source_upload_url,
|
||||
|
|
@ -176,8 +176,8 @@ class AppAssetPackageService:
|
|||
).build_all(tree, ctx)
|
||||
|
||||
user_id = getattr(assets, "updated_by", None) or getattr(assets, "created_by", None) or "system"
|
||||
zip_path = AssetPath.build_zip(tenant_id, app_id, assets.id)
|
||||
upload_url = asset_storage.get_upload_url(zip_path)
|
||||
key = AssetPaths.build_zip(tenant_id, app_id, assets.id)
|
||||
upload_url = asset_storage.get_upload_url(key)
|
||||
AppAssetPackageService.package_and_upload(
|
||||
assets=built_assets,
|
||||
upload_url=upload_url,
|
||||
|
|
|
|||
|
|
@ -13,11 +13,13 @@ from core.app.entities.app_asset_entities import (
|
|||
TreeParentNotFoundError,
|
||||
TreePathConflictError,
|
||||
)
|
||||
from core.app_assets.entities.assets import AssetItem, FileAsset
|
||||
from core.app_assets.storage import AppAssetStorage, AssetPath
|
||||
from core.app_assets.entities.assets import AssetItem
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from extensions.ext_storage import storage
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
from extensions.storage.file_presign_storage import FilePresignStorage
|
||||
from models.app_asset import AppAssets
|
||||
from models.model import App
|
||||
|
||||
|
|
@ -34,20 +36,17 @@ logger = logging.getLogger(__name__)
|
|||
class AppAssetService:
|
||||
MAX_PREVIEW_CONTENT_SIZE = 5 * 1024 * 1024 # 5MB
|
||||
_LOCK_TIMEOUT_SECONDS = 60
|
||||
_DRAFT_CACHE_KEY_PREFIX = "app_asset:draft_download"
|
||||
|
||||
@staticmethod
|
||||
def get_storage() -> AppAssetStorage:
|
||||
"""Get a lazily-initialized AppAssetStorage instance.
|
||||
def get_storage() -> CachedPresignStorage:
|
||||
"""Get a lazily-initialized storage instance for app assets.
|
||||
|
||||
This method creates an AppAssetStorage each time it's called,
|
||||
ensuring storage.storage_runner is only accessed after init_app.
|
||||
|
||||
The storage is wrapped with FilePresignStorage for presign fallback support
|
||||
and CachedPresignStorage for URL caching.
|
||||
Returns a CachedPresignStorage wrapping FilePresignStorage,
|
||||
providing presign fallback and URL caching.
|
||||
"""
|
||||
return AppAssetStorage(
|
||||
storage=storage.storage_runner,
|
||||
return CachedPresignStorage(
|
||||
storage=FilePresignStorage(storage.storage_runner),
|
||||
cache_key_prefix="app_assets",
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
|
|
@ -90,12 +89,12 @@ class AppAssetService:
|
|||
def get_draft_asset_items(tenant_id: str, app_id: str, file_tree: AppAssetFileTree) -> list[AssetItem]:
|
||||
files = file_tree.walk_files()
|
||||
return [
|
||||
FileAsset(
|
||||
AssetItem(
|
||||
asset_id=f.id,
|
||||
path=file_tree.get_path(f.id),
|
||||
file_name=f.name,
|
||||
extension=f.extension,
|
||||
storage_key=AssetPath.draft(tenant_id, app_id, f.id).get_storage_key(),
|
||||
storage_key=AssetPaths.draft(tenant_id, app_id, f.id),
|
||||
)
|
||||
for f in files
|
||||
]
|
||||
|
|
@ -218,8 +217,8 @@ class AppAssetService:
|
|||
raise AppAssetNodeTooLargeError(f"File node {node_id} size exceeded the limit: {max_size_mb} MB")
|
||||
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
asset_path = AssetPath.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
return asset_storage.load(asset_path)
|
||||
key = AssetPaths.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
return asset_storage.load_once(key)
|
||||
|
||||
@staticmethod
|
||||
def update_file_content(
|
||||
|
|
@ -239,8 +238,8 @@ class AppAssetService:
|
|||
raise AppAssetNodeNotFoundError(str(e)) from e
|
||||
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
asset_path = AssetPath.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
asset_storage.save(asset_path, content)
|
||||
key = AssetPaths.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
asset_storage.save(key, content)
|
||||
|
||||
assets.asset_tree = tree
|
||||
assets.updated_by = account_id
|
||||
|
|
@ -340,15 +339,11 @@ class AppAssetService:
|
|||
def _delete_file_from_storage(tenant_id: str, app_id: str, node_ids: list[str]) -> None:
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
for nid in node_ids:
|
||||
asset_path = AssetPath.draft(tenant_id, app_id, nid)
|
||||
key = AssetPaths.draft(tenant_id, app_id, nid)
|
||||
try:
|
||||
asset_storage.delete(asset_path)
|
||||
asset_storage.delete(key)
|
||||
except Exception:
|
||||
logger.warning(
|
||||
"Failed to delete storage file %s",
|
||||
asset_path.get_storage_key(),
|
||||
exc_info=True,
|
||||
)
|
||||
logger.warning("Failed to delete storage file %s", key, exc_info=True)
|
||||
|
||||
threading.Thread(
|
||||
target=lambda: _delete_file_from_storage(app_model.tenant_id, app_model.id, removed_ids)
|
||||
|
|
@ -370,17 +365,18 @@ class AppAssetService:
|
|||
raise AppAssetNodeNotFoundError(f"File node {node_id} not found")
|
||||
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
asset_path = AssetPath.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
return asset_storage.get_download_url(asset_path, expires_in)
|
||||
key = AssetPaths.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
return asset_storage.get_download_url(key, expires_in)
|
||||
|
||||
@staticmethod
|
||||
def get_source_zip_bytes(tenant_id: str, app_id: str, workflow_id: str) -> bytes | None:
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
asset_path = AssetPath.source_zip(tenant_id, app_id, workflow_id)
|
||||
source_zip = asset_storage.load_or_none(asset_path)
|
||||
if source_zip is None:
|
||||
logger.warning("Source zip not found: %s", asset_path.get_storage_key())
|
||||
return source_zip
|
||||
key = AssetPaths.source_zip(tenant_id, app_id, workflow_id)
|
||||
try:
|
||||
return asset_storage.load_once(key)
|
||||
except FileNotFoundError:
|
||||
logger.warning("Source zip not found: %s", key)
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def set_draft_assets(
|
||||
|
|
@ -434,15 +430,15 @@ class AppAssetService:
|
|||
assets.updated_by = account_id
|
||||
session.commit()
|
||||
|
||||
asset_path = AssetPath.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
key = AssetPaths.draft(app_model.tenant_id, app_model.id, node_id)
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
|
||||
# put empty content to create the file record
|
||||
# which avoids file not found error when uploading via presigned URL is never touched
|
||||
# resulting in inconsistent state
|
||||
asset_storage.save(asset_path, b"")
|
||||
asset_storage.save(key, b"")
|
||||
|
||||
upload_url = asset_storage.get_upload_url(asset_path, expires_in)
|
||||
upload_url = asset_storage.get_upload_url(key, expires_in)
|
||||
|
||||
return node, upload_url
|
||||
|
||||
|
|
@ -481,8 +477,8 @@ class AppAssetService:
|
|||
|
||||
def fill_urls(node: BatchUploadNode) -> None:
|
||||
if node.node_type == AssetNodeType.FILE and node.id:
|
||||
asset_path = AssetPath.draft(app_model.tenant_id, app_model.id, node.id)
|
||||
node.upload_url = asset_storage.get_upload_url(asset_path, expires_in)
|
||||
key = AssetPaths.draft(app_model.tenant_id, app_model.id, node.id)
|
||||
node.upload_url = asset_storage.get_upload_url(key, expires_in)
|
||||
for child in node.children:
|
||||
fill_urls(child)
|
||||
|
||||
|
|
|
|||
|
|
@ -36,10 +36,11 @@ from core.app.entities.app_bundle_entities import (
|
|||
BundleFormatError,
|
||||
BundleManifest,
|
||||
)
|
||||
from core.app_assets.storage import AppAssetStorage, AssetPath, BundleImportZipPath
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from core.zip_sandbox import SandboxDownloadItem, SandboxUploadItem, ZipSandbox
|
||||
from extensions.ext_database import db
|
||||
from extensions.ext_redis import redis_client
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
from models.account import Account
|
||||
from models.model import App
|
||||
|
||||
|
|
@ -108,9 +109,9 @@ class AppBundleService:
|
|||
manifest = BundleManifest.from_tree(app_assets.asset_tree, dsl_filename)
|
||||
|
||||
export_id = uuid4().hex
|
||||
export_path = AssetPath.bundle_export_zip(tenant_id, app_id, export_id)
|
||||
export_key = AssetPaths.bundle_export(tenant_id, app_id, export_id)
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
upload_url = asset_storage.get_upload_url(export_path, expires_in)
|
||||
upload_url = asset_storage.get_upload_url(export_key, expires_in)
|
||||
|
||||
dsl_content = AppDslService.export_dsl(
|
||||
app_model=app_model,
|
||||
|
|
@ -123,15 +124,15 @@ class AppBundleService:
|
|||
zs.write_file(f"bundle_root/{MANIFEST_FILENAME}", manifest.model_dump_json(indent=2).encode("utf-8"))
|
||||
|
||||
if workflow_id is not None:
|
||||
source_zip_path = AssetPath.source_zip(tenant_id, app_id, workflow_id)
|
||||
source_url = asset_storage.get_download_url(source_zip_path, expires_in)
|
||||
source_key = AssetPaths.source_zip(tenant_id, app_id, workflow_id)
|
||||
source_url = asset_storage.get_download_url(source_key, expires_in)
|
||||
zs.download_archive(source_url, path="tmp/source_assets.zip")
|
||||
zs.unzip(archive_path="tmp/source_assets.zip", dest_dir=f"bundle_root/{safe_name}")
|
||||
else:
|
||||
asset_items = AppAssetService.get_draft_assets(tenant_id, app_id)
|
||||
if asset_items:
|
||||
asset_urls = asset_storage.get_download_urls(
|
||||
[AssetPath.draft(tenant_id, app_id, a.asset_id) for a in asset_items], expires_in
|
||||
[AssetPaths.draft(tenant_id, app_id, a.asset_id) for a in asset_items], expires_in
|
||||
)
|
||||
zs.download_items(
|
||||
[
|
||||
|
|
@ -144,7 +145,7 @@ class AppBundleService:
|
|||
archive = zs.zip(src="bundle_root", include_base=False)
|
||||
zs.upload(archive, upload_url)
|
||||
|
||||
download_url = asset_storage.get_download_url(export_path, expires_in)
|
||||
download_url = asset_storage.get_download_url(export_key, expires_in)
|
||||
return BundleExportResult(download_url=download_url, filename=f"{safe_name}.zip")
|
||||
|
||||
# ========== Import ==========
|
||||
|
|
@ -153,9 +154,9 @@ class AppBundleService:
|
|||
def prepare_import(tenant_id: str, account_id: str) -> ImportPrepareResult:
|
||||
"""Prepare import: generate import_id and upload URL."""
|
||||
import_id = uuid4().hex
|
||||
import_path = AssetPath.bundle_import_zip(tenant_id, import_id)
|
||||
import_key = AssetPaths.bundle_import(tenant_id, import_id)
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
upload_url = asset_storage.get_import_upload_url(import_path, _IMPORT_TTL_SECONDS)
|
||||
upload_url = asset_storage.get_upload_url(import_key, _IMPORT_TTL_SECONDS)
|
||||
|
||||
redis_client.setex(
|
||||
f"{_IMPORT_REDIS_PREFIX}{import_id}",
|
||||
|
|
@ -188,14 +189,14 @@ class AppBundleService:
|
|||
if tenant_id != account.current_tenant_id:
|
||||
raise BundleFormatError("Import session tenant mismatch")
|
||||
|
||||
import_path = AssetPath.bundle_import_zip(tenant_id, import_id)
|
||||
import_key = AssetPaths.bundle_import(tenant_id, import_id)
|
||||
asset_storage = AppAssetService.get_storage()
|
||||
|
||||
try:
|
||||
result = AppBundleService.import_bundle(
|
||||
tenant_id=tenant_id,
|
||||
account=account,
|
||||
import_path=import_path,
|
||||
import_key=import_key,
|
||||
asset_storage=asset_storage,
|
||||
name=name,
|
||||
description=description,
|
||||
|
|
@ -205,7 +206,10 @@ class AppBundleService:
|
|||
)
|
||||
finally:
|
||||
redis_client.delete(redis_key)
|
||||
asset_storage.delete_import_zip(import_path)
|
||||
try:
|
||||
asset_storage.delete(import_key)
|
||||
except Exception: # noqa: S110
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
|
|
@ -214,8 +218,8 @@ class AppBundleService:
|
|||
*,
|
||||
tenant_id: str,
|
||||
account: Account,
|
||||
import_path: BundleImportZipPath,
|
||||
asset_storage: AppAssetStorage,
|
||||
import_key: str,
|
||||
asset_storage: CachedPresignStorage,
|
||||
name: str | None,
|
||||
description: str | None,
|
||||
icon_type: str | None,
|
||||
|
|
@ -223,7 +227,7 @@ class AppBundleService:
|
|||
icon_background: str | None,
|
||||
) -> Import:
|
||||
"""Execute import in sandbox."""
|
||||
download_url = asset_storage.get_import_download_url(import_path, _IMPORT_TTL_SECONDS)
|
||||
download_url = asset_storage.get_download_url(import_key, _IMPORT_TTL_SECONDS)
|
||||
|
||||
with ZipSandbox(tenant_id=tenant_id, user_id=account.id, app_id="app-bundle-import") as zs:
|
||||
zs.download_archive(download_url, path="import.zip")
|
||||
|
|
@ -260,8 +264,8 @@ class AppBundleService:
|
|||
|
||||
upload_items: list[SandboxUploadItem] = []
|
||||
for file_entry in manifest.files:
|
||||
asset_path = AssetPath.draft(tenant_id, app_id, file_entry.node_id)
|
||||
file_upload_url = asset_storage.get_upload_url(asset_path, _IMPORT_TTL_SECONDS)
|
||||
key = AssetPaths.draft(tenant_id, app_id, file_entry.node_id)
|
||||
file_upload_url = asset_storage.get_upload_url(key, _IMPORT_TTL_SECONDS)
|
||||
src_path = f"{manifest.assets_prefix}/{file_entry.path}"
|
||||
upload_items.append(SandboxUploadItem(path=src_path, url=file_upload_url))
|
||||
|
||||
|
|
|
|||
|
|
@ -2,9 +2,24 @@ from __future__ import annotations
|
|||
|
||||
from core.sandbox.entities.files import SandboxFileDownloadTicket, SandboxFileNode
|
||||
from core.sandbox.inspector import SandboxFileBrowser
|
||||
from extensions.ext_storage import storage
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
from extensions.storage.file_presign_storage import FilePresignStorage
|
||||
|
||||
|
||||
class SandboxFileService:
|
||||
@staticmethod
|
||||
def get_storage() -> CachedPresignStorage:
|
||||
"""Get a lazily-initialized storage instance for sandbox files.
|
||||
|
||||
Returns a CachedPresignStorage wrapping FilePresignStorage,
|
||||
providing presign fallback and URL caching.
|
||||
"""
|
||||
return CachedPresignStorage(
|
||||
storage=FilePresignStorage(storage.storage_runner),
|
||||
cache_key_prefix="sandbox_files",
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def list_files(
|
||||
cls,
|
||||
|
|
|
|||
|
|
@ -1,11 +1,15 @@
|
|||
"""Tests for app assets storage layer."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
|
||||
from configs import dify_config
|
||||
from core.app_assets.storage import AppAssetStorage, AssetPath
|
||||
from core.app_assets.storage import AssetPaths
|
||||
from extensions.storage.base_storage import BaseStorage
|
||||
from extensions.storage.cached_presign_storage import CachedPresignStorage
|
||||
from extensions.storage.file_presign_storage import FilePresignStorage
|
||||
from services.storage_ticket_service import StorageTicket, StorageTicketService
|
||||
|
||||
|
||||
|
|
@ -55,19 +59,86 @@ class DummyRedis:
|
|||
return None
|
||||
|
||||
|
||||
def test_asset_path_validation():
|
||||
# --- AssetPaths validation tests ---
|
||||
|
||||
|
||||
def test_asset_paths_draft_validation():
|
||||
tenant_id = str(uuid4())
|
||||
app_id = str(uuid4())
|
||||
resource_id = str(uuid4())
|
||||
|
||||
ref = AssetPath.draft(tenant_id=tenant_id, app_id=app_id, node_id=resource_id)
|
||||
assert "/draft/" in ref.get_storage_key()
|
||||
key = AssetPaths.draft(tenant_id=tenant_id, app_id=app_id, node_id=resource_id)
|
||||
assert "/draft/" in key
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
AssetPath.draft(tenant_id="not-a-uuid", app_id=app_id, node_id=resource_id)
|
||||
AssetPaths.draft(tenant_id="not-a-uuid", app_id=app_id, node_id=resource_id)
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
AssetPath.draft(tenant_id=tenant_id, app_id=app_id, node_id="not-a-uuid")
|
||||
AssetPaths.draft(tenant_id=tenant_id, app_id=app_id, node_id="not-a-uuid")
|
||||
|
||||
|
||||
def test_asset_paths_resolved_requires_node_id():
|
||||
"""Test that AssetPaths.resolved() requires a valid node_id."""
|
||||
tenant_id = str(uuid4())
|
||||
app_id = str(uuid4())
|
||||
assets_id = str(uuid4())
|
||||
|
||||
# Missing node_id should raise
|
||||
with pytest.raises(TypeError):
|
||||
AssetPaths.resolved(tenant_id, app_id, assets_id) # type: ignore[call-arg]
|
||||
|
||||
# Invalid node_id should raise
|
||||
with pytest.raises(ValueError, match="node_id must be a valid UUID"):
|
||||
AssetPaths.resolved(tenant_id, app_id, assets_id, node_id="not-a-uuid")
|
||||
|
||||
|
||||
# --- Storage key format tests (must match existing paths exactly) ---
|
||||
|
||||
|
||||
def test_draft_storage_key():
|
||||
tid, aid, nid = str(uuid4()), str(uuid4()), str(uuid4())
|
||||
key = AssetPaths.draft(tid, aid, nid)
|
||||
assert key == f"app_assets/{tid}/{aid}/draft/{nid}"
|
||||
|
||||
|
||||
def test_build_zip_storage_key():
|
||||
tid, aid, assets_id = str(uuid4()), str(uuid4()), str(uuid4())
|
||||
key = AssetPaths.build_zip(tid, aid, assets_id)
|
||||
assert key == f"app_assets/{tid}/{aid}/artifacts/{assets_id}.zip"
|
||||
|
||||
|
||||
def test_resolved_storage_key():
|
||||
tid, aid, assets_id, nid = str(uuid4()), str(uuid4()), str(uuid4()), str(uuid4())
|
||||
key = AssetPaths.resolved(tid, aid, assets_id, nid)
|
||||
assert key == f"app_assets/{tid}/{aid}/artifacts/{assets_id}/resolved/{nid}"
|
||||
|
||||
|
||||
def test_skill_bundle_storage_key():
|
||||
tid, aid, assets_id = str(uuid4()), str(uuid4()), str(uuid4())
|
||||
key = AssetPaths.skill_bundle(tid, aid, assets_id)
|
||||
assert key == f"app_assets/{tid}/{aid}/artifacts/{assets_id}/skill_artifact_set.json"
|
||||
|
||||
|
||||
def test_source_zip_storage_key():
|
||||
tid, aid, workflow_id = str(uuid4()), str(uuid4()), str(uuid4())
|
||||
key = AssetPaths.source_zip(tid, aid, workflow_id)
|
||||
assert key == f"app_assets/{tid}/{aid}/sources/{workflow_id}.zip"
|
||||
|
||||
|
||||
def test_bundle_export_zip_storage_key():
|
||||
tid, aid, export_id = str(uuid4()), str(uuid4()), str(uuid4())
|
||||
key = AssetPaths.bundle_export(tid, aid, export_id)
|
||||
assert key == f"app_assets/{tid}/{aid}/bundle_exports/{export_id}.zip"
|
||||
|
||||
|
||||
def test_bundle_import_zip_storage_key():
|
||||
tid = str(uuid4())
|
||||
import_id = "abc123"
|
||||
key = AssetPaths.bundle_import(tid, import_id)
|
||||
assert key == f"app_assets/{tid}/imports/{import_id}.zip"
|
||||
|
||||
|
||||
# --- Storage ticket service tests ---
|
||||
|
||||
|
||||
def test_storage_ticket_service(monkeypatch: pytest.MonkeyPatch):
|
||||
|
|
@ -87,27 +158,22 @@ def test_storage_ticket_service(monkeypatch: pytest.MonkeyPatch):
|
|||
mock_redis.get = mock_get
|
||||
|
||||
with patch("services.storage_ticket_service.redis_client", mock_redis):
|
||||
# Test download URL creation
|
||||
url = StorageTicketService.create_download_url("test/path/file.txt", expires_in=300, filename="file.txt")
|
||||
|
||||
assert url.startswith("http://files.local/files/storage-files/")
|
||||
token = url.split("/")[-1]
|
||||
|
||||
# Verify ticket was stored
|
||||
ticket = StorageTicketService.get_ticket(token)
|
||||
assert ticket is not None
|
||||
assert ticket.op == "download"
|
||||
assert ticket.storage_key == "test/path/file.txt"
|
||||
assert ticket.filename == "file.txt"
|
||||
|
||||
# Test upload URL creation
|
||||
upload_url = StorageTicketService.create_upload_url("test/upload.txt", expires_in=300, max_bytes=1024)
|
||||
|
||||
upload_token = upload_url.split("/")[-1]
|
||||
upload_ticket = StorageTicketService.get_ticket(upload_token)
|
||||
assert upload_ticket is not None
|
||||
assert upload_ticket.op == "upload"
|
||||
assert upload_ticket.storage_key == "test/upload.txt"
|
||||
assert upload_ticket.max_bytes == 1024
|
||||
|
||||
|
||||
|
|
@ -122,47 +188,56 @@ def test_storage_ticket_not_found(monkeypatch: pytest.MonkeyPatch):
|
|||
|
||||
|
||||
def test_ticket_url_generation(monkeypatch: pytest.MonkeyPatch):
|
||||
"""Test that AppAssetStorage generates correct ticket URLs when presign is not supported."""
|
||||
"""Test that CachedPresignStorage generates correct ticket URLs when presign is not supported."""
|
||||
tenant_id = str(uuid4())
|
||||
app_id = str(uuid4())
|
||||
resource_id = str(uuid4())
|
||||
asset_path = AssetPath.draft(tenant_id, app_id, resource_id)
|
||||
key = AssetPaths.draft(tenant_id, app_id, resource_id)
|
||||
|
||||
monkeypatch.setattr(dify_config, "FILES_URL", "http://files.local", raising=False)
|
||||
|
||||
mock_redis = MagicMock()
|
||||
mock_redis.setex = MagicMock()
|
||||
mock_redis.mget = MagicMock(return_value=[None])
|
||||
|
||||
with patch("services.storage_ticket_service.redis_client", mock_redis):
|
||||
storage = AppAssetStorage(DummyStorage(), redis_client=DummyRedis())
|
||||
url = storage.get_download_url(asset_path, expires_in=120)
|
||||
with (
|
||||
patch("services.storage_ticket_service.redis_client", mock_redis),
|
||||
patch("extensions.storage.cached_presign_storage.redis_client", mock_redis),
|
||||
):
|
||||
storage = CachedPresignStorage(
|
||||
storage=FilePresignStorage(DummyStorage()),
|
||||
cache_key_prefix="app_assets",
|
||||
)
|
||||
url = storage.get_download_url(key, expires_in=120)
|
||||
|
||||
# URL should be a ticket URL since DummyStorage doesn't support presign
|
||||
assert url.startswith("http://files.local/files/storage-files/")
|
||||
# Token should be a UUID
|
||||
token = url.split("/")[-1]
|
||||
assert len(token) == 36 # UUID format
|
||||
|
||||
|
||||
def test_upload_ticket_url_generation(monkeypatch: pytest.MonkeyPatch):
|
||||
"""Test that AppAssetStorage generates correct upload ticket URLs."""
|
||||
"""Test that CachedPresignStorage generates correct upload ticket URLs."""
|
||||
tenant_id = str(uuid4())
|
||||
app_id = str(uuid4())
|
||||
resource_id = str(uuid4())
|
||||
asset_path = AssetPath.draft(tenant_id, app_id, resource_id)
|
||||
key = AssetPaths.draft(tenant_id, app_id, resource_id)
|
||||
|
||||
monkeypatch.setattr(dify_config, "FILES_URL", "http://files.local", raising=False)
|
||||
|
||||
mock_redis = MagicMock()
|
||||
mock_redis.setex = MagicMock()
|
||||
|
||||
with patch("services.storage_ticket_service.redis_client", mock_redis):
|
||||
storage = AppAssetStorage(DummyStorage(), redis_client=DummyRedis())
|
||||
url = storage.get_upload_url(asset_path, expires_in=120)
|
||||
with (
|
||||
patch("services.storage_ticket_service.redis_client", mock_redis),
|
||||
patch("extensions.storage.cached_presign_storage.redis_client", mock_redis),
|
||||
):
|
||||
storage = CachedPresignStorage(
|
||||
storage=FilePresignStorage(DummyStorage()),
|
||||
cache_key_prefix="app_assets",
|
||||
)
|
||||
url = storage.get_upload_url(key, expires_in=120)
|
||||
|
||||
# URL should be a ticket URL since DummyStorage doesn't support presign
|
||||
assert url.startswith("http://files.local/files/storage-files/")
|
||||
# Token should be a UUID
|
||||
token = url.split("/")[-1]
|
||||
assert len(token) == 36 # UUID format
|
||||
|
||||
|
|
@ -183,7 +258,6 @@ def test_storage_ticket_pydantic():
|
|||
"max_bytes": None,
|
||||
}
|
||||
|
||||
# Test JSON serialization
|
||||
json_str = ticket.model_dump_json()
|
||||
restored = StorageTicket.model_validate_json(json_str)
|
||||
assert restored.op == ticket.op
|
||||
|
|
@ -191,7 +265,6 @@ def test_storage_ticket_pydantic():
|
|||
assert restored.filename == ticket.filename
|
||||
assert restored.max_bytes is None
|
||||
|
||||
# Test upload ticket with max_bytes
|
||||
upload_ticket = StorageTicket(
|
||||
op="upload",
|
||||
storage_key="path/to/upload.txt",
|
||||
|
|
|
|||
Loading…
Reference in New Issue