diff --git a/.gemini/config.yaml b/.gemini/config.yaml new file mode 100644 index 0000000000..15c697730a --- /dev/null +++ b/.gemini/config.yaml @@ -0,0 +1,13 @@ +have_fun: false +memory_config: + disabled: false +code_review: + disable: true + comment_severity_threshold: MEDIUM + max_review_comments: -1 + pull_request_opened: + help: false + summary: false + code_review: false + include_drafts: false +ignore_patterns: [] diff --git a/.github/actions/setup-web/action.yml b/.github/actions/setup-web/action.yml index 6f3b3c08b4..24af948732 100644 --- a/.github/actions/setup-web/action.yml +++ b/.github/actions/setup-web/action.yml @@ -4,10 +4,9 @@ runs: using: composite steps: - name: Setup Vite+ - uses: voidzero-dev/setup-vp@4a524139920f87f9f7080d3b8545acac019e1852 # v1.0.0 + uses: voidzero-dev/setup-vp@20553a7a7429c429a74894104a2835d7fed28a72 # v1.3.0 with: - node-version-file: web/.nvmrc + working-directory: web + node-version-file: .nvmrc cache: true - cache-dependency-path: web/pnpm-lock.yaml - run-install: | - cwd: ./web + run-install: true diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index 657a481f74..23ae36f7b1 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -84,20 +84,20 @@ jobs: if: steps.changed-files.outputs.any_changed == 'true' uses: ./.github/actions/setup-web + - name: Restore ESLint cache + if: steps.changed-files.outputs.any_changed == 'true' + id: eslint-cache-restore + uses: actions/cache/restore@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + with: + path: web/.eslintcache + key: ${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'web/pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}-${{ github.sha }} + restore-keys: | + ${{ runner.os }}-web-eslint-${{ hashFiles('web/package.json', 'web/pnpm-lock.yaml', 'web/eslint.config.mjs', 'web/eslint.constants.mjs', 'web/plugins/eslint/**') }}- + - name: Web style check if: steps.changed-files.outputs.any_changed == 'true' working-directory: ./web - run: | - vp run lint:ci - # pnpm run lint:report - # continue-on-error: true - - # - name: Annotate Code - # if: steps.changed-files.outputs.any_changed == 'true' && github.event_name == 'pull_request' - # uses: DerLev/eslint-annotations@51347b3a0abfb503fc8734d5ae31c4b151297fae - # with: - # eslint-report: web/eslint_report.json - # github-token: ${{ secrets.GITHUB_TOKEN }} + run: vp run lint:ci - name: Web tsslint if: steps.changed-files.outputs.any_changed == 'true' @@ -114,6 +114,13 @@ jobs: working-directory: ./web run: vp run knip + - name: Save ESLint cache + if: steps.changed-files.outputs.any_changed == 'true' && success() && steps.eslint-cache-restore.outputs.cache-hit != 'true' + uses: actions/cache/save@668228422ae6a00e4ad889ee87cd7109ec5666a7 # v5.0.4 + with: + path: web/.eslintcache + key: ${{ steps.eslint-cache-restore.outputs.cache-primary-key }} + superlinter: name: SuperLinter runs-on: ubuntu-latest diff --git a/.github/workflows/translate-i18n-claude.yml b/.github/workflows/translate-i18n-claude.yml index 84f8000a01..1869254295 100644 --- a/.github/workflows/translate-i18n-claude.yml +++ b/.github/workflows/translate-i18n-claude.yml @@ -120,7 +120,7 @@ jobs: - name: Run Claude Code for Translation Sync if: steps.detect_changes.outputs.CHANGED_FILES != '' - uses: anthropics/claude-code-action@6062f3709600659be5e47fcddf2cf76993c235c2 # v1.0.76 + uses: anthropics/claude-code-action@ff9acae5886d41a99ed4ec14b7dc147d55834722 # v1.0.77 with: anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }} github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/api/.env.example b/api/.env.example index 40e1c2dfdf..9672a99d55 100644 --- a/api/.env.example +++ b/api/.env.example @@ -353,6 +353,9 @@ BAIDU_VECTOR_DB_SHARD=1 BAIDU_VECTOR_DB_REPLICAS=3 BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE +BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT=500 +BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO=0.05 +BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS=300 # Upstash configuration UPSTASH_VECTOR_URL=your-server-url diff --git a/api/commands/vector.py b/api/commands/vector.py index 4cf11c9ad1..bef18bf73b 100644 --- a/api/commands/vector.py +++ b/api/commands/vector.py @@ -10,6 +10,7 @@ from configs import dify_config from core.rag.datasource.vdb.vector_factory import Vector from core.rag.datasource.vdb.vector_type import VectorType from core.rag.index_processor.constant.built_in_field import BuiltInField +from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.models.document import ChildDocument, Document from extensions.ext_database import db from models.dataset import Dataset, DatasetCollectionBinding, DatasetMetadata, DatasetMetadataBinding, DocumentSegment @@ -269,7 +270,7 @@ def migrate_knowledge_vector_database(): "dataset_id": segment.dataset_id, }, ) - if dataset_document.doc_form == "hierarchical_model": + if dataset_document.doc_form == IndexStructureType.PARENT_CHILD_INDEX: child_chunks = segment.get_child_chunks() if child_chunks: child_documents = [] diff --git a/api/configs/middleware/vdb/baidu_vector_config.py b/api/configs/middleware/vdb/baidu_vector_config.py index 8f956745b1..c8e4f7309f 100644 --- a/api/configs/middleware/vdb/baidu_vector_config.py +++ b/api/configs/middleware/vdb/baidu_vector_config.py @@ -51,3 +51,18 @@ class BaiduVectorDBConfig(BaseSettings): description="Parser mode for inverted index in Baidu Vector Database (default is COARSE_MODE)", default="COARSE_MODE", ) + + BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT: int = Field( + description="Auto build row count increment threshold (default is 500)", + default=500, + ) + + BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO: float = Field( + description="Auto build row count increment ratio threshold (default is 0.05)", + default=0.05, + ) + + BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS: int = Field( + description="Timeout in seconds for rebuilding the index in Baidu Vector Database (default is 3600 seconds)", + default=300, + ) diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index 6c54be84a8..783cb5c444 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -9,6 +9,7 @@ from extensions.ext_database import db from libs.helper import TimestampField from libs.login import current_account_with_tenant, login_required from models.dataset import Dataset +from models.enums import ApiTokenType from models.model import ApiToken, App from services.api_token_service import ApiTokenCache @@ -47,7 +48,7 @@ def _get_resource(resource_id, tenant_id, resource_model): class BaseApiKeyListResource(Resource): method_decorators = [account_initialization_required, login_required, setup_required] - resource_type: str | None = None + resource_type: ApiTokenType | None = None resource_model: type | None = None resource_id_field: str | None = None token_prefix: str | None = None @@ -91,6 +92,7 @@ class BaseApiKeyListResource(Resource): ) key = ApiToken.generate_api_key(self.token_prefix or "", 24) + assert self.resource_type is not None, "resource_type must be set" api_token = ApiToken() setattr(api_token, self.resource_id_field, resource_id) api_token.tenant_id = current_tenant_id @@ -104,7 +106,7 @@ class BaseApiKeyListResource(Resource): class BaseApiKeyResource(Resource): method_decorators = [account_initialization_required, login_required, setup_required] - resource_type: str | None = None + resource_type: ApiTokenType | None = None resource_model: type | None = None resource_id_field: str | None = None @@ -159,7 +161,7 @@ class AppApiKeyListResource(BaseApiKeyListResource): """Create a new API key for an app""" return super().post(resource_id) - resource_type = "app" + resource_type = ApiTokenType.APP resource_model = App resource_id_field = "app_id" token_prefix = "app-" @@ -175,7 +177,7 @@ class AppApiKeyResource(BaseApiKeyResource): """Delete an API key for an app""" return super().delete(resource_id, api_key_id) - resource_type = "app" + resource_type = ApiTokenType.APP resource_model = App resource_id_field = "app_id" @@ -199,7 +201,7 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): """Create a new API key for a dataset""" return super().post(resource_id) - resource_type = "dataset" + resource_type = ApiTokenType.DATASET resource_model = Dataset resource_id_field = "dataset_id" token_prefix = "ds-" @@ -215,6 +217,6 @@ class DatasetApiKeyResource(BaseApiKeyResource): """Delete an API key for a dataset""" return super().delete(resource_id, api_key_id) - resource_type = "dataset" + resource_type = ApiTokenType.DATASET resource_model = Dataset resource_id_field = "dataset_id" diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 5ac0e342e6..7e41260eeb 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -95,7 +95,7 @@ class CreateAppPayload(BaseModel): name: str = Field(..., min_length=1, description="App name") description: str | None = Field(default=None, description="App description (max 400 chars)", max_length=400) mode: Literal["chat", "agent-chat", "advanced-chat", "workflow", "completion"] = Field(..., description="App mode") - icon_type: str | None = Field(default=None, description="Icon type") + icon_type: IconType | None = Field(default=None, description="Icon type") icon: str | None = Field(default=None, description="Icon") icon_background: str | None = Field(default=None, description="Icon background color") @@ -103,7 +103,7 @@ class CreateAppPayload(BaseModel): class UpdateAppPayload(BaseModel): name: str = Field(..., min_length=1, description="App name") description: str | None = Field(default=None, description="App description (max 400 chars)", max_length=400) - icon_type: str | None = Field(default=None, description="Icon type") + icon_type: IconType | None = Field(default=None, description="Icon type") icon: str | None = Field(default=None, description="Icon") icon_background: str | None = Field(default=None, description="Icon background color") use_icon_as_answer_icon: bool | None = Field(default=None, description="Use icon as answer icon") @@ -113,7 +113,7 @@ class UpdateAppPayload(BaseModel): class CopyAppPayload(BaseModel): name: str | None = Field(default=None, description="Name for the copied app") description: str | None = Field(default=None, description="Description for the copied app", max_length=400) - icon_type: str | None = Field(default=None, description="Icon type") + icon_type: IconType | None = Field(default=None, description="Icon type") icon: str | None = Field(default=None, description="Icon") icon_background: str | None = Field(default=None, description="Icon background color") @@ -594,7 +594,7 @@ class AppApi(Resource): args_dict: AppService.ArgsDict = { "name": args.name, "description": args.description or "", - "icon_type": args.icon_type or "", + "icon_type": args.icon_type, "icon": args.icon or "", "icon_background": args.icon_background or "", "use_icon_as_answer_icon": args.use_icon_as_answer_icon or False, diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index 74750981dd..d329d22309 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -458,9 +458,7 @@ class ChatConversationApi(Resource): args = ChatConversationQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore subquery = ( - db.session.query( - Conversation.id.label("conversation_id"), EndUser.session_id.label("from_end_user_session_id") - ) + sa.select(Conversation.id.label("conversation_id"), EndUser.session_id.label("from_end_user_session_id")) .outerjoin(EndUser, Conversation.from_end_user_id == EndUser.id) .subquery() ) @@ -595,10 +593,8 @@ class ChatConversationDetailApi(Resource): def _get_conversation(app_model, conversation_id): current_user, _ = current_account_with_tenant() - conversation = ( - db.session.query(Conversation) - .where(Conversation.id == conversation_id, Conversation.app_id == app_model.id) - .first() + conversation = db.session.scalar( + sa.select(Conversation).where(Conversation.id == conversation_id, Conversation.app_id == app_model.id).limit(1) ) if not conversation: diff --git a/api/controllers/console/app/generator.py b/api/controllers/console/app/generator.py index af4ac450bb..442d0d2324 100644 --- a/api/controllers/console/app/generator.py +++ b/api/controllers/console/app/generator.py @@ -168,7 +168,7 @@ class InstructionGenerateApi(Resource): try: # Generate from nothing for a workflow node if (args.current in (code_template, "")) and args.node_id != "": - app = db.session.query(App).where(App.id == args.flow_id).first() + app = db.session.get(App, args.flow_id) if not app: return {"error": f"app {args.flow_id} not found"}, 400 workflow = WorkflowService().get_draft_workflow(app_model=app) diff --git a/api/controllers/console/app/mcp_server.py b/api/controllers/console/app/mcp_server.py index 4b20418b53..412fc8795a 100644 --- a/api/controllers/console/app/mcp_server.py +++ b/api/controllers/console/app/mcp_server.py @@ -2,6 +2,7 @@ import json from flask_restx import Resource, marshal_with from pydantic import BaseModel, Field +from sqlalchemy import select from werkzeug.exceptions import NotFound from controllers.console import console_ns @@ -47,7 +48,7 @@ class AppMCPServerController(Resource): @get_app_model @marshal_with(app_server_model) def get(self, app_model): - server = db.session.query(AppMCPServer).where(AppMCPServer.app_id == app_model.id).first() + server = db.session.scalar(select(AppMCPServer).where(AppMCPServer.app_id == app_model.id).limit(1)) return server @console_ns.doc("create_app_mcp_server") @@ -98,7 +99,7 @@ class AppMCPServerController(Resource): @edit_permission_required def put(self, app_model): payload = MCPServerUpdatePayload.model_validate(console_ns.payload or {}) - server = db.session.query(AppMCPServer).where(AppMCPServer.id == payload.id).first() + server = db.session.get(AppMCPServer, payload.id) if not server: raise NotFound() @@ -135,11 +136,10 @@ class AppMCPServerRefreshController(Resource): @edit_permission_required def get(self, server_id): _, current_tenant_id = current_account_with_tenant() - server = ( - db.session.query(AppMCPServer) - .where(AppMCPServer.id == server_id) - .where(AppMCPServer.tenant_id == current_tenant_id) - .first() + server = db.session.scalar( + select(AppMCPServer) + .where(AppMCPServer.id == server_id, AppMCPServer.tenant_id == current_tenant_id) + .limit(1) ) if not server: raise NotFound() diff --git a/api/controllers/console/app/message.py b/api/controllers/console/app/message.py index 4fb73f61f3..736e7dbe17 100644 --- a/api/controllers/console/app/message.py +++ b/api/controllers/console/app/message.py @@ -4,7 +4,7 @@ from typing import Literal from flask import request from flask_restx import Resource, fields, marshal_with from pydantic import BaseModel, Field, field_validator -from sqlalchemy import exists, select +from sqlalchemy import exists, func, select from werkzeug.exceptions import InternalServerError, NotFound from controllers.common.schema import register_schema_models @@ -244,27 +244,25 @@ class ChatMessageListApi(Resource): def get(self, app_model): args = ChatMessagesQuery.model_validate(request.args.to_dict()) - conversation = ( - db.session.query(Conversation) + conversation = db.session.scalar( + select(Conversation) .where(Conversation.id == args.conversation_id, Conversation.app_id == app_model.id) - .first() + .limit(1) ) if not conversation: raise NotFound("Conversation Not Exists.") if args.first_id: - first_message = ( - db.session.query(Message) - .where(Message.conversation_id == conversation.id, Message.id == args.first_id) - .first() + first_message = db.session.scalar( + select(Message).where(Message.conversation_id == conversation.id, Message.id == args.first_id).limit(1) ) if not first_message: raise NotFound("First message not found") - history_messages = ( - db.session.query(Message) + history_messages = db.session.scalars( + select(Message) .where( Message.conversation_id == conversation.id, Message.created_at < first_message.created_at, @@ -272,16 +270,14 @@ class ChatMessageListApi(Resource): ) .order_by(Message.created_at.desc()) .limit(args.limit) - .all() - ) + ).all() else: - history_messages = ( - db.session.query(Message) + history_messages = db.session.scalars( + select(Message) .where(Message.conversation_id == conversation.id) .order_by(Message.created_at.desc()) .limit(args.limit) - .all() - ) + ).all() # Initialize has_more based on whether we have a full page if len(history_messages) == args.limit: @@ -326,7 +322,9 @@ class MessageFeedbackApi(Resource): message_id = str(args.message_id) - message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first() + message = db.session.scalar( + select(Message).where(Message.id == message_id, Message.app_id == app_model.id).limit(1) + ) if not message: raise NotFound("Message Not Exists.") @@ -375,7 +373,9 @@ class MessageAnnotationCountApi(Resource): @login_required @account_initialization_required def get(self, app_model): - count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_model.id).count() + count = db.session.scalar( + select(func.count(MessageAnnotation.id)).where(MessageAnnotation.app_id == app_model.id) + ) return {"count": count} @@ -479,7 +479,9 @@ class MessageApi(Resource): def get(self, app_model, message_id: str): message_id = str(message_id) - message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first() + message = db.session.scalar( + select(Message).where(Message.id == message_id, Message.app_id == app_model.id).limit(1) + ) if not message: raise NotFound("Message Not Exists.") diff --git a/api/controllers/console/app/model_config.py b/api/controllers/console/app/model_config.py index a85e54fb51..e9bd30ba7e 100644 --- a/api/controllers/console/app/model_config.py +++ b/api/controllers/console/app/model_config.py @@ -69,9 +69,7 @@ class ModelConfigResource(Resource): if app_model.mode == AppMode.AGENT_CHAT or app_model.is_agent: # get original app model config - original_app_model_config = ( - db.session.query(AppModelConfig).where(AppModelConfig.id == app_model.app_model_config_id).first() - ) + original_app_model_config = db.session.get(AppModelConfig, app_model.app_model_config_id) if original_app_model_config is None: raise ValueError("Original app model config not found") agent_mode = original_app_model_config.agent_mode_dict diff --git a/api/controllers/console/app/site.py b/api/controllers/console/app/site.py index db218d8b81..7f44a99ff1 100644 --- a/api/controllers/console/app/site.py +++ b/api/controllers/console/app/site.py @@ -2,6 +2,7 @@ from typing import Literal from flask_restx import Resource, marshal_with from pydantic import BaseModel, Field, field_validator +from sqlalchemy import select from werkzeug.exceptions import NotFound from constants.languages import supported_language @@ -75,7 +76,7 @@ class AppSite(Resource): def post(self, app_model): args = AppSiteUpdatePayload.model_validate(console_ns.payload or {}) current_user, _ = current_account_with_tenant() - site = db.session.query(Site).where(Site.app_id == app_model.id).first() + site = db.session.scalar(select(Site).where(Site.app_id == app_model.id).limit(1)) if not site: raise NotFound @@ -124,7 +125,7 @@ class AppSiteAccessTokenReset(Resource): @marshal_with(app_site_model) def post(self, app_model): current_user, _ = current_account_with_tenant() - site = db.session.query(Site).where(Site.app_id == app_model.id).first() + site = db.session.scalar(select(Site).where(Site.app_id == app_model.id).limit(1)) if not site: raise NotFound diff --git a/api/controllers/console/app/wraps.py b/api/controllers/console/app/wraps.py index e687d980fa..493022ffea 100644 --- a/api/controllers/console/app/wraps.py +++ b/api/controllers/console/app/wraps.py @@ -2,6 +2,8 @@ from collections.abc import Callable from functools import wraps from typing import ParamSpec, TypeVar, Union +from sqlalchemy import select + from controllers.console.app.error import AppNotFoundError from extensions.ext_database import db from libs.login import current_account_with_tenant @@ -15,16 +17,14 @@ R1 = TypeVar("R1") def _load_app_model(app_id: str) -> App | None: _, current_tenant_id = current_account_with_tenant() - app_model = ( - db.session.query(App) - .where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal") - .first() + app_model = db.session.scalar( + select(App).where(App.id == app_id, App.tenant_id == current_tenant_id, App.status == "normal").limit(1) ) return app_model def _load_app_model_with_trial(app_id: str) -> App | None: - app_model = db.session.query(App).where(App.id == app_id, App.status == "normal").first() + app_model = db.session.scalar(select(App).where(App.id == app_id, App.status == "normal").limit(1)) return app_model diff --git a/api/controllers/console/auth/oauth.py b/api/controllers/console/auth/oauth.py index 112e152432..5c9023f27b 100644 --- a/api/controllers/console/auth/oauth.py +++ b/api/controllers/console/auth/oauth.py @@ -1,4 +1,5 @@ import logging +import urllib.parse import httpx from flask import current_app, redirect, request @@ -112,6 +113,9 @@ class OAuthCallback(Resource): error_text = e.response.text logger.exception("An error occurred during the OAuth process with %s: %s", provider, error_text) return {"error": "OAuth process failed"}, 400 + except ValueError as e: + logger.warning("OAuth error with %s", provider, exc_info=True) + return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={urllib.parse.quote(str(e))}") if invite_token and RegisterService.is_valid_invite_token(invite_token): invitation = RegisterService.get_invitation_by_token(token=invite_token) diff --git a/api/controllers/console/datasets/datasets.py b/api/controllers/console/datasets/datasets.py index 725a8380cd..fb98932269 100644 --- a/api/controllers/console/datasets/datasets.py +++ b/api/controllers/console/datasets/datasets.py @@ -54,7 +54,7 @@ from fields.document_fields import document_status_fields from libs.login import current_account_with_tenant, login_required from models import ApiToken, Dataset, Document, DocumentSegment, UploadFile from models.dataset import DatasetPermission, DatasetPermissionEnum -from models.enums import SegmentStatus +from models.enums import ApiTokenType, SegmentStatus from models.provider_ids import ModelProviderID from services.api_token_service import ApiTokenCache from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService @@ -777,7 +777,7 @@ class DatasetIndexingStatusApi(Resource): class DatasetApiKeyApi(Resource): max_keys = 10 token_prefix = "dataset-" - resource_type = "dataset" + resource_type = ApiTokenType.DATASET @console_ns.doc("get_dataset_api_keys") @console_ns.doc(description="Get dataset API keys") @@ -826,7 +826,7 @@ class DatasetApiKeyApi(Resource): @console_ns.route("/datasets/api-keys/") class DatasetApiDeleteApi(Resource): - resource_type = "dataset" + resource_type = ApiTokenType.DATASET @console_ns.doc("delete_dataset_api_key") @console_ns.doc(description="Delete dataset API key") diff --git a/api/core/app/apps/workflow/generate_task_pipeline.py b/api/core/app/apps/workflow/generate_task_pipeline.py index 96dd8c5445..bd6e2a0302 100644 --- a/api/core/app/apps/workflow/generate_task_pipeline.py +++ b/api/core/app/apps/workflow/generate_task_pipeline.py @@ -705,7 +705,7 @@ class WorkflowAppGenerateTaskPipeline(GraphRuntimeStateSupport): app_id=self._application_generate_entity.app_config.app_id, workflow_id=self._workflow.id, workflow_run_id=workflow_run_id, - created_from=created_from.value, + created_from=created_from, created_by_role=self._created_by_role, created_by=self._user_id, ) diff --git a/api/core/app/features/rate_limiting/rate_limit.py b/api/core/app/features/rate_limiting/rate_limit.py index 2ca1275a8a..e0f1759e5e 100644 --- a/api/core/app/features/rate_limiting/rate_limit.py +++ b/api/core/app/features/rate_limiting/rate_limit.py @@ -19,6 +19,7 @@ class RateLimit: _REQUEST_MAX_ALIVE_TIME = 10 * 60 # 10 minutes _ACTIVE_REQUESTS_COUNT_FLUSH_INTERVAL = 5 * 60 # recalculate request_count from request_detail every 5 minutes _instance_dict: dict[str, "RateLimit"] = {} + max_active_requests: int def __new__(cls, client_id: str, max_active_requests: int): if client_id not in cls._instance_dict: @@ -27,7 +28,13 @@ class RateLimit: return cls._instance_dict[client_id] def __init__(self, client_id: str, max_active_requests: int): + flush_cache = hasattr(self, "max_active_requests") and self.max_active_requests != max_active_requests self.max_active_requests = max_active_requests + # Only flush here if this instance has already been fully initialized, + # i.e. the Redis key attributes exist. Otherwise, rely on the flush at + # the end of initialization below. + if flush_cache and hasattr(self, "active_requests_key") and hasattr(self, "max_active_requests_key"): + self.flush_cache(use_local_value=True) # must be called after max_active_requests is set if self.disabled(): return @@ -41,8 +48,6 @@ class RateLimit: self.flush_cache(use_local_value=True) def flush_cache(self, use_local_value=False): - if self.disabled(): - return self.last_recalculate_time = time.time() # flush max active requests if use_local_value or not redis_client.exists(self.max_active_requests_key): @@ -50,7 +55,8 @@ class RateLimit: else: self.max_active_requests = int(redis_client.get(self.max_active_requests_key).decode("utf-8")) redis_client.expire(self.max_active_requests_key, timedelta(days=1)) - + if self.disabled(): + return # flush max active requests (in-transit request list) if not redis_client.exists(self.active_requests_key): return diff --git a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py index 7cb54b2c88..f54461e99a 100644 --- a/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py +++ b/api/core/ops/arize_phoenix_trace/arize_phoenix_trace.py @@ -181,10 +181,6 @@ class ArizePhoenixDataTrace(BaseTraceInstance): arize_phoenix_config: ArizeConfig | PhoenixConfig, ): super().__init__(arize_phoenix_config) - import logging - - logging.basicConfig() - logging.getLogger().setLevel(logging.DEBUG) self.arize_phoenix_config = arize_phoenix_config self.tracer, self.processor = setup_tracer(arize_phoenix_config) self.project = arize_phoenix_config.project diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 3c3fbd6dd2..6d2be0ab7a 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -918,11 +918,11 @@ class ProviderManager: trail_pool = CreditPoolService.get_pool( tenant_id=tenant_id, - pool_type=ProviderQuotaType.TRIAL.value, + pool_type=ProviderQuotaType.TRIAL, ) paid_pool = CreditPoolService.get_pool( tenant_id=tenant_id, - pool_type=ProviderQuotaType.PAID.value, + pool_type=ProviderQuotaType.PAID, ) else: trail_pool = None diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py index 144d834495..9f5842e449 100644 --- a/api/core/rag/datasource/vdb/baidu/baidu_vector.py +++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py @@ -13,6 +13,7 @@ from pymochow.exception import ServerError # type: ignore from pymochow.model.database import Database from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, ServerErrCode, TableState # type: ignore from pymochow.model.schema import ( + AutoBuildRowCountIncrement, Field, FilteringIndex, HNSWParams, @@ -51,6 +52,9 @@ class BaiduConfig(BaseModel): replicas: int = 3 inverted_index_analyzer: str = "DEFAULT_ANALYZER" inverted_index_parser_mode: str = "COARSE_MODE" + auto_build_row_count_increment: int = 500 + auto_build_row_count_increment_ratio: float = 0.05 + rebuild_index_timeout_in_seconds: int = 300 @model_validator(mode="before") @classmethod @@ -107,18 +111,6 @@ class BaiduVector(BaseVector): rows.append(row) table.upsert(rows=rows) - # rebuild vector index after upsert finished - table.rebuild_index(self.vector_index) - timeout = 3600 # 1 hour timeout - start_time = time.time() - while True: - time.sleep(1) - index = table.describe_index(self.vector_index) - if index.state == IndexState.NORMAL: - break - if time.time() - start_time > timeout: - raise TimeoutError(f"Index rebuild timeout after {timeout} seconds") - def text_exists(self, id: str) -> bool: res = self._db.table(self._collection_name).query(primary_key={VDBField.PRIMARY_KEY: id}) if res and res.code == 0: @@ -232,8 +224,14 @@ class BaiduVector(BaseVector): return self._client.database(self._client_config.database) def _table_existed(self) -> bool: - tables = self._db.list_table() - return any(table.table_name == self._collection_name for table in tables) + try: + table = self._db.table(self._collection_name) + except ServerError as e: + if e.code == ServerErrCode.TABLE_NOT_EXIST: + return False + else: + raise + return True def _create_table(self, dimension: int): # Try to grab distributed lock and create table @@ -287,6 +285,11 @@ class BaiduVector(BaseVector): field=VDBField.VECTOR, metric_type=metric_type, params=HNSWParams(m=16, efconstruction=200), + auto_build=True, + auto_build_index_policy=AutoBuildRowCountIncrement( + row_count_increment=self._client_config.auto_build_row_count_increment, + row_count_increment_ratio=self._client_config.auto_build_row_count_increment_ratio, + ), ) ) @@ -335,7 +338,7 @@ class BaiduVector(BaseVector): ) # Wait for table created - timeout = 300 # 5 minutes timeout + timeout = self._client_config.rebuild_index_timeout_in_seconds # default 5 minutes timeout start_time = time.time() while True: time.sleep(1) @@ -345,6 +348,20 @@ class BaiduVector(BaseVector): if time.time() - start_time > timeout: raise TimeoutError(f"Table creation timeout after {timeout} seconds") redis_client.set(table_exist_cache_key, 1, ex=3600) + # rebuild vector index immediately after table created, make sure index is ready + table.rebuild_index(self.vector_index) + timeout = 3600 # 1 hour timeout + self._wait_for_index_ready(table, timeout) + + def _wait_for_index_ready(self, table, timeout: int = 3600): + start_time = time.time() + while True: + time.sleep(1) + index = table.describe_index(self.vector_index) + if index.state == IndexState.NORMAL: + break + if time.time() - start_time > timeout: + raise TimeoutError(f"Index rebuild timeout after {timeout} seconds") class BaiduVectorFactory(AbstractVectorFactory): @@ -369,5 +386,8 @@ class BaiduVectorFactory(AbstractVectorFactory): replicas=dify_config.BAIDU_VECTOR_DB_REPLICAS, inverted_index_analyzer=dify_config.BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER, inverted_index_parser_mode=dify_config.BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE, + auto_build_row_count_increment=dify_config.BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT, + auto_build_row_count_increment_ratio=dify_config.BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO, + rebuild_index_timeout_in_seconds=dify_config.BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS, ), ) diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py index 71b6fa0a9b..3c1d5e015f 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_on_qdrant_vector.py @@ -33,6 +33,7 @@ from core.rag.models.document import Document from extensions.ext_database import db from extensions.ext_redis import redis_client from models.dataset import Dataset, TidbAuthBinding +from models.enums import TidbAuthBindingStatus if TYPE_CHECKING: from qdrant_client import grpc # noqa @@ -452,7 +453,7 @@ class TidbOnQdrantVectorFactory(AbstractVectorFactory): password=new_cluster["password"], tenant_id=dataset.tenant_id, active=True, - status="ACTIVE", + status=TidbAuthBindingStatus.ACTIVE, ) db.session.add(new_tidb_auth_binding) db.session.commit() diff --git a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py index 754c149241..06b17b9e62 100644 --- a/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py +++ b/api/core/rag/datasource/vdb/tidb_on_qdrant/tidb_service.py @@ -9,6 +9,7 @@ from configs import dify_config from extensions.ext_database import db from extensions.ext_redis import redis_client from models.dataset import TidbAuthBinding +from models.enums import TidbAuthBindingStatus class TidbService: @@ -170,7 +171,7 @@ class TidbService: userPrefix = item["userPrefix"] if state == "ACTIVE" and len(userPrefix) > 0: cluster_info = tidb_serverless_list_map[item["clusterId"]] - cluster_info.status = "ACTIVE" + cluster_info.status = TidbAuthBindingStatus.ACTIVE cluster_info.account = f"{userPrefix}.root" db.session.add(cluster_info) db.session.commit() diff --git a/api/core/rag/extractor/firecrawl/firecrawl_app.py b/api/core/rag/extractor/firecrawl/firecrawl_app.py index 371f7b0865..e1ddd2dd96 100644 --- a/api/core/rag/extractor/firecrawl/firecrawl_app.py +++ b/api/core/rag/extractor/firecrawl/firecrawl_app.py @@ -95,15 +95,11 @@ class FirecrawlApp: if response.status_code == 200: crawl_status_response = response.json() if crawl_status_response.get("status") == "completed": - total = crawl_status_response.get("total", 0) - if total == 0: + # Normalize to avoid None bypassing the zero-guard when the API returns null. + total = crawl_status_response.get("total") or 0 + if total <= 0: raise Exception("Failed to check crawl status. Error: No page found") - data = crawl_status_response.get("data", []) - url_data_list: list[FirecrawlDocumentData] = [] - for item in data: - if isinstance(item, dict) and "metadata" in item and "markdown" in item: - url_data = self._extract_common_fields(item) - url_data_list.append(url_data) + url_data_list = self._collect_all_crawl_pages(crawl_status_response, headers) if url_data_list: file_key = "website_files/" + job_id + ".txt" try: @@ -120,6 +116,36 @@ class FirecrawlApp: self._handle_error(response, "check crawl status") raise RuntimeError("unreachable: _handle_error always raises") + def _collect_all_crawl_pages( + self, first_page: dict[str, Any], headers: dict[str, str] + ) -> list[FirecrawlDocumentData]: + """Collect all crawl result pages by following pagination links. + + Raises an exception if any paginated request fails, to avoid returning + partial data that is inconsistent with the reported total. + + The number of pages processed is capped at ``total`` (the + server-reported page count) to guard against infinite loops caused by + a misbehaving server that keeps returning a ``next`` URL. + """ + total: int = first_page.get("total") or 0 + url_data_list: list[FirecrawlDocumentData] = [] + current_page = first_page + pages_processed = 0 + while True: + for item in current_page.get("data", []): + if isinstance(item, dict) and "metadata" in item and "markdown" in item: + url_data_list.append(self._extract_common_fields(item)) + next_url: str | None = current_page.get("next") + pages_processed += 1 + if not next_url or pages_processed >= total: + break + response = self._get_request(next_url, headers) + if response.status_code != 200: + self._handle_error(response, "fetch next crawl page") + current_page = response.json() + return url_data_list + def _format_crawl_status_response( self, status: str, diff --git a/api/core/tools/builtin_tool/tool.py b/api/core/tools/builtin_tool/tool.py index 00f5931088..bcf58394ba 100644 --- a/api/core/tools/builtin_tool/tool.py +++ b/api/core/tools/builtin_tool/tool.py @@ -50,7 +50,7 @@ class BuiltinTool(Tool): return ModelInvocationUtils.invoke( user_id=user_id, tenant_id=self.runtime.tenant_id or "", - tool_type="builtin", + tool_type=ToolProviderType.BUILT_IN, tool_name=self.entity.identity.name, prompt_messages=prompt_messages, ) diff --git a/api/core/tools/tool_label_manager.py b/api/core/tools/tool_label_manager.py index 90d5a647e9..250dd91bfd 100644 --- a/api/core/tools/tool_label_manager.py +++ b/api/core/tools/tool_label_manager.py @@ -38,7 +38,7 @@ class ToolLabelManager: db.session.add( ToolLabelBinding( tool_id=provider_id, - tool_type=controller.provider_type.value, + tool_type=controller.provider_type, label_name=label, ) ) @@ -58,7 +58,7 @@ class ToolLabelManager: raise ValueError("Unsupported tool type") stmt = select(ToolLabelBinding.label_name).where( ToolLabelBinding.tool_id == provider_id, - ToolLabelBinding.tool_type == controller.provider_type.value, + ToolLabelBinding.tool_type == controller.provider_type, ) labels = db.session.scalars(stmt).all() diff --git a/api/core/tools/utils/model_invocation_utils.py b/api/core/tools/utils/model_invocation_utils.py index 8f958563bd..373bd1b1c8 100644 --- a/api/core/tools/utils/model_invocation_utils.py +++ b/api/core/tools/utils/model_invocation_utils.py @@ -9,6 +9,7 @@ from decimal import Decimal from typing import cast from core.model_manager import ModelManager +from core.tools.entities.tool_entities import ToolProviderType from dify_graph.model_runtime.entities.llm_entities import LLMResult from dify_graph.model_runtime.entities.message_entities import PromptMessage from dify_graph.model_runtime.entities.model_entities import ModelPropertyKey, ModelType @@ -78,7 +79,7 @@ class ModelInvocationUtils: @staticmethod def invoke( - user_id: str, tenant_id: str, tool_type: str, tool_name: str, prompt_messages: list[PromptMessage] + user_id: str, tenant_id: str, tool_type: ToolProviderType, tool_name: str, prompt_messages: list[PromptMessage] ) -> LLMResult: """ invoke model with parameters in user's own context diff --git a/api/dify_graph/nodes/llm/llm_utils.py b/api/dify_graph/nodes/llm/llm_utils.py index 2be391a424..8682c3682c 100644 --- a/api/dify_graph/nodes/llm/llm_utils.py +++ b/api/dify_graph/nodes/llm/llm_utils.py @@ -1,6 +1,9 @@ from __future__ import annotations -from collections.abc import Sequence +import json +import logging +import re +from collections.abc import Mapping, Sequence from typing import Any, cast from core.model_manager import ModelInstance @@ -36,6 +39,11 @@ from .exc import ( ) from .protocols import TemplateRenderer +logger = logging.getLogger(__name__) + +VARIABLE_PATTERN = re.compile(r"\{\{#[^#]+#\}\}") +MAX_RESOLVED_VALUE_LENGTH = 1024 + def fetch_model_schema(*, model_instance: ModelInstance) -> AIModelEntity: model_schema = cast(LargeLanguageModel, model_instance.model_type_instance).get_model_schema( @@ -475,3 +483,61 @@ def _append_file_prompts( prompt_messages[-1] = UserPromptMessage(content=file_prompts + existing_contents) else: prompt_messages.append(UserPromptMessage(content=file_prompts)) + + +def _coerce_resolved_value(raw: str) -> int | float | bool | str: + """Try to restore the original type from a resolved template string. + + Variable references are always resolved to text, but completion params may + expect numeric or boolean values (e.g. a variable that holds "0.7" mapped to + the ``temperature`` parameter). This helper attempts a JSON parse so that + ``"0.7"`` → ``0.7``, ``"true"`` → ``True``, etc. Plain strings that are not + valid JSON literals are returned as-is. + """ + stripped = raw.strip() + if not stripped: + return raw + + try: + parsed: object = json.loads(stripped) + except (json.JSONDecodeError, ValueError): + return raw + + if isinstance(parsed, (int, float, bool)): + return parsed + return raw + + +def resolve_completion_params_variables( + completion_params: Mapping[str, Any], + variable_pool: VariablePool, +) -> dict[str, Any]: + """Resolve variable references (``{{#node_id.var#}}``) in string-typed completion params. + + Security notes: + - Resolved values are length-capped to ``MAX_RESOLVED_VALUE_LENGTH`` to + prevent denial-of-service through excessively large variable payloads. + - This follows the same ``VariablePool.convert_template`` pattern used across + Dify (Answer Node, HTTP Request Node, Agent Node, etc.). The downstream + model plugin receives these values as structured JSON key-value pairs — they + are never concatenated into raw HTTP headers or SQL queries. + - Numeric/boolean coercion is applied so that variables holding ``"0.7"`` are + restored to their native type rather than sent as a bare string. + """ + resolved: dict[str, Any] = {} + for key, value in completion_params.items(): + if isinstance(value, str) and VARIABLE_PATTERN.search(value): + segment_group = variable_pool.convert_template(value) + text = segment_group.text + if len(text) > MAX_RESOLVED_VALUE_LENGTH: + logger.warning( + "Resolved value for param '%s' truncated from %d to %d chars", + key, + len(text), + MAX_RESOLVED_VALUE_LENGTH, + ) + text = text[:MAX_RESOLVED_VALUE_LENGTH] + resolved[key] = _coerce_resolved_value(text) + else: + resolved[key] = value + return resolved diff --git a/api/dify_graph/nodes/llm/node.py b/api/dify_graph/nodes/llm/node.py index 5ed90ed7e3..a5492aee6b 100644 --- a/api/dify_graph/nodes/llm/node.py +++ b/api/dify_graph/nodes/llm/node.py @@ -202,6 +202,10 @@ class LLMNode(Node[LLMNodeData]): # fetch model config model_instance = self._model_instance + # Resolve variable references in string-typed completion params + model_instance.parameters = llm_utils.resolve_completion_params_variables( + model_instance.parameters, variable_pool + ) model_name = model_instance.model_name model_provider = model_instance.provider model_stop = model_instance.stop diff --git a/api/dify_graph/nodes/parameter_extractor/parameter_extractor_node.py b/api/dify_graph/nodes/parameter_extractor/parameter_extractor_node.py index 3913a27697..e6e8a44d06 100644 --- a/api/dify_graph/nodes/parameter_extractor/parameter_extractor_node.py +++ b/api/dify_graph/nodes/parameter_extractor/parameter_extractor_node.py @@ -164,6 +164,10 @@ class ParameterExtractorNode(Node[ParameterExtractorNodeData]): ) model_instance = self._model_instance + # Resolve variable references in string-typed completion params + model_instance.parameters = llm_utils.resolve_completion_params_variables( + model_instance.parameters, variable_pool + ) if not isinstance(model_instance.model_type_instance, LargeLanguageModel): raise InvalidModelTypeError("Model is not a Large Language Model") diff --git a/api/dify_graph/nodes/question_classifier/question_classifier_node.py b/api/dify_graph/nodes/question_classifier/question_classifier_node.py index 59d0a2a4d8..928618fdbc 100644 --- a/api/dify_graph/nodes/question_classifier/question_classifier_node.py +++ b/api/dify_graph/nodes/question_classifier/question_classifier_node.py @@ -114,6 +114,10 @@ class QuestionClassifierNode(Node[QuestionClassifierNodeData]): variables = {"query": query} # fetch model instance model_instance = self._model_instance + # Resolve variable references in string-typed completion params + model_instance.parameters = llm_utils.resolve_completion_params_variables( + model_instance.parameters, variable_pool + ) memory = self._memory # fetch instruction node_data.instruction = node_data.instruction or "" diff --git a/api/libs/login.py b/api/libs/login.py index bd5cb5f30d..dce332b01d 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -18,15 +18,23 @@ if TYPE_CHECKING: from models.model import EndUser +def _resolve_current_user() -> EndUser | Account | None: + """ + Resolve the current user proxy to its underlying user object. + This keeps unit tests working when they patch `current_user` directly + instead of bootstrapping a full Flask-Login manager. + """ + user_proxy = current_user + get_current_object = getattr(user_proxy, "_get_current_object", None) + return get_current_object() if callable(get_current_object) else user_proxy # type: ignore + + def current_account_with_tenant(): """ Resolve the underlying account for the current user proxy and ensure tenant context exists. Allows tests to supply plain Account mocks without the LocalProxy helper. """ - user_proxy = current_user - - get_current_object = getattr(user_proxy, "_get_current_object", None) - user = get_current_object() if callable(get_current_object) else user_proxy # type: ignore + user = _resolve_current_user() if not isinstance(user, Account): raise ValueError("current_user must be an Account instance") @@ -79,9 +87,10 @@ def login_required(func: Callable[P, R]) -> Callable[P, R | ResponseReturnValue] if request.method in EXEMPT_METHODS or dify_config.LOGIN_DISABLED: return current_app.ensure_sync(func)(*args, **kwargs) - user = _get_user() + user = _resolve_current_user() if user is None or not user.is_authenticated: return current_app.login_manager.unauthorized() # type: ignore + g._login_user = user # we put csrf validation here for less conflicts # TODO: maybe find a better place for it. check_csrf_token(request, user.id) diff --git a/api/libs/oauth.py b/api/libs/oauth.py index efce13f6f1..1afb42304d 100644 --- a/api/libs/oauth.py +++ b/api/libs/oauth.py @@ -1,16 +1,19 @@ +import logging import sys import urllib.parse from dataclasses import dataclass from typing import NotRequired import httpx -from pydantic import TypeAdapter +from pydantic import TypeAdapter, ValidationError if sys.version_info >= (3, 12): from typing import TypedDict else: from typing_extensions import TypedDict +logger = logging.getLogger(__name__) + JsonObject = dict[str, object] JsonObjectList = list[JsonObject] @@ -30,8 +33,8 @@ class GitHubEmailRecord(TypedDict, total=False): class GitHubRawUserInfo(TypedDict): id: int | str login: str - name: NotRequired[str] - email: NotRequired[str] + name: NotRequired[str | None] + email: NotRequired[str | None] class GoogleRawUserInfo(TypedDict): @@ -127,9 +130,14 @@ class GitHubOAuth(OAuth): response.raise_for_status() user_info = GITHUB_RAW_USER_INFO_ADAPTER.validate_python(_json_object(response)) - email_response = httpx.get(self._EMAIL_INFO_URL, headers=headers) - email_info = GITHUB_EMAIL_RECORDS_ADAPTER.validate_python(_json_list(email_response)) - primary_email = next((email for email in email_info if email.get("primary") is True), None) + try: + email_response = httpx.get(self._EMAIL_INFO_URL, headers=headers) + email_response.raise_for_status() + email_info = GITHUB_EMAIL_RECORDS_ADAPTER.validate_python(_json_list(email_response)) + primary_email = next((email for email in email_info if email.get("primary") is True), None) + except (httpx.HTTPStatusError, ValidationError): + logger.warning("Failed to retrieve email from GitHub /user/emails endpoint", exc_info=True) + primary_email = None return {**user_info, "email": primary_email.get("email", "") if primary_email else ""} @@ -137,8 +145,11 @@ class GitHubOAuth(OAuth): payload = GITHUB_RAW_USER_INFO_ADAPTER.validate_python(raw_info) email = payload.get("email") if not email: - email = f"{payload['id']}+{payload['login']}@users.noreply.github.com" - return OAuthUserInfo(id=str(payload["id"]), name=str(payload.get("name", "")), email=email) + raise ValueError( + 'Dify currently not supports the "Keep my email addresses private" feature,' + " please disable it and login again" + ) + return OAuthUserInfo(id=str(payload["id"]), name=str(payload.get("name") or ""), email=email) class GoogleOAuth(OAuth): diff --git a/api/models/dataset.py b/api/models/dataset.py index d0163e6984..b4fb03a7f4 100644 --- a/api/models/dataset.py +++ b/api/models/dataset.py @@ -43,7 +43,9 @@ from .enums import ( IndexingStatus, ProcessRuleMode, SegmentStatus, + SegmentType, SummaryStatus, + TidbAuthBindingStatus, ) from .model import App, Tag, TagBinding, UploadFile from .types import AdjustedJSON, BinaryData, EnumText, LongText, StringUUID, adjusted_json_index @@ -494,7 +496,9 @@ class Document(Base): ) doc_type = mapped_column(EnumText(DocumentDocType, length=40), nullable=True) doc_metadata = mapped_column(AdjustedJSON, nullable=True) - doc_form = mapped_column(String(255), nullable=False, server_default=sa.text("'text_model'")) + doc_form: Mapped[IndexStructureType] = mapped_column( + EnumText(IndexStructureType, length=255), nullable=False, server_default=sa.text("'text_model'") + ) doc_language = mapped_column(String(255), nullable=True) need_summary: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) @@ -998,7 +1002,9 @@ class ChildChunk(Base): # indexing fields index_node_id = mapped_column(String(255), nullable=True) index_node_hash = mapped_column(String(255), nullable=True) - type = mapped_column(String(255), nullable=False, server_default=sa.text("'automatic'")) + type: Mapped[SegmentType] = mapped_column( + EnumText(SegmentType, length=255), nullable=False, server_default=sa.text("'automatic'") + ) created_by = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column(DateTime, nullable=False, server_default=sa.func.current_timestamp()) updated_by = mapped_column(StringUUID, nullable=True) @@ -1239,7 +1245,9 @@ class TidbAuthBinding(TypeBase): cluster_id: Mapped[str] = mapped_column(String(255), nullable=False) cluster_name: Mapped[str] = mapped_column(String(255), nullable=False) active: Mapped[bool] = mapped_column(sa.Boolean, nullable=False, server_default=sa.text("false")) - status: Mapped[str] = mapped_column(sa.String(255), nullable=False, server_default=sa.text("'CREATING'")) + status: Mapped[TidbAuthBindingStatus] = mapped_column( + EnumText(TidbAuthBindingStatus, length=255), nullable=False, server_default=sa.text("'CREATING'") + ) account: Mapped[str] = mapped_column(String(255), nullable=False) password: Mapped[str] = mapped_column(String(255), nullable=False) created_at: Mapped[datetime] = mapped_column( diff --git a/api/models/enums.py b/api/models/enums.py index 4849099d30..cdec7b2f12 100644 --- a/api/models/enums.py +++ b/api/models/enums.py @@ -222,6 +222,13 @@ class DatasetMetadataType(StrEnum): TIME = "time" +class SegmentType(StrEnum): + """Document segment type""" + + AUTOMATIC = "automatic" + CUSTOMIZED = "customized" + + class SegmentStatus(StrEnum): """Document segment status""" @@ -323,3 +330,10 @@ class ProviderQuotaType(StrEnum): if member.value == value: return member raise ValueError(f"No matching enum found for value '{value}'") + + +class ApiTokenType(StrEnum): + """API Token type""" + + APP = "app" + DATASET = "dataset" diff --git a/api/models/execution_extra_content.py b/api/models/execution_extra_content.py index d0bd34efec..b2d09a7732 100644 --- a/api/models/execution_extra_content.py +++ b/api/models/execution_extra_content.py @@ -66,8 +66,8 @@ class HumanInputContent(ExecutionExtraContent): form_id: Mapped[str] = mapped_column(StringUUID, nullable=True) @classmethod - def new(cls, form_id: str, message_id: str | None) -> "HumanInputContent": - return cls(form_id=form_id, message_id=message_id) + def new(cls, *, workflow_run_id: str, form_id: str, message_id: str | None) -> "HumanInputContent": + return cls(workflow_run_id=workflow_run_id, form_id=form_id, message_id=message_id) form: Mapped["HumanInputForm"] = relationship( "HumanInputForm", diff --git a/api/models/model.py b/api/models/model.py index a08e43d128..05233f8711 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -21,7 +21,7 @@ from configs import dify_config from constants import DEFAULT_FILE_NUMBER_LIMITS from core.tools.signature import sign_tool_file from dify_graph.enums import WorkflowExecutionStatus -from dify_graph.file import FILE_MODEL_IDENTITY, File, FileTransferMethod +from dify_graph.file import FILE_MODEL_IDENTITY, File, FileTransferMethod, FileType from dify_graph.file import helpers as file_helpers from extensions.storage.storage_type import StorageType from libs.helper import generate_string # type: ignore[import-not-found] @@ -31,6 +31,7 @@ from .account import Account, Tenant from .base import Base, TypeBase, gen_uuidv4_string from .engine import db from .enums import ( + ApiTokenType, AppMCPServerStatus, AppStatus, BannerStatus, @@ -43,6 +44,8 @@ from .enums import ( MessageChainType, MessageFileBelongsTo, MessageStatus, + ProviderQuotaType, + TagType, ) from .provider_ids import GenericProviderID from .types import EnumText, LongText, StringUUID @@ -1782,7 +1785,7 @@ class MessageFile(TypeBase): StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False ) message_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - type: Mapped[str] = mapped_column(String(255), nullable=False) + type: Mapped[FileType] = mapped_column(EnumText(FileType, length=255), nullable=False) transfer_method: Mapped[FileTransferMethod] = mapped_column( EnumText(FileTransferMethod, length=255), nullable=False ) @@ -2094,7 +2097,7 @@ class ApiToken(Base): # bug: this uses setattr so idk the field. id = mapped_column(StringUUID, default=lambda: str(uuid4())) app_id = mapped_column(StringUUID, nullable=True) tenant_id = mapped_column(StringUUID, nullable=True) - type = mapped_column(String(16), nullable=False) + type: Mapped[ApiTokenType] = mapped_column(EnumText(ApiTokenType, length=16), nullable=False) token: Mapped[str] = mapped_column(String(255), nullable=False) last_used_at = mapped_column(sa.DateTime, nullable=True) created_at = mapped_column(sa.DateTime, nullable=False, server_default=func.current_timestamp()) @@ -2404,7 +2407,7 @@ class Tag(TypeBase): StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False ) tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) - type: Mapped[str] = mapped_column(String(16), nullable=False) + type: Mapped[TagType] = mapped_column(EnumText(TagType, length=16), nullable=False) name: Mapped[str] = mapped_column(String(255), nullable=False) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column( @@ -2489,7 +2492,9 @@ class TenantCreditPool(TypeBase): StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False ) tenant_id: Mapped[str] = mapped_column(StringUUID, nullable=False) - pool_type: Mapped[str] = mapped_column(String(40), nullable=False, default="trial", server_default="trial") + pool_type: Mapped[ProviderQuotaType] = mapped_column( + EnumText(ProviderQuotaType, length=40), nullable=False, default=ProviderQuotaType.TRIAL, server_default="trial" + ) quota_limit: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0) quota_used: Mapped[int] = mapped_column(BigInteger, nullable=False, default=0) created_at: Mapped[datetime] = mapped_column( diff --git a/api/models/tools.py b/api/models/tools.py index c09f054e7d..63b27b9413 100644 --- a/api/models/tools.py +++ b/api/models/tools.py @@ -13,12 +13,16 @@ from sqlalchemy.orm import Mapped, mapped_column from core.tools.entities.common_entities import I18nObject from core.tools.entities.tool_bundle import ApiToolBundle -from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration +from core.tools.entities.tool_entities import ( + ApiProviderSchemaType, + ToolProviderType, + WorkflowToolParameterConfiguration, +) from .base import TypeBase from .engine import db from .model import Account, App, Tenant -from .types import LongText, StringUUID +from .types import EnumText, LongText, StringUUID if TYPE_CHECKING: from core.entities.mcp_provider import MCPProviderEntity @@ -141,7 +145,9 @@ class ApiToolProvider(TypeBase): icon: Mapped[str] = mapped_column(String(255), nullable=False) # original schema schema: Mapped[str] = mapped_column(LongText, nullable=False) - schema_type_str: Mapped[str] = mapped_column(String(40), nullable=False) + schema_type_str: Mapped[ApiProviderSchemaType] = mapped_column( + EnumText(ApiProviderSchemaType, length=40), nullable=False + ) # who created this tool user_id: Mapped[str] = mapped_column(StringUUID, nullable=False) # tenant id @@ -208,7 +214,7 @@ class ToolLabelBinding(TypeBase): # tool id tool_id: Mapped[str] = mapped_column(String(64), nullable=False) # tool type - tool_type: Mapped[str] = mapped_column(String(40), nullable=False) + tool_type: Mapped[ToolProviderType] = mapped_column(EnumText(ToolProviderType, length=40), nullable=False) # label name label_name: Mapped[str] = mapped_column(String(40), nullable=False) @@ -386,7 +392,7 @@ class ToolModelInvoke(TypeBase): # provider provider: Mapped[str] = mapped_column(String(255), nullable=False) # type - tool_type: Mapped[str] = mapped_column(String(40), nullable=False) + tool_type: Mapped[ToolProviderType] = mapped_column(EnumText(ToolProviderType, length=40), nullable=False) # tool name tool_name: Mapped[str] = mapped_column(String(128), nullable=False) # invoke parameters diff --git a/api/models/workflow.py b/api/models/workflow.py index 6e8dda429d..334ec42058 100644 --- a/api/models/workflow.py +++ b/api/models/workflow.py @@ -1221,7 +1221,9 @@ class WorkflowAppLog(TypeBase): app_id: Mapped[str] = mapped_column(StringUUID) workflow_id: Mapped[str] = mapped_column(StringUUID, nullable=False) workflow_run_id: Mapped[str] = mapped_column(StringUUID) - created_from: Mapped[str] = mapped_column(String(255), nullable=False) + created_from: Mapped[WorkflowAppLogCreatedFrom] = mapped_column( + EnumText(WorkflowAppLogCreatedFrom, length=255), nullable=False + ) created_by_role: Mapped[CreatorUserRole] = mapped_column(EnumText(CreatorUserRole, length=255), nullable=False) created_by: Mapped[str] = mapped_column(StringUUID, nullable=False) created_at: Mapped[datetime] = mapped_column( @@ -1301,10 +1303,14 @@ class WorkflowArchiveLog(TypeBase): log_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True) log_created_at: Mapped[datetime | None] = mapped_column(DateTime, nullable=True) - log_created_from: Mapped[str | None] = mapped_column(String(255), nullable=True) + log_created_from: Mapped[WorkflowAppLogCreatedFrom | None] = mapped_column( + EnumText(WorkflowAppLogCreatedFrom, length=255), nullable=True + ) run_version: Mapped[str] = mapped_column(String(255), nullable=False) - run_status: Mapped[str] = mapped_column(String(255), nullable=False) + run_status: Mapped[WorkflowExecutionStatus] = mapped_column( + EnumText(WorkflowExecutionStatus, length=255), nullable=False + ) run_triggered_from: Mapped[WorkflowRunTriggeredFrom] = mapped_column( EnumText(WorkflowRunTriggeredFrom, length=255), nullable=False ) diff --git a/api/pyproject.toml b/api/pyproject.toml index fb71f3cd6c..d4d0ebcf7f 100644 --- a/api/pyproject.toml +++ b/api/pyproject.toml @@ -8,7 +8,7 @@ dependencies = [ "arize-phoenix-otel~=0.15.0", "azure-identity==1.25.3", "beautifulsoup4==4.14.3", - "boto3==1.42.68", + "boto3==1.42.73", "bs4~=0.0.1", "cachetools~=5.3.0", "celery~=5.6.2", @@ -23,7 +23,7 @@ dependencies = [ "gevent~=25.9.1", "gmpy2~=2.3.0", "google-api-core>=2.19.1", - "google-api-python-client==2.192.0", + "google-api-python-client==2.193.0", "google-auth>=2.47.0", "google-auth-httplib2==0.3.0", "google-cloud-aiplatform>=1.123.0", @@ -40,7 +40,7 @@ dependencies = [ "numpy~=1.26.4", "openpyxl~=3.1.5", "opik~=1.10.37", - "litellm==1.82.2", # Pinned to avoid madoka dependency issue + "litellm==1.82.6", # Pinned to avoid madoka dependency issue "opentelemetry-api==1.28.0", "opentelemetry-distro==0.49b0", "opentelemetry-exporter-otlp==1.28.0", @@ -72,10 +72,10 @@ dependencies = [ "pyyaml~=6.0.1", "readabilipy~=0.3.0", "redis[hiredis]~=7.3.0", - "resend~=2.23.0", - "sentry-sdk[flask]~=2.54.0", + "resend~=2.26.0", + "sentry-sdk[flask]~=2.55.0", "sqlalchemy~=2.0.29", - "starlette==0.52.1", + "starlette==1.0.0", "tiktoken~=0.12.0", "transformers~=5.3.0", "unstructured[docx,epub,md,ppt,pptx]~=0.21.5", @@ -92,7 +92,7 @@ dependencies = [ "apscheduler>=3.11.0", "weave>=0.52.16", "fastopenapi[flask]>=0.7.0", - "bleach~=6.2.0", + "bleach~=6.3.0", ] # Before adding new dependency, consider place it in # alphabet order (a-z) and suitable group. @@ -119,7 +119,7 @@ dev = [ "ruff~=0.15.5", "pytest~=9.0.2", "pytest-benchmark~=5.2.3", - "pytest-cov~=7.0.0", + "pytest-cov~=7.1.0", "pytest-env~=1.6.0", "pytest-mock~=3.15.1", "testcontainers~=4.14.1", @@ -203,7 +203,7 @@ tools = ["cloudscraper~=1.2.71", "nltk~=3.9.1"] # Required by vector store clients ############################################################ vdb = [ - "alibabacloud_gpdb20160503~=3.8.0", + "alibabacloud_gpdb20160503~=5.1.0", "alibabacloud_tea_openapi~=0.4.3", "chromadb==0.5.20", "clickhouse-connect~=0.14.1", diff --git a/api/schedule/create_tidb_serverless_task.py b/api/schedule/create_tidb_serverless_task.py index 8b9d973d6d..6ceb3ef856 100644 --- a/api/schedule/create_tidb_serverless_task.py +++ b/api/schedule/create_tidb_serverless_task.py @@ -8,6 +8,7 @@ from configs import dify_config from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService from extensions.ext_database import db from models.dataset import TidbAuthBinding +from models.enums import TidbAuthBindingStatus @app.celery.task(queue="dataset") @@ -57,7 +58,7 @@ def create_clusters(batch_size): account=new_cluster["account"], password=new_cluster["password"], active=False, - status="CREATING", + status=TidbAuthBindingStatus.CREATING, ) db.session.add(tidb_auth_binding) db.session.commit() diff --git a/api/schedule/update_tidb_serverless_status_task.py b/api/schedule/update_tidb_serverless_status_task.py index 1befa0e8b5..10003b1b97 100644 --- a/api/schedule/update_tidb_serverless_status_task.py +++ b/api/schedule/update_tidb_serverless_status_task.py @@ -9,6 +9,7 @@ from configs import dify_config from core.rag.datasource.vdb.tidb_on_qdrant.tidb_service import TidbService from extensions.ext_database import db from models.dataset import TidbAuthBinding +from models.enums import TidbAuthBindingStatus @app.celery.task(queue="dataset") @@ -18,7 +19,10 @@ def update_tidb_serverless_status_task(): try: # check the number of idle tidb serverless tidb_serverless_list = db.session.scalars( - select(TidbAuthBinding).where(TidbAuthBinding.active == False, TidbAuthBinding.status == "CREATING") + select(TidbAuthBinding).where( + TidbAuthBinding.active == False, + TidbAuthBinding.status == TidbAuthBindingStatus.CREATING, + ) ).all() if len(tidb_serverless_list) == 0: return diff --git a/api/services/app_service.py b/api/services/app_service.py index c5d1479a20..69c7c0c95a 100644 --- a/api/services/app_service.py +++ b/api/services/app_service.py @@ -241,7 +241,7 @@ class AppService: class ArgsDict(TypedDict): name: str description: str - icon_type: str + icon_type: IconType | str | None icon: str icon_background: str use_icon_as_answer_icon: bool @@ -257,7 +257,13 @@ class AppService: assert current_user is not None app.name = args["name"] app.description = args["description"] - app.icon_type = IconType(args["icon_type"]) if args["icon_type"] else None + icon_type = args.get("icon_type") + if icon_type is None: + resolved_icon_type = app.icon_type + else: + resolved_icon_type = IconType(icon_type) + + app.icon_type = resolved_icon_type app.icon = args["icon"] app.icon_background = args["icon_background"] app.use_icon_as_answer_icon = args.get("use_icon_as_answer_icon", False) diff --git a/api/services/auth/api_key_auth_base.py b/api/services/auth/api_key_auth_base.py index dd74a8f1b5..2e1b723e82 100644 --- a/api/services/auth/api_key_auth_base.py +++ b/api/services/auth/api_key_auth_base.py @@ -1,8 +1,16 @@ from abc import ABC, abstractmethod +from typing import Any + +from typing_extensions import TypedDict + + +class AuthCredentials(TypedDict): + auth_type: str + config: dict[str, Any] class ApiKeyAuthBase(ABC): - def __init__(self, credentials: dict): + def __init__(self, credentials: AuthCredentials): self.credentials = credentials @abstractmethod diff --git a/api/services/auth/api_key_auth_factory.py b/api/services/auth/api_key_auth_factory.py index 7ae31b0768..6e183b70e3 100644 --- a/api/services/auth/api_key_auth_factory.py +++ b/api/services/auth/api_key_auth_factory.py @@ -1,9 +1,9 @@ -from services.auth.api_key_auth_base import ApiKeyAuthBase +from services.auth.api_key_auth_base import ApiKeyAuthBase, AuthCredentials from services.auth.auth_type import AuthType class ApiKeyAuthFactory: - def __init__(self, provider: str, credentials: dict): + def __init__(self, provider: str, credentials: AuthCredentials): auth_factory = self.get_apikey_auth_factory(provider) self.auth = auth_factory(credentials) diff --git a/api/services/auth/firecrawl/firecrawl.py b/api/services/auth/firecrawl/firecrawl.py index b002706931..c9e5610aea 100644 --- a/api/services/auth/firecrawl/firecrawl.py +++ b/api/services/auth/firecrawl/firecrawl.py @@ -2,11 +2,11 @@ import json import httpx -from services.auth.api_key_auth_base import ApiKeyAuthBase +from services.auth.api_key_auth_base import ApiKeyAuthBase, AuthCredentials class FirecrawlAuth(ApiKeyAuthBase): - def __init__(self, credentials: dict): + def __init__(self, credentials: AuthCredentials): super().__init__(credentials) auth_type = credentials.get("auth_type") if auth_type != "bearer": diff --git a/api/services/auth/jina.py b/api/services/auth/jina.py index afaed28ac9..e5e2319ce1 100644 --- a/api/services/auth/jina.py +++ b/api/services/auth/jina.py @@ -2,11 +2,11 @@ import json import httpx -from services.auth.api_key_auth_base import ApiKeyAuthBase +from services.auth.api_key_auth_base import ApiKeyAuthBase, AuthCredentials class JinaAuth(ApiKeyAuthBase): - def __init__(self, credentials: dict): + def __init__(self, credentials: AuthCredentials): super().__init__(credentials) auth_type = credentials.get("auth_type") if auth_type != "bearer": diff --git a/api/services/auth/jina/jina.py b/api/services/auth/jina/jina.py index afaed28ac9..e5e2319ce1 100644 --- a/api/services/auth/jina/jina.py +++ b/api/services/auth/jina/jina.py @@ -2,11 +2,11 @@ import json import httpx -from services.auth.api_key_auth_base import ApiKeyAuthBase +from services.auth.api_key_auth_base import ApiKeyAuthBase, AuthCredentials class JinaAuth(ApiKeyAuthBase): - def __init__(self, credentials: dict): + def __init__(self, credentials: AuthCredentials): super().__init__(credentials) auth_type = credentials.get("auth_type") if auth_type != "bearer": diff --git a/api/services/auth/watercrawl/watercrawl.py b/api/services/auth/watercrawl/watercrawl.py index b2d28a83d1..cbdc908690 100644 --- a/api/services/auth/watercrawl/watercrawl.py +++ b/api/services/auth/watercrawl/watercrawl.py @@ -3,11 +3,11 @@ from urllib.parse import urljoin import httpx -from services.auth.api_key_auth_base import ApiKeyAuthBase +from services.auth.api_key_auth_base import ApiKeyAuthBase, AuthCredentials class WatercrawlAuth(ApiKeyAuthBase): - def __init__(self, credentials: dict): + def __init__(self, credentials: AuthCredentials): super().__init__(credentials) auth_type = credentials.get("auth_type") if auth_type != "x-api-key": diff --git a/api/services/credit_pool_service.py b/api/services/credit_pool_service.py index 1954602571..2894826935 100644 --- a/api/services/credit_pool_service.py +++ b/api/services/credit_pool_service.py @@ -7,6 +7,7 @@ from configs import dify_config from core.errors.error import QuotaExceededError from extensions.ext_database import db from models import TenantCreditPool +from models.enums import ProviderQuotaType logger = logging.getLogger(__name__) @@ -16,7 +17,10 @@ class CreditPoolService: def create_default_pool(cls, tenant_id: str) -> TenantCreditPool: """create default credit pool for new tenant""" credit_pool = TenantCreditPool( - tenant_id=tenant_id, quota_limit=dify_config.HOSTED_POOL_CREDITS, quota_used=0, pool_type="trial" + tenant_id=tenant_id, + quota_limit=dify_config.HOSTED_POOL_CREDITS, + quota_used=0, + pool_type=ProviderQuotaType.TRIAL, ) db.session.add(credit_pool) db.session.commit() diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index cdab90a3dc..65e112f1e9 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -58,6 +58,7 @@ from models.enums import ( IndexingStatus, ProcessRuleMode, SegmentStatus, + SegmentType, ) from models.model import UploadFile from models.provider_ids import ModelProviderID @@ -1439,7 +1440,7 @@ class DocumentService: .filter( Document.id.in_(document_id_list), Document.dataset_id == dataset_id, - Document.doc_form != "qa_model", # Skip qa_model documents + Document.doc_form != IndexStructureType.QA_INDEX, # Skip qa_model documents ) .update({Document.need_summary: need_summary}, synchronize_session=False) ) @@ -2039,7 +2040,7 @@ class DocumentService: document.dataset_process_rule_id = dataset_process_rule.id document.updated_at = naive_utc_now() document.created_from = created_from - document.doc_form = knowledge_config.doc_form + document.doc_form = IndexStructureType(knowledge_config.doc_form) document.doc_language = knowledge_config.doc_language document.data_source_info = json.dumps(data_source_info) document.batch = batch @@ -2639,7 +2640,7 @@ class DocumentService: document.splitting_completed_at = None document.updated_at = naive_utc_now() document.created_from = created_from - document.doc_form = document_data.doc_form + document.doc_form = IndexStructureType(document_data.doc_form) db.session.add(document) db.session.commit() # update document segment @@ -3100,7 +3101,7 @@ class DocumentService: class SegmentService: @classmethod def segment_create_args_validate(cls, args: dict, document: Document): - if document.doc_form == "qa_model": + if document.doc_form == IndexStructureType.QA_INDEX: if "answer" not in args or not args["answer"]: raise ValueError("Answer is required") if not args["answer"].strip(): @@ -3157,7 +3158,7 @@ class SegmentService: completed_at=naive_utc_now(), created_by=current_user.id, ) - if document.doc_form == "qa_model": + if document.doc_form == IndexStructureType.QA_INDEX: segment_document.word_count += len(args["answer"]) segment_document.answer = args["answer"] @@ -3231,7 +3232,7 @@ class SegmentService: tokens = 0 if dataset.indexing_technique == "high_quality" and embedding_model: # calc embedding use tokens - if document.doc_form == "qa_model": + if document.doc_form == IndexStructureType.QA_INDEX: tokens = embedding_model.get_text_embedding_num_tokens( texts=[content + segment_item["answer"]] )[0] @@ -3254,7 +3255,7 @@ class SegmentService: completed_at=naive_utc_now(), created_by=current_user.id, ) - if document.doc_form == "qa_model": + if document.doc_form == IndexStructureType.QA_INDEX: segment_document.answer = segment_item["answer"] segment_document.word_count += len(segment_item["answer"]) increment_word_count += segment_document.word_count @@ -3321,7 +3322,7 @@ class SegmentService: content = args.content or segment.content if segment.content == content: segment.word_count = len(content) - if document.doc_form == "qa_model": + if document.doc_form == IndexStructureType.QA_INDEX: segment.answer = args.answer segment.word_count += len(args.answer) if args.answer else 0 word_count_change = segment.word_count - word_count_change @@ -3418,7 +3419,7 @@ class SegmentService: ) # calc embedding use tokens - if document.doc_form == "qa_model": + if document.doc_form == IndexStructureType.QA_INDEX: segment.answer = args.answer tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0] # type: ignore else: @@ -3435,7 +3436,7 @@ class SegmentService: segment.enabled = True segment.disabled_at = None segment.disabled_by = None - if document.doc_form == "qa_model": + if document.doc_form == IndexStructureType.QA_INDEX: segment.answer = args.answer segment.word_count += len(args.answer) if args.answer else 0 word_count_change = segment.word_count - word_count_change @@ -3786,7 +3787,7 @@ class SegmentService: child_chunk.word_count = len(child_chunk.content) child_chunk.updated_by = current_user.id child_chunk.updated_at = naive_utc_now() - child_chunk.type = "customized" + child_chunk.type = SegmentType.CUSTOMIZED update_child_chunks.append(child_chunk) else: new_child_chunks_args.append(child_chunk_update_args) @@ -3845,7 +3846,7 @@ class SegmentService: child_chunk.word_count = len(content) child_chunk.updated_by = current_user.id child_chunk.updated_at = naive_utc_now() - child_chunk.type = "customized" + child_chunk.type = SegmentType.CUSTOMIZED db.session.add(child_chunk) VectorService.update_child_chunk_vector([], [child_chunk], [], dataset) db.session.commit() diff --git a/api/services/rag_pipeline/rag_pipeline_transform_service.py b/api/services/rag_pipeline/rag_pipeline_transform_service.py index 1d0aafd5fd..7dcfecdd1d 100644 --- a/api/services/rag_pipeline/rag_pipeline_transform_service.py +++ b/api/services/rag_pipeline/rag_pipeline_transform_service.py @@ -9,6 +9,7 @@ from flask_login import current_user from constants import DOCUMENT_EXTENSIONS from core.plugin.impl.plugin import PluginInstaller +from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.retrieval.retrieval_methods import RetrievalMethod from extensions.ext_database import db from factories import variable_factory @@ -79,9 +80,9 @@ class RagPipelineTransformService: pipeline = self._create_pipeline(pipeline_yaml) # save chunk structure to dataset - if doc_form == "hierarchical_model": + if doc_form == IndexStructureType.PARENT_CHILD_INDEX: dataset.chunk_structure = "hierarchical_model" - elif doc_form == "text_model": + elif doc_form == IndexStructureType.PARAGRAPH_INDEX: dataset.chunk_structure = "text_model" else: raise ValueError("Unsupported doc form") @@ -101,7 +102,7 @@ class RagPipelineTransformService: def _get_transform_yaml(self, doc_form: str, datasource_type: str, indexing_technique: str | None): pipeline_yaml = {} - if doc_form == "text_model": + if doc_form == IndexStructureType.PARAGRAPH_INDEX: match datasource_type: case DataSourceType.UPLOAD_FILE: if indexing_technique == "high_quality": @@ -132,7 +133,7 @@ class RagPipelineTransformService: pipeline_yaml = yaml.safe_load(f) case _: raise ValueError("Unsupported datasource type") - elif doc_form == "hierarchical_model": + elif doc_form == IndexStructureType.PARENT_CHILD_INDEX: match datasource_type: case DataSourceType.UPLOAD_FILE: # get graph from transform.file-parentchild.yml diff --git a/api/services/tag_service.py b/api/services/tag_service.py index bd3585acf4..70bf7f16f2 100644 --- a/api/services/tag_service.py +++ b/api/services/tag_service.py @@ -7,6 +7,7 @@ from werkzeug.exceptions import NotFound from extensions.ext_database import db from models.dataset import Dataset +from models.enums import TagType from models.model import App, Tag, TagBinding @@ -83,7 +84,7 @@ class TagService: raise ValueError("Tag name already exists") tag = Tag( name=args["name"], - type=args["type"], + type=TagType(args["type"]), created_by=current_user.id, tenant_id=current_user.current_tenant_id, ) diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index 49dee00919..7f810129ef 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -11,6 +11,7 @@ from sqlalchemy import func from core.db.session_factory import session_factory from core.model_manager import ModelManager +from core.rag.index_processor.constant.index_type import IndexStructureType from dify_graph.model_runtime.entities.model_entities import ModelType from extensions.ext_redis import redis_client from extensions.ext_storage import storage @@ -109,7 +110,7 @@ def batch_create_segment_to_index_task( df = pd.read_csv(file_path) content = [] for _, row in df.iterrows(): - if document_config["doc_form"] == "qa_model": + if document_config["doc_form"] == IndexStructureType.QA_INDEX: data = {"content": row.iloc[0], "answer": row.iloc[1]} else: data = {"content": row.iloc[0]} @@ -159,7 +160,7 @@ def batch_create_segment_to_index_task( status="completed", completed_at=naive_utc_now(), ) - if document_config["doc_form"] == "qa_model": + if document_config["doc_form"] == IndexStructureType.QA_INDEX: segment_document.answer = segment["answer"] segment_document.word_count += len(segment["answer"]) word_count_change += segment_document.word_count diff --git a/api/tasks/document_indexing_task.py b/api/tasks/document_indexing_task.py index e05d63426c..b5794e33e2 100644 --- a/api/tasks/document_indexing_task.py +++ b/api/tasks/document_indexing_task.py @@ -10,6 +10,7 @@ from configs import dify_config from core.db.session_factory import session_factory from core.entities.document_task import DocumentTask from core.indexing_runner import DocumentIsPausedError, IndexingRunner +from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.pipeline.queue import TenantIsolatedTaskQueue from enums.cloud_plan import CloudPlan from libs.datetime_utils import naive_utc_now @@ -150,7 +151,7 @@ def _document_indexing(dataset_id: str, document_ids: Sequence[str]): ) if ( document.indexing_status == IndexingStatus.COMPLETED - and document.doc_form != "qa_model" + and document.doc_form != IndexStructureType.QA_INDEX and document.need_summary is True ): try: diff --git a/api/tasks/regenerate_summary_index_task.py b/api/tasks/regenerate_summary_index_task.py index 39c2f4103e..ac5d23408a 100644 --- a/api/tasks/regenerate_summary_index_task.py +++ b/api/tasks/regenerate_summary_index_task.py @@ -9,6 +9,7 @@ from celery import shared_task from sqlalchemy import or_, select from core.db.session_factory import session_factory +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import Dataset, DocumentSegment, DocumentSegmentSummary from models.dataset import Document as DatasetDocument from services.summary_index_service import SummaryIndexService @@ -106,7 +107,7 @@ def regenerate_summary_index_task( ), DatasetDocument.enabled == True, # Document must be enabled DatasetDocument.archived == False, # Document must not be archived - DatasetDocument.doc_form != "qa_model", # Skip qa_model documents + DatasetDocument.doc_form != IndexStructureType.QA_INDEX, # Skip qa_model documents ) .order_by(DocumentSegment.document_id.asc(), DocumentSegment.position.asc()) .all() @@ -209,7 +210,7 @@ def regenerate_summary_index_task( for dataset_document in dataset_documents: # Skip qa_model documents - if dataset_document.doc_form == "qa_model": + if dataset_document.doc_form == IndexStructureType.QA_INDEX: continue try: diff --git a/api/tasks/trigger_processing_tasks.py b/api/tasks/trigger_processing_tasks.py index 75ae1f6316..f8c7964805 100644 --- a/api/tasks/trigger_processing_tasks.py +++ b/api/tasks/trigger_processing_tasks.py @@ -179,7 +179,7 @@ def _record_trigger_failure_log( app_id=workflow.app_id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from=WorkflowAppLogCreatedFrom.SERVICE_API.value, + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=created_by_role, created_by=created_by, ) diff --git a/api/tests/integration_tests/libs/test_api_token_cache_integration.py b/api/tests/integration_tests/libs/test_api_token_cache_integration.py index 1d7b835fd2..a942690cbd 100644 --- a/api/tests/integration_tests/libs/test_api_token_cache_integration.py +++ b/api/tests/integration_tests/libs/test_api_token_cache_integration.py @@ -13,6 +13,7 @@ from unittest.mock import patch import pytest from extensions.ext_redis import redis_client +from models.enums import ApiTokenType from models.model import ApiToken from services.api_token_service import ApiTokenCache, CachedApiToken @@ -279,7 +280,7 @@ class TestEndToEndCacheFlow: test_token = ApiToken() test_token.id = "test-e2e-id" test_token.token = test_token_value - test_token.type = test_scope + test_token.type = ApiTokenType.APP test_token.app_id = "test-app" test_token.tenant_id = "test-tenant" test_token.last_used_at = None diff --git a/api/tests/test_containers_integration_tests/controllers/console/app/test_message.py b/api/tests/test_containers_integration_tests/controllers/console/app/test_message.py new file mode 100644 index 0000000000..6b51ec98bc --- /dev/null +++ b/api/tests/test_containers_integration_tests/controllers/console/app/test_message.py @@ -0,0 +1,342 @@ +"""Authenticated controller integration tests for console message APIs.""" + +from datetime import timedelta +from decimal import Decimal +from unittest.mock import patch +from uuid import uuid4 + +import pytest +from flask.testing import FlaskClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from controllers.console.app.message import ChatMessagesQuery, FeedbackExportQuery, MessageFeedbackPayload +from controllers.console.app.message import attach_message_extra_contents as _attach_message_extra_contents +from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError +from libs.datetime_utils import naive_utc_now +from models.enums import ConversationFromSource, FeedbackRating +from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback +from services.errors.conversation import ConversationNotExistsError +from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError +from tests.test_containers_integration_tests.controllers.console.helpers import ( + authenticate_console_client, + create_console_account_and_tenant, + create_console_app, +) + + +def _create_conversation(db_session: Session, app_id: str, account_id: str, mode: AppMode) -> Conversation: + conversation = Conversation( + app_id=app_id, + app_model_config_id=None, + model_provider=None, + model_id="", + override_model_configs=None, + mode=mode, + name="Test Conversation", + inputs={}, + introduction="", + system_instruction="", + system_instruction_tokens=0, + status="normal", + from_source=ConversationFromSource.CONSOLE, + from_account_id=account_id, + ) + db_session.add(conversation) + db_session.commit() + return conversation + + +def _create_message( + db_session: Session, + app_id: str, + conversation_id: str, + account_id: str, + *, + created_at_offset_seconds: int = 0, +) -> Message: + created_at = naive_utc_now() + timedelta(seconds=created_at_offset_seconds) + message = Message( + app_id=app_id, + model_provider=None, + model_id="", + override_model_configs=None, + conversation_id=conversation_id, + inputs={}, + query="Hello", + message={"type": "text", "content": "Hello"}, + message_tokens=1, + message_unit_price=Decimal("0.0001"), + message_price_unit=Decimal("0.001"), + answer="Hi there", + answer_tokens=1, + answer_unit_price=Decimal("0.0001"), + answer_price_unit=Decimal("0.001"), + parent_message_id=None, + provider_response_latency=0, + total_price=Decimal("0.0002"), + currency="USD", + from_source=ConversationFromSource.CONSOLE, + from_account_id=account_id, + created_at=created_at, + updated_at=created_at, + app_mode=AppMode.CHAT, + ) + db_session.add(message) + db_session.commit() + return message + + +class TestMessageValidators: + def test_chat_messages_query_validators(self) -> None: + assert ChatMessagesQuery.empty_to_none("") is None + assert ChatMessagesQuery.empty_to_none("val") == "val" + assert ChatMessagesQuery.validate_uuid(None) is None + assert ( + ChatMessagesQuery.validate_uuid("123e4567-e89b-12d3-a456-426614174000") + == "123e4567-e89b-12d3-a456-426614174000" + ) + + def test_message_feedback_validators(self) -> None: + assert ( + MessageFeedbackPayload.validate_message_id("123e4567-e89b-12d3-a456-426614174000") + == "123e4567-e89b-12d3-a456-426614174000" + ) + + def test_feedback_export_validators(self) -> None: + assert FeedbackExportQuery.parse_bool(None) is None + assert FeedbackExportQuery.parse_bool(True) is True + assert FeedbackExportQuery.parse_bool("1") is True + assert FeedbackExportQuery.parse_bool("0") is False + assert FeedbackExportQuery.parse_bool("off") is False + + with pytest.raises(ValueError): + FeedbackExportQuery.parse_bool("invalid") + + +def test_chat_message_list_not_found( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/chat-messages", + query_string={"conversation_id": str(uuid4())}, + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 404 + payload = response.get_json() + assert payload is not None + assert payload["code"] == "not_found" + + +def test_chat_message_list_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode) + _create_message(db_session_with_containers, app.id, conversation.id, account.id, created_at_offset_seconds=0) + second = _create_message( + db_session_with_containers, + app.id, + conversation.id, + account.id, + created_at_offset_seconds=1, + ) + + with patch( + "controllers.console.app.message.attach_message_extra_contents", + side_effect=_attach_message_extra_contents, + ): + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/chat-messages", + query_string={"conversation_id": conversation.id, "limit": 1}, + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert payload["limit"] == 1 + assert payload["has_more"] is True + assert len(payload["data"]) == 1 + assert payload["data"][0]["id"] == second.id + + +def test_message_feedback_not_found( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + + response = test_client_with_containers.post( + f"/console/api/apps/{app.id}/feedbacks", + json={"message_id": str(uuid4()), "rating": "like"}, + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 404 + payload = response.get_json() + assert payload is not None + assert payload["code"] == "not_found" + + +def test_message_feedback_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode) + message = _create_message(db_session_with_containers, app.id, conversation.id, account.id) + + response = test_client_with_containers.post( + f"/console/api/apps/{app.id}/feedbacks", + json={"message_id": message.id, "rating": "like"}, + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"result": "success"} + + feedback = db_session_with_containers.scalar( + select(MessageFeedback).where(MessageFeedback.message_id == message.id) + ) + assert feedback is not None + assert feedback.rating == FeedbackRating.LIKE + assert feedback.from_account_id == account.id + + +def test_message_annotation_count( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode) + message = _create_message(db_session_with_containers, app.id, conversation.id, account.id) + db_session_with_containers.add( + MessageAnnotation( + app_id=app.id, + conversation_id=conversation.id, + message_id=message.id, + question="Q", + content="A", + account_id=account.id, + ) + ) + db_session_with_containers.commit() + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/annotations/count", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"count": 1} + + +def test_message_suggested_questions_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + message_id = str(uuid4()) + + with patch( + "controllers.console.app.message.MessageService.get_suggested_questions_after_answer", + return_value=["q1", "q2"], + ): + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/chat-messages/{message_id}/suggested-questions", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"data": ["q1", "q2"]} + + +@pytest.mark.parametrize( + ("exc", "expected_status", "expected_code"), + [ + (MessageNotExistsError(), 404, "not_found"), + (ConversationNotExistsError(), 404, "not_found"), + (ProviderTokenNotInitError(), 400, "provider_not_initialize"), + (QuotaExceededError(), 400, "provider_quota_exceeded"), + (ModelCurrentlyNotSupportError(), 400, "model_currently_not_support"), + (SuggestedQuestionsAfterAnswerDisabledError(), 403, "app_suggested_questions_after_answer_disabled"), + (Exception(), 500, "internal_server_error"), + ], +) +def test_message_suggested_questions_errors( + exc: Exception, + expected_status: int, + expected_code: str, + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + message_id = str(uuid4()) + + with patch( + "controllers.console.app.message.MessageService.get_suggested_questions_after_answer", + side_effect=exc, + ): + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/chat-messages/{message_id}/suggested-questions", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == expected_status + payload = response.get_json() + assert payload is not None + assert payload["code"] == expected_code + + +def test_message_feedback_export_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + + with patch("services.feedback_service.FeedbackService.export_feedbacks", return_value={"exported": True}): + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/feedbacks/export", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"exported": True} + + +def test_message_api_get_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode) + message = _create_message(db_session_with_containers, app.id, conversation.id, account.id) + + with patch( + "controllers.console.app.message.attach_message_extra_contents", + side_effect=_attach_message_extra_contents, + ): + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/messages/{message.id}", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert payload["id"] == message.id diff --git a/api/tests/test_containers_integration_tests/controllers/console/app/test_statistic.py b/api/tests/test_containers_integration_tests/controllers/console/app/test_statistic.py new file mode 100644 index 0000000000..963cfe53e5 --- /dev/null +++ b/api/tests/test_containers_integration_tests/controllers/console/app/test_statistic.py @@ -0,0 +1,334 @@ +"""Controller integration tests for console statistic routes.""" + +from datetime import timedelta +from decimal import Decimal +from unittest.mock import patch +from uuid import uuid4 + +from flask.testing import FlaskClient +from sqlalchemy.orm import Session + +from core.app.entities.app_invoke_entities import InvokeFrom +from libs.datetime_utils import naive_utc_now +from models.enums import ConversationFromSource, FeedbackFromSource, FeedbackRating +from models.model import AppMode, Conversation, Message, MessageFeedback +from tests.test_containers_integration_tests.controllers.console.helpers import ( + authenticate_console_client, + create_console_account_and_tenant, + create_console_app, +) + + +def _create_conversation( + db_session: Session, + app_id: str, + account_id: str, + *, + mode: AppMode, + created_at_offset_days: int = 0, +) -> Conversation: + created_at = naive_utc_now() + timedelta(days=created_at_offset_days) + conversation = Conversation( + app_id=app_id, + app_model_config_id=None, + model_provider=None, + model_id="", + override_model_configs=None, + mode=mode, + name="Stats Conversation", + inputs={}, + introduction="", + system_instruction="", + system_instruction_tokens=0, + status="normal", + from_source=ConversationFromSource.CONSOLE, + from_account_id=account_id, + created_at=created_at, + updated_at=created_at, + ) + db_session.add(conversation) + db_session.commit() + return conversation + + +def _create_message( + db_session: Session, + app_id: str, + conversation_id: str, + *, + from_account_id: str | None, + from_end_user_id: str | None = None, + message_tokens: int = 1, + answer_tokens: int = 1, + total_price: Decimal = Decimal("0.01"), + provider_response_latency: float = 1.0, + created_at_offset_days: int = 0, +) -> Message: + created_at = naive_utc_now() + timedelta(days=created_at_offset_days) + message = Message( + app_id=app_id, + model_provider=None, + model_id="", + override_model_configs=None, + conversation_id=conversation_id, + inputs={}, + query="Hello", + message={"type": "text", "content": "Hello"}, + message_tokens=message_tokens, + message_unit_price=Decimal("0.001"), + message_price_unit=Decimal("0.001"), + answer="Hi there", + answer_tokens=answer_tokens, + answer_unit_price=Decimal("0.001"), + answer_price_unit=Decimal("0.001"), + parent_message_id=None, + provider_response_latency=provider_response_latency, + total_price=total_price, + currency="USD", + invoke_from=InvokeFrom.EXPLORE, + from_source=ConversationFromSource.CONSOLE, + from_end_user_id=from_end_user_id, + from_account_id=from_account_id, + created_at=created_at, + updated_at=created_at, + app_mode=AppMode.CHAT, + ) + db_session.add(message) + db_session.commit() + return message + + +def _create_like_feedback( + db_session: Session, + app_id: str, + conversation_id: str, + message_id: str, + account_id: str, +) -> None: + db_session.add( + MessageFeedback( + app_id=app_id, + conversation_id=conversation_id, + message_id=message_id, + rating=FeedbackRating.LIKE, + from_source=FeedbackFromSource.ADMIN, + from_account_id=account_id, + ) + ) + db_session.commit() + + +def test_daily_message_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/daily-messages", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json()["data"][0]["message_count"] == 1 + + +def test_daily_conversation_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id) + _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/daily-conversations", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json()["data"][0]["conversation_count"] == 1 + + +def test_daily_terminals_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + _create_message( + db_session_with_containers, + app.id, + conversation.id, + from_account_id=None, + from_end_user_id=str(uuid4()), + ) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/daily-end-users", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json()["data"][0]["terminal_count"] == 1 + + +def test_daily_token_cost_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + _create_message( + db_session_with_containers, + app.id, + conversation.id, + from_account_id=account.id, + message_tokens=40, + answer_tokens=60, + total_price=Decimal("0.02"), + ) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/token-costs", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload["data"][0]["token_count"] == 100 + assert Decimal(payload["data"][0]["total_price"]) == Decimal("0.02") + + +def test_average_session_interaction_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id) + _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/average-session-interactions", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json()["data"][0]["interactions"] == 2.0 + + +def test_user_satisfaction_rate_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + first = _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id) + for _ in range(9): + _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id) + _create_like_feedback(db_session_with_containers, app.id, conversation.id, first.id, account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/user-satisfaction-rate", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json()["data"][0]["rate"] == 100.0 + + +def test_average_response_time_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.COMPLETION) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + _create_message( + db_session_with_containers, + app.id, + conversation.id, + from_account_id=account.id, + provider_response_latency=1.234, + ) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/average-response-time", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json()["data"][0]["latency"] == 1234.0 + + +def test_tokens_per_second_statistic( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode) + _create_message( + db_session_with_containers, + app.id, + conversation.id, + from_account_id=account.id, + answer_tokens=31, + provider_response_latency=2.0, + ) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/tokens-per-second", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json()["data"][0]["tps"] == 15.5 + + +def test_invalid_time_range( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + + with patch("controllers.console.app.statistic.parse_time_range", side_effect=ValueError("Invalid time")): + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/daily-messages?start=invalid&end=invalid", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 400 + assert response.get_json()["message"] == "Invalid time" + + +def test_time_range_params_passed( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + import datetime + + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT) + start = datetime.datetime.now() + end = datetime.datetime.now() + + with patch("controllers.console.app.statistic.parse_time_range", return_value=(start, end)) as mock_parse: + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/statistics/daily-messages?start=something&end=something", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + mock_parse.assert_called_once_with("something", "something", "UTC") diff --git a/api/tests/test_containers_integration_tests/controllers/console/app/test_workflow_draft_variable.py b/api/tests/test_containers_integration_tests/controllers/console/app/test_workflow_draft_variable.py new file mode 100644 index 0000000000..f037ad77c0 --- /dev/null +++ b/api/tests/test_containers_integration_tests/controllers/console/app/test_workflow_draft_variable.py @@ -0,0 +1,415 @@ +"""Authenticated controller integration tests for workflow draft variable APIs.""" + +import uuid + +from flask.testing import FlaskClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from dify_graph.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID +from dify_graph.variables.segments import StringSegment +from factories.variable_factory import segment_to_variable +from models import Workflow +from models.model import AppMode +from models.workflow import WorkflowDraftVariable +from tests.test_containers_integration_tests.controllers.console.helpers import ( + authenticate_console_client, + create_console_account_and_tenant, + create_console_app, +) + + +def _create_draft_workflow( + db_session: Session, + app_id: str, + tenant_id: str, + account_id: str, + *, + environment_variables: list | None = None, + conversation_variables: list | None = None, +) -> Workflow: + workflow = Workflow.new( + tenant_id=tenant_id, + app_id=app_id, + type="workflow", + version=Workflow.VERSION_DRAFT, + graph='{"nodes": [], "edges": []}', + features="{}", + created_by=account_id, + environment_variables=environment_variables or [], + conversation_variables=conversation_variables or [], + rag_pipeline_variables=[], + ) + db_session.add(workflow) + db_session.commit() + return workflow + + +def _create_node_variable( + db_session: Session, + app_id: str, + user_id: str, + *, + node_id: str = "node_1", + name: str = "test_var", +) -> WorkflowDraftVariable: + variable = WorkflowDraftVariable.new_node_variable( + app_id=app_id, + user_id=user_id, + node_id=node_id, + name=name, + value=StringSegment(value="test_value"), + node_execution_id=str(uuid.uuid4()), + visible=True, + editable=True, + ) + db_session.add(variable) + db_session.commit() + return variable + + +def _create_system_variable( + db_session: Session, app_id: str, user_id: str, name: str = "query" +) -> WorkflowDraftVariable: + variable = WorkflowDraftVariable.new_sys_variable( + app_id=app_id, + user_id=user_id, + name=name, + value=StringSegment(value="system-value"), + node_execution_id=str(uuid.uuid4()), + editable=True, + ) + db_session.add(variable) + db_session.commit() + return variable + + +def _build_environment_variable(name: str, value: str): + return segment_to_variable( + segment=StringSegment(value=value), + selector=[ENVIRONMENT_VARIABLE_NODE_ID, name], + name=name, + description=f"Environment variable {name}", + ) + + +def _build_conversation_variable(name: str, value: str): + return segment_to_variable( + segment=StringSegment(value=value), + selector=[CONVERSATION_VARIABLE_NODE_ID, name], + name=name, + description=f"Conversation variable {name}", + ) + + +def test_workflow_variable_collection_get_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/variables?page=1&limit=20", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"items": [], "total": 0} + + +def test_workflow_variable_collection_get_not_exist( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 404 + payload = response.get_json() + assert payload is not None + assert payload["code"] == "draft_workflow_not_exist" + + +def test_workflow_variable_collection_delete( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_node_variable(db_session_with_containers, app.id, account.id) + _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_2", name="other_var") + + response = test_client_with_containers.delete( + f"/console/api/apps/{app.id}/workflows/draft/variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 204 + remaining = db_session_with_containers.scalars( + select(WorkflowDraftVariable).where( + WorkflowDraftVariable.app_id == app.id, + WorkflowDraftVariable.user_id == account.id, + ) + ).all() + assert remaining == [] + + +def test_node_variable_collection_get_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + node_variable = _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_123") + _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_456", name="other") + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/nodes/node_123/variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert [item["id"] for item in payload["items"]] == [node_variable.id] + + +def test_node_variable_collection_get_invalid_node_id( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/nodes/sys/variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 400 + payload = response.get_json() + assert payload is not None + assert payload["code"] == "invalid_param" + + +def test_node_variable_collection_delete( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + target = _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_123") + untouched = _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_456") + target_id = target.id + untouched_id = untouched.id + + response = test_client_with_containers.delete( + f"/console/api/apps/{app.id}/workflows/draft/nodes/node_123/variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 204 + assert ( + db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == target_id)) + is None + ) + assert ( + db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == untouched_id)) + is not None + ) + + +def test_variable_api_get_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id) + variable = _create_node_variable(db_session_with_containers, app.id, account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert payload["id"] == variable.id + assert payload["name"] == "test_var" + + +def test_variable_api_get_not_found( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/variables/{uuid.uuid4()}", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 404 + payload = response.get_json() + assert payload is not None + assert payload["code"] == "not_found" + + +def test_variable_api_patch_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id) + variable = _create_node_variable(db_session_with_containers, app.id, account.id) + + response = test_client_with_containers.patch( + f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}", + headers=authenticate_console_client(test_client_with_containers, account), + json={"name": "renamed_var"}, + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert payload["id"] == variable.id + assert payload["name"] == "renamed_var" + + refreshed = db_session_with_containers.scalar( + select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == variable.id) + ) + assert refreshed is not None + assert refreshed.name == "renamed_var" + + +def test_variable_api_delete_success( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id) + variable = _create_node_variable(db_session_with_containers, app.id, account.id) + + response = test_client_with_containers.delete( + f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 204 + assert ( + db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == variable.id)) + is None + ) + + +def test_variable_reset_api_put_success_returns_no_content_without_execution( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id) + variable = _create_node_variable(db_session_with_containers, app.id, account.id) + + response = test_client_with_containers.put( + f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}/reset", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 204 + assert ( + db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == variable.id)) + is None + ) + + +def test_conversation_variable_collection_get( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow( + db_session_with_containers, + app.id, + tenant.id, + account.id, + conversation_variables=[_build_conversation_variable("session_name", "Alice")], + ) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/conversation-variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert [item["name"] for item in payload["items"]] == ["session_name"] + + created = db_session_with_containers.scalars( + select(WorkflowDraftVariable).where( + WorkflowDraftVariable.app_id == app.id, + WorkflowDraftVariable.user_id == account.id, + WorkflowDraftVariable.node_id == CONVERSATION_VARIABLE_NODE_ID, + ) + ).all() + assert len(created) == 1 + + +def test_system_variable_collection_get( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + variable = _create_system_variable(db_session_with_containers, app.id, account.id) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/system-variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert [item["id"] for item in payload["items"]] == [variable.id] + + +def test_environment_variable_collection_get( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW) + _create_draft_workflow( + db_session_with_containers, + app.id, + tenant.id, + account.id, + environment_variables=[_build_environment_variable("api_key", "secret-value")], + ) + + response = test_client_with_containers.get( + f"/console/api/apps/{app.id}/workflows/draft/environment-variables", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert payload["items"][0]["name"] == "api_key" + assert payload["items"][0]["value"] == "secret-value" diff --git a/api/tests/test_containers_integration_tests/controllers/console/auth/test_data_source_bearer_auth.py b/api/tests/test_containers_integration_tests/controllers/console/auth/test_data_source_bearer_auth.py new file mode 100644 index 0000000000..00309c25d6 --- /dev/null +++ b/api/tests/test_containers_integration_tests/controllers/console/auth/test_data_source_bearer_auth.py @@ -0,0 +1,131 @@ +"""Controller integration tests for API key data source auth routes.""" + +import json +from unittest.mock import patch + +from flask.testing import FlaskClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from models.source import DataSourceApiKeyAuthBinding +from tests.test_containers_integration_tests.controllers.console.helpers import ( + authenticate_console_client, + create_console_account_and_tenant, +) + + +def test_get_api_key_auth_data_source( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + binding = DataSourceApiKeyAuthBinding( + tenant_id=tenant.id, + category="api_key", + provider="custom_provider", + credentials=json.dumps({"auth_type": "api_key", "config": {"api_key": "encrypted"}}), + disabled=False, + ) + db_session_with_containers.add(binding) + db_session_with_containers.commit() + + response = test_client_with_containers.get( + "/console/api/api-key-auth/data-source", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert len(payload["sources"]) == 1 + assert payload["sources"][0]["provider"] == "custom_provider" + + +def test_get_api_key_auth_data_source_empty( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, _tenant = create_console_account_and_tenant(db_session_with_containers) + + response = test_client_with_containers.get( + "/console/api/api-key-auth/data-source", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"sources": []} + + +def test_create_binding_successful( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, _tenant = create_console_account_and_tenant(db_session_with_containers) + + with ( + patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args"), + patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth"), + ): + response = test_client_with_containers.post( + "/console/api/api-key-auth/data-source/binding", + json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}}, + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"result": "success"} + + +def test_create_binding_failure( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, _tenant = create_console_account_and_tenant(db_session_with_containers) + + with ( + patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args"), + patch( + "controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth", + side_effect=ValueError("Invalid structure"), + ), + ): + response = test_client_with_containers.post( + "/console/api/api-key-auth/data-source/binding", + json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}}, + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 500 + payload = response.get_json() + assert payload is not None + assert payload["code"] == "auth_failed" + assert payload["message"] == "Invalid structure" + + +def test_delete_binding_successful( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + binding = DataSourceApiKeyAuthBinding( + tenant_id=tenant.id, + category="api_key", + provider="custom_provider", + credentials=json.dumps({"auth_type": "api_key", "config": {"api_key": "encrypted"}}), + disabled=False, + ) + db_session_with_containers.add(binding) + db_session_with_containers.commit() + + response = test_client_with_containers.delete( + f"/console/api/api-key-auth/data-source/{binding.id}", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 204 + assert ( + db_session_with_containers.scalar( + select(DataSourceApiKeyAuthBinding).where(DataSourceApiKeyAuthBinding.id == binding.id) + ) + is None + ) diff --git a/api/tests/test_containers_integration_tests/controllers/console/auth/test_data_source_oauth.py b/api/tests/test_containers_integration_tests/controllers/console/auth/test_data_source_oauth.py new file mode 100644 index 0000000000..81b5423261 --- /dev/null +++ b/api/tests/test_containers_integration_tests/controllers/console/auth/test_data_source_oauth.py @@ -0,0 +1,120 @@ +"""Controller integration tests for console OAuth data source routes.""" + +from unittest.mock import MagicMock, patch + +from flask.testing import FlaskClient +from sqlalchemy.orm import Session + +from models.source import DataSourceOauthBinding +from tests.test_containers_integration_tests.controllers.console.helpers import ( + authenticate_console_client, + create_console_account_and_tenant, +) + + +def test_get_oauth_url_successful( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + provider = MagicMock() + provider.get_authorization_url.return_value = "http://oauth.provider/auth" + + with ( + patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": provider}), + patch("controllers.console.auth.data_source_oauth.dify_config.NOTION_INTEGRATION_TYPE", None), + ): + response = test_client_with_containers.get( + "/console/api/oauth/data-source/notion", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert tenant.id == account.current_tenant_id + assert response.status_code == 200 + assert response.get_json() == {"data": "http://oauth.provider/auth"} + provider.get_authorization_url.assert_called_once() + + +def test_get_oauth_url_invalid_provider( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, _tenant = create_console_account_and_tenant(db_session_with_containers) + + with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}): + response = test_client_with_containers.get( + "/console/api/oauth/data-source/unknown_provider", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 400 + assert response.get_json() == {"error": "Invalid provider"} + + +def test_oauth_callback_successful(test_client_with_containers: FlaskClient) -> None: + with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}): + response = test_client_with_containers.get("/console/api/oauth/data-source/callback/notion?code=mock_code") + + assert response.status_code == 302 + assert "code=mock_code" in response.location + + +def test_oauth_callback_missing_code(test_client_with_containers: FlaskClient) -> None: + with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}): + response = test_client_with_containers.get("/console/api/oauth/data-source/callback/notion") + + assert response.status_code == 302 + assert "error=Access%20denied" in response.location + + +def test_oauth_callback_invalid_provider(test_client_with_containers: FlaskClient) -> None: + with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}): + response = test_client_with_containers.get("/console/api/oauth/data-source/callback/invalid?code=mock_code") + + assert response.status_code == 400 + assert response.get_json() == {"error": "Invalid provider"} + + +def test_get_binding_successful(test_client_with_containers: FlaskClient) -> None: + provider = MagicMock() + with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": provider}): + response = test_client_with_containers.get("/console/api/oauth/data-source/binding/notion?code=auth_code_123") + + assert response.status_code == 200 + assert response.get_json() == {"result": "success"} + provider.get_access_token.assert_called_once_with("auth_code_123") + + +def test_get_binding_missing_code(test_client_with_containers: FlaskClient) -> None: + with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}): + response = test_client_with_containers.get("/console/api/oauth/data-source/binding/notion?code=") + + assert response.status_code == 400 + assert response.get_json() == {"error": "Invalid code"} + + +def test_sync_successful( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, tenant = create_console_account_and_tenant(db_session_with_containers) + binding = DataSourceOauthBinding( + tenant_id=tenant.id, + access_token="test-access-token", + provider="notion", + source_info={"workspace_name": "Workspace", "workspace_icon": None, "workspace_id": tenant.id, "pages": []}, + disabled=False, + ) + db_session_with_containers.add(binding) + db_session_with_containers.commit() + + provider = MagicMock() + with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": provider}): + response = test_client_with_containers.get( + f"/console/api/oauth/data-source/notion/{binding.id}/sync", + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"result": "success"} + provider.sync_data_source.assert_called_once_with(binding.id) diff --git a/api/tests/test_containers_integration_tests/controllers/console/auth/test_oauth_server.py b/api/tests/test_containers_integration_tests/controllers/console/auth/test_oauth_server.py new file mode 100644 index 0000000000..2ef27133d8 --- /dev/null +++ b/api/tests/test_containers_integration_tests/controllers/console/auth/test_oauth_server.py @@ -0,0 +1,365 @@ +"""Controller integration tests for console OAuth server routes.""" + +from unittest.mock import patch + +from flask.testing import FlaskClient +from sqlalchemy.orm import Session + +from models.model import OAuthProviderApp +from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN +from tests.test_containers_integration_tests.controllers.console.helpers import ( + authenticate_console_client, + create_console_account_and_tenant, + ensure_dify_setup, +) + + +def _build_oauth_provider_app() -> OAuthProviderApp: + return OAuthProviderApp( + app_icon="icon_url", + client_id="test_client_id", + client_secret="test_secret", + app_label={"en-US": "Test App"}, + redirect_uris=["http://localhost/callback"], + scope="read,write", + ) + + +def test_oauth_provider_successful_post( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider", + json={"client_id": "test_client_id", "redirect_uri": "http://localhost/callback"}, + ) + + assert response.status_code == 200 + payload = response.get_json() + assert payload is not None + assert payload["app_icon"] == "icon_url" + assert payload["app_label"] == {"en-US": "Test App"} + assert payload["scope"] == "read,write" + + +def test_oauth_provider_invalid_redirect_uri( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider", + json={"client_id": "test_client_id", "redirect_uri": "http://invalid/callback"}, + ) + + assert response.status_code == 400 + payload = response.get_json() + assert payload is not None + assert "redirect_uri is invalid" in payload["message"] + + +def test_oauth_provider_invalid_client_id( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + response = test_client_with_containers.post( + "/console/api/oauth/provider", + json={"client_id": "test_invalid_client_id", "redirect_uri": "http://localhost/callback"}, + ) + + assert response.status_code == 404 + payload = response.get_json() + assert payload is not None + assert "client_id is invalid" in payload["message"] + + +def test_oauth_authorize_successful( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + account, _tenant = create_console_account_and_tenant(db_session_with_containers) + + with ( + patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ), + patch( + "controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_authorization_code", + return_value="auth_code_123", + ) as mock_sign, + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/authorize", + json={"client_id": "test_client_id"}, + headers=authenticate_console_client(test_client_with_containers, account), + ) + + assert response.status_code == 200 + assert response.get_json() == {"code": "auth_code_123"} + mock_sign.assert_called_once_with("test_client_id", account.id) + + +def test_oauth_token_authorization_code_grant( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with ( + patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ), + patch( + "controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token", + return_value=("access_123", "refresh_123"), + ), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/token", + json={ + "client_id": "test_client_id", + "grant_type": "authorization_code", + "code": "auth_code", + "client_secret": "test_secret", + "redirect_uri": "http://localhost/callback", + }, + ) + + assert response.status_code == 200 + assert response.get_json() == { + "access_token": "access_123", + "token_type": "Bearer", + "expires_in": OAUTH_ACCESS_TOKEN_EXPIRES_IN, + "refresh_token": "refresh_123", + } + + +def test_oauth_token_authorization_code_grant_missing_code( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/token", + json={ + "client_id": "test_client_id", + "grant_type": "authorization_code", + "client_secret": "test_secret", + "redirect_uri": "http://localhost/callback", + }, + ) + + assert response.status_code == 400 + assert response.get_json()["message"] == "code is required" + + +def test_oauth_token_authorization_code_grant_invalid_secret( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/token", + json={ + "client_id": "test_client_id", + "grant_type": "authorization_code", + "code": "auth_code", + "client_secret": "invalid_secret", + "redirect_uri": "http://localhost/callback", + }, + ) + + assert response.status_code == 400 + assert response.get_json()["message"] == "client_secret is invalid" + + +def test_oauth_token_authorization_code_grant_invalid_redirect_uri( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/token", + json={ + "client_id": "test_client_id", + "grant_type": "authorization_code", + "code": "auth_code", + "client_secret": "test_secret", + "redirect_uri": "http://invalid/callback", + }, + ) + + assert response.status_code == 400 + assert response.get_json()["message"] == "redirect_uri is invalid" + + +def test_oauth_token_refresh_token_grant( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with ( + patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ), + patch( + "controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token", + return_value=("new_access", "new_refresh"), + ), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/token", + json={"client_id": "test_client_id", "grant_type": "refresh_token", "refresh_token": "refresh_123"}, + ) + + assert response.status_code == 200 + assert response.get_json() == { + "access_token": "new_access", + "token_type": "Bearer", + "expires_in": OAUTH_ACCESS_TOKEN_EXPIRES_IN, + "refresh_token": "new_refresh", + } + + +def test_oauth_token_refresh_token_grant_missing_token( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/token", + json={"client_id": "test_client_id", "grant_type": "refresh_token"}, + ) + + assert response.status_code == 400 + assert response.get_json()["message"] == "refresh_token is required" + + +def test_oauth_token_invalid_grant_type( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/token", + json={"client_id": "test_client_id", "grant_type": "invalid_grant"}, + ) + + assert response.status_code == 400 + assert response.get_json()["message"] == "invalid grant_type" + + +def test_oauth_account_successful_retrieval( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + account, _tenant = create_console_account_and_tenant(db_session_with_containers) + account.avatar = "avatar_url" + db_session_with_containers.commit() + + with ( + patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ), + patch( + "controllers.console.auth.oauth_server.OAuthServerService.validate_oauth_access_token", + return_value=account, + ), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/account", + json={"client_id": "test_client_id"}, + headers={"Authorization": "Bearer valid_access_token"}, + ) + + assert response.status_code == 200 + assert response.get_json() == { + "name": "Test User", + "email": account.email, + "avatar": "avatar_url", + "interface_language": "en-US", + "timezone": "UTC", + } + + +def test_oauth_account_missing_authorization_header( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/account", + json={"client_id": "test_client_id"}, + ) + + assert response.status_code == 401 + assert response.get_json() == {"error": "Authorization header is required"} + + +def test_oauth_account_invalid_authorization_header_format( + db_session_with_containers: Session, + test_client_with_containers: FlaskClient, +) -> None: + ensure_dify_setup(db_session_with_containers) + + with patch( + "controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app", + return_value=_build_oauth_provider_app(), + ): + response = test_client_with_containers.post( + "/console/api/oauth/provider/account", + json={"client_id": "test_client_id"}, + headers={"Authorization": "InvalidFormat"}, + ) + + assert response.status_code == 401 + assert response.get_json() == {"error": "Invalid Authorization header format"} diff --git a/api/tests/unit_tests/controllers/console/auth/test_password_reset.py b/api/tests/test_containers_integration_tests/controllers/console/auth/test_password_reset.py similarity index 81% rename from api/tests/unit_tests/controllers/console/auth/test_password_reset.py rename to api/tests/test_containers_integration_tests/controllers/console/auth/test_password_reset.py index 9488cf528e..8f9db287e3 100644 --- a/api/tests/unit_tests/controllers/console/auth/test_password_reset.py +++ b/api/tests/test_containers_integration_tests/controllers/console/auth/test_password_reset.py @@ -1,17 +1,10 @@ -""" -Test suite for password reset authentication flows. +"""Testcontainers integration tests for password reset authentication flows.""" -This module tests the password reset mechanism including: -- Password reset email sending -- Verification code validation -- Password reset with token -- Rate limiting and security checks -""" +from __future__ import annotations from unittest.mock import MagicMock, patch import pytest -from flask import Flask from controllers.console.auth.error import ( EmailCodeError, @@ -28,31 +21,12 @@ from controllers.console.auth.forgot_password import ( from controllers.console.error import AccountNotFound, EmailSendIpLimitError -@pytest.fixture(autouse=True) -def _mock_forgot_password_session(): - with patch("controllers.console.auth.forgot_password.Session") as mock_session_cls: - mock_session = MagicMock() - mock_session_cls.return_value.__enter__.return_value = mock_session - mock_session_cls.return_value.__exit__.return_value = None - yield mock_session - - -@pytest.fixture(autouse=True) -def _mock_forgot_password_db(): - with patch("controllers.console.auth.forgot_password.db") as mock_db: - mock_db.engine = MagicMock() - yield mock_db - - class TestForgotPasswordSendEmailApi: """Test cases for sending password reset emails.""" @pytest.fixture - def app(self): - """Create Flask test application.""" - app = Flask(__name__) - app.config["TESTING"] = True - return app + def app(self, flask_app_with_containers): + return flask_app_with_containers @pytest.fixture def mock_account(self): @@ -62,7 +36,6 @@ class TestForgotPasswordSendEmailApi: account.name = "Test User" return account - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_email_send_ip_limit") @patch("controllers.console.auth.forgot_password.AccountService.get_account_by_email_with_case_fallback") @patch("controllers.console.auth.forgot_password.AccountService.send_reset_password_email") @@ -73,20 +46,10 @@ class TestForgotPasswordSendEmailApi: mock_send_email, mock_get_account, mock_is_ip_limit, - mock_wraps_db, app, mock_account, ): - """ - Test successful password reset email sending. - - Verifies that: - - Email is sent to valid account - - Reset token is generated and returned - - IP rate limiting is checked - """ # Arrange - mock_wraps_db.session.query.return_value.first.return_value = MagicMock() mock_is_ip_limit.return_value = False mock_get_account.return_value = mock_account mock_send_email.return_value = "reset_token_123" @@ -104,9 +67,8 @@ class TestForgotPasswordSendEmailApi: assert response["data"] == "reset_token_123" mock_send_email.assert_called_once() - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_email_send_ip_limit") - def test_send_reset_email_ip_rate_limited(self, mock_is_ip_limit, mock_db, app): + def test_send_reset_email_ip_rate_limited(self, mock_is_ip_limit, app): """ Test password reset email blocked by IP rate limit. @@ -115,7 +77,6 @@ class TestForgotPasswordSendEmailApi: - No email is sent when rate limited """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_is_ip_limit.return_value = True # Act & Assert @@ -133,7 +94,6 @@ class TestForgotPasswordSendEmailApi: (None, "en-US"), # Defaults to en-US when not provided ], ) - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_email_send_ip_limit") @patch("controllers.console.auth.forgot_password.AccountService.get_account_by_email_with_case_fallback") @patch("controllers.console.auth.forgot_password.AccountService.send_reset_password_email") @@ -144,7 +104,6 @@ class TestForgotPasswordSendEmailApi: mock_send_email, mock_get_account, mock_is_ip_limit, - mock_wraps_db, app, mock_account, language_input, @@ -158,7 +117,6 @@ class TestForgotPasswordSendEmailApi: - Unsupported languages default to en-US """ # Arrange - mock_wraps_db.session.query.return_value.first.return_value = MagicMock() mock_is_ip_limit.return_value = False mock_get_account.return_value = mock_account mock_send_email.return_value = "token" @@ -180,13 +138,9 @@ class TestForgotPasswordCheckApi: """Test cases for verifying password reset codes.""" @pytest.fixture - def app(self): - """Create Flask test application.""" - app = Flask(__name__) - app.config["TESTING"] = True - return app + def app(self, flask_app_with_containers): + return flask_app_with_containers - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_forgot_password_error_rate_limit") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") @patch("controllers.console.auth.forgot_password.AccountService.revoke_reset_password_token") @@ -199,7 +153,6 @@ class TestForgotPasswordCheckApi: mock_revoke_token, mock_get_data, mock_is_rate_limit, - mock_db, app, ): """ @@ -212,7 +165,6 @@ class TestForgotPasswordCheckApi: - Rate limit is reset on success """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_is_rate_limit.return_value = False mock_get_data.return_value = {"email": "test@example.com", "code": "123456"} mock_generate_token.return_value = (None, "new_token") @@ -236,7 +188,6 @@ class TestForgotPasswordCheckApi: ) mock_reset_rate_limit.assert_called_once_with("test@example.com") - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_forgot_password_error_rate_limit") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") @patch("controllers.console.auth.forgot_password.AccountService.revoke_reset_password_token") @@ -249,10 +200,8 @@ class TestForgotPasswordCheckApi: mock_revoke_token, mock_get_data, mock_is_rate_limit, - mock_db, app, ): - mock_db.session.query.return_value.first.return_value = MagicMock() mock_is_rate_limit.return_value = False mock_get_data.return_value = {"email": "User@Example.com", "code": "999888"} mock_generate_token.return_value = (None, "fresh-token") @@ -271,9 +220,8 @@ class TestForgotPasswordCheckApi: mock_revoke_token.assert_called_once_with("upper_token") mock_reset_rate_limit.assert_called_once_with("user@example.com") - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_forgot_password_error_rate_limit") - def test_verify_code_rate_limited(self, mock_is_rate_limit, mock_db, app): + def test_verify_code_rate_limited(self, mock_is_rate_limit, app): """ Test code verification blocked by rate limit. @@ -282,7 +230,6 @@ class TestForgotPasswordCheckApi: - Prevents brute force attacks on verification codes """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_is_rate_limit.return_value = True # Act & Assert @@ -295,10 +242,9 @@ class TestForgotPasswordCheckApi: with pytest.raises(EmailPasswordResetLimitError): api.post() - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_forgot_password_error_rate_limit") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") - def test_verify_code_invalid_token(self, mock_get_data, mock_is_rate_limit, mock_db, app): + def test_verify_code_invalid_token(self, mock_get_data, mock_is_rate_limit, app): """ Test code verification with invalid token. @@ -306,7 +252,6 @@ class TestForgotPasswordCheckApi: - InvalidTokenError is raised for invalid/expired tokens """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_is_rate_limit.return_value = False mock_get_data.return_value = None @@ -320,10 +265,9 @@ class TestForgotPasswordCheckApi: with pytest.raises(InvalidTokenError): api.post() - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_forgot_password_error_rate_limit") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") - def test_verify_code_email_mismatch(self, mock_get_data, mock_is_rate_limit, mock_db, app): + def test_verify_code_email_mismatch(self, mock_get_data, mock_is_rate_limit, app): """ Test code verification with mismatched email. @@ -332,7 +276,6 @@ class TestForgotPasswordCheckApi: - Prevents token abuse """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_is_rate_limit.return_value = False mock_get_data.return_value = {"email": "original@example.com", "code": "123456"} @@ -346,11 +289,10 @@ class TestForgotPasswordCheckApi: with pytest.raises(InvalidEmailError): api.post() - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.is_forgot_password_error_rate_limit") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") @patch("controllers.console.auth.forgot_password.AccountService.add_forgot_password_error_rate_limit") - def test_verify_code_wrong_code(self, mock_add_rate_limit, mock_get_data, mock_is_rate_limit, mock_db, app): + def test_verify_code_wrong_code(self, mock_add_rate_limit, mock_get_data, mock_is_rate_limit, app): """ Test code verification with incorrect code. @@ -359,7 +301,6 @@ class TestForgotPasswordCheckApi: - Rate limit counter is incremented """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_is_rate_limit.return_value = False mock_get_data.return_value = {"email": "test@example.com", "code": "123456"} @@ -380,11 +321,8 @@ class TestForgotPasswordResetApi: """Test cases for resetting password with verified token.""" @pytest.fixture - def app(self): - """Create Flask test application.""" - app = Flask(__name__) - app.config["TESTING"] = True - return app + def app(self, flask_app_with_containers): + return flask_app_with_containers @pytest.fixture def mock_account(self): @@ -394,7 +332,6 @@ class TestForgotPasswordResetApi: account.name = "Test User" return account - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") @patch("controllers.console.auth.forgot_password.AccountService.revoke_reset_password_token") @patch("controllers.console.auth.forgot_password.AccountService.get_account_by_email_with_case_fallback") @@ -405,7 +342,6 @@ class TestForgotPasswordResetApi: mock_get_account, mock_revoke_token, mock_get_data, - mock_wraps_db, app, mock_account, ): @@ -418,7 +354,6 @@ class TestForgotPasswordResetApi: - Success response is returned """ # Arrange - mock_wraps_db.session.query.return_value.first.return_value = MagicMock() mock_get_data.return_value = {"email": "test@example.com", "phase": "reset"} mock_get_account.return_value = mock_account mock_get_tenants.return_value = [MagicMock()] @@ -436,9 +371,8 @@ class TestForgotPasswordResetApi: assert response["result"] == "success" mock_revoke_token.assert_called_once_with("valid_token") - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") - def test_reset_password_mismatch(self, mock_get_data, mock_db, app): + def test_reset_password_mismatch(self, mock_get_data, app): """ Test password reset with mismatched passwords. @@ -447,7 +381,6 @@ class TestForgotPasswordResetApi: - No password update occurs """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_get_data.return_value = {"email": "test@example.com", "phase": "reset"} # Act & Assert @@ -460,9 +393,8 @@ class TestForgotPasswordResetApi: with pytest.raises(PasswordMismatchError): api.post() - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") - def test_reset_password_invalid_token(self, mock_get_data, mock_db, app): + def test_reset_password_invalid_token(self, mock_get_data, app): """ Test password reset with invalid token. @@ -470,7 +402,6 @@ class TestForgotPasswordResetApi: - InvalidTokenError is raised for invalid/expired tokens """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_get_data.return_value = None # Act & Assert @@ -483,9 +414,8 @@ class TestForgotPasswordResetApi: with pytest.raises(InvalidTokenError): api.post() - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") - def test_reset_password_wrong_phase(self, mock_get_data, mock_db, app): + def test_reset_password_wrong_phase(self, mock_get_data, app): """ Test password reset with token not in reset phase. @@ -494,7 +424,6 @@ class TestForgotPasswordResetApi: - Prevents use of verification-phase tokens for reset """ # Arrange - mock_db.session.query.return_value.first.return_value = MagicMock() mock_get_data.return_value = {"email": "test@example.com", "phase": "verify"} # Act & Assert @@ -507,13 +436,10 @@ class TestForgotPasswordResetApi: with pytest.raises(InvalidTokenError): api.post() - @patch("controllers.console.wraps.db") @patch("controllers.console.auth.forgot_password.AccountService.get_reset_password_data") @patch("controllers.console.auth.forgot_password.AccountService.revoke_reset_password_token") @patch("controllers.console.auth.forgot_password.AccountService.get_account_by_email_with_case_fallback") - def test_reset_password_account_not_found( - self, mock_get_account, mock_revoke_token, mock_get_data, mock_wraps_db, app - ): + def test_reset_password_account_not_found(self, mock_get_account, mock_revoke_token, mock_get_data, app): """ Test password reset for non-existent account. @@ -521,7 +447,6 @@ class TestForgotPasswordResetApi: - AccountNotFound is raised when account doesn't exist """ # Arrange - mock_wraps_db.session.query.return_value.first.return_value = MagicMock() mock_get_data.return_value = {"email": "nonexistent@example.com", "phase": "reset"} mock_get_account.return_value = None diff --git a/api/tests/test_containers_integration_tests/controllers/console/helpers.py b/api/tests/test_containers_integration_tests/controllers/console/helpers.py new file mode 100644 index 0000000000..9e2084f393 --- /dev/null +++ b/api/tests/test_containers_integration_tests/controllers/console/helpers.py @@ -0,0 +1,85 @@ +"""Shared helpers for authenticated console controller integration tests.""" + +import uuid + +from flask.testing import FlaskClient +from sqlalchemy import select +from sqlalchemy.orm import Session + +from configs import dify_config +from constants import HEADER_NAME_CSRF_TOKEN +from libs.datetime_utils import naive_utc_now +from libs.token import _real_cookie_name, generate_csrf_token +from models import Account, DifySetup, Tenant, TenantAccountJoin +from models.account import AccountStatus, TenantAccountRole +from models.model import App, AppMode +from services.account_service import AccountService + + +def ensure_dify_setup(db_session: Session) -> None: + """Create a setup marker once so setup-protected console routes can be exercised.""" + if db_session.scalar(select(DifySetup).limit(1)) is not None: + return + + db_session.add(DifySetup(version=dify_config.project.version)) + db_session.commit() + + +def create_console_account_and_tenant(db_session: Session) -> tuple[Account, Tenant]: + """Create an initialized owner account with a current tenant.""" + account = Account( + email=f"test-{uuid.uuid4()}@example.com", + name="Test User", + interface_language="en-US", + status=AccountStatus.ACTIVE, + ) + account.initialized_at = naive_utc_now() + db_session.add(account) + db_session.commit() + + tenant = Tenant(name="Test Tenant", status="normal") + db_session.add(tenant) + db_session.commit() + + db_session.add( + TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role=TenantAccountRole.OWNER, + current=True, + ) + ) + db_session.commit() + + account.set_tenant_id(tenant.id) + account.timezone = "UTC" + db_session.commit() + + ensure_dify_setup(db_session) + return account, tenant + + +def create_console_app(db_session: Session, tenant_id: str, account_id: str, mode: AppMode) -> App: + """Create a minimal app row that can be loaded by get_app_model.""" + app = App( + tenant_id=tenant_id, + name="Test App", + mode=mode, + enable_site=True, + enable_api=True, + created_by=account_id, + ) + db_session.add(app) + db_session.commit() + return app + + +def authenticate_console_client(test_client: FlaskClient, account: Account) -> dict[str, str]: + """Attach console auth cookies/headers for endpoints guarded by login_required.""" + access_token = AccountService.get_account_jwt_token(account) + csrf_token = generate_csrf_token(account.id) + test_client.set_cookie(_real_cookie_name("csrf_token"), csrf_token, domain="localhost") + return { + "Authorization": f"Bearer {access_token}", + HEADER_NAME_CSRF_TOKEN: csrf_token, + } diff --git a/api/tests/test_containers_integration_tests/core/rag/retrieval/test_dataset_retrieval_integration.py b/api/tests/test_containers_integration_tests/core/rag/retrieval/test_dataset_retrieval_integration.py index 781e297fa4..ea8d04502a 100644 --- a/api/tests/test_containers_integration_tests/core/rag/retrieval/test_dataset_retrieval_integration.py +++ b/api/tests/test_containers_integration_tests/core/rag/retrieval/test_dataset_retrieval_integration.py @@ -4,6 +4,7 @@ from unittest.mock import patch import pytest from faker import Faker +from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.retrieval.dataset_retrieval import DatasetRetrieval from core.workflow.nodes.knowledge_retrieval.retrieval import KnowledgeRetrievalRequest from models.dataset import Dataset, Document @@ -55,7 +56,7 @@ class TestGetAvailableDatasetsIntegration: name=f"Document {i}", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -112,7 +113,7 @@ class TestGetAvailableDatasetsIntegration: created_from=DocumentCreatedFrom.WEB, name=f"Archived Document {i}", created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, indexing_status=IndexingStatus.COMPLETED, enabled=True, archived=True, # Archived @@ -165,7 +166,7 @@ class TestGetAvailableDatasetsIntegration: created_from=DocumentCreatedFrom.WEB, name=f"Disabled Document {i}", created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, indexing_status=IndexingStatus.COMPLETED, enabled=False, # Disabled archived=False, @@ -218,7 +219,7 @@ class TestGetAvailableDatasetsIntegration: created_from=DocumentCreatedFrom.WEB, name=f"Document {status}", created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, indexing_status=status, # Not completed enabled=True, archived=False, @@ -336,7 +337,7 @@ class TestGetAvailableDatasetsIntegration: created_from=DocumentCreatedFrom.WEB, name=f"Document for {dataset.name}", created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, indexing_status=IndexingStatus.COMPLETED, enabled=True, archived=False, @@ -416,7 +417,7 @@ class TestGetAvailableDatasetsIntegration: created_from=DocumentCreatedFrom.WEB, name=f"Document {i}", created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, indexing_status=IndexingStatus.COMPLETED, enabled=True, archived=False, @@ -476,7 +477,7 @@ class TestKnowledgeRetrievalIntegration: indexing_status=IndexingStatus.COMPLETED, enabled=True, archived=False, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) db_session_with_containers.commit() diff --git a/api/tests/test_containers_integration_tests/repositories/test_execution_extra_content_repository.py b/api/tests/test_containers_integration_tests/repositories/test_execution_extra_content_repository.py deleted file mode 100644 index c9058626d1..0000000000 --- a/api/tests/test_containers_integration_tests/repositories/test_execution_extra_content_repository.py +++ /dev/null @@ -1,27 +0,0 @@ -from __future__ import annotations - -from sqlalchemy.orm import sessionmaker - -from extensions.ext_database import db -from repositories.sqlalchemy_execution_extra_content_repository import SQLAlchemyExecutionExtraContentRepository -from tests.test_containers_integration_tests.helpers.execution_extra_content import ( - create_human_input_message_fixture, -) - - -def test_get_by_message_ids_returns_human_input_content(db_session_with_containers): - fixture = create_human_input_message_fixture(db_session_with_containers) - repository = SQLAlchemyExecutionExtraContentRepository( - session_maker=sessionmaker(bind=db.engine, expire_on_commit=False) - ) - - results = repository.get_by_message_ids([fixture.message.id]) - - assert len(results) == 1 - assert len(results[0]) == 1 - content = results[0][0] - assert content.submitted is True - assert content.form_submission_data is not None - assert content.form_submission_data.action_id == fixture.action_id - assert content.form_submission_data.action_text == fixture.action_text - assert content.form_submission_data.rendered_content == fixture.form.rendered_content diff --git a/api/tests/test_containers_integration_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py b/api/tests/test_containers_integration_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py index c3ed79656f..49b370990a 100644 --- a/api/tests/test_containers_integration_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py +++ b/api/tests/test_containers_integration_tests/repositories/test_sqlalchemy_api_workflow_run_repository.py @@ -27,7 +27,7 @@ from models.human_input import ( HumanInputFormRecipient, RecipientType, ) -from models.workflow import WorkflowAppLog, WorkflowPause, WorkflowPauseReason, WorkflowRun +from models.workflow import WorkflowAppLog, WorkflowAppLogCreatedFrom, WorkflowPause, WorkflowPauseReason, WorkflowRun from repositories.entities.workflow_pause import WorkflowPauseEntity from repositories.sqlalchemy_api_workflow_run_repository import ( DifyAPISQLAlchemyWorkflowRunRepository, @@ -218,7 +218,7 @@ class TestDeleteRunsWithRelated: app_id=test_scope.app_id, workflow_id=test_scope.workflow_id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=test_scope.user_id, ) @@ -278,7 +278,7 @@ class TestCountRunsWithRelated: app_id=test_scope.app_id, workflow_id=test_scope.workflow_id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=test_scope.user_id, ) diff --git a/api/tests/test_containers_integration_tests/repositories/test_sqlalchemy_execution_extra_content_repository.py b/api/tests/test_containers_integration_tests/repositories/test_sqlalchemy_execution_extra_content_repository.py new file mode 100644 index 0000000000..ed998c9ed0 --- /dev/null +++ b/api/tests/test_containers_integration_tests/repositories/test_sqlalchemy_execution_extra_content_repository.py @@ -0,0 +1,407 @@ +"""Integration tests for SQLAlchemyExecutionExtraContentRepository using Testcontainers. + +Part of #32454 — replaces the mock-based unit tests with real database interactions. +""" + +from __future__ import annotations + +from collections.abc import Generator +from dataclasses import dataclass +from datetime import datetime, timedelta +from decimal import Decimal +from uuid import uuid4 + +import pytest +from sqlalchemy import Engine, delete, select +from sqlalchemy.orm import Session, sessionmaker + +from dify_graph.nodes.human_input.entities import FormDefinition, UserAction +from dify_graph.nodes.human_input.enums import HumanInputFormStatus +from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole +from models.enums import ConversationFromSource, InvokeFrom +from models.execution_extra_content import ExecutionExtraContent, HumanInputContent +from models.human_input import ( + ConsoleRecipientPayload, + HumanInputDelivery, + HumanInputForm, + HumanInputFormRecipient, + RecipientType, +) +from models.model import App, Conversation, Message +from repositories.sqlalchemy_execution_extra_content_repository import SQLAlchemyExecutionExtraContentRepository + + +@dataclass +class _TestScope: + """Per-test data scope used to isolate DB rows. + + IDs are populated after flushing the base entities to the database. + """ + + tenant_id: str = "" + app_id: str = "" + user_id: str = "" + + +def _cleanup_scope_data(session: Session, scope: _TestScope) -> None: + """Remove test-created DB rows for a test scope.""" + form_ids_subquery = select(HumanInputForm.id).where( + HumanInputForm.tenant_id == scope.tenant_id, + ) + session.execute(delete(HumanInputFormRecipient).where(HumanInputFormRecipient.form_id.in_(form_ids_subquery))) + session.execute(delete(HumanInputDelivery).where(HumanInputDelivery.form_id.in_(form_ids_subquery))) + session.execute( + delete(ExecutionExtraContent).where( + ExecutionExtraContent.workflow_run_id.in_( + select(HumanInputForm.workflow_run_id).where(HumanInputForm.tenant_id == scope.tenant_id) + ) + ) + ) + session.execute(delete(HumanInputForm).where(HumanInputForm.tenant_id == scope.tenant_id)) + session.execute(delete(Message).where(Message.app_id == scope.app_id)) + session.execute(delete(Conversation).where(Conversation.app_id == scope.app_id)) + session.execute(delete(App).where(App.id == scope.app_id)) + session.execute(delete(TenantAccountJoin).where(TenantAccountJoin.tenant_id == scope.tenant_id)) + session.execute(delete(Account).where(Account.id == scope.user_id)) + session.execute(delete(Tenant).where(Tenant.id == scope.tenant_id)) + session.commit() + + +def _seed_base_entities(session: Session, scope: _TestScope) -> None: + """Create the base tenant, account, and app needed by tests.""" + tenant = Tenant(name="Test Tenant") + session.add(tenant) + session.flush() + scope.tenant_id = tenant.id + + account = Account( + name="Test Account", + email=f"test_{uuid4()}@example.com", + password="hashed-password", + password_salt="salt", + interface_language="en-US", + timezone="UTC", + ) + session.add(account) + session.flush() + scope.user_id = account.id + + tenant_join = TenantAccountJoin( + tenant_id=scope.tenant_id, + account_id=scope.user_id, + role=TenantAccountRole.OWNER, + current=True, + ) + session.add(tenant_join) + + app = App( + tenant_id=scope.tenant_id, + name="Test App", + description="", + mode="chat", + icon_type="emoji", + icon="bot", + icon_background="#FFFFFF", + enable_site=False, + enable_api=True, + api_rpm=100, + api_rph=100, + is_demo=False, + is_public=False, + is_universal=False, + created_by=scope.user_id, + updated_by=scope.user_id, + ) + session.add(app) + session.flush() + scope.app_id = app.id + + +def _create_conversation(session: Session, scope: _TestScope) -> Conversation: + conversation = Conversation( + app_id=scope.app_id, + mode="chat", + name="Test Conversation", + summary="", + introduction="", + system_instruction="", + status="normal", + invoke_from=InvokeFrom.EXPLORE, + from_source=ConversationFromSource.CONSOLE, + from_account_id=scope.user_id, + from_end_user_id=None, + ) + conversation.inputs = {} + session.add(conversation) + session.flush() + return conversation + + +def _create_message( + session: Session, + scope: _TestScope, + conversation_id: str, + workflow_run_id: str, +) -> Message: + message = Message( + app_id=scope.app_id, + conversation_id=conversation_id, + inputs={}, + query="test query", + message={"messages": []}, + answer="test answer", + message_tokens=50, + message_unit_price=Decimal("0.001"), + answer_tokens=80, + answer_unit_price=Decimal("0.001"), + provider_response_latency=0.5, + currency="USD", + from_source=ConversationFromSource.CONSOLE, + from_account_id=scope.user_id, + workflow_run_id=workflow_run_id, + ) + session.add(message) + session.flush() + return message + + +def _create_submitted_form( + session: Session, + scope: _TestScope, + *, + workflow_run_id: str, + action_id: str = "approve", + action_title: str = "Approve", + node_title: str = "Approval", +) -> HumanInputForm: + expiration_time = datetime.utcnow() + timedelta(days=1) + form_definition = FormDefinition( + form_content="content", + inputs=[], + user_actions=[UserAction(id=action_id, title=action_title)], + rendered_content="rendered", + expiration_time=expiration_time, + node_title=node_title, + display_in_ui=True, + ) + form = HumanInputForm( + tenant_id=scope.tenant_id, + app_id=scope.app_id, + workflow_run_id=workflow_run_id, + node_id="node-id", + form_definition=form_definition.model_dump_json(), + rendered_content=f"Rendered {action_title}", + status=HumanInputFormStatus.SUBMITTED, + expiration_time=expiration_time, + selected_action_id=action_id, + ) + session.add(form) + session.flush() + return form + + +def _create_waiting_form( + session: Session, + scope: _TestScope, + *, + workflow_run_id: str, + default_values: dict | None = None, +) -> HumanInputForm: + expiration_time = datetime.utcnow() + timedelta(days=1) + form_definition = FormDefinition( + form_content="content", + inputs=[], + user_actions=[UserAction(id="approve", title="Approve")], + rendered_content="rendered", + expiration_time=expiration_time, + default_values=default_values or {"name": "John"}, + node_title="Approval", + display_in_ui=True, + ) + form = HumanInputForm( + tenant_id=scope.tenant_id, + app_id=scope.app_id, + workflow_run_id=workflow_run_id, + node_id="node-id", + form_definition=form_definition.model_dump_json(), + rendered_content="Rendered block", + status=HumanInputFormStatus.WAITING, + expiration_time=expiration_time, + ) + session.add(form) + session.flush() + return form + + +def _create_human_input_content( + session: Session, + *, + workflow_run_id: str, + message_id: str, + form_id: str, +) -> HumanInputContent: + content = HumanInputContent.new( + workflow_run_id=workflow_run_id, + message_id=message_id, + form_id=form_id, + ) + session.add(content) + return content + + +def _create_recipient( + session: Session, + *, + form_id: str, + delivery_id: str, + recipient_type: RecipientType = RecipientType.CONSOLE, + access_token: str = "token-1", +) -> HumanInputFormRecipient: + payload = ConsoleRecipientPayload(account_id=None) + recipient = HumanInputFormRecipient( + form_id=form_id, + delivery_id=delivery_id, + recipient_type=recipient_type, + recipient_payload=payload.model_dump_json(), + access_token=access_token, + ) + session.add(recipient) + return recipient + + +def _create_delivery(session: Session, *, form_id: str) -> HumanInputDelivery: + from dify_graph.nodes.human_input.enums import DeliveryMethodType + from models.human_input import ConsoleDeliveryPayload + + delivery = HumanInputDelivery( + form_id=form_id, + delivery_method_type=DeliveryMethodType.WEBAPP, + channel_payload=ConsoleDeliveryPayload().model_dump_json(), + ) + session.add(delivery) + session.flush() + return delivery + + +@pytest.fixture +def repository(db_session_with_containers: Session) -> SQLAlchemyExecutionExtraContentRepository: + """Build a repository backed by the testcontainers database engine.""" + engine = db_session_with_containers.get_bind() + assert isinstance(engine, Engine) + return SQLAlchemyExecutionExtraContentRepository(sessionmaker(bind=engine, expire_on_commit=False)) + + +@pytest.fixture +def test_scope(db_session_with_containers: Session) -> Generator[_TestScope]: + """Provide an isolated scope and clean related data after each test.""" + scope = _TestScope() + _seed_base_entities(db_session_with_containers, scope) + db_session_with_containers.commit() + yield scope + _cleanup_scope_data(db_session_with_containers, scope) + + +class TestGetByMessageIds: + """Tests for SQLAlchemyExecutionExtraContentRepository.get_by_message_ids.""" + + def test_groups_contents_by_message( + self, + db_session_with_containers: Session, + repository: SQLAlchemyExecutionExtraContentRepository, + test_scope: _TestScope, + ) -> None: + """Submitted forms are correctly mapped and grouped by message ID.""" + workflow_run_id = str(uuid4()) + conversation = _create_conversation(db_session_with_containers, test_scope) + msg1 = _create_message(db_session_with_containers, test_scope, conversation.id, workflow_run_id) + msg2 = _create_message(db_session_with_containers, test_scope, conversation.id, workflow_run_id) + + form = _create_submitted_form( + db_session_with_containers, + test_scope, + workflow_run_id=workflow_run_id, + action_id="approve", + action_title="Approve", + ) + _create_human_input_content( + db_session_with_containers, + workflow_run_id=workflow_run_id, + message_id=msg1.id, + form_id=form.id, + ) + db_session_with_containers.commit() + + result = repository.get_by_message_ids([msg1.id, msg2.id]) + + assert len(result) == 2 + # msg1 has one submitted content + assert len(result[0]) == 1 + content = result[0][0] + assert content.submitted is True + assert content.workflow_run_id == workflow_run_id + assert content.form_submission_data is not None + assert content.form_submission_data.action_id == "approve" + assert content.form_submission_data.action_text == "Approve" + assert content.form_submission_data.rendered_content == "Rendered Approve" + assert content.form_submission_data.node_id == "node-id" + assert content.form_submission_data.node_title == "Approval" + # msg2 has no content + assert result[1] == [] + + def test_returns_unsubmitted_form_definition( + self, + db_session_with_containers: Session, + repository: SQLAlchemyExecutionExtraContentRepository, + test_scope: _TestScope, + ) -> None: + """Waiting forms return full form_definition with resolved token and defaults.""" + workflow_run_id = str(uuid4()) + conversation = _create_conversation(db_session_with_containers, test_scope) + msg = _create_message(db_session_with_containers, test_scope, conversation.id, workflow_run_id) + + form = _create_waiting_form( + db_session_with_containers, + test_scope, + workflow_run_id=workflow_run_id, + default_values={"name": "John"}, + ) + delivery = _create_delivery(db_session_with_containers, form_id=form.id) + _create_recipient( + db_session_with_containers, + form_id=form.id, + delivery_id=delivery.id, + access_token="token-1", + ) + _create_human_input_content( + db_session_with_containers, + workflow_run_id=workflow_run_id, + message_id=msg.id, + form_id=form.id, + ) + db_session_with_containers.commit() + + result = repository.get_by_message_ids([msg.id]) + + assert len(result) == 1 + assert len(result[0]) == 1 + domain_content = result[0][0] + assert domain_content.submitted is False + assert domain_content.workflow_run_id == workflow_run_id + assert domain_content.form_definition is not None + form_def = domain_content.form_definition + assert form_def.form_id == form.id + assert form_def.node_id == "node-id" + assert form_def.node_title == "Approval" + assert form_def.form_content == "Rendered block" + assert form_def.display_in_ui is True + assert form_def.form_token == "token-1" + assert form_def.resolved_default_values == {"name": "John"} + assert form_def.expiration_time == int(form.expiration_time.timestamp()) + + def test_empty_message_ids_returns_empty_list( + self, + repository: SQLAlchemyExecutionExtraContentRepository, + ) -> None: + """Passing no message IDs returns an empty list without hitting the DB.""" + result = repository.get_by_message_ids([]) + assert result == [] diff --git a/api/tests/test_containers_integration_tests/services/document_service_status.py b/api/tests/test_containers_integration_tests/services/document_service_status.py index f995ac7bef..42d587b7f7 100644 --- a/api/tests/test_containers_integration_tests/services/document_service_status.py +++ b/api/tests/test_containers_integration_tests/services/document_service_status.py @@ -13,6 +13,7 @@ from uuid import uuid4 import pytest +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.storage.storage_type import StorageType from models import Account from models.dataset import Dataset, Document @@ -91,7 +92,7 @@ class DocumentStatusTestDataFactory: name=name, created_from=DocumentCreatedFrom.WEB, created_by=created_by, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) document.id = document_id document.indexing_status = indexing_status diff --git a/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py b/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py index 7ce7357b41..b8e022503f 100644 --- a/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py +++ b/api/tests/test_containers_integration_tests/services/test_api_based_extension_service.py @@ -525,3 +525,147 @@ class TestAPIBasedExtensionService: # Try to get extension with wrong tenant ID with pytest.raises(ValueError, match="API based extension is not found"): APIBasedExtensionService.get_with_tenant_id(tenant2.id, created_extension.id) + + def test_save_extension_api_key_exactly_four_chars_rejected( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """API key with exactly 4 characters should be rejected (boundary).""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + assert tenant is not None + + extension_data = APIBasedExtension( + tenant_id=tenant.id, + name=fake.company(), + api_endpoint=f"https://{fake.domain_name()}/api", + api_key="1234", + ) + + with pytest.raises(ValueError, match="api_key must be at least 5 characters"): + APIBasedExtensionService.save(extension_data) + + def test_save_extension_api_key_exactly_five_chars_accepted( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """API key with exactly 5 characters should be accepted (boundary).""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + assert tenant is not None + + extension_data = APIBasedExtension( + tenant_id=tenant.id, + name=fake.company(), + api_endpoint=f"https://{fake.domain_name()}/api", + api_key="12345", + ) + + saved = APIBasedExtensionService.save(extension_data) + assert saved.id is not None + + def test_save_extension_requestor_constructor_error( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Exception raised by requestor constructor is wrapped in ValueError.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + assert tenant is not None + + mock_external_service_dependencies["requestor"].side_effect = RuntimeError("bad config") + + extension_data = APIBasedExtension( + tenant_id=tenant.id, + name=fake.company(), + api_endpoint=f"https://{fake.domain_name()}/api", + api_key=fake.password(length=20), + ) + + with pytest.raises(ValueError, match="connection error: bad config"): + APIBasedExtensionService.save(extension_data) + + def test_save_extension_network_exception( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Network exceptions during ping are wrapped in ValueError.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + assert tenant is not None + + mock_external_service_dependencies["requestor_instance"].request.side_effect = ConnectionError( + "network failure" + ) + + extension_data = APIBasedExtension( + tenant_id=tenant.id, + name=fake.company(), + api_endpoint=f"https://{fake.domain_name()}/api", + api_key=fake.password(length=20), + ) + + with pytest.raises(ValueError, match="connection error: network failure"): + APIBasedExtensionService.save(extension_data) + + def test_save_extension_update_duplicate_name_rejected( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Updating an existing extension to use another extension's name should fail.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + assert tenant is not None + + ext1 = APIBasedExtensionService.save( + APIBasedExtension( + tenant_id=tenant.id, + name="Extension Alpha", + api_endpoint=f"https://{fake.domain_name()}/api", + api_key=fake.password(length=20), + ) + ) + ext2 = APIBasedExtensionService.save( + APIBasedExtension( + tenant_id=tenant.id, + name="Extension Beta", + api_endpoint=f"https://{fake.domain_name()}/api", + api_key=fake.password(length=20), + ) + ) + + # Try to rename ext2 to ext1's name + ext2.name = "Extension Alpha" + with pytest.raises(ValueError, match="name must be unique, it is already existed"): + APIBasedExtensionService.save(ext2) + + def test_get_all_returns_empty_for_different_tenant( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Extensions from one tenant should not be visible to another.""" + fake = Faker() + _, tenant1 = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + _, tenant2 = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + assert tenant1 is not None + + APIBasedExtensionService.save( + APIBasedExtension( + tenant_id=tenant1.id, + name=fake.company(), + api_endpoint=f"https://{fake.domain_name()}/api", + api_key=fake.password(length=20), + ) + ) + + assert tenant2 is not None + result = APIBasedExtensionService.get_all_by_tenant_id(tenant2.id) + assert result == [] diff --git a/api/tests/test_containers_integration_tests/services/test_app_service.py b/api/tests/test_containers_integration_tests/services/test_app_service.py index d79f80c009..9ca8729b77 100644 --- a/api/tests/test_containers_integration_tests/services/test_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_app_service.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from constants.model_template import default_app_templates from models import Account -from models.model import App, Site +from models.model import App, IconType, Site from services.account_service import AccountService, TenantService from tests.test_containers_integration_tests.helpers import generate_valid_password @@ -463,6 +463,109 @@ class TestAppService: assert updated_app.tenant_id == app.tenant_id assert updated_app.created_by == app.created_by + def test_update_app_should_preserve_icon_type_when_omitted( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """ + Test update_app keeps the persisted icon_type when the update payload omits it. + """ + fake = Faker() + + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=generate_valid_password(fake), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + from services.app_service import AppService + + app_service = AppService() + app = app_service.create_app( + tenant.id, + { + "name": fake.company(), + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🎯", + "icon_background": "#45B7D1", + }, + account, + ) + + mock_current_user = create_autospec(Account, instance=True) + mock_current_user.id = account.id + mock_current_user.current_tenant_id = account.current_tenant_id + + with patch("services.app_service.current_user", mock_current_user): + updated_app = app_service.update_app( + app, + { + "name": "Updated App Name", + "description": "Updated app description", + "icon_type": None, + "icon": "🔄", + "icon_background": "#FF8C42", + "use_icon_as_answer_icon": True, + }, + ) + + assert updated_app.icon_type == IconType.EMOJI + + def test_update_app_should_reject_empty_icon_type( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """ + Test update_app rejects an explicit empty icon_type. + """ + fake = Faker() + + account = AccountService.create_account( + email=fake.email(), + name=fake.name(), + interface_language="en-US", + password=generate_valid_password(fake), + ) + TenantService.create_owner_tenant_if_not_exist(account, name=fake.company()) + tenant = account.current_tenant + + from services.app_service import AppService + + app_service = AppService() + app = app_service.create_app( + tenant.id, + { + "name": fake.company(), + "description": fake.text(max_nb_chars=100), + "mode": "chat", + "icon_type": "emoji", + "icon": "🎯", + "icon_background": "#45B7D1", + }, + account, + ) + + mock_current_user = create_autospec(Account, instance=True) + mock_current_user.id = account.id + mock_current_user.current_tenant_id = account.current_tenant_id + + with patch("services.app_service.current_user", mock_current_user): + with pytest.raises(ValueError): + app_service.update_app( + app, + { + "name": "Updated App Name", + "description": "Updated app description", + "icon_type": "", + "icon": "🔄", + "icon_background": "#FF8C42", + "use_icon_as_answer_icon": True, + }, + ) + def test_update_app_name_success(self, db_session_with_containers: Session, mock_external_service_dependencies): """ Test successful app name update. diff --git a/api/tests/test_containers_integration_tests/services/test_attachment_service.py b/api/tests/test_containers_integration_tests/services/test_attachment_service.py new file mode 100644 index 0000000000..768a8baee2 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_attachment_service.py @@ -0,0 +1,80 @@ +"""Testcontainers integration tests for AttachmentService.""" + +import base64 +from datetime import UTC, datetime +from unittest.mock import patch +from uuid import uuid4 + +import pytest +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker +from werkzeug.exceptions import NotFound + +import services.attachment_service as attachment_service_module +from extensions.ext_database import db +from extensions.storage.storage_type import StorageType +from models.enums import CreatorUserRole +from models.model import UploadFile +from services.attachment_service import AttachmentService + + +class TestAttachmentService: + def _create_upload_file(self, db_session_with_containers, *, tenant_id: str | None = None) -> UploadFile: + upload_file = UploadFile( + tenant_id=tenant_id or str(uuid4()), + storage_type=StorageType.OPENDAL, + key=f"upload/{uuid4()}.txt", + name="test-file.txt", + size=100, + extension="txt", + mime_type="text/plain", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=str(uuid4()), + created_at=datetime.now(UTC), + used=False, + ) + db_session_with_containers.add(upload_file) + db_session_with_containers.commit() + return upload_file + + def test_should_initialize_with_sessionmaker(self): + session_factory = sessionmaker() + + service = AttachmentService(session_factory=session_factory) + + assert service._session_maker is session_factory + + def test_should_initialize_with_engine(self): + engine = create_engine("sqlite:///:memory:") + + service = AttachmentService(session_factory=engine) + session = service._session_maker() + try: + assert session.bind == engine + finally: + session.close() + engine.dispose() + + @pytest.mark.parametrize("invalid_session_factory", [None, "not-a-session-factory", 1]) + def test_should_raise_assertion_error_for_invalid_session_factory(self, invalid_session_factory): + with pytest.raises(AssertionError, match="must be a sessionmaker or an Engine."): + AttachmentService(session_factory=invalid_session_factory) + + def test_should_return_base64_when_file_exists(self, db_session_with_containers): + upload_file = self._create_upload_file(db_session_with_containers) + service = AttachmentService(session_factory=sessionmaker(bind=db.engine)) + + with patch.object(attachment_service_module.storage, "load_once", return_value=b"binary-content") as mock_load: + result = service.get_file_base64(upload_file.id) + + assert result == base64.b64encode(b"binary-content").decode() + mock_load.assert_called_once_with(upload_file.key) + + def test_should_raise_not_found_when_file_missing(self, db_session_with_containers): + service = AttachmentService(session_factory=sessionmaker(bind=db.engine)) + + with patch.object(attachment_service_module.storage, "load_once") as mock_load: + with pytest.raises(NotFound, match="File not found"): + service.get_file_base64(str(uuid4())) + + mock_load.assert_not_called() diff --git a/api/tests/test_containers_integration_tests/services/test_conversation_variable_updater.py b/api/tests/test_containers_integration_tests/services/test_conversation_variable_updater.py new file mode 100644 index 0000000000..42a2215896 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_conversation_variable_updater.py @@ -0,0 +1,58 @@ +"""Testcontainers integration tests for ConversationVariableUpdater.""" + +from uuid import uuid4 + +import pytest +from sqlalchemy.orm import sessionmaker + +from dify_graph.variables import StringVariable +from extensions.ext_database import db +from models.workflow import ConversationVariable +from services.conversation_variable_updater import ConversationVariableNotFoundError, ConversationVariableUpdater + + +class TestConversationVariableUpdater: + def _create_conversation_variable( + self, db_session_with_containers, *, conversation_id: str, variable: StringVariable, app_id: str | None = None + ) -> ConversationVariable: + row = ConversationVariable( + id=variable.id, + conversation_id=conversation_id, + app_id=app_id or str(uuid4()), + data=variable.model_dump_json(), + ) + db_session_with_containers.add(row) + db_session_with_containers.commit() + return row + + def test_should_update_conversation_variable_data_and_commit(self, db_session_with_containers): + conversation_id = str(uuid4()) + variable = StringVariable(id=str(uuid4()), name="topic", value="old value") + self._create_conversation_variable( + db_session_with_containers, conversation_id=conversation_id, variable=variable + ) + + updated_variable = StringVariable(id=variable.id, name="topic", value="new value") + updater = ConversationVariableUpdater(sessionmaker(bind=db.engine)) + + updater.update(conversation_id=conversation_id, variable=updated_variable) + + db_session_with_containers.expire_all() + row = db_session_with_containers.get(ConversationVariable, (variable.id, conversation_id)) + assert row is not None + assert row.data == updated_variable.model_dump_json() + + def test_should_raise_not_found_when_variable_missing(self, db_session_with_containers): + conversation_id = str(uuid4()) + variable = StringVariable(id=str(uuid4()), name="topic", value="value") + updater = ConversationVariableUpdater(sessionmaker(bind=db.engine)) + + with pytest.raises(ConversationVariableNotFoundError, match="conversation variable not found in the database"): + updater.update(conversation_id=conversation_id, variable=variable) + + def test_should_do_nothing_when_flush_is_called(self, db_session_with_containers): + updater = ConversationVariableUpdater(sessionmaker(bind=db.engine)) + + result = updater.flush() + + assert result is None diff --git a/api/tests/test_containers_integration_tests/services/test_credit_pool_service.py b/api/tests/test_containers_integration_tests/services/test_credit_pool_service.py new file mode 100644 index 0000000000..0f63d98642 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_credit_pool_service.py @@ -0,0 +1,104 @@ +"""Testcontainers integration tests for CreditPoolService.""" + +from uuid import uuid4 + +import pytest + +from core.errors.error import QuotaExceededError +from models import TenantCreditPool +from models.enums import ProviderQuotaType +from services.credit_pool_service import CreditPoolService + + +class TestCreditPoolService: + def _create_tenant_id(self) -> str: + return str(uuid4()) + + def test_create_default_pool(self, db_session_with_containers): + tenant_id = self._create_tenant_id() + + pool = CreditPoolService.create_default_pool(tenant_id) + + assert isinstance(pool, TenantCreditPool) + assert pool.tenant_id == tenant_id + assert pool.pool_type == ProviderQuotaType.TRIAL + assert pool.quota_used == 0 + assert pool.quota_limit > 0 + + def test_get_pool_returns_pool_when_exists(self, db_session_with_containers): + tenant_id = self._create_tenant_id() + CreditPoolService.create_default_pool(tenant_id) + + result = CreditPoolService.get_pool(tenant_id=tenant_id, pool_type=ProviderQuotaType.TRIAL) + + assert result is not None + assert result.tenant_id == tenant_id + assert result.pool_type == ProviderQuotaType.TRIAL + + def test_get_pool_returns_none_when_not_exists(self, db_session_with_containers): + result = CreditPoolService.get_pool(tenant_id=self._create_tenant_id(), pool_type=ProviderQuotaType.TRIAL) + + assert result is None + + def test_check_credits_available_returns_false_when_no_pool(self, db_session_with_containers): + result = CreditPoolService.check_credits_available(tenant_id=self._create_tenant_id(), credits_required=10) + + assert result is False + + def test_check_credits_available_returns_true_when_sufficient(self, db_session_with_containers): + tenant_id = self._create_tenant_id() + CreditPoolService.create_default_pool(tenant_id) + + result = CreditPoolService.check_credits_available(tenant_id=tenant_id, credits_required=10) + + assert result is True + + def test_check_credits_available_returns_false_when_insufficient(self, db_session_with_containers): + tenant_id = self._create_tenant_id() + pool = CreditPoolService.create_default_pool(tenant_id) + # Exhaust credits + pool.quota_used = pool.quota_limit + db_session_with_containers.commit() + + result = CreditPoolService.check_credits_available(tenant_id=tenant_id, credits_required=1) + + assert result is False + + def test_check_and_deduct_credits_raises_when_no_pool(self, db_session_with_containers): + with pytest.raises(QuotaExceededError, match="Credit pool not found"): + CreditPoolService.check_and_deduct_credits(tenant_id=self._create_tenant_id(), credits_required=10) + + def test_check_and_deduct_credits_raises_when_no_remaining(self, db_session_with_containers): + tenant_id = self._create_tenant_id() + pool = CreditPoolService.create_default_pool(tenant_id) + pool.quota_used = pool.quota_limit + db_session_with_containers.commit() + + with pytest.raises(QuotaExceededError, match="No credits remaining"): + CreditPoolService.check_and_deduct_credits(tenant_id=tenant_id, credits_required=10) + + def test_check_and_deduct_credits_deducts_required_amount(self, db_session_with_containers): + tenant_id = self._create_tenant_id() + CreditPoolService.create_default_pool(tenant_id) + credits_required = 10 + + result = CreditPoolService.check_and_deduct_credits(tenant_id=tenant_id, credits_required=credits_required) + + assert result == credits_required + db_session_with_containers.expire_all() + pool = CreditPoolService.get_pool(tenant_id=tenant_id) + assert pool.quota_used == credits_required + + def test_check_and_deduct_credits_caps_at_remaining(self, db_session_with_containers): + tenant_id = self._create_tenant_id() + pool = CreditPoolService.create_default_pool(tenant_id) + remaining = 5 + pool.quota_used = pool.quota_limit - remaining + db_session_with_containers.commit() + + result = CreditPoolService.check_and_deduct_credits(tenant_id=tenant_id, credits_required=200) + + assert result == remaining + db_session_with_containers.expire_all() + updated_pool = CreditPoolService.get_pool(tenant_id=tenant_id) + assert updated_pool.quota_used == pool.quota_limit diff --git a/api/tests/test_containers_integration_tests/services/test_dataset_permission_service.py b/api/tests/test_containers_integration_tests/services/test_dataset_permission_service.py index 975af3d428..55bfb64e18 100644 --- a/api/tests/test_containers_integration_tests/services/test_dataset_permission_service.py +++ b/api/tests/test_containers_integration_tests/services/test_dataset_permission_service.py @@ -397,6 +397,68 @@ class TestDatasetPermissionServiceClearPartialMemberList: class TestDatasetServiceCheckDatasetPermission: """Verify dataset access checks against persisted partial-member permissions.""" + def test_check_dataset_permission_different_tenant_should_fail(self, db_session_with_containers): + """Test that users from different tenants cannot access dataset.""" + owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER) + other_user, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.EDITOR) + + dataset = DatasetPermissionTestDataFactory.create_dataset( + tenant.id, owner.id, permission=DatasetPermissionEnum.ALL_TEAM + ) + + with pytest.raises(NoPermissionError): + DatasetService.check_dataset_permission(dataset, other_user) + + def test_check_dataset_permission_owner_can_access_any_dataset(self, db_session_with_containers): + """Test that tenant owners can access any dataset regardless of permission level.""" + owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER) + creator, _ = DatasetPermissionTestDataFactory.create_account_with_tenant( + role=TenantAccountRole.NORMAL, tenant=tenant + ) + + dataset = DatasetPermissionTestDataFactory.create_dataset( + tenant.id, creator.id, permission=DatasetPermissionEnum.ONLY_ME + ) + + DatasetService.check_dataset_permission(dataset, owner) + + def test_check_dataset_permission_only_me_creator_can_access(self, db_session_with_containers): + """Test ONLY_ME permission allows only the dataset creator to access.""" + creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.EDITOR) + + dataset = DatasetPermissionTestDataFactory.create_dataset( + tenant.id, creator.id, permission=DatasetPermissionEnum.ONLY_ME + ) + + DatasetService.check_dataset_permission(dataset, creator) + + def test_check_dataset_permission_only_me_others_cannot_access(self, db_session_with_containers): + """Test ONLY_ME permission denies access to non-creators.""" + creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.NORMAL) + other, _ = DatasetPermissionTestDataFactory.create_account_with_tenant( + role=TenantAccountRole.NORMAL, tenant=tenant + ) + + dataset = DatasetPermissionTestDataFactory.create_dataset( + tenant.id, creator.id, permission=DatasetPermissionEnum.ONLY_ME + ) + + with pytest.raises(NoPermissionError): + DatasetService.check_dataset_permission(dataset, other) + + def test_check_dataset_permission_all_team_allows_access(self, db_session_with_containers): + """Test ALL_TEAM permission allows any team member to access the dataset.""" + creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.NORMAL) + member, _ = DatasetPermissionTestDataFactory.create_account_with_tenant( + role=TenantAccountRole.NORMAL, tenant=tenant + ) + + dataset = DatasetPermissionTestDataFactory.create_dataset( + tenant.id, creator.id, permission=DatasetPermissionEnum.ALL_TEAM + ) + + DatasetService.check_dataset_permission(dataset, member) + def test_check_dataset_permission_partial_members_with_permission_success(self, db_session_with_containers): """ Test that user with explicit permission can access partial_members dataset. @@ -443,6 +505,16 @@ class TestDatasetServiceCheckDatasetPermission: with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"): DatasetService.check_dataset_permission(dataset, user) + def test_check_dataset_permission_partial_team_creator_can_access(self, db_session_with_containers): + """Test PARTIAL_TEAM permission allows creator to access without explicit permission.""" + creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.EDITOR) + + dataset = DatasetPermissionTestDataFactory.create_dataset( + tenant.id, creator.id, permission=DatasetPermissionEnum.PARTIAL_TEAM + ) + + DatasetService.check_dataset_permission(dataset, creator) + class TestDatasetServiceCheckDatasetOperatorPermission: """Verify operator permission checks against persisted partial-member permissions.""" diff --git a/api/tests/test_containers_integration_tests/services/test_dataset_service.py b/api/tests/test_containers_integration_tests/services/test_dataset_service.py index ac3d9f9604..a484c7be87 100644 --- a/api/tests/test_containers_integration_tests/services/test_dataset_service.py +++ b/api/tests/test_containers_integration_tests/services/test_dataset_service.py @@ -11,6 +11,7 @@ from uuid import uuid4 import pytest from sqlalchemy.orm import Session +from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.retrieval.retrieval_methods import RetrievalMethod from dify_graph.model_runtime.entities.model_entities import ModelType from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole @@ -106,7 +107,7 @@ class DatasetServiceIntegrationDataFactory: created_from=DocumentCreatedFrom.WEB, created_by=created_by, indexing_status=IndexingStatus.COMPLETED, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) db_session_with_containers.flush() diff --git a/api/tests/test_containers_integration_tests/services/test_dataset_service_batch_update_document_status.py b/api/tests/test_containers_integration_tests/services/test_dataset_service_batch_update_document_status.py index 7983b1cd93..c1d088755c 100644 --- a/api/tests/test_containers_integration_tests/services/test_dataset_service_batch_update_document_status.py +++ b/api/tests/test_containers_integration_tests/services/test_dataset_service_batch_update_document_status.py @@ -13,6 +13,7 @@ from uuid import uuid4 import pytest from sqlalchemy.orm import Session +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import Dataset, Document from models.enums import DataSourceType, DocumentCreatedFrom, IndexingStatus from services.dataset_service import DocumentService @@ -79,7 +80,7 @@ class DocumentBatchUpdateIntegrationDataFactory: name=name, created_from=DocumentCreatedFrom.WEB, created_by=created_by or str(uuid4()), - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) document.id = document_id or str(uuid4()) document.enabled = enabled @@ -694,3 +695,19 @@ class TestDatasetServiceBatchUpdateDocumentStatus: patched_dependencies["redis_client"].setex.assert_called_once_with(f"document_{doc1.id}_indexing", 600, 1) patched_dependencies["add_task"].delay.assert_called_once_with(doc1.id) + + def test_batch_update_invalid_action_raises_value_error( + self, db_session_with_containers: Session, patched_dependencies + ): + """Test that an invalid action raises ValueError.""" + factory = DocumentBatchUpdateIntegrationDataFactory + dataset = factory.create_dataset(db_session_with_containers) + doc = factory.create_document(db_session_with_containers, dataset) + user = UserDouble(id=str(uuid4())) + + patched_dependencies["redis_client"].get.return_value = None + + with pytest.raises(ValueError, match="Invalid action"): + DocumentService.batch_update_document_status( + dataset=dataset, document_ids=[doc.id], action="invalid_action", user=user + ) diff --git a/api/tests/test_containers_integration_tests/services/test_dataset_service_create_dataset.py b/api/tests/test_containers_integration_tests/services/test_dataset_service_create_dataset.py new file mode 100644 index 0000000000..c486ff5613 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_dataset_service_create_dataset.py @@ -0,0 +1,60 @@ +"""Testcontainers integration tests for DatasetService.create_empty_rag_pipeline_dataset.""" + +from __future__ import annotations + +from unittest.mock import Mock, patch +from uuid import uuid4 + +import pytest + +from models.account import Account, Tenant, TenantAccountJoin +from services.dataset_service import DatasetService +from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, RagPipelineDatasetCreateEntity + + +class TestDatasetServiceCreateRagPipelineDataset: + def _create_tenant_and_account(self, db_session_with_containers) -> tuple[Tenant, Account]: + tenant = Tenant(name=f"Tenant {uuid4()}") + db_session_with_containers.add(tenant) + db_session_with_containers.flush() + + account = Account( + name=f"Account {uuid4()}", + email=f"ds_create_{uuid4()}@example.com", + password="hashed", + password_salt="salt", + interface_language="en-US", + timezone="UTC", + ) + db_session_with_containers.add(account) + db_session_with_containers.flush() + + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role="owner", + current=True, + ) + db_session_with_containers.add(join) + db_session_with_containers.commit() + return tenant, account + + def _build_entity(self, name: str = "Test Dataset") -> RagPipelineDatasetCreateEntity: + icon_info = IconInfo(icon="\U0001f4d9", icon_background="#FFF4ED", icon_type="emoji") + return RagPipelineDatasetCreateEntity( + name=name, + description="", + icon_info=icon_info, + permission="only_me", + ) + + def test_create_rag_pipeline_dataset_raises_when_current_user_id_is_none(self, db_session_with_containers): + tenant, _ = self._create_tenant_and_account(db_session_with_containers) + + mock_user = Mock(id=None) + with patch("services.dataset_service.current_user", mock_user): + with pytest.raises(ValueError, match="Current user or current user id not found"): + DatasetService.create_empty_rag_pipeline_dataset( + tenant_id=tenant.id, + rag_pipeline_dataset_create_entity=self._build_entity(), + ) diff --git a/api/tests/test_containers_integration_tests/services/test_dataset_service_delete_dataset.py b/api/tests/test_containers_integration_tests/services/test_dataset_service_delete_dataset.py index ed070527c9..807d18322c 100644 --- a/api/tests/test_containers_integration_tests/services/test_dataset_service_delete_dataset.py +++ b/api/tests/test_containers_integration_tests/services/test_dataset_service_delete_dataset.py @@ -3,6 +3,7 @@ from unittest.mock import patch from uuid import uuid4 +from core.rag.index_processor.constant.index_type import IndexStructureType from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document from models.enums import DataSourceType, DocumentCreatedFrom @@ -78,7 +79,7 @@ class DatasetDeleteIntegrationDataFactory: tenant_id: str, dataset_id: str, created_by: str, - doc_form: str = "text_model", + doc_form: str = IndexStructureType.PARAGRAPH_INDEX, ) -> Document: """Persist a document so dataset.doc_form resolves through the real document path.""" document = Document( @@ -119,7 +120,7 @@ class TestDatasetServiceDeleteDataset: tenant_id=tenant.id, dataset_id=dataset.id, created_by=owner.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) # Act diff --git a/api/tests/test_containers_integration_tests/services/test_document_service_display_status.py b/api/tests/test_containers_integration_tests/services/test_document_service_display_status.py index c6aa89c733..c0047df810 100644 --- a/api/tests/test_containers_integration_tests/services/test_document_service_display_status.py +++ b/api/tests/test_containers_integration_tests/services/test_document_service_display_status.py @@ -3,6 +3,7 @@ from uuid import uuid4 from sqlalchemy import select +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import Dataset, Document from models.enums import DataSourceType, DocumentCreatedFrom, IndexingStatus from services.dataset_service import DocumentService @@ -42,7 +43,7 @@ def _create_document( name=f"doc-{uuid4()}", created_from=DocumentCreatedFrom.WEB, created_by=str(uuid4()), - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) document.id = str(uuid4()) document.indexing_status = indexing_status @@ -142,3 +143,11 @@ def test_apply_display_status_filter_returns_same_when_invalid(db_session_with_c rows = db_session_with_containers.scalars(filtered).all() assert {row.id for row in rows} == {doc1.id, doc2.id} + + +def test_normalize_display_status_alias_mapping(): + """Test that normalize_display_status maps aliases correctly.""" + assert DocumentService.normalize_display_status("ACTIVE") == "available" + assert DocumentService.normalize_display_status("enabled") == "available" + assert DocumentService.normalize_display_status("archived") == "archived" + assert DocumentService.normalize_display_status("unknown") is None diff --git a/api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py b/api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py index bffa520ce6..34532ed7f8 100644 --- a/api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py +++ b/api/tests/test_containers_integration_tests/services/test_document_service_rename_document.py @@ -7,6 +7,7 @@ from uuid import uuid4 import pytest +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.storage.storage_type import StorageType from models import Account from models.dataset import Dataset, Document @@ -69,7 +70,7 @@ def make_document( name=name, created_from=DocumentCreatedFrom.WEB, created_by=str(uuid4()), - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) doc.id = document_id doc.indexing_status = "completed" diff --git a/api/tests/test_containers_integration_tests/services/test_end_user_service.py b/api/tests/test_containers_integration_tests/services/test_end_user_service.py index ae811db768..cafabc939b 100644 --- a/api/tests/test_containers_integration_tests/services/test_end_user_service.py +++ b/api/tests/test_containers_integration_tests/services/test_end_user_service.py @@ -414,3 +414,144 @@ class TestEndUserServiceGetEndUserById: ) assert result is None + + +class TestEndUserServiceCreateBatch: + """Integration tests for EndUserService.create_end_user_batch.""" + + @pytest.fixture + def factory(self): + return TestEndUserServiceFactory() + + def _create_multiple_apps(self, db_session_with_containers, factory, count: int = 3): + """Create multiple apps under the same tenant.""" + first_app = factory.create_app_and_account(db_session_with_containers) + tenant_id = first_app.tenant_id + apps = [first_app] + for _ in range(count - 1): + app = App( + tenant_id=tenant_id, + name=f"App {uuid4()}", + description="", + mode="chat", + icon_type="emoji", + icon="bot", + icon_background="#FFFFFF", + enable_site=False, + enable_api=True, + api_rpm=100, + api_rph=100, + is_demo=False, + is_public=False, + is_universal=False, + created_by=first_app.created_by, + updated_by=first_app.updated_by, + ) + db_session_with_containers.add(app) + db_session_with_containers.commit() + all_apps = db_session_with_containers.query(App).filter(App.tenant_id == tenant_id).all() + return tenant_id, all_apps + + def test_create_batch_empty_app_ids(self, db_session_with_containers): + result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, tenant_id=str(uuid4()), app_ids=[], user_id="user-1" + ) + assert result == {} + + def test_create_batch_creates_users_for_all_apps(self, db_session_with_containers, factory): + tenant_id, apps = self._create_multiple_apps(db_session_with_containers, factory, count=3) + app_ids = [a.id for a in apps] + user_id = f"user-{uuid4()}" + + result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id + ) + + assert len(result) == 3 + for app_id in app_ids: + assert app_id in result + assert result[app_id].session_id == user_id + assert result[app_id].type == InvokeFrom.SERVICE_API + + def test_create_batch_default_session_id(self, db_session_with_containers, factory): + tenant_id, apps = self._create_multiple_apps(db_session_with_containers, factory, count=2) + app_ids = [a.id for a in apps] + + result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, tenant_id=tenant_id, app_ids=app_ids, user_id="" + ) + + assert len(result) == 2 + for end_user in result.values(): + assert end_user.session_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID + assert end_user._is_anonymous is True + + def test_create_batch_deduplicate_app_ids(self, db_session_with_containers, factory): + tenant_id, apps = self._create_multiple_apps(db_session_with_containers, factory, count=2) + app_ids = [apps[0].id, apps[1].id, apps[0].id, apps[1].id] + user_id = f"user-{uuid4()}" + + result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id + ) + + assert len(result) == 2 + + def test_create_batch_returns_existing_users(self, db_session_with_containers, factory): + tenant_id, apps = self._create_multiple_apps(db_session_with_containers, factory, count=2) + app_ids = [a.id for a in apps] + user_id = f"user-{uuid4()}" + + # Create batch first time + first_result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id + ) + + # Create batch second time — should return existing users + second_result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id + ) + + assert len(second_result) == 2 + for app_id in app_ids: + assert first_result[app_id].id == second_result[app_id].id + + def test_create_batch_partial_existing_users(self, db_session_with_containers, factory): + tenant_id, apps = self._create_multiple_apps(db_session_with_containers, factory, count=3) + user_id = f"user-{uuid4()}" + + # Create for first 2 apps + first_result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, + tenant_id=tenant_id, + app_ids=[apps[0].id, apps[1].id], + user_id=user_id, + ) + + # Create for all 3 apps — should reuse first 2, create 3rd + all_result = EndUserService.create_end_user_batch( + type=InvokeFrom.SERVICE_API, + tenant_id=tenant_id, + app_ids=[a.id for a in apps], + user_id=user_id, + ) + + assert len(all_result) == 3 + assert all_result[apps[0].id].id == first_result[apps[0].id].id + assert all_result[apps[1].id].id == first_result[apps[1].id].id + assert all_result[apps[2].id].session_id == user_id + + @pytest.mark.parametrize( + "invoke_type", + [InvokeFrom.SERVICE_API, InvokeFrom.WEB_APP, InvokeFrom.EXPLORE, InvokeFrom.DEBUGGER], + ) + def test_create_batch_all_invoke_types(self, db_session_with_containers, invoke_type, factory): + tenant_id, apps = self._create_multiple_apps(db_session_with_containers, factory, count=1) + user_id = f"user-{uuid4()}" + + result = EndUserService.create_end_user_batch( + type=invoke_type, tenant_id=tenant_id, app_ids=[apps[0].id], user_id=user_id + ) + + assert len(result) == 1 + assert result[apps[0].id].type == invoke_type diff --git a/api/tests/test_containers_integration_tests/services/test_file_service_zip_and_lookup.py b/api/tests/test_containers_integration_tests/services/test_file_service_zip_and_lookup.py new file mode 100644 index 0000000000..4e0a726cc7 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_file_service_zip_and_lookup.py @@ -0,0 +1,96 @@ +""" +Testcontainers integration tests for FileService helpers. + +Covers: +- ZIP tempfile building (sanitization + deduplication + content writes) +- tenant-scoped batch lookup behavior (get_upload_files_by_ids) +""" + +from __future__ import annotations + +from datetime import UTC, datetime +from types import SimpleNamespace +from typing import Any +from uuid import uuid4 +from zipfile import ZipFile + +import pytest + +import services.file_service as file_service_module +from extensions.storage.storage_type import StorageType +from models.enums import CreatorUserRole +from models.model import UploadFile +from services.file_service import FileService + + +def _create_upload_file(db_session, *, tenant_id: str, key: str, name: str) -> UploadFile: + upload_file = UploadFile( + tenant_id=tenant_id, + storage_type=StorageType.OPENDAL, + key=key, + name=name, + size=100, + extension="txt", + mime_type="text/plain", + created_by_role=CreatorUserRole.ACCOUNT, + created_by=str(uuid4()), + created_at=datetime.now(UTC), + used=False, + ) + db_session.add(upload_file) + db_session.commit() + return upload_file + + +def test_build_upload_files_zip_tempfile_sanitizes_and_dedupes_names(monkeypatch: pytest.MonkeyPatch) -> None: + """Ensure ZIP entry names are safe and unique while preserving extensions.""" + upload_files: list[Any] = [ + SimpleNamespace(name="a/b.txt", key="k1"), + SimpleNamespace(name="c/b.txt", key="k2"), + SimpleNamespace(name="../b.txt", key="k3"), + ] + + data_by_key: dict[str, list[bytes]] = {"k1": [b"one"], "k2": [b"two"], "k3": [b"three"]} + + def _load(key: str, stream: bool = True) -> list[bytes]: + assert stream is True + return data_by_key[key] + + monkeypatch.setattr(file_service_module.storage, "load", _load) + + with FileService.build_upload_files_zip_tempfile(upload_files=upload_files) as tmp: + with ZipFile(tmp, mode="r") as zf: + assert zf.namelist() == ["b.txt", "b (1).txt", "b (2).txt"] + assert zf.read("b.txt") == b"one" + assert zf.read("b (1).txt") == b"two" + assert zf.read("b (2).txt") == b"three" + + +def test_get_upload_files_by_ids_returns_empty_when_no_ids(db_session_with_containers) -> None: + """Ensure empty input returns an empty mapping without hitting the database.""" + assert FileService.get_upload_files_by_ids(str(uuid4()), []) == {} + + +def test_get_upload_files_by_ids_returns_id_keyed_mapping(db_session_with_containers) -> None: + """Ensure batch lookup returns a dict keyed by stringified UploadFile ids.""" + tenant_id = str(uuid4()) + file1 = _create_upload_file(db_session_with_containers, tenant_id=tenant_id, key="k1", name="file1.txt") + file2 = _create_upload_file(db_session_with_containers, tenant_id=tenant_id, key="k2", name="file2.txt") + + result = FileService.get_upload_files_by_ids(tenant_id, [file1.id, file1.id, file2.id]) + + assert set(result.keys()) == {file1.id, file2.id} + assert result[file1.id].id == file1.id + assert result[file2.id].id == file2.id + + +def test_get_upload_files_by_ids_filters_by_tenant(db_session_with_containers) -> None: + """Ensure files from other tenants are not returned.""" + tenant_a = str(uuid4()) + tenant_b = str(uuid4()) + file_a = _create_upload_file(db_session_with_containers, tenant_id=tenant_a, key="ka", name="a.txt") + _create_upload_file(db_session_with_containers, tenant_id=tenant_b, key="kb", name="b.txt") + + result = FileService.get_upload_files_by_ids(tenant_a, [file_a.id]) + + assert set(result.keys()) == {file_a.id} diff --git a/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py b/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py index 8707f2e827..57bbc73b50 100644 --- a/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py +++ b/api/tests/test_containers_integration_tests/services/test_messages_clean_service.py @@ -8,6 +8,7 @@ import pytest from faker import Faker from sqlalchemy.orm import Session +from dify_graph.file.enums import FileType from enums.cloud_plan import CloudPlan from extensions.ext_redis import redis_client from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole @@ -253,7 +254,7 @@ class TestMessagesCleanServiceIntegration: # MessageFile file = MessageFile( message_id=message.id, - type="image", + type=FileType.IMAGE, transfer_method="local_file", url="http://example.com/test.jpg", belongs_to=MessageFileBelongsTo.USER, diff --git a/api/tests/test_containers_integration_tests/services/test_metadata_service.py b/api/tests/test_containers_integration_tests/services/test_metadata_service.py index e847329c5b..8b1349be9a 100644 --- a/api/tests/test_containers_integration_tests/services/test_metadata_service.py +++ b/api/tests/test_containers_integration_tests/services/test_metadata_service.py @@ -5,6 +5,7 @@ from faker import Faker from sqlalchemy.orm import Session from core.rag.index_processor.constant.built_in_field import BuiltInField +from core.rag.index_processor.constant.index_type import IndexStructureType from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, DatasetMetadata, DatasetMetadataBinding, Document from models.enums import DatasetMetadataType, DataSourceType, DocumentCreatedFrom @@ -139,7 +140,7 @@ class TestMetadataService: name=fake.file_name(), created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", ) diff --git a/api/tests/test_containers_integration_tests/services/test_oauth_server_service.py b/api/tests/test_containers_integration_tests/services/test_oauth_server_service.py new file mode 100644 index 0000000000..c146a5924b --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/test_oauth_server_service.py @@ -0,0 +1,174 @@ +"""Testcontainers integration tests for OAuthServerService.""" + +from __future__ import annotations + +import uuid +from typing import cast +from unittest.mock import MagicMock, patch +from uuid import uuid4 + +import pytest +from werkzeug.exceptions import BadRequest + +from models.model import OAuthProviderApp +from services.oauth_server import ( + OAUTH_ACCESS_TOKEN_EXPIRES_IN, + OAUTH_ACCESS_TOKEN_REDIS_KEY, + OAUTH_AUTHORIZATION_CODE_REDIS_KEY, + OAUTH_REFRESH_TOKEN_EXPIRES_IN, + OAUTH_REFRESH_TOKEN_REDIS_KEY, + OAuthGrantType, + OAuthServerService, +) + + +class TestOAuthServerServiceGetProviderApp: + """DB-backed tests for get_oauth_provider_app.""" + + def _create_oauth_provider_app(self, db_session_with_containers, *, client_id: str) -> OAuthProviderApp: + app = OAuthProviderApp( + app_icon="icon.png", + client_id=client_id, + client_secret=str(uuid4()), + app_label={"en-US": "Test OAuth App"}, + redirect_uris=["https://example.com/callback"], + scope="read", + ) + db_session_with_containers.add(app) + db_session_with_containers.commit() + return app + + def test_get_oauth_provider_app_returns_app_when_exists(self, db_session_with_containers): + client_id = f"client-{uuid4()}" + created = self._create_oauth_provider_app(db_session_with_containers, client_id=client_id) + + result = OAuthServerService.get_oauth_provider_app(client_id) + + assert result is not None + assert result.client_id == client_id + assert result.id == created.id + + def test_get_oauth_provider_app_returns_none_when_not_exists(self, db_session_with_containers): + result = OAuthServerService.get_oauth_provider_app(f"nonexistent-{uuid4()}") + + assert result is None + + +class TestOAuthServerServiceTokenOperations: + """Redis-backed tests for token sign/validate operations.""" + + @pytest.fixture + def mock_redis(self): + with patch("services.oauth_server.redis_client") as mock: + yield mock + + def test_sign_authorization_code_stores_and_returns_code(self, mock_redis): + deterministic_uuid = uuid.UUID("00000000-0000-0000-0000-000000000111") + with patch("services.oauth_server.uuid.uuid4", return_value=deterministic_uuid): + code = OAuthServerService.sign_oauth_authorization_code("client-1", "user-1") + + assert code == str(deterministic_uuid) + mock_redis.set.assert_called_once_with( + OAUTH_AUTHORIZATION_CODE_REDIS_KEY.format(client_id="client-1", code=code), + "user-1", + ex=600, + ) + + def test_sign_access_token_raises_bad_request_for_invalid_code(self, mock_redis): + mock_redis.get.return_value = None + + with pytest.raises(BadRequest, match="invalid code"): + OAuthServerService.sign_oauth_access_token( + grant_type=OAuthGrantType.AUTHORIZATION_CODE, + code="bad-code", + client_id="client-1", + ) + + def test_sign_access_token_issues_tokens_for_valid_code(self, mock_redis): + token_uuids = [ + uuid.UUID("00000000-0000-0000-0000-000000000201"), + uuid.UUID("00000000-0000-0000-0000-000000000202"), + ] + with patch("services.oauth_server.uuid.uuid4", side_effect=token_uuids): + mock_redis.get.return_value = b"user-1" + + access_token, refresh_token = OAuthServerService.sign_oauth_access_token( + grant_type=OAuthGrantType.AUTHORIZATION_CODE, + code="code-1", + client_id="client-1", + ) + + assert access_token == str(token_uuids[0]) + assert refresh_token == str(token_uuids[1]) + code_key = OAUTH_AUTHORIZATION_CODE_REDIS_KEY.format(client_id="client-1", code="code-1") + mock_redis.delete.assert_called_once_with(code_key) + mock_redis.set.assert_any_call( + OAUTH_ACCESS_TOKEN_REDIS_KEY.format(client_id="client-1", token=access_token), + b"user-1", + ex=OAUTH_ACCESS_TOKEN_EXPIRES_IN, + ) + mock_redis.set.assert_any_call( + OAUTH_REFRESH_TOKEN_REDIS_KEY.format(client_id="client-1", token=refresh_token), + b"user-1", + ex=OAUTH_REFRESH_TOKEN_EXPIRES_IN, + ) + + def test_sign_access_token_raises_bad_request_for_invalid_refresh_token(self, mock_redis): + mock_redis.get.return_value = None + + with pytest.raises(BadRequest, match="invalid refresh token"): + OAuthServerService.sign_oauth_access_token( + grant_type=OAuthGrantType.REFRESH_TOKEN, + refresh_token="stale-token", + client_id="client-1", + ) + + def test_sign_access_token_issues_new_token_for_valid_refresh(self, mock_redis): + deterministic_uuid = uuid.UUID("00000000-0000-0000-0000-000000000301") + with patch("services.oauth_server.uuid.uuid4", return_value=deterministic_uuid): + mock_redis.get.return_value = b"user-1" + + access_token, returned_refresh = OAuthServerService.sign_oauth_access_token( + grant_type=OAuthGrantType.REFRESH_TOKEN, + refresh_token="refresh-1", + client_id="client-1", + ) + + assert access_token == str(deterministic_uuid) + assert returned_refresh == "refresh-1" + + def test_sign_access_token_returns_none_for_unknown_grant_type(self, mock_redis): + grant_type = cast(OAuthGrantType, "invalid-grant-type") + + result = OAuthServerService.sign_oauth_access_token(grant_type=grant_type, client_id="client-1") + + assert result is None + + def test_sign_refresh_token_stores_with_expected_expiry(self, mock_redis): + deterministic_uuid = uuid.UUID("00000000-0000-0000-0000-000000000401") + with patch("services.oauth_server.uuid.uuid4", return_value=deterministic_uuid): + refresh_token = OAuthServerService._sign_oauth_refresh_token("client-2", "user-2") + + assert refresh_token == str(deterministic_uuid) + mock_redis.set.assert_called_once_with( + OAUTH_REFRESH_TOKEN_REDIS_KEY.format(client_id="client-2", token=refresh_token), + "user-2", + ex=OAUTH_REFRESH_TOKEN_EXPIRES_IN, + ) + + def test_validate_access_token_returns_none_when_not_found(self, mock_redis): + mock_redis.get.return_value = None + + result = OAuthServerService.validate_oauth_access_token("client-1", "missing-token") + + assert result is None + + def test_validate_access_token_loads_user_when_exists(self, mock_redis): + mock_redis.get.return_value = b"user-88" + expected_user = MagicMock() + + with patch("services.oauth_server.AccountService.load_user", return_value=expected_user) as mock_load: + result = OAuthServerService.validate_oauth_access_token("client-1", "access-token") + + assert result is expected_user + mock_load.assert_called_once_with("user-88") diff --git a/api/tests/test_containers_integration_tests/services/test_saved_message_service.py b/api/tests/test_containers_integration_tests/services/test_saved_message_service.py index 94a4e62560..d256c0d90b 100644 --- a/api/tests/test_containers_integration_tests/services/test_saved_message_service.py +++ b/api/tests/test_containers_integration_tests/services/test_saved_message_service.py @@ -396,11 +396,6 @@ class TestSavedMessageService: assert "User is required" in str(exc_info.value) - # Verify no database operations were performed - - saved_messages = db_session_with_containers.query(SavedMessage).all() - assert len(saved_messages) == 0 - def test_save_error_no_user(self, db_session_with_containers: Session, mock_external_service_dependencies): """ Test error handling when saving message with no user. @@ -497,124 +492,140 @@ class TestSavedMessageService: # The message should still exist, only the saved_message should be deleted assert db_session_with_containers.query(Message).where(Message.id == message.id).first() is not None - def test_pagination_by_last_id_error_no_user( - self, db_session_with_containers: Session, mock_external_service_dependencies - ): - """ - Test error handling when no user is provided. - - This test verifies: - - Proper error handling for missing user - - ValueError is raised when user is None - - No database operations are performed - """ - # Arrange: Create test data - fake = Faker() + def test_save_for_end_user(self, db_session_with_containers: Session, mock_external_service_dependencies): + """Test saving a message for an EndUser.""" app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + end_user = self._create_test_end_user(db_session_with_containers, app) + message = self._create_test_message(db_session_with_containers, app, end_user) - # Act & Assert: Verify proper error handling - with pytest.raises(ValueError) as exc_info: - SavedMessageService.pagination_by_last_id(app_model=app, user=None, last_id=None, limit=10) + mock_external_service_dependencies["message_service"].get_message.return_value = message - assert "User is required" in str(exc_info.value) + SavedMessageService.save(app_model=app, user=end_user, message_id=message.id) - # Verify no database operations were performed for this specific test - # Note: We don't check total count as other tests may have created data - # Instead, we verify that the error was properly raised - pass - - def test_save_error_no_user(self, db_session_with_containers: Session, mock_external_service_dependencies): - """ - Test error handling when saving message with no user. - - This test verifies: - - Method returns early when user is None - - No database operations are performed - - No exceptions are raised - """ - # Arrange: Create test data - fake = Faker() - app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) - message = self._create_test_message(db_session_with_containers, app, account) - - # Act: Execute the method under test with None user - result = SavedMessageService.save(app_model=app, user=None, message_id=message.id) - - # Assert: Verify the expected outcomes - assert result is None - - # Verify no saved message was created - - saved_message = ( + saved = ( db_session_with_containers.query(SavedMessage) - .where( - SavedMessage.app_id == app.id, - SavedMessage.message_id == message.id, - ) + .where(SavedMessage.app_id == app.id, SavedMessage.message_id == message.id) .first() ) + assert saved is not None + assert saved.created_by == end_user.id + assert saved.created_by_role == "end_user" - assert saved_message is None - - def test_delete_success_existing_message( + def test_save_duplicate_is_idempotent( self, db_session_with_containers: Session, mock_external_service_dependencies ): - """ - Test successful deletion of an existing saved message. - - This test verifies: - - Proper deletion of existing saved message - - Correct database state after deletion - - No errors during deletion process - """ - # Arrange: Create test data - fake = Faker() + """Test that saving an already-saved message does not create a duplicate.""" app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) message = self._create_test_message(db_session_with_containers, app, account) - # Create a saved message first - saved_message = SavedMessage( - app_id=app.id, - message_id=message.id, - created_by_role="account", - created_by=account.id, - ) + mock_external_service_dependencies["message_service"].get_message.return_value = message - db_session_with_containers.add(saved_message) + # Save once + SavedMessageService.save(app_model=app, user=account, message_id=message.id) + # Save again + SavedMessageService.save(app_model=app, user=account, message_id=message.id) + + count = ( + db_session_with_containers.query(SavedMessage) + .where(SavedMessage.app_id == app.id, SavedMessage.message_id == message.id) + .count() + ) + assert count == 1 + + def test_delete_without_user_does_nothing( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test that deleting without a user is a no-op.""" + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + message = self._create_test_message(db_session_with_containers, app, account) + + # Pre-create a saved message + saved = SavedMessage(app_id=app.id, message_id=message.id, created_by_role="account", created_by=account.id) + db_session_with_containers.add(saved) db_session_with_containers.commit() - # Verify saved message exists + SavedMessageService.delete(app_model=app, user=None, message_id=message.id) + + # Should still exist + assert ( + db_session_with_containers.query(SavedMessage) + .where(SavedMessage.app_id == app.id, SavedMessage.message_id == message.id) + .first() + is not None + ) + + def test_delete_non_existent_does_nothing( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test that deleting a non-existent saved message is a no-op.""" + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + + # Should not raise — use a valid UUID that doesn't exist in DB + from uuid import uuid4 + + SavedMessageService.delete(app_model=app, user=account, message_id=str(uuid4())) + + def test_delete_for_end_user(self, db_session_with_containers: Session, mock_external_service_dependencies): + """Test deleting a saved message for an EndUser.""" + app, account = self._create_test_app_and_account(db_session_with_containers, mock_external_service_dependencies) + end_user = self._create_test_end_user(db_session_with_containers, app) + message = self._create_test_message(db_session_with_containers, app, end_user) + + saved = SavedMessage(app_id=app.id, message_id=message.id, created_by_role="end_user", created_by=end_user.id) + db_session_with_containers.add(saved) + db_session_with_containers.commit() + + SavedMessageService.delete(app_model=app, user=end_user, message_id=message.id) + + assert ( + db_session_with_containers.query(SavedMessage) + .where(SavedMessage.app_id == app.id, SavedMessage.message_id == message.id) + .first() + is None + ) + + def test_delete_only_affects_own_saved_messages( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test that delete only removes the requesting user's saved message.""" + app, account1 = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + end_user = self._create_test_end_user(db_session_with_containers, app) + message = self._create_test_message(db_session_with_containers, app, account1) + + # Both users save the same message + saved_account = SavedMessage( + app_id=app.id, message_id=message.id, created_by_role="account", created_by=account1.id + ) + saved_end_user = SavedMessage( + app_id=app.id, message_id=message.id, created_by_role="end_user", created_by=end_user.id + ) + db_session_with_containers.add_all([saved_account, saved_end_user]) + db_session_with_containers.commit() + + # Delete only account1's saved message + SavedMessageService.delete(app_model=app, user=account1, message_id=message.id) + + # Account's saved message should be gone assert ( db_session_with_containers.query(SavedMessage) .where( SavedMessage.app_id == app.id, SavedMessage.message_id == message.id, - SavedMessage.created_by_role == "account", - SavedMessage.created_by == account.id, + SavedMessage.created_by == account1.id, ) .first() - is not None + is None ) - - # Act: Execute the method under test - SavedMessageService.delete(app_model=app, user=account, message_id=message.id) - - # Assert: Verify the expected outcomes - # Check if saved message was deleted from database - deleted_saved_message = ( + # End user's saved message should still exist + assert ( db_session_with_containers.query(SavedMessage) .where( SavedMessage.app_id == app.id, SavedMessage.message_id == message.id, - SavedMessage.created_by_role == "account", - SavedMessage.created_by == account.id, + SavedMessage.created_by == end_user.id, ) .first() + is not None ) - - assert deleted_saved_message is None - - # Verify database state - db_session_with_containers.commit() - # The message should still exist, only the saved_message should be deleted - assert db_session_with_containers.query(Message).where(Message.id == message.id).first() is not None diff --git a/api/tests/test_containers_integration_tests/services/test_tag_service.py b/api/tests/test_containers_integration_tests/services/test_tag_service.py index fa6e651529..1a72e3b6c2 100644 --- a/api/tests/test_containers_integration_tests/services/test_tag_service.py +++ b/api/tests/test_containers_integration_tests/services/test_tag_service.py @@ -9,7 +9,7 @@ from werkzeug.exceptions import NotFound from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset -from models.enums import DataSourceType +from models.enums import DataSourceType, TagType from models.model import App, Tag, TagBinding from services.tag_service import TagService @@ -547,7 +547,7 @@ class TestTagService: assert result is not None assert len(result) == 1 assert result[0].name == "python_tag" - assert result[0].type == "app" + assert result[0].type == TagType.APP assert result[0].tenant_id == tenant.id def test_get_tag_by_tag_name_no_matches( @@ -638,7 +638,7 @@ class TestTagService: # Verify all tags are returned for tag in result: - assert tag.type == "app" + assert tag.type == TagType.APP assert tag.tenant_id == tenant.id assert tag.id in [t.id for t in tags] diff --git a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py index 8ab8df2a5a..84ce6364df 100644 --- a/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py +++ b/api/tests/test_containers_integration_tests/services/test_workflow_app_service.py @@ -10,6 +10,7 @@ from sqlalchemy.orm import Session from dify_graph.entities.workflow_execution import WorkflowExecutionStatus from models import EndUser, Workflow, WorkflowAppLog, WorkflowRun from models.enums import CreatorUserRole +from models.workflow import WorkflowAppLogCreatedFrom from services.account_service import AccountService, TenantService # Delay import of AppService to avoid circular dependency @@ -221,7 +222,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -357,7 +358,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run_1.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -399,7 +400,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run_2.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -441,7 +442,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run_4.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -521,7 +522,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -627,7 +628,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -732,7 +733,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -860,7 +861,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -902,7 +903,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="web-app", + created_from=WorkflowAppLogCreatedFrom.WEB_APP, created_by_role=CreatorUserRole.END_USER, created_by=end_user.id, ) @@ -1037,7 +1038,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -1125,7 +1126,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -1279,7 +1280,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -1379,7 +1380,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) @@ -1481,7 +1482,7 @@ class TestWorkflowAppService: app_id=app.id, workflow_id=workflow.id, workflow_run_id=workflow_run.id, - created_from="service-api", + created_from=WorkflowAppLogCreatedFrom.SERVICE_API, created_by_role=CreatorUserRole.ACCOUNT, created_by=account.id, ) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py index bffdca623a..d3e765055a 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_api_tools_manage_service.py @@ -536,3 +536,151 @@ class TestApiToolManageService: # Verify mock interactions mock_external_service_dependencies["encrypter"].assert_called_once() mock_external_service_dependencies["provider_controller"].from_db.assert_called_once() + + def test_delete_api_tool_provider_success( + self, flask_req_ctx_with_containers, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test successful deletion of an API tool provider.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + schema = self._create_test_openapi_schema() + provider_name = fake.unique.word() + + ApiToolManageService.create_api_tool_provider( + user_id=account.id, + tenant_id=tenant.id, + provider_name=provider_name, + icon={"content": "🔧", "background": "#FFF"}, + credentials={"auth_type": "none"}, + schema_type=ApiProviderSchemaType.OPENAPI, + schema=schema, + privacy_policy="", + custom_disclaimer="", + labels=[], + ) + + provider = ( + db_session_with_containers.query(ApiToolProvider) + .filter(ApiToolProvider.tenant_id == tenant.id, ApiToolProvider.name == provider_name) + .first() + ) + assert provider is not None + + result = ApiToolManageService.delete_api_tool_provider(account.id, tenant.id, provider_name) + + assert result == {"result": "success"} + deleted = ( + db_session_with_containers.query(ApiToolProvider) + .filter(ApiToolProvider.tenant_id == tenant.id, ApiToolProvider.name == provider_name) + .first() + ) + assert deleted is None + + def test_delete_api_tool_provider_not_found( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test deletion raises ValueError when provider not found.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + with pytest.raises(ValueError, match="you have not added provider"): + ApiToolManageService.delete_api_tool_provider(account.id, tenant.id, "nonexistent") + + def test_update_api_tool_provider_not_found( + self, flask_req_ctx_with_containers, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test update raises ValueError when original provider not found.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + with pytest.raises(ValueError, match="does not exists"): + ApiToolManageService.update_api_tool_provider( + user_id=account.id, + tenant_id=tenant.id, + provider_name="new-name", + original_provider="nonexistent", + icon={}, + credentials={"auth_type": "none"}, + _schema_type=ApiProviderSchemaType.OPENAPI, + schema=self._create_test_openapi_schema(), + privacy_policy=None, + custom_disclaimer="", + labels=[], + ) + + def test_update_api_tool_provider_missing_auth_type( + self, flask_req_ctx_with_containers, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test update raises ValueError when auth_type is missing from credentials.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + schema = self._create_test_openapi_schema() + provider_name = fake.unique.word() + + ApiToolManageService.create_api_tool_provider( + user_id=account.id, + tenant_id=tenant.id, + provider_name=provider_name, + icon={"content": "🔧", "background": "#FFF"}, + credentials={"auth_type": "none"}, + schema_type=ApiProviderSchemaType.OPENAPI, + schema=schema, + privacy_policy="", + custom_disclaimer="", + labels=[], + ) + + with pytest.raises(ValueError, match="auth_type is required"): + ApiToolManageService.update_api_tool_provider( + user_id=account.id, + tenant_id=tenant.id, + provider_name=provider_name, + original_provider=provider_name, + icon={}, + credentials={}, + _schema_type=ApiProviderSchemaType.OPENAPI, + schema=schema, + privacy_policy=None, + custom_disclaimer="", + labels=[], + ) + + def test_list_api_tool_provider_tools_not_found( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test listing tools raises ValueError when provider not found.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + with pytest.raises(ValueError, match="you have not added provider"): + ApiToolManageService.list_api_tool_provider_tools(account.id, tenant.id, "nonexistent") + + def test_test_api_tool_preview_invalid_schema_type( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test preview raises ValueError for invalid schema type.""" + fake = Faker() + account, tenant = self._create_test_account_and_tenant( + db_session_with_containers, mock_external_service_dependencies + ) + + with pytest.raises(ValueError, match="invalid schema type"): + ApiToolManageService.test_api_tool_preview( + tenant_id=tenant.id, + provider_name="provider-a", + tool_name="tool-a", + credentials={"auth_type": "none"}, + parameters={}, + schema_type="bad-schema-type", + schema="schema", + ) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py index 0f38218c51..7ab059bb75 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_tools_transform_service.py @@ -6,7 +6,7 @@ from sqlalchemy.orm import Session from core.tools.entities.api_entities import ToolProviderApiEntity from core.tools.entities.common_entities import I18nObject -from core.tools.entities.tool_entities import ToolProviderType +from core.tools.entities.tool_entities import ApiProviderSchemaType, ToolProviderType from models.tools import ApiToolProvider, BuiltinToolProvider, MCPToolProvider, WorkflowToolProvider from services.plugin.plugin_service import PluginService from services.tools.tools_transform_service import ToolTransformService @@ -52,7 +52,7 @@ class TestToolTransformService: user_id="test_user_id", credentials_str='{"auth_type": "api_key_header", "api_key": "test_key"}', schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, tools_str="[]", ) elif provider_type == "builtin": @@ -659,7 +659,7 @@ class TestToolTransformService: user_id=fake.uuid4(), credentials_str='{"auth_type": "api_key_header", "api_key": "test_key"}', schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, tools_str="[]", ) @@ -695,7 +695,7 @@ class TestToolTransformService: user_id=fake.uuid4(), credentials_str='{"auth_type": "api_key_query", "api_key": "test_key"}', schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, tools_str="[]", ) @@ -731,7 +731,7 @@ class TestToolTransformService: user_id=fake.uuid4(), credentials_str='{"auth_type": "api_key", "api_key": "test_key"}', schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, tools_str="[]", ) diff --git a/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py b/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py index 34906a4e54..e3c0749494 100644 --- a/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py +++ b/api/tests/test_containers_integration_tests/services/tools/test_workflow_tools_manage_service.py @@ -1043,3 +1043,112 @@ class TestWorkflowToolManageService: # After the fix, this should always be 0 # For now, we document that the record may exist, demonstrating the bug # assert tool_count == 0 # Expected after fix + + def test_delete_workflow_tool_success( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test successful deletion of a workflow tool.""" + fake = Faker() + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + tool_name = fake.unique.word() + + WorkflowToolManageService.create_workflow_tool( + user_id=account.id, + tenant_id=account.current_tenant.id, + workflow_app_id=app.id, + name=tool_name, + label=fake.word(), + icon={"type": "emoji", "emoji": "🔧"}, + description=fake.text(max_nb_chars=200), + parameters=self._create_test_workflow_tool_parameters(), + ) + + tool = ( + db_session_with_containers.query(WorkflowToolProvider) + .where(WorkflowToolProvider.tenant_id == account.current_tenant.id, WorkflowToolProvider.name == tool_name) + .first() + ) + assert tool is not None + + result = WorkflowToolManageService.delete_workflow_tool(account.id, account.current_tenant.id, tool.id) + + assert result == {"result": "success"} + deleted = ( + db_session_with_containers.query(WorkflowToolProvider).where(WorkflowToolProvider.id == tool.id).first() + ) + assert deleted is None + + def test_list_tenant_workflow_tools_empty( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test listing workflow tools when none exist returns empty list.""" + fake = Faker() + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + result = WorkflowToolManageService.list_tenant_workflow_tools(account.id, account.current_tenant.id) + + assert result == [] + + def test_get_workflow_tool_by_tool_id_not_found( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test that get_workflow_tool_by_tool_id raises ValueError when tool not found.""" + fake = Faker() + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + with pytest.raises(ValueError, match="Tool not found"): + WorkflowToolManageService.get_workflow_tool_by_tool_id(account.id, account.current_tenant.id, fake.uuid4()) + + def test_get_workflow_tool_by_app_id_not_found( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test that get_workflow_tool_by_app_id raises ValueError when tool not found.""" + fake = Faker() + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + with pytest.raises(ValueError, match="Tool not found"): + WorkflowToolManageService.get_workflow_tool_by_app_id(account.id, account.current_tenant.id, fake.uuid4()) + + def test_list_single_workflow_tools_not_found( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test that list_single_workflow_tools raises ValueError when tool not found.""" + fake = Faker() + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + with pytest.raises(ValueError, match="not found"): + WorkflowToolManageService.list_single_workflow_tools(account.id, account.current_tenant.id, fake.uuid4()) + + def test_create_workflow_tool_with_labels( + self, db_session_with_containers: Session, mock_external_service_dependencies + ): + """Test that labels are forwarded to ToolLabelManager when provided.""" + fake = Faker() + app, account, workflow = self._create_test_app_and_account( + db_session_with_containers, mock_external_service_dependencies + ) + + result = WorkflowToolManageService.create_workflow_tool( + user_id=account.id, + tenant_id=account.current_tenant.id, + workflow_app_id=app.id, + name=fake.unique.word(), + label=fake.word(), + icon={"type": "emoji", "emoji": "🔧"}, + description=fake.text(max_nb_chars=200), + parameters=self._create_test_workflow_tool_parameters(), + labels=["label-1", "label-2"], + ) + + assert result == {"result": "success"} + mock_external_service_dependencies["tool_label_manager"].update_tool_labels.assert_called_once() diff --git a/api/tests/test_containers_integration_tests/services/workflow/test_workflow_deletion.py b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_deletion.py new file mode 100644 index 0000000000..29e1e240b4 --- /dev/null +++ b/api/tests/test_containers_integration_tests/services/workflow/test_workflow_deletion.py @@ -0,0 +1,158 @@ +"""Testcontainers integration tests for WorkflowService.delete_workflow.""" + +import json +from uuid import uuid4 + +import pytest +from sqlalchemy.orm import Session, sessionmaker + +from extensions.ext_database import db +from models.account import Account, Tenant, TenantAccountJoin +from models.model import App +from models.tools import WorkflowToolProvider +from models.workflow import Workflow +from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService + + +class TestWorkflowDeletion: + def _create_tenant_and_account(self, session: Session) -> tuple[Tenant, Account]: + tenant = Tenant(name=f"Tenant {uuid4()}") + session.add(tenant) + session.flush() + + account = Account( + name=f"Account {uuid4()}", + email=f"wf_del_{uuid4()}@example.com", + password="hashed", + password_salt="salt", + interface_language="en-US", + timezone="UTC", + ) + session.add(account) + session.flush() + + join = TenantAccountJoin( + tenant_id=tenant.id, + account_id=account.id, + role="owner", + current=True, + ) + session.add(join) + session.flush() + return tenant, account + + def _create_app(self, session: Session, *, tenant: Tenant, account: Account, workflow_id: str | None = None) -> App: + app = App( + tenant_id=tenant.id, + name=f"App {uuid4()}", + description="", + mode="workflow", + icon_type="emoji", + icon="bot", + icon_background="#FFFFFF", + enable_site=False, + enable_api=True, + api_rpm=100, + api_rph=100, + is_demo=False, + is_public=False, + is_universal=False, + created_by=account.id, + updated_by=account.id, + workflow_id=workflow_id, + ) + session.add(app) + session.flush() + return app + + def _create_workflow( + self, session: Session, *, tenant: Tenant, app: App, account: Account, version: str = "1.0" + ) -> Workflow: + workflow = Workflow( + id=str(uuid4()), + tenant_id=tenant.id, + app_id=app.id, + type="workflow", + version=version, + graph=json.dumps({"nodes": [], "edges": []}), + _features=json.dumps({}), + created_by=account.id, + updated_by=account.id, + ) + session.add(workflow) + session.flush() + return workflow + + def _create_tool_provider( + self, session: Session, *, tenant: Tenant, app: App, account: Account, version: str + ) -> WorkflowToolProvider: + provider = WorkflowToolProvider( + name=f"tool-{uuid4()}", + label=f"Tool {uuid4()}", + icon="wrench", + app_id=app.id, + version=version, + user_id=account.id, + tenant_id=tenant.id, + description="test tool provider", + ) + session.add(provider) + session.flush() + return provider + + def test_delete_workflow_success(self, db_session_with_containers): + tenant, account = self._create_tenant_and_account(db_session_with_containers) + app = self._create_app(db_session_with_containers, tenant=tenant, account=account) + workflow = self._create_workflow( + db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0" + ) + db_session_with_containers.commit() + workflow_id = workflow.id + + service = WorkflowService(sessionmaker(bind=db.engine)) + result = service.delete_workflow( + session=db_session_with_containers, workflow_id=workflow_id, tenant_id=tenant.id + ) + + assert result is True + db_session_with_containers.expire_all() + assert db_session_with_containers.get(Workflow, workflow_id) is None + + def test_delete_draft_workflow_raises_error(self, db_session_with_containers): + tenant, account = self._create_tenant_and_account(db_session_with_containers) + app = self._create_app(db_session_with_containers, tenant=tenant, account=account) + workflow = self._create_workflow( + db_session_with_containers, tenant=tenant, app=app, account=account, version="draft" + ) + db_session_with_containers.commit() + + service = WorkflowService(sessionmaker(bind=db.engine)) + with pytest.raises(DraftWorkflowDeletionError): + service.delete_workflow(session=db_session_with_containers, workflow_id=workflow.id, tenant_id=tenant.id) + + def test_delete_workflow_in_use_by_app_raises_error(self, db_session_with_containers): + tenant, account = self._create_tenant_and_account(db_session_with_containers) + app = self._create_app(db_session_with_containers, tenant=tenant, account=account) + workflow = self._create_workflow( + db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0" + ) + # Point app to this workflow + app.workflow_id = workflow.id + db_session_with_containers.commit() + + service = WorkflowService(sessionmaker(bind=db.engine)) + with pytest.raises(WorkflowInUseError, match="currently in use by app"): + service.delete_workflow(session=db_session_with_containers, workflow_id=workflow.id, tenant_id=tenant.id) + + def test_delete_workflow_published_as_tool_raises_error(self, db_session_with_containers): + tenant, account = self._create_tenant_and_account(db_session_with_containers) + app = self._create_app(db_session_with_containers, tenant=tenant, account=account) + workflow = self._create_workflow( + db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0" + ) + self._create_tool_provider(db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0") + db_session_with_containers.commit() + + service = WorkflowService(sessionmaker(bind=db.engine)) + with pytest.raises(WorkflowInUseError, match="published as a tool"): + service.delete_workflow(session=db_session_with_containers, workflow_id=workflow.id, tenant_id=tenant.id) diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py index 210d9eb39e..6cbbe43137 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_clean_document_task.py @@ -13,6 +13,7 @@ import pytest from faker import Faker from sqlalchemy.orm import Session +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.storage.storage_type import StorageType from libs.datetime_utils import naive_utc_now from models import Account, Tenant, TenantAccountJoin, TenantAccountRole @@ -152,7 +153,7 @@ class TestBatchCleanDocumentTask: created_from=DocumentCreatedFrom.WEB, created_by=account.id, indexing_status=IndexingStatus.COMPLETED, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) @@ -392,7 +393,12 @@ class TestBatchCleanDocumentTask: db_session_with_containers.commit() # Execute the task with non-existent dataset - batch_clean_document_task(document_ids=[document_id], dataset_id=dataset_id, doc_form="text_model", file_ids=[]) + batch_clean_document_task( + document_ids=[document_id], + dataset_id=dataset_id, + doc_form=IndexStructureType.PARAGRAPH_INDEX, + file_ids=[], + ) # Verify that no index processing occurred mock_external_service_dependencies["index_processor"].clean.assert_not_called() @@ -525,7 +531,11 @@ class TestBatchCleanDocumentTask: account = self._create_test_account(db_session_with_containers) # Test different doc_form types - doc_forms = ["text_model", "qa_model", "hierarchical_model"] + doc_forms = [ + IndexStructureType.PARAGRAPH_INDEX, + IndexStructureType.QA_INDEX, + IndexStructureType.PARENT_CHILD_INDEX, + ] for doc_form in doc_forms: dataset = self._create_test_dataset(db_session_with_containers, account) diff --git a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py index 202ccb0098..5ebf141828 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_batch_create_segment_to_index_task.py @@ -19,6 +19,7 @@ import pytest from faker import Faker from sqlalchemy.orm import Session +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.storage.storage_type import StorageType from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment @@ -179,7 +180,7 @@ class TestBatchCreateSegmentToIndexTask: indexing_status=IndexingStatus.COMPLETED, enabled=True, archived=False, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, word_count=0, ) @@ -221,17 +222,17 @@ class TestBatchCreateSegmentToIndexTask: return upload_file - def _create_test_csv_content(self, content_type="text_model"): + def _create_test_csv_content(self, content_type=IndexStructureType.PARAGRAPH_INDEX): """ Helper method to create test CSV content. Args: - content_type: Type of content to create ("text_model" or "qa_model") + content_type: Type of content to create (IndexStructureType.PARAGRAPH_INDEX or IndexStructureType.QA_INDEX) Returns: str: CSV content as string """ - if content_type == "qa_model": + if content_type == IndexStructureType.QA_INDEX: csv_content = "content,answer\n" csv_content += "This is the first segment content,This is the first answer\n" csv_content += "This is the second segment content,This is the second answer\n" @@ -264,7 +265,7 @@ class TestBatchCreateSegmentToIndexTask: upload_file = self._create_test_upload_file(db_session_with_containers, account, tenant) # Create CSV content - csv_content = self._create_test_csv_content("text_model") + csv_content = self._create_test_csv_content(IndexStructureType.PARAGRAPH_INDEX) # Mock storage to return our CSV content mock_storage = mock_external_service_dependencies["storage"] @@ -451,7 +452,7 @@ class TestBatchCreateSegmentToIndexTask: indexing_status=IndexingStatus.COMPLETED, enabled=False, # Document is disabled archived=False, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, word_count=0, ), # Archived document @@ -467,7 +468,7 @@ class TestBatchCreateSegmentToIndexTask: indexing_status=IndexingStatus.COMPLETED, enabled=True, archived=True, # Document is archived - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, word_count=0, ), # Document with incomplete indexing @@ -483,7 +484,7 @@ class TestBatchCreateSegmentToIndexTask: indexing_status=IndexingStatus.INDEXING, # Not completed enabled=True, archived=False, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, word_count=0, ), ] @@ -655,7 +656,7 @@ class TestBatchCreateSegmentToIndexTask: db_session_with_containers.commit() # Create CSV content - csv_content = self._create_test_csv_content("text_model") + csv_content = self._create_test_csv_content(IndexStructureType.PARAGRAPH_INDEX) # Mock storage to return our CSV content mock_storage = mock_external_service_dependencies["storage"] diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py index 1cd698b870..9449fee0af 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_dataset_task.py @@ -18,6 +18,7 @@ import pytest from faker import Faker from sqlalchemy.orm import Session +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.storage.storage_type import StorageType from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import ( @@ -192,7 +193,7 @@ class TestCleanDatasetTask: indexing_status=IndexingStatus.COMPLETED, enabled=True, archived=False, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, word_count=100, created_at=datetime.now(), updated_at=datetime.now(), diff --git a/api/tests/test_containers_integration_tests/tasks/test_clean_notion_document_task.py b/api/tests/test_containers_integration_tests/tasks/test_clean_notion_document_task.py index a2a190fd69..926c839c8b 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_clean_notion_document_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_clean_notion_document_task.py @@ -12,6 +12,7 @@ from unittest.mock import Mock, patch import pytest from faker import Faker +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import Dataset, Document, DocumentSegment from models.enums import DataSourceType, DocumentCreatedFrom, IndexingStatus, SegmentStatus from services.account_service import AccountService, TenantService @@ -114,7 +115,7 @@ class TestCleanNotionDocumentTask: name=f"Notion Page {i}", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", # Set doc_form to ensure dataset.doc_form works + doc_form=IndexStructureType.PARAGRAPH_INDEX, # Set doc_form to ensure dataset.doc_form works doc_language="en", indexing_status=IndexingStatus.COMPLETED, ) @@ -261,7 +262,7 @@ class TestCleanNotionDocumentTask: # Test different index types # Note: Only testing text_model to avoid dependency on external services - index_types = ["text_model"] + index_types = [IndexStructureType.PARAGRAPH_INDEX] for index_type in index_types: # Create dataset (doc_form will be set via document creation) diff --git a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py index 132f43c320..979435282b 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_create_segment_to_index_task.py @@ -12,6 +12,7 @@ from uuid import uuid4 import pytest from faker import Faker +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.ext_redis import redis_client from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment @@ -141,7 +142,7 @@ class TestCreateSegmentToIndexTask: enabled=True, archived=False, indexing_status=IndexingStatus.COMPLETED, - doc_form="qa_model", + doc_form=IndexStructureType.QA_INDEX, ) db_session_with_containers.add(document) db_session_with_containers.commit() @@ -301,7 +302,7 @@ class TestCreateSegmentToIndexTask: enabled=True, archived=False, indexing_status=IndexingStatus.COMPLETED, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) db_session_with_containers.commit() @@ -552,7 +553,11 @@ class TestCreateSegmentToIndexTask: - Processing completes successfully for different forms """ # Arrange: Test different doc_forms - doc_forms = ["qa_model", "text_model", "web_model"] + doc_forms = [ + IndexStructureType.QA_INDEX, + IndexStructureType.PARAGRAPH_INDEX, + IndexStructureType.PARAGRAPH_INDEX, + ] for doc_form in doc_forms: # Create fresh test data for each form diff --git a/api/tests/test_containers_integration_tests/tasks/test_deal_dataset_vector_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_deal_dataset_vector_index_task.py index e80b37ac1b..d457b59d58 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_deal_dataset_vector_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_deal_dataset_vector_index_task.py @@ -12,6 +12,7 @@ from unittest.mock import ANY, Mock, patch import pytest from faker import Faker +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import Dataset, Document, DocumentSegment from models.enums import DataSourceType, DocumentCreatedFrom, IndexingStatus, SegmentStatus from services.account_service import AccountService, TenantService @@ -107,7 +108,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -167,7 +168,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -187,7 +188,7 @@ class TestDealDatasetVectorIndexTask: name="Test Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -268,7 +269,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="parent_child_index", + doc_form=IndexStructureType.PARENT_CHILD_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -288,7 +289,7 @@ class TestDealDatasetVectorIndexTask: name="Test Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="parent_child_index", + doc_form=IndexStructureType.PARENT_CHILD_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -416,7 +417,7 @@ class TestDealDatasetVectorIndexTask: name="Test Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -505,7 +506,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -525,7 +526,7 @@ class TestDealDatasetVectorIndexTask: name="Test Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -601,7 +602,7 @@ class TestDealDatasetVectorIndexTask: name="Test Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="qa_index", + doc_form=IndexStructureType.QA_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -638,7 +639,7 @@ class TestDealDatasetVectorIndexTask: assert updated_document.indexing_status == IndexingStatus.COMPLETED # Verify index processor was initialized with custom index type - mock_index_processor_factory.assert_called_once_with("qa_index") + mock_index_processor_factory.assert_called_once_with(IndexStructureType.QA_INDEX) mock_factory = mock_index_processor_factory.return_value mock_processor = mock_factory.init_index_processor.return_value mock_processor.load.assert_called_once() @@ -677,7 +678,7 @@ class TestDealDatasetVectorIndexTask: name="Test Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -714,7 +715,7 @@ class TestDealDatasetVectorIndexTask: assert updated_document.indexing_status == IndexingStatus.COMPLETED # Verify index processor was initialized with the document's index type - mock_index_processor_factory.assert_called_once_with("text_model") + mock_index_processor_factory.assert_called_once_with(IndexStructureType.PARAGRAPH_INDEX) mock_factory = mock_index_processor_factory.return_value mock_processor = mock_factory.init_index_processor.return_value mock_processor.load.assert_called_once() @@ -753,7 +754,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -775,7 +776,7 @@ class TestDealDatasetVectorIndexTask: name=f"Test Document {i}", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -856,7 +857,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -876,7 +877,7 @@ class TestDealDatasetVectorIndexTask: name="Test Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -953,7 +954,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -973,7 +974,7 @@ class TestDealDatasetVectorIndexTask: name="Enabled Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -992,7 +993,7 @@ class TestDealDatasetVectorIndexTask: name="Disabled Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=False, # This document should be skipped @@ -1074,7 +1075,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -1094,7 +1095,7 @@ class TestDealDatasetVectorIndexTask: name="Active Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -1113,7 +1114,7 @@ class TestDealDatasetVectorIndexTask: name="Archived Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -1195,7 +1196,7 @@ class TestDealDatasetVectorIndexTask: name="Document for doc_form", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -1215,7 +1216,7 @@ class TestDealDatasetVectorIndexTask: name="Completed Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.COMPLETED, enabled=True, @@ -1234,7 +1235,7 @@ class TestDealDatasetVectorIndexTask: name="Incomplete Document", created_from=DocumentCreatedFrom.WEB, created_by=account.id, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", indexing_status=IndexingStatus.INDEXING, # This document should be skipped enabled=True, diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py index da42fc7167..d21f1daf23 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segment_from_index_task.py @@ -15,6 +15,7 @@ import pytest from faker import Faker from sqlalchemy.orm import Session +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.ext_redis import redis_client from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment @@ -113,7 +114,7 @@ class TestDisableSegmentFromIndexTask: dataset: Dataset, tenant: Tenant, account: Account, - doc_form: str = "text_model", + doc_form: str = IndexStructureType.PARAGRAPH_INDEX, ) -> Document: """ Helper method to create a test document. @@ -476,7 +477,11 @@ class TestDisableSegmentFromIndexTask: - Index processor clean method is called correctly """ # Test different document forms - doc_forms = ["text_model", "qa_model", "table_model"] + doc_forms = [ + IndexStructureType.PARAGRAPH_INDEX, + IndexStructureType.QA_INDEX, + IndexStructureType.PARENT_CHILD_INDEX, + ] for doc_form in doc_forms: # Arrange: Create test data for each form diff --git a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py index 4bc9bb4749..fbcb7b5264 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_disable_segments_from_index_task.py @@ -11,6 +11,7 @@ from unittest.mock import MagicMock, patch from faker import Faker from sqlalchemy.orm import Session +from core.rag.index_processor.constant.index_type import IndexStructureType from models import Account, Dataset, DocumentSegment from models import Document as DatasetDocument from models.dataset import DatasetProcessRule @@ -153,7 +154,7 @@ class TestDisableSegmentsFromIndexTask: document.indexing_status = "completed" document.enabled = True document.archived = False - document.doc_form = "text_model" # Use text_model form for testing + document.doc_form = IndexStructureType.PARAGRAPH_INDEX # Use text_model form for testing document.doc_language = "en" db_session_with_containers.add(document) db_session_with_containers.commit() @@ -500,7 +501,11 @@ class TestDisableSegmentsFromIndexTask: segment_ids = [segment.id for segment in segments] # Test different document forms - doc_forms = ["text_model", "qa_model", "hierarchical_model"] + doc_forms = [ + IndexStructureType.PARAGRAPH_INDEX, + IndexStructureType.QA_INDEX, + IndexStructureType.PARENT_CHILD_INDEX, + ] for doc_form in doc_forms: # Update document form diff --git a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_sync_task.py b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_sync_task.py index 6a17a19a54..10d97919fb 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_sync_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_sync_task.py @@ -14,6 +14,7 @@ from uuid import uuid4 import pytest from core.indexing_runner import DocumentIsPausedError, IndexingRunner +from core.rag.index_processor.constant.index_type import IndexStructureType from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.enums import DataSourceType, DocumentCreatedFrom, IndexingStatus, SegmentStatus @@ -85,7 +86,7 @@ class DocumentIndexingSyncTaskTestDataFactory: created_by=created_by, indexing_status=indexing_status, enabled=True, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, doc_language="en", ) db_session_with_containers.add(document) diff --git a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py index 2fbea1388c..c650d56091 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_document_indexing_update_task.py @@ -3,6 +3,7 @@ from unittest.mock import MagicMock, patch import pytest from faker import Faker +from core.rag.index_processor.constant.index_type import IndexStructureType from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment from models.enums import DataSourceType, DocumentCreatedFrom, IndexingStatus, SegmentStatus @@ -80,7 +81,7 @@ class TestDocumentIndexingUpdateTask: created_by=account.id, indexing_status=IndexingStatus.WAITING, enabled=True, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) db_session_with_containers.commit() diff --git a/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py b/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py index f1f5a4b105..76b6a8ae73 100644 --- a/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py +++ b/api/tests/test_containers_integration_tests/tasks/test_duplicate_document_indexing_task.py @@ -4,6 +4,7 @@ import pytest from faker import Faker from core.indexing_runner import DocumentIsPausedError +from core.rag.index_processor.constant.index_type import IndexStructureType from enums.cloud_plan import CloudPlan from models import Account, Tenant, TenantAccountJoin, TenantAccountRole from models.dataset import Dataset, Document, DocumentSegment @@ -130,7 +131,7 @@ class TestDuplicateDocumentIndexingTasks: created_by=account.id, indexing_status=IndexingStatus.WAITING, enabled=True, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) documents.append(document) @@ -265,7 +266,7 @@ class TestDuplicateDocumentIndexingTasks: created_by=account.id, indexing_status=IndexingStatus.WAITING, enabled=True, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) documents.append(document) @@ -524,7 +525,7 @@ class TestDuplicateDocumentIndexingTasks: created_by=dataset.created_by, indexing_status=IndexingStatus.WAITING, enabled=True, - doc_form="text_model", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) db_session_with_containers.add(document) extra_documents.append(document) diff --git a/api/tests/unit_tests/controllers/console/app/test_app_apis.py b/api/tests/unit_tests/controllers/console/app/test_app_apis.py index 60b8ee96fe..1d1e119fd6 100644 --- a/api/tests/unit_tests/controllers/console/app/test_app_apis.py +++ b/api/tests/unit_tests/controllers/console/app/test_app_apis.py @@ -7,14 +7,19 @@ from __future__ import annotations import uuid from types import SimpleNamespace -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest +from pydantic import ValidationError from werkzeug.exceptions import BadRequest, NotFound +from controllers.console import console_ns from controllers.console.app import ( annotation as annotation_module, ) +from controllers.console.app import ( + app as app_module, +) from controllers.console.app import ( completion as completion_module, ) @@ -203,6 +208,48 @@ class TestCompletionEndpoints: method(app_model=MagicMock(id="app-1")) +class TestAppEndpoints: + """Tests for app endpoints.""" + + def test_app_put_should_preserve_icon_type_when_payload_omits_it(self, app, monkeypatch): + api = app_module.AppApi() + method = _unwrap(api.put) + payload = { + "name": "Updated App", + "description": "Updated description", + "icon": "🤖", + "icon_background": "#FFFFFF", + } + app_service = MagicMock() + app_service.update_app.return_value = SimpleNamespace() + response_model = MagicMock() + response_model.model_dump.return_value = {"id": "app-1"} + + monkeypatch.setattr(app_module, "AppService", lambda: app_service) + monkeypatch.setattr(app_module.AppDetailWithSite, "model_validate", MagicMock(return_value=response_model)) + + with ( + app.test_request_context("/console/api/apps/app-1", method="PUT", json=payload), + patch.object(type(console_ns), "payload", payload), + ): + response = method(app_model=SimpleNamespace(icon_type=app_module.IconType.EMOJI)) + + assert response == {"id": "app-1"} + assert app_service.update_app.call_args.args[1]["icon_type"] is None + + def test_update_app_payload_should_reject_empty_icon_type(self): + with pytest.raises(ValidationError): + app_module.UpdateAppPayload.model_validate( + { + "name": "Updated App", + "description": "Updated description", + "icon_type": "", + "icon": "🤖", + "icon_background": "#FFFFFF", + } + ) + + # ========== OpsTrace Tests ========== class TestOpsTraceEndpoints: """Tests for ops_trace endpoint.""" @@ -281,12 +328,10 @@ class TestSiteEndpoints: method = _unwrap(api.post) site = MagicMock() - query = MagicMock() - query.where.return_value.first.return_value = site monkeypatch.setattr( site_module.db, "session", - MagicMock(query=lambda *_args, **_kwargs: query, commit=lambda: None), + MagicMock(scalar=lambda *_args, **_kwargs: site, commit=lambda: None), ) monkeypatch.setattr( site_module, @@ -305,12 +350,10 @@ class TestSiteEndpoints: method = _unwrap(api.post) site = MagicMock() - query = MagicMock() - query.where.return_value.first.return_value = site monkeypatch.setattr( site_module.db, "session", - MagicMock(query=lambda *_args, **_kwargs: query, commit=lambda: None), + MagicMock(scalar=lambda *_args, **_kwargs: site, commit=lambda: None), ) monkeypatch.setattr(site_module.Site, "generate_code", lambda *_args, **_kwargs: "code") monkeypatch.setattr( diff --git a/api/tests/unit_tests/controllers/console/app/test_conversation_api.py b/api/tests/unit_tests/controllers/console/app/test_conversation_api.py index 5db8e5c332..11b3b3470d 100644 --- a/api/tests/unit_tests/controllers/console/app/test_conversation_api.py +++ b/api/tests/unit_tests/controllers/console/app/test_conversation_api.py @@ -82,12 +82,8 @@ def test_chat_conversation_list_advanced_chat_calls_paginate(app, monkeypatch: p def test_get_conversation_updates_read_at(monkeypatch: pytest.MonkeyPatch) -> None: conversation = SimpleNamespace(id="c1", app_id="app-1") - query = MagicMock() - query.where.return_value = query - query.first.return_value = conversation - session = MagicMock() - session.query.return_value = query + session.scalar.return_value = conversation monkeypatch.setattr(conversation_module, "current_account_with_tenant", lambda: (_make_account(), "t1")) monkeypatch.setattr(conversation_module.db, "session", session) @@ -101,12 +97,8 @@ def test_get_conversation_updates_read_at(monkeypatch: pytest.MonkeyPatch) -> No def test_get_conversation_missing_raises_not_found(monkeypatch: pytest.MonkeyPatch) -> None: - query = MagicMock() - query.where.return_value = query - query.first.return_value = None - session = MagicMock() - session.query.return_value = query + session.scalar.return_value = None monkeypatch.setattr(conversation_module, "current_account_with_tenant", lambda: (_make_account(), "t1")) monkeypatch.setattr(conversation_module.db, "session", session) diff --git a/api/tests/unit_tests/controllers/console/app/test_conversation_read_timestamp.py b/api/tests/unit_tests/controllers/console/app/test_conversation_read_timestamp.py index 460da06ecc..f588ab261d 100644 --- a/api/tests/unit_tests/controllers/console/app/test_conversation_read_timestamp.py +++ b/api/tests/unit_tests/controllers/console/app/test_conversation_read_timestamp.py @@ -24,7 +24,7 @@ def test_get_conversation_mark_read_keeps_updated_at_unchanged(): ), patch("controllers.console.app.conversation.db.session", autospec=True) as mock_session, ): - mock_session.query.return_value.where.return_value.first.return_value = conversation + mock_session.scalar.return_value = conversation _get_conversation(app_model, "conversation-id") diff --git a/api/tests/unit_tests/controllers/console/app/test_generator_api.py b/api/tests/unit_tests/controllers/console/app/test_generator_api.py index f83bc18da3..e64c508b82 100644 --- a/api/tests/unit_tests/controllers/console/app/test_generator_api.py +++ b/api/tests/unit_tests/controllers/console/app/test_generator_api.py @@ -73,8 +73,7 @@ def test_instruction_generate_app_not_found(app, monkeypatch: pytest.MonkeyPatch monkeypatch.setattr(generator_module, "current_account_with_tenant", lambda: (None, "t1")) - query = SimpleNamespace(where=lambda *_args, **_kwargs: query, first=lambda: None) - monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(query=lambda *_args, **_kwargs: query)) + monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(get=lambda *_args, **_kwargs: None)) with app.test_request_context( "/console/api/instruction-generate", @@ -99,8 +98,7 @@ def test_instruction_generate_workflow_not_found(app, monkeypatch: pytest.Monkey monkeypatch.setattr(generator_module, "current_account_with_tenant", lambda: (None, "t1")) app_model = SimpleNamespace(id="app-1") - query = SimpleNamespace(where=lambda *_args, **_kwargs: query, first=lambda: app_model) - monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(query=lambda *_args, **_kwargs: query)) + monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(get=lambda *_args, **_kwargs: app_model)) _install_workflow_service(monkeypatch, workflow=None) with app.test_request_context( @@ -126,8 +124,7 @@ def test_instruction_generate_node_missing(app, monkeypatch: pytest.MonkeyPatch) monkeypatch.setattr(generator_module, "current_account_with_tenant", lambda: (None, "t1")) app_model = SimpleNamespace(id="app-1") - query = SimpleNamespace(where=lambda *_args, **_kwargs: query, first=lambda: app_model) - monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(query=lambda *_args, **_kwargs: query)) + monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(get=lambda *_args, **_kwargs: app_model)) workflow = SimpleNamespace(graph_dict={"nodes": []}) _install_workflow_service(monkeypatch, workflow=workflow) @@ -155,8 +152,7 @@ def test_instruction_generate_code_node(app, monkeypatch: pytest.MonkeyPatch) -> monkeypatch.setattr(generator_module, "current_account_with_tenant", lambda: (None, "t1")) app_model = SimpleNamespace(id="app-1") - query = SimpleNamespace(where=lambda *_args, **_kwargs: query, first=lambda: app_model) - monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(query=lambda *_args, **_kwargs: query)) + monkeypatch.setattr(generator_module.db, "session", SimpleNamespace(get=lambda *_args, **_kwargs: app_model)) workflow = SimpleNamespace( graph_dict={ diff --git a/api/tests/unit_tests/controllers/console/app/test_message.py b/api/tests/unit_tests/controllers/console/app/test_message.py deleted file mode 100644 index 3ffa53b6db..0000000000 --- a/api/tests/unit_tests/controllers/console/app/test_message.py +++ /dev/null @@ -1,320 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest -from flask import Flask, request -from werkzeug.exceptions import InternalServerError, NotFound -from werkzeug.local import LocalProxy - -from controllers.console.app.error import ( - ProviderModelCurrentlyNotSupportError, - ProviderNotInitializeError, - ProviderQuotaExceededError, -) -from controllers.console.app.message import ( - ChatMessageListApi, - ChatMessagesQuery, - FeedbackExportQuery, - MessageAnnotationCountApi, - MessageApi, - MessageFeedbackApi, - MessageFeedbackExportApi, - MessageFeedbackPayload, - MessageSuggestedQuestionApi, -) -from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDisabledError -from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError -from models import App, AppMode -from services.errors.conversation import ConversationNotExistsError -from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError - - -@pytest.fixture -def app(): - flask_app = Flask(__name__) - flask_app.config["TESTING"] = True - flask_app.config["RESTX_MASK_HEADER"] = "X-Fields" - return flask_app - - -@pytest.fixture -def mock_account(): - from models.account import Account, AccountStatus - - account = MagicMock(spec=Account) - account.id = "user_123" - account.timezone = "UTC" - account.status = AccountStatus.ACTIVE - account.is_admin_or_owner = True - account.current_tenant.current_role = "owner" - account.has_edit_permission = True - return account - - -@pytest.fixture -def mock_app_model(): - app_model = MagicMock(spec=App) - app_model.id = "app_123" - app_model.mode = AppMode.CHAT - app_model.tenant_id = "tenant_123" - return app_model - - -@pytest.fixture(autouse=True) -def mock_csrf(): - with patch("libs.login.check_csrf_token") as mock: - yield mock - - -import contextlib - - -@contextlib.contextmanager -def setup_test_context( - test_app, endpoint_class, route_path, method, mock_account, mock_app_model, payload=None, qs=None -): - with ( - patch("extensions.ext_database.db") as mock_db, - patch("controllers.console.app.wraps.db", mock_db), - patch("controllers.console.wraps.db", mock_db), - patch("controllers.console.app.message.db", mock_db), - patch("controllers.console.app.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch("controllers.console.app.message.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - ): - # Set up a generic query mock that usually returns mock_app_model when getting app - app_query_mock = MagicMock() - app_query_mock.filter.return_value.first.return_value = mock_app_model - app_query_mock.filter.return_value.filter.return_value.first.return_value = mock_app_model - app_query_mock.where.return_value.first.return_value = mock_app_model - app_query_mock.where.return_value.where.return_value.first.return_value = mock_app_model - - data_query_mock = MagicMock() - - def query_side_effect(*args, **kwargs): - if args and hasattr(args[0], "__name__") and args[0].__name__ == "App": - return app_query_mock - return data_query_mock - - mock_db.session.query.side_effect = query_side_effect - mock_db.data_query = data_query_mock - - # Let the caller override the stat db query logic - proxy_mock = LocalProxy(lambda: mock_account) - - query_string = "&".join([f"{k}={v}" for k, v in (qs or {}).items()]) - full_path = f"{route_path}?{query_string}" if qs else route_path - - with ( - patch("libs.login.current_user", proxy_mock), - patch("flask_login.current_user", proxy_mock), - patch("controllers.console.app.message.attach_message_extra_contents", return_value=None), - ): - with test_app.test_request_context(full_path, method=method, json=payload): - request.view_args = {"app_id": "app_123"} - - if "suggested-questions" in route_path: - # simplistic extraction for message_id - parts = route_path.split("chat-messages/") - if len(parts) > 1: - request.view_args["message_id"] = parts[1].split("/")[0] - elif "messages/" in route_path and "chat-messages" not in route_path: - parts = route_path.split("messages/") - if len(parts) > 1: - request.view_args["message_id"] = parts[1].split("/")[0] - - api_instance = endpoint_class() - - # Check if it has a dispatch_request or method - if hasattr(api_instance, method.lower()): - yield api_instance, mock_db, request.view_args - - -class TestMessageValidators: - def test_chat_messages_query_validators(self): - # Test empty_to_none - assert ChatMessagesQuery.empty_to_none("") is None - assert ChatMessagesQuery.empty_to_none("val") == "val" - - # Test validate_uuid - assert ChatMessagesQuery.validate_uuid(None) is None - assert ( - ChatMessagesQuery.validate_uuid("123e4567-e89b-12d3-a456-426614174000") - == "123e4567-e89b-12d3-a456-426614174000" - ) - - def test_message_feedback_validators(self): - assert ( - MessageFeedbackPayload.validate_message_id("123e4567-e89b-12d3-a456-426614174000") - == "123e4567-e89b-12d3-a456-426614174000" - ) - - def test_feedback_export_validators(self): - assert FeedbackExportQuery.parse_bool(None) is None - assert FeedbackExportQuery.parse_bool(True) is True - assert FeedbackExportQuery.parse_bool("1") is True - assert FeedbackExportQuery.parse_bool("0") is False - assert FeedbackExportQuery.parse_bool("off") is False - - with pytest.raises(ValueError): - FeedbackExportQuery.parse_bool("invalid") - - -class TestMessageEndpoints: - def test_chat_message_list_not_found(self, app, mock_account, mock_app_model): - with setup_test_context( - app, - ChatMessageListApi, - "/apps/app_123/chat-messages", - "GET", - mock_account, - mock_app_model, - qs={"conversation_id": "123e4567-e89b-12d3-a456-426614174000"}, - ) as (api, mock_db, v_args): - mock_db.data_query.where.return_value.first.return_value = None - - with pytest.raises(NotFound): - api.get(**v_args) - - def test_chat_message_list_success(self, app, mock_account, mock_app_model): - with setup_test_context( - app, - ChatMessageListApi, - "/apps/app_123/chat-messages", - "GET", - mock_account, - mock_app_model, - qs={"conversation_id": "123e4567-e89b-12d3-a456-426614174000", "limit": 1}, - ) as (api, mock_db, v_args): - mock_conv = MagicMock() - mock_conv.id = "123e4567-e89b-12d3-a456-426614174000" - mock_msg = MagicMock() - mock_msg.id = "msg_123" - mock_msg.feedbacks = [] - mock_msg.annotation = None - mock_msg.annotation_hit_history = None - mock_msg.agent_thoughts = [] - mock_msg.message_files = [] - mock_msg.extra_contents = [] - mock_msg.message = {} - mock_msg.message_metadata_dict = {} - - # mock returns - q_mock = mock_db.data_query - q_mock.where.return_value.first.side_effect = [mock_conv] - q_mock.where.return_value.order_by.return_value.limit.return_value.all.return_value = [mock_msg] - mock_db.session.scalar.return_value = False - - resp = api.get(**v_args) - assert resp["limit"] == 1 - assert resp["has_more"] is False - assert len(resp["data"]) == 1 - - def test_message_feedback_not_found(self, app, mock_account, mock_app_model): - with setup_test_context( - app, - MessageFeedbackApi, - "/apps/app_123/feedbacks", - "POST", - mock_account, - mock_app_model, - payload={"message_id": "123e4567-e89b-12d3-a456-426614174000"}, - ) as (api, mock_db, v_args): - mock_db.data_query.where.return_value.first.return_value = None - - with pytest.raises(NotFound): - api.post(**v_args) - - def test_message_feedback_success(self, app, mock_account, mock_app_model): - payload = {"message_id": "123e4567-e89b-12d3-a456-426614174000", "rating": "like"} - with setup_test_context( - app, MessageFeedbackApi, "/apps/app_123/feedbacks", "POST", mock_account, mock_app_model, payload=payload - ) as (api, mock_db, v_args): - mock_msg = MagicMock() - mock_msg.admin_feedback = None - mock_db.data_query.where.return_value.first.return_value = mock_msg - - resp = api.post(**v_args) - assert resp == {"result": "success"} - - def test_message_annotation_count(self, app, mock_account, mock_app_model): - with setup_test_context( - app, MessageAnnotationCountApi, "/apps/app_123/annotations/count", "GET", mock_account, mock_app_model - ) as (api, mock_db, v_args): - mock_db.data_query.where.return_value.count.return_value = 5 - - resp = api.get(**v_args) - assert resp == {"count": 5} - - @patch("controllers.console.app.message.MessageService") - def test_message_suggested_questions_success(self, mock_msg_srv, app, mock_account, mock_app_model): - mock_msg_srv.get_suggested_questions_after_answer.return_value = ["q1", "q2"] - - with setup_test_context( - app, - MessageSuggestedQuestionApi, - "/apps/app_123/chat-messages/msg_123/suggested-questions", - "GET", - mock_account, - mock_app_model, - ) as (api, mock_db, v_args): - resp = api.get(**v_args) - assert resp == {"data": ["q1", "q2"]} - - @pytest.mark.parametrize( - ("exc", "expected_exc"), - [ - (MessageNotExistsError, NotFound), - (ConversationNotExistsError, NotFound), - (ProviderTokenNotInitError, ProviderNotInitializeError), - (QuotaExceededError, ProviderQuotaExceededError), - (ModelCurrentlyNotSupportError, ProviderModelCurrentlyNotSupportError), - (SuggestedQuestionsAfterAnswerDisabledError, AppSuggestedQuestionsAfterAnswerDisabledError), - (Exception, InternalServerError), - ], - ) - @patch("controllers.console.app.message.MessageService") - def test_message_suggested_questions_errors( - self, mock_msg_srv, exc, expected_exc, app, mock_account, mock_app_model - ): - mock_msg_srv.get_suggested_questions_after_answer.side_effect = exc() - - with setup_test_context( - app, - MessageSuggestedQuestionApi, - "/apps/app_123/chat-messages/msg_123/suggested-questions", - "GET", - mock_account, - mock_app_model, - ) as (api, mock_db, v_args): - with pytest.raises(expected_exc): - api.get(**v_args) - - @patch("services.feedback_service.FeedbackService.export_feedbacks") - def test_message_feedback_export_success(self, mock_export, app, mock_account, mock_app_model): - mock_export.return_value = {"exported": True} - - with setup_test_context( - app, MessageFeedbackExportApi, "/apps/app_123/feedbacks/export", "GET", mock_account, mock_app_model - ) as (api, mock_db, v_args): - resp = api.get(**v_args) - assert resp == {"exported": True} - - def test_message_api_get_success(self, app, mock_account, mock_app_model): - with setup_test_context( - app, MessageApi, "/apps/app_123/messages/msg_123", "GET", mock_account, mock_app_model - ) as (api, mock_db, v_args): - mock_msg = MagicMock() - mock_msg.id = "msg_123" - mock_msg.feedbacks = [] - mock_msg.annotation = None - mock_msg.annotation_hit_history = None - mock_msg.agent_thoughts = [] - mock_msg.message_files = [] - mock_msg.extra_contents = [] - mock_msg.message = {} - mock_msg.message_metadata_dict = {} - - mock_db.data_query.where.return_value.first.return_value = mock_msg - - resp = api.get(**v_args) - assert resp["id"] == "msg_123" diff --git a/api/tests/unit_tests/controllers/console/app/test_model_config_api.py b/api/tests/unit_tests/controllers/console/app/test_model_config_api.py index 61d92bb5c7..a0e2edb8cf 100644 --- a/api/tests/unit_tests/controllers/console/app/test_model_config_api.py +++ b/api/tests/unit_tests/controllers/console/app/test_model_config_api.py @@ -92,10 +92,7 @@ def test_post_encrypts_agent_tool_parameters(app, monkeypatch: pytest.MonkeyPatc ) session = MagicMock() - query = MagicMock() - query.where.return_value = query - query.first.return_value = original_config - session.query.return_value = query + session.get.return_value = original_config monkeypatch.setattr(model_config_module.db, "session", session) monkeypatch.setattr( diff --git a/api/tests/unit_tests/controllers/console/app/test_statistic.py b/api/tests/unit_tests/controllers/console/app/test_statistic.py deleted file mode 100644 index beba23385d..0000000000 --- a/api/tests/unit_tests/controllers/console/app/test_statistic.py +++ /dev/null @@ -1,275 +0,0 @@ -from decimal import Decimal -from unittest.mock import MagicMock, patch - -import pytest -from flask import Flask, request -from werkzeug.local import LocalProxy - -from controllers.console.app.statistic import ( - AverageResponseTimeStatistic, - AverageSessionInteractionStatistic, - DailyConversationStatistic, - DailyMessageStatistic, - DailyTerminalsStatistic, - DailyTokenCostStatistic, - TokensPerSecondStatistic, - UserSatisfactionRateStatistic, -) -from models import App, AppMode - - -@pytest.fixture -def app(): - flask_app = Flask(__name__) - flask_app.config["TESTING"] = True - return flask_app - - -@pytest.fixture -def mock_account(): - from models.account import Account, AccountStatus - - account = MagicMock(spec=Account) - account.id = "user_123" - account.timezone = "UTC" - account.status = AccountStatus.ACTIVE - account.is_admin_or_owner = True - account.current_tenant.current_role = "owner" - account.has_edit_permission = True - return account - - -@pytest.fixture -def mock_app_model(): - app_model = MagicMock(spec=App) - app_model.id = "app_123" - app_model.mode = AppMode.CHAT - app_model.tenant_id = "tenant_123" - return app_model - - -@pytest.fixture(autouse=True) -def mock_csrf(): - with patch("libs.login.check_csrf_token") as mock: - yield mock - - -def setup_test_context( - test_app, endpoint_class, route_path, mock_account, mock_app_model, mock_rs, mock_parse_ret=(None, None) -): - with ( - patch("controllers.console.app.statistic.db") as mock_db_stat, - patch("controllers.console.app.wraps.db") as mock_db_wraps, - patch("controllers.console.wraps.db", mock_db_wraps), - patch( - "controllers.console.app.statistic.current_account_with_tenant", return_value=(mock_account, "tenant_123") - ), - patch("controllers.console.app.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - ): - mock_conn = MagicMock() - mock_conn.execute.return_value = mock_rs - - mock_begin = MagicMock() - mock_begin.__enter__.return_value = mock_conn - mock_db_stat.engine.begin.return_value = mock_begin - - mock_query = MagicMock() - mock_query.filter.return_value.first.return_value = mock_app_model - mock_query.filter.return_value.filter.return_value.first.return_value = mock_app_model - mock_query.where.return_value.first.return_value = mock_app_model - mock_query.where.return_value.where.return_value.first.return_value = mock_app_model - mock_db_wraps.session.query.return_value = mock_query - - proxy_mock = LocalProxy(lambda: mock_account) - - with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock): - with test_app.test_request_context(route_path, method="GET"): - request.view_args = {"app_id": "app_123"} - api_instance = endpoint_class() - response = api_instance.get(app_id="app_123") - return response - - -class TestStatisticEndpoints: - def test_daily_message_statistic(self, app, mock_account, mock_app_model): - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.message_count = 10 - mock_row.interactions = Decimal(0) - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - DailyMessageStatistic, - "/apps/app_123/statistics/daily-messages?start=2023-01-01 00:00&end=2023-01-02 00:00", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["message_count"] == 10 - - def test_daily_conversation_statistic(self, app, mock_account, mock_app_model): - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.conversation_count = 5 - mock_row.interactions = Decimal(0) - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - DailyConversationStatistic, - "/apps/app_123/statistics/daily-conversations", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["conversation_count"] == 5 - - def test_daily_terminals_statistic(self, app, mock_account, mock_app_model): - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.terminal_count = 2 - mock_row.interactions = Decimal(0) - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - DailyTerminalsStatistic, - "/apps/app_123/statistics/daily-end-users", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["terminal_count"] == 2 - - def test_daily_token_cost_statistic(self, app, mock_account, mock_app_model): - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.token_count = 100 - mock_row.total_price = Decimal("0.02") - mock_row.interactions = Decimal(0) - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - DailyTokenCostStatistic, - "/apps/app_123/statistics/token-costs", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["token_count"] == 100 - assert response.json["data"][0]["total_price"] == "0.02" - - def test_average_session_interaction_statistic(self, app, mock_account, mock_app_model): - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.interactions = Decimal("3.523") - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - AverageSessionInteractionStatistic, - "/apps/app_123/statistics/average-session-interactions", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["interactions"] == 3.52 - - def test_user_satisfaction_rate_statistic(self, app, mock_account, mock_app_model): - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.message_count = 100 - mock_row.feedback_count = 10 - mock_row.interactions = Decimal(0) - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - UserSatisfactionRateStatistic, - "/apps/app_123/statistics/user-satisfaction-rate", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["rate"] == 100.0 - - def test_average_response_time_statistic(self, app, mock_account, mock_app_model): - mock_app_model.mode = AppMode.COMPLETION - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.latency = 1.234 - mock_row.interactions = Decimal(0) - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - AverageResponseTimeStatistic, - "/apps/app_123/statistics/average-response-time", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["latency"] == 1234.0 - - def test_tokens_per_second_statistic(self, app, mock_account, mock_app_model): - mock_row = MagicMock() - mock_row.date = "2023-01-01" - mock_row.tokens_per_second = 15.5 - mock_row.interactions = Decimal(0) - - with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)): - response = setup_test_context( - app, - TokensPerSecondStatistic, - "/apps/app_123/statistics/tokens-per-second", - mock_account, - mock_app_model, - [mock_row], - ) - assert response.status_code == 200 - assert response.json["data"][0]["tps"] == 15.5 - - @patch("controllers.console.app.statistic.parse_time_range") - def test_invalid_time_range(self, mock_parse, app, mock_account, mock_app_model): - mock_parse.side_effect = ValueError("Invalid time") - - from werkzeug.exceptions import BadRequest - - with pytest.raises(BadRequest): - setup_test_context( - app, - DailyMessageStatistic, - "/apps/app_123/statistics/daily-messages?start=invalid&end=invalid", - mock_account, - mock_app_model, - [], - ) - - @patch("controllers.console.app.statistic.parse_time_range") - def test_time_range_params_passed(self, mock_parse, app, mock_account, mock_app_model): - import datetime - - start = datetime.datetime.now() - end = datetime.datetime.now() - mock_parse.return_value = (start, end) - - response = setup_test_context( - app, - DailyMessageStatistic, - "/apps/app_123/statistics/daily-messages?start=something&end=something", - mock_account, - mock_app_model, - [], - ) - assert response.status_code == 200 - mock_parse.assert_called_once() diff --git a/api/tests/unit_tests/controllers/console/app/test_workflow_draft_variable.py b/api/tests/unit_tests/controllers/console/app/test_workflow_draft_variable.py deleted file mode 100644 index 9b5d47c208..0000000000 --- a/api/tests/unit_tests/controllers/console/app/test_workflow_draft_variable.py +++ /dev/null @@ -1,313 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest -from flask import Flask, request -from werkzeug.local import LocalProxy - -from controllers.console.app.error import DraftWorkflowNotExist -from controllers.console.app.workflow_draft_variable import ( - ConversationVariableCollectionApi, - EnvironmentVariableCollectionApi, - NodeVariableCollectionApi, - SystemVariableCollectionApi, - VariableApi, - VariableResetApi, - WorkflowVariableCollectionApi, -) -from controllers.web.error import InvalidArgumentError, NotFoundError -from models import App, AppMode -from models.enums import DraftVariableType - - -@pytest.fixture -def app(): - flask_app = Flask(__name__) - flask_app.config["TESTING"] = True - flask_app.config["RESTX_MASK_HEADER"] = "X-Fields" - return flask_app - - -@pytest.fixture -def mock_account(): - from models.account import Account, AccountStatus - - account = MagicMock(spec=Account) - account.id = "user_123" - account.timezone = "UTC" - account.status = AccountStatus.ACTIVE - account.is_admin_or_owner = True - account.current_tenant.current_role = "owner" - account.has_edit_permission = True - return account - - -@pytest.fixture -def mock_app_model(): - app_model = MagicMock(spec=App) - app_model.id = "app_123" - app_model.mode = AppMode.WORKFLOW - app_model.tenant_id = "tenant_123" - return app_model - - -@pytest.fixture(autouse=True) -def mock_csrf(): - with patch("libs.login.check_csrf_token") as mock: - yield mock - - -def setup_test_context(test_app, endpoint_class, route_path, method, mock_account, mock_app_model, payload=None): - with ( - patch("controllers.console.app.wraps.db") as mock_db_wraps, - patch("controllers.console.wraps.db", mock_db_wraps), - patch("controllers.console.app.workflow_draft_variable.db"), - patch("controllers.console.app.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - ): - mock_query = MagicMock() - mock_query.filter.return_value.first.return_value = mock_app_model - mock_query.filter.return_value.filter.return_value.first.return_value = mock_app_model - mock_query.where.return_value.first.return_value = mock_app_model - mock_query.where.return_value.where.return_value.first.return_value = mock_app_model - mock_db_wraps.session.query.return_value = mock_query - - proxy_mock = LocalProxy(lambda: mock_account) - - with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock): - with test_app.test_request_context(route_path, method=method, json=payload): - request.view_args = {"app_id": "app_123"} - # extract node_id or variable_id from path manually since view_args overrides - if "nodes/" in route_path: - request.view_args["node_id"] = route_path.split("nodes/")[1].split("/")[0] - if "variables/" in route_path: - # simplistic extraction - parts = route_path.split("variables/") - if len(parts) > 1 and parts[1] and parts[1] != "reset": - request.view_args["variable_id"] = parts[1].split("/")[0] - - api_instance = endpoint_class() - # we just call dispatch_request to avoid manual argument passing - if hasattr(api_instance, method.lower()): - func = getattr(api_instance, method.lower()) - return func(**request.view_args) - - -class TestWorkflowDraftVariableEndpoints: - @staticmethod - def _mock_workflow_variable(variable_type: DraftVariableType = DraftVariableType.NODE) -> MagicMock: - class DummyValueType: - def exposed_type(self): - return DraftVariableType.NODE - - mock_var = MagicMock() - mock_var.app_id = "app_123" - mock_var.id = "var_123" - mock_var.name = "test_var" - mock_var.description = "" - mock_var.get_variable_type.return_value = variable_type - mock_var.get_selector.return_value = [] - mock_var.value_type = DummyValueType() - mock_var.edited = False - mock_var.visible = True - mock_var.file_id = None - mock_var.variable_file = None - mock_var.is_truncated.return_value = False - mock_var.get_value.return_value.model_copy.return_value.value = "test_value" - return mock_var - - @patch("controllers.console.app.workflow_draft_variable.WorkflowService") - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_workflow_variable_collection_get_success( - self, mock_draft_srv, mock_wf_srv, app, mock_account, mock_app_model - ): - mock_wf_srv.return_value.is_workflow_exist.return_value = True - from services.workflow_draft_variable_service import WorkflowDraftVariableList - - mock_draft_srv.return_value.list_variables_without_values.return_value = WorkflowDraftVariableList( - variables=[], total=0 - ) - - resp = setup_test_context( - app, - WorkflowVariableCollectionApi, - "/apps/app_123/workflows/draft/variables?page=1&limit=20", - "GET", - mock_account, - mock_app_model, - ) - assert resp == {"items": [], "total": 0} - - @patch("controllers.console.app.workflow_draft_variable.WorkflowService") - def test_workflow_variable_collection_get_not_exist(self, mock_wf_srv, app, mock_account, mock_app_model): - mock_wf_srv.return_value.is_workflow_exist.return_value = False - - with pytest.raises(DraftWorkflowNotExist): - setup_test_context( - app, - WorkflowVariableCollectionApi, - "/apps/app_123/workflows/draft/variables", - "GET", - mock_account, - mock_app_model, - ) - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_workflow_variable_collection_delete(self, mock_draft_srv, app, mock_account, mock_app_model): - resp = setup_test_context( - app, - WorkflowVariableCollectionApi, - "/apps/app_123/workflows/draft/variables", - "DELETE", - mock_account, - mock_app_model, - ) - assert resp.status_code == 204 - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_node_variable_collection_get_success(self, mock_draft_srv, app, mock_account, mock_app_model): - from services.workflow_draft_variable_service import WorkflowDraftVariableList - - mock_draft_srv.return_value.list_node_variables.return_value = WorkflowDraftVariableList(variables=[]) - resp = setup_test_context( - app, - NodeVariableCollectionApi, - "/apps/app_123/workflows/draft/nodes/node_123/variables", - "GET", - mock_account, - mock_app_model, - ) - assert resp == {"items": []} - - def test_node_variable_collection_get_invalid_node_id(self, app, mock_account, mock_app_model): - with pytest.raises(InvalidArgumentError): - setup_test_context( - app, - NodeVariableCollectionApi, - "/apps/app_123/workflows/draft/nodes/sys/variables", - "GET", - mock_account, - mock_app_model, - ) - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_node_variable_collection_delete(self, mock_draft_srv, app, mock_account, mock_app_model): - resp = setup_test_context( - app, - NodeVariableCollectionApi, - "/apps/app_123/workflows/draft/nodes/node_123/variables", - "DELETE", - mock_account, - mock_app_model, - ) - assert resp.status_code == 204 - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_variable_api_get_success(self, mock_draft_srv, app, mock_account, mock_app_model): - mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable() - - resp = setup_test_context( - app, VariableApi, "/apps/app_123/workflows/draft/variables/var_123", "GET", mock_account, mock_app_model - ) - assert resp["id"] == "var_123" - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_variable_api_get_not_found(self, mock_draft_srv, app, mock_account, mock_app_model): - mock_draft_srv.return_value.get_variable.return_value = None - - with pytest.raises(NotFoundError): - setup_test_context( - app, VariableApi, "/apps/app_123/workflows/draft/variables/var_123", "GET", mock_account, mock_app_model - ) - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_variable_api_patch_success(self, mock_draft_srv, app, mock_account, mock_app_model): - mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable() - - resp = setup_test_context( - app, - VariableApi, - "/apps/app_123/workflows/draft/variables/var_123", - "PATCH", - mock_account, - mock_app_model, - payload={"name": "new_name"}, - ) - assert resp["id"] == "var_123" - mock_draft_srv.return_value.update_variable.assert_called_once() - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_variable_api_delete_success(self, mock_draft_srv, app, mock_account, mock_app_model): - mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable() - - resp = setup_test_context( - app, VariableApi, "/apps/app_123/workflows/draft/variables/var_123", "DELETE", mock_account, mock_app_model - ) - assert resp.status_code == 204 - mock_draft_srv.return_value.delete_variable.assert_called_once() - - @patch("controllers.console.app.workflow_draft_variable.WorkflowService") - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_variable_reset_api_put_success(self, mock_draft_srv, mock_wf_srv, app, mock_account, mock_app_model): - mock_wf_srv.return_value.get_draft_workflow.return_value = MagicMock() - mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable() - mock_draft_srv.return_value.reset_variable.return_value = None # means no content - - resp = setup_test_context( - app, - VariableResetApi, - "/apps/app_123/workflows/draft/variables/var_123/reset", - "PUT", - mock_account, - mock_app_model, - ) - assert resp.status_code == 204 - - @patch("controllers.console.app.workflow_draft_variable.WorkflowService") - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_conversation_variable_collection_get(self, mock_draft_srv, mock_wf_srv, app, mock_account, mock_app_model): - mock_wf_srv.return_value.get_draft_workflow.return_value = MagicMock() - from services.workflow_draft_variable_service import WorkflowDraftVariableList - - mock_draft_srv.return_value.list_conversation_variables.return_value = WorkflowDraftVariableList(variables=[]) - - resp = setup_test_context( - app, - ConversationVariableCollectionApi, - "/apps/app_123/workflows/draft/conversation-variables", - "GET", - mock_account, - mock_app_model, - ) - assert resp == {"items": []} - - @patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService") - def test_system_variable_collection_get(self, mock_draft_srv, app, mock_account, mock_app_model): - from services.workflow_draft_variable_service import WorkflowDraftVariableList - - mock_draft_srv.return_value.list_system_variables.return_value = WorkflowDraftVariableList(variables=[]) - - resp = setup_test_context( - app, - SystemVariableCollectionApi, - "/apps/app_123/workflows/draft/system-variables", - "GET", - mock_account, - mock_app_model, - ) - assert resp == {"items": []} - - @patch("controllers.console.app.workflow_draft_variable.WorkflowService") - def test_environment_variable_collection_get(self, mock_wf_srv, app, mock_account, mock_app_model): - mock_wf = MagicMock() - mock_wf.environment_variables = [] - mock_wf_srv.return_value.get_draft_workflow.return_value = mock_wf - - resp = setup_test_context( - app, - EnvironmentVariableCollectionApi, - "/apps/app_123/workflows/draft/environment-variables", - "GET", - mock_account, - mock_app_model, - ) - assert resp == {"items": []} diff --git a/api/tests/unit_tests/controllers/console/app/test_wraps.py b/api/tests/unit_tests/controllers/console/app/test_wraps.py index 7664e492da..b5f751f5a5 100644 --- a/api/tests/unit_tests/controllers/console/app/test_wraps.py +++ b/api/tests/unit_tests/controllers/console/app/test_wraps.py @@ -11,10 +11,8 @@ from models.model import AppMode def test_get_app_model_injects_model(monkeypatch: pytest.MonkeyPatch) -> None: app_model = SimpleNamespace(id="app-1", mode=AppMode.CHAT.value, status="normal", tenant_id="t1") - query = SimpleNamespace(where=lambda *_args, **_kwargs: query, first=lambda: app_model) - monkeypatch.setattr(wraps_module, "current_account_with_tenant", lambda: (None, "t1")) - monkeypatch.setattr(wraps_module.db, "session", SimpleNamespace(query=lambda *_args, **_kwargs: query)) + monkeypatch.setattr(wraps_module.db, "session", SimpleNamespace(scalar=lambda *_args, **_kwargs: app_model)) @wraps_module.get_app_model def handler(app_model): @@ -25,10 +23,8 @@ def test_get_app_model_injects_model(monkeypatch: pytest.MonkeyPatch) -> None: def test_get_app_model_rejects_wrong_mode(monkeypatch: pytest.MonkeyPatch) -> None: app_model = SimpleNamespace(id="app-1", mode=AppMode.CHAT.value, status="normal", tenant_id="t1") - query = SimpleNamespace(where=lambda *_args, **_kwargs: query, first=lambda: app_model) - monkeypatch.setattr(wraps_module, "current_account_with_tenant", lambda: (None, "t1")) - monkeypatch.setattr(wraps_module.db, "session", SimpleNamespace(query=lambda *_args, **_kwargs: query)) + monkeypatch.setattr(wraps_module.db, "session", SimpleNamespace(scalar=lambda *_args, **_kwargs: app_model)) @wraps_module.get_app_model(mode=[AppMode.COMPLETION]) def handler(app_model): diff --git a/api/tests/unit_tests/controllers/console/auth/test_data_source_bearer_auth.py b/api/tests/unit_tests/controllers/console/auth/test_data_source_bearer_auth.py deleted file mode 100644 index bc4c7e0993..0000000000 --- a/api/tests/unit_tests/controllers/console/auth/test_data_source_bearer_auth.py +++ /dev/null @@ -1,209 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest -from flask import Flask - -from controllers.console.auth.data_source_bearer_auth import ( - ApiKeyAuthDataSource, - ApiKeyAuthDataSourceBinding, - ApiKeyAuthDataSourceBindingDelete, -) -from controllers.console.auth.error import ApiKeyAuthFailedError - - -class TestApiKeyAuthDataSource: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - app.config["WTF_CSRF_ENABLED"] = False - return app - - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.get_provider_auth_list") - def test_get_api_key_auth_data_source(self, mock_get_list, mock_db, mock_csrf, app): - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - - mock_binding = MagicMock() - mock_binding.id = "bind_123" - mock_binding.category = "api_key" - mock_binding.provider = "custom_provider" - mock_binding.disabled = False - mock_binding.created_at.timestamp.return_value = 1620000000 - mock_binding.updated_at.timestamp.return_value = 1620000001 - - mock_get_list.return_value = [mock_binding] - - with ( - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch( - "controllers.console.auth.data_source_bearer_auth.current_account_with_tenant", - return_value=(mock_account, "tenant_123"), - ), - ): - with app.test_request_context("/console/api/api-key-auth/data-source", method="GET"): - proxy_mock = MagicMock() - proxy_mock._get_current_object.return_value = mock_account - with patch("libs.login.current_user", proxy_mock): - api_instance = ApiKeyAuthDataSource() - response = api_instance.get() - - assert "sources" in response - assert len(response["sources"]) == 1 - assert response["sources"][0]["provider"] == "custom_provider" - - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.get_provider_auth_list") - def test_get_api_key_auth_data_source_empty(self, mock_get_list, mock_db, mock_csrf, app): - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - - mock_get_list.return_value = None - - with ( - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch( - "controllers.console.auth.data_source_bearer_auth.current_account_with_tenant", - return_value=(mock_account, "tenant_123"), - ), - ): - with app.test_request_context("/console/api/api-key-auth/data-source", method="GET"): - proxy_mock = MagicMock() - proxy_mock._get_current_object.return_value = mock_account - with patch("libs.login.current_user", proxy_mock): - api_instance = ApiKeyAuthDataSource() - response = api_instance.get() - - assert "sources" in response - assert len(response["sources"]) == 0 - - -class TestApiKeyAuthDataSourceBinding: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - app.config["WTF_CSRF_ENABLED"] = False - return app - - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth") - @patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args") - def test_create_binding_successful(self, mock_validate, mock_create, mock_db, mock_csrf, app): - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - - with ( - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch( - "controllers.console.auth.data_source_bearer_auth.current_account_with_tenant", - return_value=(mock_account, "tenant_123"), - ), - ): - with app.test_request_context( - "/console/api/api-key-auth/data-source/binding", - method="POST", - json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}}, - ): - proxy_mock = MagicMock() - proxy_mock._get_current_object.return_value = mock_account - with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock): - api_instance = ApiKeyAuthDataSourceBinding() - response = api_instance.post() - - assert response[0]["result"] == "success" - assert response[1] == 200 - mock_validate.assert_called_once() - mock_create.assert_called_once() - - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth") - @patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args") - def test_create_binding_failure(self, mock_validate, mock_create, mock_db, mock_csrf, app): - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - - mock_create.side_effect = ValueError("Invalid structure") - - with ( - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch( - "controllers.console.auth.data_source_bearer_auth.current_account_with_tenant", - return_value=(mock_account, "tenant_123"), - ), - ): - with app.test_request_context( - "/console/api/api-key-auth/data-source/binding", - method="POST", - json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}}, - ): - proxy_mock = MagicMock() - proxy_mock._get_current_object.return_value = mock_account - with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock): - api_instance = ApiKeyAuthDataSourceBinding() - with pytest.raises(ApiKeyAuthFailedError, match="Invalid structure"): - api_instance.post() - - -class TestApiKeyAuthDataSourceBindingDelete: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - app.config["WTF_CSRF_ENABLED"] = False - return app - - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.delete_provider_auth") - def test_delete_binding_successful(self, mock_delete, mock_db, mock_csrf, app): - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - - with ( - patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")), - patch( - "controllers.console.auth.data_source_bearer_auth.current_account_with_tenant", - return_value=(mock_account, "tenant_123"), - ), - ): - with app.test_request_context("/console/api/api-key-auth/data-source/binding_123", method="DELETE"): - proxy_mock = MagicMock() - proxy_mock._get_current_object.return_value = mock_account - with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock): - api_instance = ApiKeyAuthDataSourceBindingDelete() - response = api_instance.delete("binding_123") - - assert response[0]["result"] == "success" - assert response[1] == 204 - mock_delete.assert_called_once_with("tenant_123", "binding_123") diff --git a/api/tests/unit_tests/controllers/console/auth/test_data_source_oauth.py b/api/tests/unit_tests/controllers/console/auth/test_data_source_oauth.py deleted file mode 100644 index f369565946..0000000000 --- a/api/tests/unit_tests/controllers/console/auth/test_data_source_oauth.py +++ /dev/null @@ -1,192 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest -from flask import Flask -from werkzeug.local import LocalProxy - -from controllers.console.auth.data_source_oauth import ( - OAuthDataSource, - OAuthDataSourceBinding, - OAuthDataSourceCallback, - OAuthDataSourceSync, -) - - -class TestOAuthDataSource: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - @patch("flask_login.current_user") - @patch("libs.login.current_user") - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.data_source_oauth.dify_config.NOTION_INTEGRATION_TYPE", None) - def test_get_oauth_url_successful( - self, mock_db, mock_csrf, mock_libs_user, mock_flask_user, mock_get_providers, app - ): - mock_oauth_provider = MagicMock() - mock_oauth_provider.get_authorization_url.return_value = "http://oauth.provider/auth" - mock_get_providers.return_value = {"notion": mock_oauth_provider} - - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - mock_libs_user.return_value = mock_account - mock_flask_user.return_value = mock_account - - # also patch current_account_with_tenant - with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, MagicMock())): - with app.test_request_context("/console/api/oauth/data-source/notion", method="GET"): - proxy_mock = LocalProxy(lambda: mock_account) - with patch("libs.login.current_user", proxy_mock): - api_instance = OAuthDataSource() - response = api_instance.get("notion") - - assert response[0]["data"] == "http://oauth.provider/auth" - assert response[1] == 200 - mock_oauth_provider.get_authorization_url.assert_called_once() - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - @patch("flask_login.current_user") - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - def test_get_oauth_url_invalid_provider(self, mock_db, mock_csrf, mock_flask_user, mock_get_providers, app): - mock_get_providers.return_value = {"notion": MagicMock()} - - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - - with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, MagicMock())): - with app.test_request_context("/console/api/oauth/data-source/unknown_provider", method="GET"): - proxy_mock = LocalProxy(lambda: mock_account) - with patch("libs.login.current_user", proxy_mock): - api_instance = OAuthDataSource() - response = api_instance.get("unknown_provider") - - assert response[0]["error"] == "Invalid provider" - assert response[1] == 400 - - -class TestOAuthDataSourceCallback: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - def test_oauth_callback_successful(self, mock_get_providers, app): - provider_mock = MagicMock() - mock_get_providers.return_value = {"notion": provider_mock} - - with app.test_request_context("/console/api/oauth/data-source/notion/callback?code=mock_code", method="GET"): - api_instance = OAuthDataSourceCallback() - response = api_instance.get("notion") - - assert response.status_code == 302 - assert "code=mock_code" in response.location - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - def test_oauth_callback_missing_code(self, mock_get_providers, app): - provider_mock = MagicMock() - mock_get_providers.return_value = {"notion": provider_mock} - - with app.test_request_context("/console/api/oauth/data-source/notion/callback", method="GET"): - api_instance = OAuthDataSourceCallback() - response = api_instance.get("notion") - - assert response.status_code == 302 - assert "error=Access denied" in response.location - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - def test_oauth_callback_invalid_provider(self, mock_get_providers, app): - mock_get_providers.return_value = {"notion": MagicMock()} - - with app.test_request_context("/console/api/oauth/data-source/invalid/callback?code=mock_code", method="GET"): - api_instance = OAuthDataSourceCallback() - response = api_instance.get("invalid") - - assert response[0]["error"] == "Invalid provider" - assert response[1] == 400 - - -class TestOAuthDataSourceBinding: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - def test_get_binding_successful(self, mock_get_providers, app): - mock_provider = MagicMock() - mock_provider.get_access_token.return_value = None - mock_get_providers.return_value = {"notion": mock_provider} - - with app.test_request_context("/console/api/oauth/data-source/notion/binding?code=auth_code_123", method="GET"): - api_instance = OAuthDataSourceBinding() - response = api_instance.get("notion") - - assert response[0]["result"] == "success" - assert response[1] == 200 - mock_provider.get_access_token.assert_called_once_with("auth_code_123") - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - def test_get_binding_missing_code(self, mock_get_providers, app): - mock_get_providers.return_value = {"notion": MagicMock()} - - with app.test_request_context("/console/api/oauth/data-source/notion/binding?code=", method="GET"): - api_instance = OAuthDataSourceBinding() - response = api_instance.get("notion") - - assert response[0]["error"] == "Invalid code" - assert response[1] == 400 - - -class TestOAuthDataSourceSync: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @patch("controllers.console.auth.data_source_oauth.get_oauth_providers") - @patch("libs.login.check_csrf_token") - @patch("controllers.console.wraps.db") - def test_sync_successful(self, mock_db, mock_csrf, mock_get_providers, app): - mock_provider = MagicMock() - mock_provider.sync_data_source.return_value = None - mock_get_providers.return_value = {"notion": mock_provider} - - from models.account import Account, AccountStatus - - mock_account = MagicMock(spec=Account) - mock_account.id = "user_123" - mock_account.status = AccountStatus.ACTIVE - mock_account.is_admin_or_owner = True - mock_account.current_tenant.current_role = "owner" - - with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, MagicMock())): - with app.test_request_context("/console/api/oauth/data-source/notion/binding_123/sync", method="GET"): - proxy_mock = LocalProxy(lambda: mock_account) - with patch("libs.login.current_user", proxy_mock): - api_instance = OAuthDataSourceSync() - # The route pattern uses , so we just pass a string for unit testing - response = api_instance.get("notion", "binding_123") - - assert response[0]["result"] == "success" - assert response[1] == 200 - mock_provider.sync_data_source.assert_called_once_with("binding_123") diff --git a/api/tests/unit_tests/controllers/console/auth/test_oauth_server.py b/api/tests/unit_tests/controllers/console/auth/test_oauth_server.py deleted file mode 100644 index fc5663e72d..0000000000 --- a/api/tests/unit_tests/controllers/console/auth/test_oauth_server.py +++ /dev/null @@ -1,417 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest -from flask import Flask -from werkzeug.exceptions import BadRequest, NotFound - -from controllers.console.auth.oauth_server import ( - OAuthServerAppApi, - OAuthServerUserAccountApi, - OAuthServerUserAuthorizeApi, - OAuthServerUserTokenApi, -) - - -class TestOAuthServerAppApi: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @pytest.fixture - def mock_oauth_provider_app(self): - from models.model import OAuthProviderApp - - oauth_app = MagicMock(spec=OAuthProviderApp) - oauth_app.client_id = "test_client_id" - oauth_app.redirect_uris = ["http://localhost/callback"] - oauth_app.app_icon = "icon_url" - oauth_app.app_label = "Test App" - oauth_app.scope = "read,write" - return oauth_app - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_successful_post(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider", - method="POST", - json={"client_id": "test_client_id", "redirect_uri": "http://localhost/callback"}, - ): - api_instance = OAuthServerAppApi() - response = api_instance.post() - - assert response["app_icon"] == "icon_url" - assert response["app_label"] == "Test App" - assert response["scope"] == "read,write" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_invalid_redirect_uri(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider", - method="POST", - json={"client_id": "test_client_id", "redirect_uri": "http://invalid/callback"}, - ): - api_instance = OAuthServerAppApi() - with pytest.raises(BadRequest, match="redirect_uri is invalid"): - api_instance.post() - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_invalid_client_id(self, mock_get_app, mock_db, app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = None - - with app.test_request_context( - "/oauth/provider", - method="POST", - json={"client_id": "test_invalid_client_id", "redirect_uri": "http://localhost/callback"}, - ): - api_instance = OAuthServerAppApi() - with pytest.raises(NotFound, match="client_id is invalid"): - api_instance.post() - - -class TestOAuthServerUserAuthorizeApi: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @pytest.fixture - def mock_oauth_provider_app(self): - oauth_app = MagicMock() - oauth_app.client_id = "test_client_id" - return oauth_app - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - @patch("controllers.console.auth.oauth_server.current_account_with_tenant") - @patch("controllers.console.wraps.current_account_with_tenant") - @patch("controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_authorization_code") - @patch("libs.login.check_csrf_token") - def test_successful_authorize( - self, mock_csrf, mock_sign, mock_wrap_current, mock_current, mock_get_app, mock_db, app, mock_oauth_provider_app - ): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - mock_account = MagicMock() - mock_account.id = "user_123" - from models.account import AccountStatus - - mock_account.status = AccountStatus.ACTIVE - - mock_current.return_value = (mock_account, MagicMock()) - mock_wrap_current.return_value = (mock_account, MagicMock()) - - mock_sign.return_value = "auth_code_123" - - with app.test_request_context("/oauth/provider/authorize", method="POST", json={"client_id": "test_client_id"}): - with patch("libs.login.current_user", mock_account): - api_instance = OAuthServerUserAuthorizeApi() - response = api_instance.post() - - assert response["code"] == "auth_code_123" - mock_sign.assert_called_once_with("test_client_id", "user_123") - - -class TestOAuthServerUserTokenApi: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @pytest.fixture - def mock_oauth_provider_app(self): - from models.model import OAuthProviderApp - - oauth_app = MagicMock(spec=OAuthProviderApp) - oauth_app.client_id = "test_client_id" - oauth_app.client_secret = "test_secret" - oauth_app.redirect_uris = ["http://localhost/callback"] - return oauth_app - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - @patch("controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token") - def test_authorization_code_grant(self, mock_sign, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - mock_sign.return_value = ("access_123", "refresh_123") - - with app.test_request_context( - "/oauth/provider/token", - method="POST", - json={ - "client_id": "test_client_id", - "grant_type": "authorization_code", - "code": "auth_code", - "client_secret": "test_secret", - "redirect_uri": "http://localhost/callback", - }, - ): - api_instance = OAuthServerUserTokenApi() - response = api_instance.post() - - assert response["access_token"] == "access_123" - assert response["refresh_token"] == "refresh_123" - assert response["token_type"] == "Bearer" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_authorization_code_grant_missing_code(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/token", - method="POST", - json={ - "client_id": "test_client_id", - "grant_type": "authorization_code", - "client_secret": "test_secret", - "redirect_uri": "http://localhost/callback", - }, - ): - api_instance = OAuthServerUserTokenApi() - with pytest.raises(BadRequest, match="code is required"): - api_instance.post() - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_authorization_code_grant_invalid_secret(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/token", - method="POST", - json={ - "client_id": "test_client_id", - "grant_type": "authorization_code", - "code": "auth_code", - "client_secret": "invalid_secret", - "redirect_uri": "http://localhost/callback", - }, - ): - api_instance = OAuthServerUserTokenApi() - with pytest.raises(BadRequest, match="client_secret is invalid"): - api_instance.post() - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_authorization_code_grant_invalid_redirect_uri(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/token", - method="POST", - json={ - "client_id": "test_client_id", - "grant_type": "authorization_code", - "code": "auth_code", - "client_secret": "test_secret", - "redirect_uri": "http://invalid/callback", - }, - ): - api_instance = OAuthServerUserTokenApi() - with pytest.raises(BadRequest, match="redirect_uri is invalid"): - api_instance.post() - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - @patch("controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token") - def test_refresh_token_grant(self, mock_sign, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - mock_sign.return_value = ("new_access", "new_refresh") - - with app.test_request_context( - "/oauth/provider/token", - method="POST", - json={"client_id": "test_client_id", "grant_type": "refresh_token", "refresh_token": "refresh_123"}, - ): - api_instance = OAuthServerUserTokenApi() - response = api_instance.post() - - assert response["access_token"] == "new_access" - assert response["refresh_token"] == "new_refresh" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_refresh_token_grant_missing_token(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/token", - method="POST", - json={ - "client_id": "test_client_id", - "grant_type": "refresh_token", - }, - ): - api_instance = OAuthServerUserTokenApi() - with pytest.raises(BadRequest, match="refresh_token is required"): - api_instance.post() - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_invalid_grant_type(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/token", - method="POST", - json={ - "client_id": "test_client_id", - "grant_type": "invalid_grant", - }, - ): - api_instance = OAuthServerUserTokenApi() - with pytest.raises(BadRequest, match="invalid grant_type"): - api_instance.post() - - -class TestOAuthServerUserAccountApi: - @pytest.fixture - def app(self): - app = Flask(__name__) - app.config["TESTING"] = True - return app - - @pytest.fixture - def mock_oauth_provider_app(self): - from models.model import OAuthProviderApp - - oauth_app = MagicMock(spec=OAuthProviderApp) - oauth_app.client_id = "test_client_id" - return oauth_app - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - @patch("controllers.console.auth.oauth_server.OAuthServerService.validate_oauth_access_token") - def test_successful_account_retrieval(self, mock_validate, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - mock_account = MagicMock() - mock_account.name = "Test User" - mock_account.email = "test@example.com" - mock_account.avatar = "avatar_url" - mock_account.interface_language = "en-US" - mock_account.timezone = "UTC" - mock_validate.return_value = mock_account - - with app.test_request_context( - "/oauth/provider/account", - method="POST", - json={"client_id": "test_client_id"}, - headers={"Authorization": "Bearer valid_access_token"}, - ): - api_instance = OAuthServerUserAccountApi() - response = api_instance.post() - - assert response["name"] == "Test User" - assert response["email"] == "test@example.com" - assert response["avatar"] == "avatar_url" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_missing_authorization_header(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context("/oauth/provider/account", method="POST", json={"client_id": "test_client_id"}): - api_instance = OAuthServerUserAccountApi() - response = api_instance.post() - - assert response.status_code == 401 - assert response.json["error"] == "Authorization header is required" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_invalid_authorization_header_format(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/account", - method="POST", - json={"client_id": "test_client_id"}, - headers={"Authorization": "InvalidFormat"}, - ): - api_instance = OAuthServerUserAccountApi() - response = api_instance.post() - - assert response.status_code == 401 - assert response.json["error"] == "Invalid Authorization header format" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_invalid_token_type(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/account", - method="POST", - json={"client_id": "test_client_id"}, - headers={"Authorization": "Basic something"}, - ): - api_instance = OAuthServerUserAccountApi() - response = api_instance.post() - - assert response.status_code == 401 - assert response.json["error"] == "token_type is invalid" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - def test_missing_access_token(self, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - - with app.test_request_context( - "/oauth/provider/account", - method="POST", - json={"client_id": "test_client_id"}, - headers={"Authorization": "Bearer "}, - ): - api_instance = OAuthServerUserAccountApi() - response = api_instance.post() - - assert response.status_code == 401 - assert response.json["error"] == "Invalid Authorization header format" - - @patch("controllers.console.wraps.db") - @patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app") - @patch("controllers.console.auth.oauth_server.OAuthServerService.validate_oauth_access_token") - def test_invalid_access_token(self, mock_validate, mock_get_app, mock_db, app, mock_oauth_provider_app): - mock_db.session.query.return_value.first.return_value = MagicMock() - mock_get_app.return_value = mock_oauth_provider_app - mock_validate.return_value = None - - with app.test_request_context( - "/oauth/provider/account", - method="POST", - json={"client_id": "test_client_id"}, - headers={"Authorization": "Bearer invalid_token"}, - ): - api_instance = OAuthServerUserAccountApi() - response = api_instance.post() - - assert response.status_code == 401 - assert response.json["error"] == "access_token or client_id is invalid" diff --git a/api/tests/unit_tests/controllers/console/datasets/test_data_source.py b/api/tests/unit_tests/controllers/console/datasets/test_data_source.py index 3060062adf..d841f67f9b 100644 --- a/api/tests/unit_tests/controllers/console/datasets/test_data_source.py +++ b/api/tests/unit_tests/controllers/console/datasets/test_data_source.py @@ -11,6 +11,7 @@ from controllers.console.datasets.data_source import ( DataSourceNotionDocumentSyncApi, DataSourceNotionListApi, ) +from core.rag.index_processor.constant.index_type import IndexStructureType def unwrap(func): @@ -343,7 +344,7 @@ class TestDataSourceNotionApi: } ], "process_rule": {"rules": {}}, - "doc_form": "text_model", + "doc_form": IndexStructureType.PARAGRAPH_INDEX, "doc_language": "English", } diff --git a/api/tests/unit_tests/controllers/console/datasets/test_datasets.py b/api/tests/unit_tests/controllers/console/datasets/test_datasets.py index 0ee76e504b..68a7b30b9e 100644 --- a/api/tests/unit_tests/controllers/console/datasets/test_datasets.py +++ b/api/tests/unit_tests/controllers/console/datasets/test_datasets.py @@ -28,6 +28,7 @@ from controllers.console.datasets.datasets import ( from controllers.console.datasets.error import DatasetInUseError, DatasetNameDuplicateError, IndexingEstimateError from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError from core.provider_manager import ProviderManager +from core.rag.index_processor.constant.index_type import IndexStructureType from extensions.storage.storage_type import StorageType from models.enums import CreatorUserRole from models.model import ApiToken, UploadFile @@ -1146,7 +1147,7 @@ class TestDatasetIndexingEstimateApi: }, "process_rule": {"chunk_size": 100}, "indexing_technique": "high_quality", - "doc_form": "text_model", + "doc_form": IndexStructureType.PARAGRAPH_INDEX, "doc_language": "English", "dataset_id": None, } diff --git a/api/tests/unit_tests/controllers/console/datasets/test_datasets_document.py b/api/tests/unit_tests/controllers/console/datasets/test_datasets_document.py index f23dd5b44a..f08f21ee14 100644 --- a/api/tests/unit_tests/controllers/console/datasets/test_datasets_document.py +++ b/api/tests/unit_tests/controllers/console/datasets/test_datasets_document.py @@ -30,6 +30,7 @@ from controllers.console.datasets.error import ( InvalidActionError, InvalidMetadataError, ) +from core.rag.index_processor.constant.index_type import IndexStructureType from models.enums import DataSourceType, IndexingStatus @@ -66,7 +67,7 @@ def document(): indexing_status=IndexingStatus.INDEXING, data_source_type=DataSourceType.UPLOAD_FILE, data_source_info_dict={"upload_file_id": "file-1"}, - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, archived=False, is_paused=False, dataset_process_rule=None, @@ -765,8 +766,8 @@ class TestDocumentGenerateSummaryApi: summary_index_setting={"enable": True}, ) - doc1 = MagicMock(id="doc-1", doc_form="qa_model") - doc2 = MagicMock(id="doc-2", doc_form="text") + doc1 = MagicMock(id="doc-1", doc_form=IndexStructureType.QA_INDEX) + doc2 = MagicMock(id="doc-2", doc_form=IndexStructureType.PARAGRAPH_INDEX) payload = {"document_list": ["doc-1", "doc-2"]} @@ -822,7 +823,7 @@ class TestDocumentIndexingEstimateApi: data_source_type=DataSourceType.UPLOAD_FILE, data_source_info_dict={"upload_file_id": "file-1"}, tenant_id="tenant-1", - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, dataset_process_rule=None, ) @@ -849,7 +850,7 @@ class TestDocumentIndexingEstimateApi: data_source_type=DataSourceType.UPLOAD_FILE, data_source_info_dict={"upload_file_id": "file-1"}, tenant_id="tenant-1", - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, dataset_process_rule=None, ) @@ -973,7 +974,7 @@ class TestDocumentBatchIndexingEstimateApi: "mode": "single", "only_main_content": True, }, - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) with ( @@ -1001,7 +1002,7 @@ class TestDocumentBatchIndexingEstimateApi: "notion_page_id": "p1", "type": "page", }, - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) with ( @@ -1024,7 +1025,7 @@ class TestDocumentBatchIndexingEstimateApi: indexing_status=IndexingStatus.INDEXING, data_source_type="unknown", data_source_info_dict={}, - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) with app.test_request_context("/"), patch.object(api, "get_batch_documents", return_value=[document]): @@ -1353,7 +1354,7 @@ class TestDocumentIndexingEdgeCases: data_source_type=DataSourceType.UPLOAD_FILE, data_source_info_dict={"upload_file_id": "file-1"}, tenant_id="tenant-1", - doc_form="text", + doc_form=IndexStructureType.PARAGRAPH_INDEX, dataset_process_rule=None, ) diff --git a/api/tests/unit_tests/controllers/console/datasets/test_datasets_segments.py b/api/tests/unit_tests/controllers/console/datasets/test_datasets_segments.py index e67e4daad9..1482499c41 100644 --- a/api/tests/unit_tests/controllers/console/datasets/test_datasets_segments.py +++ b/api/tests/unit_tests/controllers/console/datasets/test_datasets_segments.py @@ -24,6 +24,7 @@ from controllers.console.datasets.error import ( InvalidActionError, ) from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import ChildChunk, DocumentSegment from models.model import UploadFile @@ -366,7 +367,7 @@ class TestDatasetDocumentSegmentAddApi: dataset.indexing_technique = "economy" document = MagicMock() - document.doc_form = "text" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX segment = MagicMock() segment.id = "seg-1" @@ -505,7 +506,7 @@ class TestDatasetDocumentSegmentUpdateApi: dataset.indexing_technique = "economy" document = MagicMock() - document.doc_form = "text" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX segment = MagicMock() diff --git a/api/tests/unit_tests/controllers/console/tag/test_tags.py b/api/tests/unit_tests/controllers/console/tag/test_tags.py index 769edc8d1c..e89b89c8b1 100644 --- a/api/tests/unit_tests/controllers/console/tag/test_tags.py +++ b/api/tests/unit_tests/controllers/console/tag/test_tags.py @@ -11,6 +11,7 @@ from controllers.console.tag.tags import ( TagListApi, TagUpdateDeleteApi, ) +from models.enums import TagType def unwrap(func): @@ -52,7 +53,7 @@ def tag(): tag = MagicMock() tag.id = "tag-1" tag.name = "test-tag" - tag.type = "knowledge" + tag.type = TagType.KNOWLEDGE return tag diff --git a/api/tests/unit_tests/controllers/console/test_apikey.py b/api/tests/unit_tests/controllers/console/test_apikey.py index c18dd044a7..2dff9c4037 100644 --- a/api/tests/unit_tests/controllers/console/test_apikey.py +++ b/api/tests/unit_tests/controllers/console/test_apikey.py @@ -8,6 +8,7 @@ from controllers.console.apikey import ( BaseApiKeyResource, _get_resource, ) +from models.enums import ApiTokenType @pytest.fixture @@ -45,14 +46,14 @@ def bypass_permissions(): class DummyApiKeyListResource(BaseApiKeyListResource): - resource_type = "app" + resource_type = ApiTokenType.APP resource_model = MagicMock() resource_id_field = "app_id" token_prefix = "app-" class DummyApiKeyResource(BaseApiKeyResource): - resource_type = "app" + resource_type = ApiTokenType.APP resource_model = MagicMock() resource_id_field = "app_id" diff --git a/api/tests/unit_tests/controllers/service_api/conftest.py b/api/tests/unit_tests/controllers/service_api/conftest.py index 4337a0c8c0..01d2d1e7c0 100644 --- a/api/tests/unit_tests/controllers/service_api/conftest.py +++ b/api/tests/unit_tests/controllers/service_api/conftest.py @@ -12,6 +12,7 @@ from unittest.mock import Mock import pytest from flask import Flask +from core.rag.index_processor.constant.index_type import IndexStructureType from models.account import TenantStatus from models.model import App, AppMode, EndUser from tests.unit_tests.conftest import setup_mock_tenant_account_query @@ -175,7 +176,7 @@ def mock_document(): document.name = "test_document.txt" document.indexing_status = "completed" document.enabled = True - document.doc_form = "text_model" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX return document diff --git a/api/tests/unit_tests/controllers/service_api/dataset/test_dataset.py b/api/tests/unit_tests/controllers/service_api/dataset/test_dataset.py index 7cb2f1050c..8fe41cd19f 100644 --- a/api/tests/unit_tests/controllers/service_api/dataset/test_dataset.py +++ b/api/tests/unit_tests/controllers/service_api/dataset/test_dataset.py @@ -35,6 +35,7 @@ from controllers.service_api.dataset.dataset import ( from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError from models.account import Account from models.dataset import DatasetPermissionEnum +from models.enums import TagType from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService from services.tag_service import TagService @@ -277,7 +278,7 @@ class TestDatasetTagsApi: mock_tag = Mock() mock_tag.id = "tag_1" mock_tag.name = "Test Tag" - mock_tag.type = "knowledge" + mock_tag.type = TagType.KNOWLEDGE mock_tag.binding_count = "0" # Required for Pydantic validation - must be string mock_tag_service.get_tags.return_value = [mock_tag] @@ -316,7 +317,7 @@ class TestDatasetTagsApi: mock_tag = Mock() mock_tag.id = "new_tag_1" mock_tag.name = "New Tag" - mock_tag.type = "knowledge" + mock_tag.type = TagType.KNOWLEDGE mock_tag_service.save_tags.return_value = mock_tag mock_service_api_ns.payload = {"name": "New Tag"} @@ -378,7 +379,7 @@ class TestDatasetTagsApi: mock_tag = Mock() mock_tag.id = "tag_1" mock_tag.name = "Updated Tag" - mock_tag.type = "knowledge" + mock_tag.type = TagType.KNOWLEDGE mock_tag.binding_count = "5" mock_tag_service.update_tags.return_value = mock_tag mock_tag_service.get_tag_binding_count.return_value = 5 @@ -866,7 +867,7 @@ class TestTagService: mock_tag = Mock() mock_tag.id = str(uuid.uuid4()) mock_tag.name = "New Tag" - mock_tag.type = "knowledge" + mock_tag.type = TagType.KNOWLEDGE mock_save.return_value = mock_tag result = TagService.save_tags({"name": "New Tag", "type": "knowledge"}) diff --git a/api/tests/unit_tests/controllers/service_api/dataset/test_dataset_segment.py b/api/tests/unit_tests/controllers/service_api/dataset/test_dataset_segment.py index 5c48ef1804..73a87761d5 100644 --- a/api/tests/unit_tests/controllers/service_api/dataset/test_dataset_segment.py +++ b/api/tests/unit_tests/controllers/service_api/dataset/test_dataset_segment.py @@ -31,6 +31,7 @@ from controllers.service_api.dataset.segment import ( SegmentCreatePayload, SegmentListQuery, ) +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import ChildChunk, Dataset, Document, DocumentSegment from models.enums import IndexingStatus from services.dataset_service import DocumentService, SegmentService @@ -788,7 +789,7 @@ class TestSegmentApiGet: # Arrange mock_account_fn.return_value = (Mock(), mock_tenant.id) mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset - mock_doc_svc.get_document.return_value = Mock(doc_form="text_model") + mock_doc_svc.get_document.return_value = Mock(doc_form=IndexStructureType.PARAGRAPH_INDEX) mock_seg_svc.get_segments.return_value = ([mock_segment], 1) mock_marshal.return_value = [{"id": mock_segment.id}] @@ -903,7 +904,7 @@ class TestSegmentApiPost: mock_doc = Mock() mock_doc.indexing_status = "completed" mock_doc.enabled = True - mock_doc.doc_form = "text_model" + mock_doc.doc_form = IndexStructureType.PARAGRAPH_INDEX mock_doc_svc.get_document.return_value = mock_doc mock_seg_svc.segment_create_args_validate.return_value = None @@ -1091,7 +1092,7 @@ class TestDatasetSegmentApiDelete: mock_doc = Mock() mock_doc.indexing_status = "completed" mock_doc.enabled = True - mock_doc.doc_form = "text_model" + mock_doc.doc_form = IndexStructureType.PARAGRAPH_INDEX mock_doc_svc.get_document.return_value = mock_doc mock_seg_svc.get_segment_by_id.return_value = None # Segment not found @@ -1371,7 +1372,7 @@ class TestDatasetSegmentApiGetSingle: mock_account_fn.return_value = (Mock(), mock_tenant.id) mock_db.session.query.return_value.where.return_value.first.return_value = mock_dataset mock_dataset_svc.check_dataset_model_setting.return_value = None - mock_doc = Mock(doc_form="text_model") + mock_doc = Mock(doc_form=IndexStructureType.PARAGRAPH_INDEX) mock_doc_svc.get_document.return_value = mock_doc mock_seg_svc.get_segment_by_id.return_value = mock_segment mock_marshal.return_value = {"id": mock_segment.id} @@ -1390,7 +1391,7 @@ class TestDatasetSegmentApiGetSingle: assert status == 200 assert "data" in response - assert response["doc_form"] == "text_model" + assert response["doc_form"] == IndexStructureType.PARAGRAPH_INDEX @patch("controllers.service_api.dataset.segment.current_account_with_tenant") @patch("controllers.service_api.dataset.segment.db") diff --git a/api/tests/unit_tests/controllers/service_api/dataset/test_document.py b/api/tests/unit_tests/controllers/service_api/dataset/test_document.py index e6e841be19..7f77e61ee4 100644 --- a/api/tests/unit_tests/controllers/service_api/dataset/test_document.py +++ b/api/tests/unit_tests/controllers/service_api/dataset/test_document.py @@ -35,6 +35,7 @@ from controllers.service_api.dataset.document import ( InvalidMetadataError, ) from controllers.service_api.dataset.error import ArchivedDocumentImmutableError +from core.rag.index_processor.constant.index_type import IndexStructureType from models.enums import IndexingStatus from services.dataset_service import DocumentService from services.entities.knowledge_entities.knowledge_entities import ProcessRule, RetrievalModel @@ -52,7 +53,7 @@ class TestDocumentTextCreatePayload: def test_payload_with_defaults(self): """Test payload default values.""" payload = DocumentTextCreatePayload(name="Doc", text="Content") - assert payload.doc_form == "text_model" + assert payload.doc_form == IndexStructureType.PARAGRAPH_INDEX assert payload.doc_language == "English" assert payload.process_rule is None assert payload.indexing_technique is None @@ -62,14 +63,14 @@ class TestDocumentTextCreatePayload: payload = DocumentTextCreatePayload( name="Full Document", text="Complete document content here", - doc_form="qa_model", + doc_form=IndexStructureType.QA_INDEX, doc_language="Chinese", indexing_technique="high_quality", embedding_model="text-embedding-ada-002", embedding_model_provider="openai", ) assert payload.name == "Full Document" - assert payload.doc_form == "qa_model" + assert payload.doc_form == IndexStructureType.QA_INDEX assert payload.doc_language == "Chinese" assert payload.indexing_technique == "high_quality" assert payload.embedding_model == "text-embedding-ada-002" @@ -147,8 +148,8 @@ class TestDocumentTextUpdate: def test_payload_with_doc_form_update(self): """Test payload with doc_form update.""" - payload = DocumentTextUpdate(doc_form="qa_model") - assert payload.doc_form == "qa_model" + payload = DocumentTextUpdate(doc_form=IndexStructureType.QA_INDEX) + assert payload.doc_form == IndexStructureType.QA_INDEX def test_payload_with_language_update(self): """Test payload with doc_language update.""" @@ -158,7 +159,7 @@ class TestDocumentTextUpdate: def test_payload_default_values(self): """Test payload default values.""" payload = DocumentTextUpdate() - assert payload.doc_form == "text_model" + assert payload.doc_form == IndexStructureType.PARAGRAPH_INDEX assert payload.doc_language == "English" @@ -272,14 +273,24 @@ class TestDocumentDocForm: def test_text_model_form(self): """Test text_model form.""" - doc_form = "text_model" - valid_forms = ["text_model", "qa_model", "hierarchical_model", "parent_child_model"] + doc_form = IndexStructureType.PARAGRAPH_INDEX + valid_forms = [ + IndexStructureType.PARAGRAPH_INDEX, + IndexStructureType.QA_INDEX, + IndexStructureType.PARENT_CHILD_INDEX, + "parent_child_model", + ] assert doc_form in valid_forms def test_qa_model_form(self): """Test qa_model form.""" - doc_form = "qa_model" - valid_forms = ["text_model", "qa_model", "hierarchical_model", "parent_child_model"] + doc_form = IndexStructureType.QA_INDEX + valid_forms = [ + IndexStructureType.PARAGRAPH_INDEX, + IndexStructureType.QA_INDEX, + IndexStructureType.PARENT_CHILD_INDEX, + "parent_child_model", + ] assert doc_form in valid_forms @@ -504,7 +515,7 @@ class TestDocumentApiGet: doc.name = "test_document.txt" doc.indexing_status = "completed" doc.enabled = True - doc.doc_form = "text_model" + doc.doc_form = IndexStructureType.PARAGRAPH_INDEX doc.doc_language = "English" doc.doc_type = "book" doc.doc_metadata_details = {"source": "upload"} diff --git a/api/tests/unit_tests/core/app/features/rate_limiting/test_rate_limit.py b/api/tests/unit_tests/core/app/features/rate_limiting/test_rate_limit.py index 3db10c1c72..538b130cac 100644 --- a/api/tests/unit_tests/core/app/features/rate_limiting/test_rate_limit.py +++ b/api/tests/unit_tests/core/app/features/rate_limiting/test_rate_limit.py @@ -68,8 +68,8 @@ class TestRateLimit: assert rate_limit.disabled() assert not hasattr(rate_limit, "initialized") - def test_should_skip_reinitialization_of_existing_instance(self, redis_patch): - """Test that existing instance doesn't reinitialize.""" + def test_should_flush_cache_when_reinitializing_existing_instance(self, redis_patch): + """Test existing instance refreshes Redis cache on reinitialization.""" redis_patch.configure_mock( **{ "exists.return_value": False, @@ -82,7 +82,37 @@ class TestRateLimit: RateLimit("client1", 10) + redis_patch.setex.assert_called_once_with( + "dify:rate_limit:client1:max_active_requests", + timedelta(days=1), + 10, + ) + + def test_should_reinitialize_after_being_disabled(self, redis_patch): + """Test disabled instance can be reinitialized and writes max_active_requests to Redis.""" + redis_patch.configure_mock( + **{ + "exists.return_value": False, + "setex.return_value": True, + } + ) + + # First construct with max_active_requests = 0 (disabled), which should skip initialization. + RateLimit("client1", 0) + + # Redis should not have been written to during disabled initialization. redis_patch.setex.assert_not_called() + redis_patch.reset_mock() + + # Reinitialize with a positive max_active_requests value; this should not raise + # and must write the max_active_requests key to Redis. + RateLimit("client1", 10) + + redis_patch.setex.assert_called_once_with( + "dify:rate_limit:client1:max_active_requests", + timedelta(days=1), + 10, + ) def test_should_be_disabled_when_max_requests_is_zero_or_negative(self): """Test disabled state for zero or negative limits.""" diff --git a/api/tests/unit_tests/core/app/task_pipeline/test_easy_ui_message_end_files.py b/api/tests/unit_tests/core/app/task_pipeline/test_easy_ui_message_end_files.py index 582990c88a..37dd116470 100644 --- a/api/tests/unit_tests/core/app/task_pipeline/test_easy_ui_message_end_files.py +++ b/api/tests/unit_tests/core/app/task_pipeline/test_easy_ui_message_end_files.py @@ -21,7 +21,7 @@ from sqlalchemy.orm import Session from core.app.entities.task_entities import MessageEndStreamResponse from core.app.task_pipeline.easy_ui_based_generate_task_pipeline import EasyUIBasedGenerateTaskPipeline -from dify_graph.file.enums import FileTransferMethod +from dify_graph.file.enums import FileTransferMethod, FileType from models.model import MessageFile, UploadFile @@ -51,7 +51,7 @@ class TestMessageEndStreamResponseFiles: message_file.transfer_method = FileTransferMethod.LOCAL_FILE message_file.upload_file_id = str(uuid.uuid4()) message_file.url = None - message_file.type = "image" + message_file.type = FileType.IMAGE return message_file @pytest.fixture @@ -63,7 +63,7 @@ class TestMessageEndStreamResponseFiles: message_file.transfer_method = FileTransferMethod.REMOTE_URL message_file.upload_file_id = None message_file.url = "https://example.com/image.jpg" - message_file.type = "image" + message_file.type = FileType.IMAGE return message_file @pytest.fixture @@ -75,7 +75,7 @@ class TestMessageEndStreamResponseFiles: message_file.transfer_method = FileTransferMethod.TOOL_FILE message_file.upload_file_id = None message_file.url = "tool_file_123.png" - message_file.type = "image" + message_file.type = FileType.IMAGE return message_file @pytest.fixture diff --git a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py index 2add12fd09..db49221583 100644 --- a/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py +++ b/api/tests/unit_tests/core/rag/extractor/firecrawl/test_firecrawl.py @@ -164,6 +164,13 @@ class TestFirecrawlApp: with pytest.raises(Exception, match="No page found"): app.check_crawl_status("job-1") + def test_check_crawl_status_completed_with_null_total_raises(self, mocker: MockerFixture): + app = FirecrawlApp(api_key="fc-key", base_url="https://custom.firecrawl.dev") + mocker.patch("httpx.get", return_value=_response(200, {"status": "completed", "total": None, "data": []})) + + with pytest.raises(Exception, match="No page found"): + app.check_crawl_status("job-1") + def test_check_crawl_status_non_completed(self, mocker: MockerFixture): app = FirecrawlApp(api_key="fc-key", base_url="https://custom.firecrawl.dev") payload = {"status": "processing", "total": 5, "completed": 1, "data": []} @@ -203,6 +210,77 @@ class TestFirecrawlApp: with pytest.raises(Exception, match="Error saving crawl data"): app.check_crawl_status("job-err") + def test_check_crawl_status_follows_pagination(self, mocker: MockerFixture): + """When status is completed and next is present, follow pagination to collect all pages.""" + app = FirecrawlApp(api_key="fc-key", base_url="https://custom.firecrawl.dev") + page1 = { + "status": "completed", + "total": 3, + "completed": 3, + "next": "https://custom.firecrawl.dev/v2/crawl/job-42?skip=1", + "data": [{"metadata": {"title": "p1", "description": "", "sourceURL": "https://p1"}, "markdown": "m1"}], + } + page2 = { + "status": "completed", + "total": 3, + "completed": 3, + "next": "https://custom.firecrawl.dev/v2/crawl/job-42?skip=2", + "data": [{"metadata": {"title": "p2", "description": "", "sourceURL": "https://p2"}, "markdown": "m2"}], + } + page3 = { + "status": "completed", + "total": 3, + "completed": 3, + "data": [{"metadata": {"title": "p3", "description": "", "sourceURL": "https://p3"}, "markdown": "m3"}], + } + mocker.patch("httpx.get", side_effect=[_response(200, page1), _response(200, page2), _response(200, page3)]) + mock_storage = MagicMock() + mock_storage.exists.return_value = False + mocker.patch.object(firecrawl_module, "storage", mock_storage) + + result = app.check_crawl_status("job-42") + + assert result["status"] == "completed" + assert result["total"] == 3 + assert len(result["data"]) == 3 + assert [d["title"] for d in result["data"]] == ["p1", "p2", "p3"] + + def test_check_crawl_status_pagination_error_raises(self, mocker: MockerFixture): + """An error while fetching a paginated page raises an exception; no partial data is returned.""" + app = FirecrawlApp(api_key="fc-key", base_url="https://custom.firecrawl.dev") + page1 = { + "status": "completed", + "total": 2, + "completed": 2, + "next": "https://custom.firecrawl.dev/v2/crawl/job-99?skip=1", + "data": [{"metadata": {"title": "p1", "description": "", "sourceURL": "https://p1"}, "markdown": "m1"}], + } + mocker.patch("httpx.get", side_effect=[_response(200, page1), _response(500, {"error": "server error"})]) + + with pytest.raises(Exception, match="fetch next crawl page"): + app.check_crawl_status("job-99") + + def test_check_crawl_status_pagination_capped_at_total(self, mocker: MockerFixture): + """Pagination stops once pages_processed reaches total, even if next is present.""" + app = FirecrawlApp(api_key="fc-key", base_url="https://custom.firecrawl.dev") + # total=1: only the first page should be processed; next must not be followed + page1 = { + "status": "completed", + "total": 1, + "completed": 1, + "next": "https://custom.firecrawl.dev/v2/crawl/job-cap?skip=1", + "data": [{"metadata": {"title": "p1", "description": "", "sourceURL": "https://p1"}, "markdown": "m1"}], + } + mock_get = mocker.patch("httpx.get", return_value=_response(200, page1)) + mock_storage = MagicMock() + mock_storage.exists.return_value = False + mocker.patch.object(firecrawl_module, "storage", mock_storage) + + result = app.check_crawl_status("job-cap") + + assert len(result["data"]) == 1 + mock_get.assert_called_once() # initial fetch only; next URL is not followed due to cap + def test_extract_common_fields_and_status_formatter(self): app = FirecrawlApp(api_key="fc-key", base_url="https://custom.firecrawl.dev") diff --git a/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py b/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py index 665e98bd9c..a34ca330ca 100644 --- a/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py +++ b/api/tests/unit_tests/core/rag/retrieval/test_dataset_retrieval.py @@ -4800,8 +4800,8 @@ class TestInternalHooksCoverage: dataset_docs = [ SimpleNamespace(id="doc-a", doc_form=IndexStructureType.PARENT_CHILD_INDEX), SimpleNamespace(id="doc-b", doc_form=IndexStructureType.PARENT_CHILD_INDEX), - SimpleNamespace(id="doc-c", doc_form="qa_model"), - SimpleNamespace(id="doc-d", doc_form="qa_model"), + SimpleNamespace(id="doc-c", doc_form=IndexStructureType.QA_INDEX), + SimpleNamespace(id="doc-d", doc_form=IndexStructureType.QA_INDEX), ] child_chunks = [SimpleNamespace(index_node_id="idx-a", segment_id="seg-a")] segments = [SimpleNamespace(index_node_id="idx-c", id="seg-c")] diff --git a/api/tests/unit_tests/core/workflow/nodes/llm/test_llm_utils.py b/api/tests/unit_tests/core/workflow/nodes/llm/test_llm_utils.py index 618a498659..acecbf4944 100644 --- a/api/tests/unit_tests/core/workflow/nodes/llm/test_llm_utils.py +++ b/api/tests/unit_tests/core/workflow/nodes/llm/test_llm_utils.py @@ -3,7 +3,11 @@ from unittest import mock import pytest from core.model_manager import ModelInstance -from dify_graph.model_runtime.entities import ImagePromptMessageContent, PromptMessageRole, TextPromptMessageContent +from dify_graph.model_runtime.entities import ( + ImagePromptMessageContent, + PromptMessageRole, + TextPromptMessageContent, +) from dify_graph.model_runtime.entities.message_entities import SystemPromptMessage from dify_graph.nodes.llm import llm_utils from dify_graph.nodes.llm.entities import LLMNodeChatModelMessage @@ -11,6 +15,15 @@ from dify_graph.nodes.llm.exc import NoPromptFoundError from dify_graph.runtime import VariablePool +@pytest.fixture +def variable_pool() -> VariablePool: + pool = VariablePool.empty() + pool.add(["node1", "output"], "resolved_value") + pool.add(["node2", "text"], "hello world") + pool.add(["start", "user_input"], "dynamic_param") + return pool + + def _fetch_prompt_messages_with_mocked_content(content): variable_pool = VariablePool.empty() model_instance = mock.MagicMock(spec=ModelInstance) @@ -53,6 +66,159 @@ def _fetch_prompt_messages_with_mocked_content(content): ) +class TestTypeCoercionViaResolve: + """Type coercion is tested through the public resolve_completion_params_variables API.""" + + def test_numeric_string_coerced_to_float(self): + pool = VariablePool.empty() + pool.add(["n", "v"], "0.7") + result = llm_utils.resolve_completion_params_variables({"p": "{{#n.v#}}"}, pool) + assert result["p"] == 0.7 + + def test_integer_string_coerced_to_int(self): + pool = VariablePool.empty() + pool.add(["n", "v"], "1024") + result = llm_utils.resolve_completion_params_variables({"p": "{{#n.v#}}"}, pool) + assert result["p"] == 1024 + + def test_boolean_string_coerced_to_bool(self): + pool = VariablePool.empty() + pool.add(["n", "v"], "true") + result = llm_utils.resolve_completion_params_variables({"p": "{{#n.v#}}"}, pool) + assert result["p"] is True + + def test_plain_string_stays_string(self): + pool = VariablePool.empty() + pool.add(["n", "v"], "json_object") + result = llm_utils.resolve_completion_params_variables({"p": "{{#n.v#}}"}, pool) + assert result["p"] == "json_object" + + def test_json_object_string_stays_string(self): + pool = VariablePool.empty() + pool.add(["n", "v"], '{"key": "val"}') + result = llm_utils.resolve_completion_params_variables({"p": "{{#n.v#}}"}, pool) + assert result["p"] == '{"key": "val"}' + + def test_mixed_text_and_variable_stays_string(self): + pool = VariablePool.empty() + pool.add(["n", "v"], "0.7") + result = llm_utils.resolve_completion_params_variables({"p": "val={{#n.v#}}"}, pool) + assert result["p"] == "val=0.7" + + +class TestResolveCompletionParamsVariables: + def test_plain_string_values_unchanged(self, variable_pool: VariablePool): + params = {"response_format": "json", "custom_param": "static_value"} + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"response_format": "json", "custom_param": "static_value"} + + def test_numeric_values_unchanged(self, variable_pool: VariablePool): + params = {"temperature": 0.7, "top_p": 0.9, "max_tokens": 1024} + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"temperature": 0.7, "top_p": 0.9, "max_tokens": 1024} + + def test_boolean_values_unchanged(self, variable_pool: VariablePool): + params = {"stream": True, "echo": False} + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"stream": True, "echo": False} + + def test_list_values_unchanged(self, variable_pool: VariablePool): + params = {"stop": ["Human:", "Assistant:"]} + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"stop": ["Human:", "Assistant:"]} + + def test_single_variable_reference_resolved(self, variable_pool: VariablePool): + params = {"response_format": "{{#node1.output#}}"} + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"response_format": "resolved_value"} + + def test_multiple_variable_references_resolved(self, variable_pool: VariablePool): + params = { + "param_a": "{{#node1.output#}}", + "param_b": "{{#node2.text#}}", + } + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"param_a": "resolved_value", "param_b": "hello world"} + + def test_mixed_text_and_variable_resolved(self, variable_pool: VariablePool): + params = {"prompt_prefix": "prefix_{{#node1.output#}}_suffix"} + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"prompt_prefix": "prefix_resolved_value_suffix"} + + def test_mixed_params_types(self, variable_pool: VariablePool): + """Non-string params pass through; string params with variables get resolved.""" + params = { + "temperature": 0.7, + "response_format": "{{#node1.output#}}", + "custom_string": "no_vars_here", + "max_tokens": 512, + "stop": ["\n"], + } + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == { + "temperature": 0.7, + "response_format": "resolved_value", + "custom_string": "no_vars_here", + "max_tokens": 512, + "stop": ["\n"], + } + + def test_empty_params(self, variable_pool: VariablePool): + result = llm_utils.resolve_completion_params_variables({}, variable_pool) + + assert result == {} + + def test_unresolvable_variable_keeps_selector_text(self): + """When a referenced variable doesn't exist in the pool, convert_template + falls back to the raw selector path (e.g. 'nonexistent.var').""" + pool = VariablePool.empty() + params = {"format": "{{#nonexistent.var#}}"} + + result = llm_utils.resolve_completion_params_variables(params, pool) + + assert result["format"] == "nonexistent.var" + + def test_multiple_variables_in_single_value(self, variable_pool: VariablePool): + params = {"combined": "{{#node1.output#}} and {{#node2.text#}}"} + + result = llm_utils.resolve_completion_params_variables(params, variable_pool) + + assert result == {"combined": "resolved_value and hello world"} + + def test_original_params_not_mutated(self, variable_pool: VariablePool): + original = {"response_format": "{{#node1.output#}}", "temperature": 0.5} + original_copy = dict(original) + + _ = llm_utils.resolve_completion_params_variables(original, variable_pool) + + assert original == original_copy + + def test_long_value_truncated(self): + pool = VariablePool.empty() + pool.add(["node1", "big"], "x" * 2000) + params = {"param": "{{#node1.big#}}"} + + result = llm_utils.resolve_completion_params_variables(params, pool) + + assert len(result["param"]) == llm_utils.MAX_RESOLVED_VALUE_LENGTH + + def test_fetch_prompt_messages_skips_messages_when_all_contents_are_filtered_out(): with pytest.raises(NoPromptFoundError): _fetch_prompt_messages_with_mocked_content( diff --git a/api/tests/unit_tests/libs/test_login.py b/api/tests/unit_tests/libs/test_login.py index a94ba0c00b..8613d89215 100644 --- a/api/tests/unit_tests/libs/test_login.py +++ b/api/tests/unit_tests/libs/test_login.py @@ -130,6 +130,25 @@ class TestLoginRequired: assert result == "Synced content" setup_app.ensure_sync.assert_called_once() + @patch("libs.login.check_csrf_token", mock_csrf_check) + def test_patched_current_user_without_login_manager(self, app: Flask): + """Test that patched current_user bypasses login manager bootstrapping.""" + + @login_required + def protected_view(): + return "Protected content" + + mock_user = MockUser("test_user", is_authenticated=True) + mock_proxy = MagicMock() + mock_proxy._get_current_object.return_value = mock_user + + with app.test_request_context(): + app.ensure_sync = lambda func: func + with patch("libs.login.current_user", mock_proxy): + result = protected_view() + assert result == "Protected content" + assert g._login_user == mock_user + @patch("libs.login.check_csrf_token", mock_csrf_check) def test_flask_1_compatibility(self, setup_app: Flask): """Test Flask 1.x compatibility without ensure_sync.""" diff --git a/api/tests/unit_tests/libs/test_oauth_clients.py b/api/tests/unit_tests/libs/test_oauth_clients.py index bc7880ccc8..3918e8ee4b 100644 --- a/api/tests/unit_tests/libs/test_oauth_clients.py +++ b/api/tests/unit_tests/libs/test_oauth_clients.py @@ -95,13 +95,11 @@ class TestGitHubOAuth(BaseOAuthTest): ], "primary@example.com", ), - # User with no emails - fallback to noreply - ({"id": 12345, "login": "testuser", "name": "Test User"}, [], "12345+testuser@users.noreply.github.com"), - # User with only secondary email - fallback to noreply + # User with private email (null email and name from API) ( - {"id": 12345, "login": "testuser", "name": "Test User"}, - [{"email": "secondary@example.com", "primary": False}], - "12345+testuser@users.noreply.github.com", + {"id": 12345, "login": "testuser", "name": None, "email": None}, + [{"email": "primary@example.com", "primary": True}], + "primary@example.com", ), ], ) @@ -118,9 +116,54 @@ class TestGitHubOAuth(BaseOAuthTest): user_info = oauth.get_user_info("test_token") assert user_info.id == str(user_data["id"]) - assert user_info.name == user_data["name"] + assert user_info.name == (user_data["name"] or "") assert user_info.email == expected_email + @pytest.mark.parametrize( + ("user_data", "email_data"), + [ + # User with no emails + ({"id": 12345, "login": "testuser", "name": "Test User"}, []), + # User with only secondary email + ( + {"id": 12345, "login": "testuser", "name": "Test User"}, + [{"email": "secondary@example.com", "primary": False}], + ), + # User with private email and no primary in emails endpoint + ( + {"id": 12345, "login": "testuser", "name": None, "email": None}, + [], + ), + ], + ) + @patch("httpx.get", autospec=True) + def test_should_raise_error_when_no_primary_email(self, mock_get, oauth, user_data, email_data): + user_response = MagicMock() + user_response.json.return_value = user_data + + email_response = MagicMock() + email_response.json.return_value = email_data + + mock_get.side_effect = [user_response, email_response] + + with pytest.raises(ValueError, match="Keep my email addresses private"): + oauth.get_user_info("test_token") + + @patch("httpx.get", autospec=True) + def test_should_raise_error_when_email_endpoint_fails(self, mock_get, oauth): + user_response = MagicMock() + user_response.json.return_value = {"id": 12345, "login": "testuser", "name": "Test User"} + + email_response = MagicMock() + email_response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Forbidden", request=MagicMock(), response=MagicMock() + ) + + mock_get.side_effect = [user_response, email_response] + + with pytest.raises(ValueError, match="Keep my email addresses private"): + oauth.get_user_info("test_token") + @patch("httpx.get", autospec=True) def test_should_handle_network_errors(self, mock_get, oauth): mock_get.side_effect = httpx.RequestError("Network error") diff --git a/api/tests/unit_tests/models/test_tool_models.py b/api/tests/unit_tests/models/test_tool_models.py index 1a75eb9a01..8e3c4da904 100644 --- a/api/tests/unit_tests/models/test_tool_models.py +++ b/api/tests/unit_tests/models/test_tool_models.py @@ -12,7 +12,7 @@ This test suite covers: import json from uuid import uuid4 -from core.tools.entities.tool_entities import ApiProviderSchemaType +from core.tools.entities.tool_entities import ApiProviderSchemaType, ToolProviderType from models.tools import ( ApiToolProvider, BuiltinToolProvider, @@ -238,7 +238,7 @@ class TestApiToolProviderValidation: name=provider_name, icon='{"type": "emoji", "value": "🔧"}', schema=schema, - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Custom API for testing", tools_str=json.dumps(tools), credentials_str=json.dumps(credentials), @@ -249,7 +249,7 @@ class TestApiToolProviderValidation: assert api_provider.user_id == user_id assert api_provider.name == provider_name assert api_provider.schema == schema - assert api_provider.schema_type_str == "openapi" + assert api_provider.schema_type_str == ApiProviderSchemaType.OPENAPI assert api_provider.description == "Custom API for testing" def test_api_tool_provider_schema_type_property(self): @@ -261,7 +261,7 @@ class TestApiToolProviderValidation: name="Test API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Test", tools_str="[]", credentials_str="{}", @@ -314,7 +314,7 @@ class TestApiToolProviderValidation: name="Weather API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Weather API", tools_str=json.dumps(tools_data), credentials_str="{}", @@ -343,7 +343,7 @@ class TestApiToolProviderValidation: name="Secure API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Secure API", tools_str="[]", credentials_str=json.dumps(credentials_data), @@ -369,7 +369,7 @@ class TestApiToolProviderValidation: name="Privacy API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="API with privacy policy", tools_str="[]", credentials_str="{}", @@ -391,7 +391,7 @@ class TestApiToolProviderValidation: name="Disclaimer API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="API with disclaimer", tools_str="[]", credentials_str="{}", @@ -410,7 +410,7 @@ class TestApiToolProviderValidation: name="Default API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="API", tools_str="[]", credentials_str="{}", @@ -432,7 +432,7 @@ class TestApiToolProviderValidation: name=provider_name, icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Unique API", tools_str="[]", credentials_str="{}", @@ -454,7 +454,7 @@ class TestApiToolProviderValidation: name="Public API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Public API with no auth", tools_str="[]", credentials_str=json.dumps(credentials), @@ -479,7 +479,7 @@ class TestApiToolProviderValidation: name="Query Auth API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="API with query auth", tools_str="[]", credentials_str=json.dumps(credentials), @@ -631,7 +631,7 @@ class TestToolLabelBinding: """Test creating a tool label binding.""" # Arrange tool_id = "google.search" - tool_type = "builtin" + tool_type = ToolProviderType.BUILT_IN label_name = "search" # Act @@ -655,7 +655,7 @@ class TestToolLabelBinding: # Act label_binding = ToolLabelBinding( tool_id=tool_id, - tool_type="builtin", + tool_type=ToolProviderType.BUILT_IN, label_name=label_name, ) @@ -667,7 +667,7 @@ class TestToolLabelBinding: """Test multiple labels can be bound to the same tool.""" # Arrange tool_id = "google.search" - tool_type = "builtin" + tool_type = ToolProviderType.BUILT_IN # Act binding1 = ToolLabelBinding( @@ -688,7 +688,7 @@ class TestToolLabelBinding: def test_tool_label_binding_different_tool_types(self): """Test label bindings for different tool types.""" # Arrange - tool_types = ["builtin", "api", "workflow"] + tool_types = [ToolProviderType.BUILT_IN, ToolProviderType.API, ToolProviderType.WORKFLOW] # Act & Assert for tool_type in tool_types: @@ -741,7 +741,7 @@ class TestCredentialStorage: name="Test API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Test", tools_str="[]", credentials_str=json.dumps(credentials), @@ -788,7 +788,7 @@ class TestCredentialStorage: name="Update Test", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Test", tools_str="[]", credentials_str=json.dumps(original_credentials), @@ -897,7 +897,7 @@ class TestToolProviderRelationships: name="User API", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Test", tools_str="[]", credentials_str="{}", @@ -931,7 +931,7 @@ class TestToolProviderRelationships: name="Custom API 1", icon="{}", schema="{}", - schema_type_str="openapi", + schema_type_str=ApiProviderSchemaType.OPENAPI, description="Test", tools_str="[]", credentials_str="{}", @@ -951,12 +951,12 @@ class TestToolProviderRelationships: # Act binding1 = ToolLabelBinding( tool_id=tool_id, - tool_type="builtin", + tool_type=ToolProviderType.BUILT_IN, label_name="search", ) binding2 = ToolLabelBinding( tool_id=tool_id, - tool_type="builtin", + tool_type=ToolProviderType.BUILT_IN, label_name="web", ) diff --git a/api/tests/unit_tests/repositories/test_sqlalchemy_execution_extra_content_repository.py b/api/tests/unit_tests/repositories/test_sqlalchemy_execution_extra_content_repository.py deleted file mode 100644 index 8daf91c538..0000000000 --- a/api/tests/unit_tests/repositories/test_sqlalchemy_execution_extra_content_repository.py +++ /dev/null @@ -1,180 +0,0 @@ -from __future__ import annotations - -from collections.abc import Sequence -from dataclasses import dataclass -from datetime import UTC, datetime, timedelta - -from core.entities.execution_extra_content import HumanInputContent as HumanInputContentDomain -from core.entities.execution_extra_content import HumanInputFormSubmissionData -from dify_graph.nodes.human_input.entities import ( - FormDefinition, - UserAction, -) -from dify_graph.nodes.human_input.enums import HumanInputFormStatus -from models.execution_extra_content import HumanInputContent as HumanInputContentModel -from models.human_input import ConsoleRecipientPayload, HumanInputForm, HumanInputFormRecipient, RecipientType -from repositories.sqlalchemy_execution_extra_content_repository import SQLAlchemyExecutionExtraContentRepository - - -class _FakeScalarResult: - def __init__(self, values: Sequence[HumanInputContentModel]): - self._values = list(values) - - def all(self) -> list[HumanInputContentModel]: - return list(self._values) - - -class _FakeSession: - def __init__(self, values: Sequence[Sequence[object]]): - self._values = list(values) - - def scalars(self, _stmt): - if not self._values: - return _FakeScalarResult([]) - return _FakeScalarResult(self._values.pop(0)) - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc, tb): - return False - - -@dataclass -class _FakeSessionMaker: - session: _FakeSession - - def __call__(self) -> _FakeSession: - return self.session - - -def _build_form(action_id: str, action_title: str, rendered_content: str) -> HumanInputForm: - expiration_time = datetime.now(UTC) + timedelta(days=1) - definition = FormDefinition( - form_content="content", - inputs=[], - user_actions=[UserAction(id=action_id, title=action_title)], - rendered_content="rendered", - expiration_time=expiration_time, - node_title="Approval", - display_in_ui=True, - ) - form = HumanInputForm( - id=f"form-{action_id}", - tenant_id="tenant-id", - app_id="app-id", - workflow_run_id="workflow-run", - node_id="node-id", - form_definition=definition.model_dump_json(), - rendered_content=rendered_content, - status=HumanInputFormStatus.SUBMITTED, - expiration_time=expiration_time, - ) - form.selected_action_id = action_id - return form - - -def _build_content(message_id: str, action_id: str, action_title: str) -> HumanInputContentModel: - form = _build_form( - action_id=action_id, - action_title=action_title, - rendered_content=f"Rendered {action_title}", - ) - content = HumanInputContentModel( - id=f"content-{message_id}", - form_id=form.id, - message_id=message_id, - workflow_run_id=form.workflow_run_id, - ) - content.form = form - return content - - -def test_get_by_message_ids_groups_contents_by_message() -> None: - message_ids = ["msg-1", "msg-2"] - contents = [_build_content("msg-1", "approve", "Approve")] - repository = SQLAlchemyExecutionExtraContentRepository( - session_maker=_FakeSessionMaker(session=_FakeSession(values=[contents, []])) - ) - - result = repository.get_by_message_ids(message_ids) - - assert len(result) == 2 - assert [content.model_dump(mode="json", exclude_none=True) for content in result[0]] == [ - HumanInputContentDomain( - workflow_run_id="workflow-run", - submitted=True, - form_submission_data=HumanInputFormSubmissionData( - node_id="node-id", - node_title="Approval", - rendered_content="Rendered Approve", - action_id="approve", - action_text="Approve", - ), - ).model_dump(mode="json", exclude_none=True) - ] - assert result[1] == [] - - -def test_get_by_message_ids_returns_unsubmitted_form_definition() -> None: - expiration_time = datetime.now(UTC) + timedelta(days=1) - definition = FormDefinition( - form_content="content", - inputs=[], - user_actions=[UserAction(id="approve", title="Approve")], - rendered_content="rendered", - expiration_time=expiration_time, - default_values={"name": "John"}, - node_title="Approval", - display_in_ui=True, - ) - form = HumanInputForm( - id="form-1", - tenant_id="tenant-id", - app_id="app-id", - workflow_run_id="workflow-run", - node_id="node-id", - form_definition=definition.model_dump_json(), - rendered_content="Rendered block", - status=HumanInputFormStatus.WAITING, - expiration_time=expiration_time, - ) - content = HumanInputContentModel( - id="content-msg-1", - form_id=form.id, - message_id="msg-1", - workflow_run_id=form.workflow_run_id, - ) - content.form = form - - recipient = HumanInputFormRecipient( - form_id=form.id, - delivery_id="delivery-1", - recipient_type=RecipientType.CONSOLE, - recipient_payload=ConsoleRecipientPayload(account_id=None).model_dump_json(), - access_token="token-1", - ) - - repository = SQLAlchemyExecutionExtraContentRepository( - session_maker=_FakeSessionMaker(session=_FakeSession(values=[[content], [recipient]])) - ) - - result = repository.get_by_message_ids(["msg-1"]) - - assert len(result) == 1 - assert len(result[0]) == 1 - domain_content = result[0][0] - assert domain_content.submitted is False - assert domain_content.workflow_run_id == "workflow-run" - assert domain_content.form_definition is not None - assert domain_content.form_definition.expiration_time == int(form.expiration_time.timestamp()) - assert domain_content.form_definition is not None - form_definition = domain_content.form_definition - assert form_definition.form_id == "form-1" - assert form_definition.node_id == "node-id" - assert form_definition.node_title == "Approval" - assert form_definition.form_content == "Rendered block" - assert form_definition.display_in_ui is True - assert form_definition.form_token == "token-1" - assert form_definition.resolved_default_values == {"name": "John"} - assert form_definition.expiration_time == int(form.expiration_time.timestamp()) diff --git a/api/tests/unit_tests/services/auth/test_api_key_auth_base.py b/api/tests/unit_tests/services/auth/test_api_key_auth_base.py index b5d91ef3fb..388504c07f 100644 --- a/api/tests/unit_tests/services/auth/test_api_key_auth_base.py +++ b/api/tests/unit_tests/services/auth/test_api_key_auth_base.py @@ -13,13 +13,13 @@ class ConcreteApiKeyAuth(ApiKeyAuthBase): class TestApiKeyAuthBase: def test_should_store_credentials_on_init(self): """Test that credentials are properly stored during initialization""" - credentials = {"api_key": "test_key", "auth_type": "bearer"} + credentials = {"auth_type": "bearer", "config": {"api_key": "test_key"}} auth = ConcreteApiKeyAuth(credentials) assert auth.credentials == credentials def test_should_not_instantiate_abstract_class(self): """Test that ApiKeyAuthBase cannot be instantiated directly""" - credentials = {"api_key": "test_key"} + credentials = {"auth_type": "bearer", "config": {"api_key": "test_key"}} with pytest.raises(TypeError) as exc_info: ApiKeyAuthBase(credentials) @@ -29,7 +29,7 @@ class TestApiKeyAuthBase: def test_should_allow_subclass_implementation(self): """Test that subclasses can properly implement the abstract method""" - credentials = {"api_key": "test_key", "auth_type": "bearer"} + credentials = {"auth_type": "bearer", "config": {"api_key": "test_key"}} auth = ConcreteApiKeyAuth(credentials) # Should not raise any exception diff --git a/api/tests/unit_tests/services/auth/test_api_key_auth_factory.py b/api/tests/unit_tests/services/auth/test_api_key_auth_factory.py index 60af6e20c2..b1f7cf24f3 100644 --- a/api/tests/unit_tests/services/auth/test_api_key_auth_factory.py +++ b/api/tests/unit_tests/services/auth/test_api_key_auth_factory.py @@ -58,7 +58,7 @@ class TestApiKeyAuthFactory: mock_get_factory.return_value = mock_auth_class # Act - factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, {"api_key": "test_key"}) + factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, {"auth_type": "bearer", "config": {"api_key": "test_key"}}) result = factory.validate_credentials() # Assert @@ -75,7 +75,7 @@ class TestApiKeyAuthFactory: mock_get_factory.return_value = mock_auth_class # Act & Assert - factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, {"api_key": "test_key"}) + factory = ApiKeyAuthFactory(AuthType.FIRECRAWL, {"auth_type": "bearer", "config": {"api_key": "test_key"}}) with pytest.raises(Exception) as exc_info: factory.validate_credentials() assert str(exc_info.value) == "Authentication error" diff --git a/api/tests/unit_tests/services/document_service_validation.py b/api/tests/unit_tests/services/document_service_validation.py index 6829691507..1f68ff6b3d 100644 --- a/api/tests/unit_tests/services/document_service_validation.py +++ b/api/tests/unit_tests/services/document_service_validation.py @@ -111,6 +111,7 @@ from unittest.mock import Mock, patch import pytest from core.errors.error import LLMBadRequestError, ProviderTokenNotInitError +from core.rag.index_processor.constant.index_type import IndexStructureType from dify_graph.model_runtime.entities.model_entities import ModelType from models.dataset import Dataset, DatasetProcessRule, Document from services.dataset_service import DatasetService, DocumentService @@ -188,7 +189,7 @@ class DocumentValidationTestDataFactory: def create_knowledge_config_mock( data_source: DataSource | None = None, process_rule: ProcessRule | None = None, - doc_form: str = "text_model", + doc_form: str = IndexStructureType.PARAGRAPH_INDEX, indexing_technique: str = "high_quality", **kwargs, ) -> Mock: @@ -326,8 +327,8 @@ class TestDatasetServiceCheckDocForm: - Validation logic works correctly """ # Arrange - dataset = DocumentValidationTestDataFactory.create_dataset_mock(doc_form="text_model") - doc_form = "text_model" + dataset = DocumentValidationTestDataFactory.create_dataset_mock(doc_form=IndexStructureType.PARAGRAPH_INDEX) + doc_form = IndexStructureType.PARAGRAPH_INDEX # Act (should not raise) DatasetService.check_doc_form(dataset, doc_form) @@ -349,7 +350,7 @@ class TestDatasetServiceCheckDocForm: """ # Arrange dataset = DocumentValidationTestDataFactory.create_dataset_mock(doc_form=None) - doc_form = "text_model" + doc_form = IndexStructureType.PARAGRAPH_INDEX # Act (should not raise) DatasetService.check_doc_form(dataset, doc_form) @@ -370,8 +371,8 @@ class TestDatasetServiceCheckDocForm: - Error type is correct """ # Arrange - dataset = DocumentValidationTestDataFactory.create_dataset_mock(doc_form="text_model") - doc_form = "table_model" # Different form + dataset = DocumentValidationTestDataFactory.create_dataset_mock(doc_form=IndexStructureType.PARAGRAPH_INDEX) + doc_form = IndexStructureType.PARENT_CHILD_INDEX # Different form # Act & Assert with pytest.raises(ValueError, match="doc_form is different from the dataset doc_form"): @@ -390,7 +391,7 @@ class TestDatasetServiceCheckDocForm: """ # Arrange dataset = DocumentValidationTestDataFactory.create_dataset_mock(doc_form="knowledge_card") - doc_form = "text_model" # Different form + doc_form = IndexStructureType.PARAGRAPH_INDEX # Different form # Act & Assert with pytest.raises(ValueError, match="doc_form is different from the dataset doc_form"): diff --git a/api/tests/unit_tests/services/segment_service.py b/api/tests/unit_tests/services/segment_service.py index affbc8d0b5..5e625fa0cd 100644 --- a/api/tests/unit_tests/services/segment_service.py +++ b/api/tests/unit_tests/services/segment_service.py @@ -2,8 +2,10 @@ from unittest.mock import MagicMock, Mock, patch import pytest +from core.rag.index_processor.constant.index_type import IndexStructureType from models.account import Account from models.dataset import ChildChunk, Dataset, Document, DocumentSegment +from models.enums import SegmentType from services.dataset_service import SegmentService from services.entities.knowledge_entities.knowledge_entities import SegmentUpdateArgs from services.errors.chunk import ChildChunkDeleteIndexError, ChildChunkIndexingError @@ -77,7 +79,7 @@ class SegmentTestDataFactory: chunk.word_count = word_count chunk.index_node_id = f"node-{chunk_id}" chunk.index_node_hash = "hash-123" - chunk.type = "automatic" + chunk.type = SegmentType.AUTOMATIC chunk.created_by = "user-123" chunk.updated_by = None chunk.updated_at = None @@ -90,7 +92,7 @@ class SegmentTestDataFactory: document_id: str = "doc-123", dataset_id: str = "dataset-123", tenant_id: str = "tenant-123", - doc_form: str = "text_model", + doc_form: str = IndexStructureType.PARAGRAPH_INDEX, word_count: int = 100, **kwargs, ) -> Mock: @@ -209,7 +211,7 @@ class TestSegmentServiceCreateSegment: def test_create_segment_with_qa_model(self, mock_db_session, mock_current_user): """Test creation of segment with QA model (requires answer).""" # Arrange - document = SegmentTestDataFactory.create_document_mock(doc_form="qa_model", word_count=100) + document = SegmentTestDataFactory.create_document_mock(doc_form=IndexStructureType.QA_INDEX, word_count=100) dataset = SegmentTestDataFactory.create_dataset_mock(indexing_technique="economy") args = {"content": "What is AI?", "answer": "AI is Artificial Intelligence", "keywords": ["ai"]} @@ -428,7 +430,7 @@ class TestSegmentServiceUpdateSegment: """Test update segment with QA model (includes answer).""" # Arrange segment = SegmentTestDataFactory.create_segment_mock(enabled=True, word_count=10) - document = SegmentTestDataFactory.create_document_mock(doc_form="qa_model", word_count=100) + document = SegmentTestDataFactory.create_document_mock(doc_form=IndexStructureType.QA_INDEX, word_count=100) dataset = SegmentTestDataFactory.create_dataset_mock(indexing_technique="economy") args = SegmentUpdateArgs(content="Updated question", answer="Updated answer", keywords=["qa"]) diff --git a/api/tests/unit_tests/services/test_api_based_extension_service.py b/api/tests/unit_tests/services/test_api_based_extension_service.py deleted file mode 100644 index 7f4b5fdaa3..0000000000 --- a/api/tests/unit_tests/services/test_api_based_extension_service.py +++ /dev/null @@ -1,421 +0,0 @@ -""" -Comprehensive unit tests for services/api_based_extension_service.py - -Covers: -- APIBasedExtensionService.get_all_by_tenant_id -- APIBasedExtensionService.save -- APIBasedExtensionService.delete -- APIBasedExtensionService.get_with_tenant_id -- APIBasedExtensionService._validation (new record & existing record branches) -- APIBasedExtensionService._ping_connection (pong success, wrong response, exception) -""" - -from unittest.mock import MagicMock, patch - -import pytest - -from services.api_based_extension_service import APIBasedExtensionService - -# --------------------------------------------------------------------------- -# Helpers -# --------------------------------------------------------------------------- - - -def _make_extension( - *, - id_: str | None = None, - tenant_id: str = "tenant-001", - name: str = "my-ext", - api_endpoint: str = "https://example.com/hook", - api_key: str = "secret-key-123", -) -> MagicMock: - """Return a lightweight mock that mimics APIBasedExtension.""" - ext = MagicMock() - ext.id = id_ - ext.tenant_id = tenant_id - ext.name = name - ext.api_endpoint = api_endpoint - ext.api_key = api_key - return ext - - -# --------------------------------------------------------------------------- -# Tests: get_all_by_tenant_id -# --------------------------------------------------------------------------- - - -class TestGetAllByTenantId: - """Tests for APIBasedExtensionService.get_all_by_tenant_id.""" - - @patch("services.api_based_extension_service.decrypt_token", return_value="decrypted-key") - @patch("services.api_based_extension_service.db") - def test_returns_extensions_with_decrypted_keys(self, mock_db, mock_decrypt): - """Each api_key is decrypted and the list is returned.""" - ext1 = _make_extension(id_="id-1", api_key="enc-key-1") - ext2 = _make_extension(id_="id-2", api_key="enc-key-2") - - mock_db.session.query.return_value.filter_by.return_value.order_by.return_value.all.return_value = [ - ext1, - ext2, - ] - - result = APIBasedExtensionService.get_all_by_tenant_id("tenant-001") - - assert result == [ext1, ext2] - assert ext1.api_key == "decrypted-key" - assert ext2.api_key == "decrypted-key" - assert mock_decrypt.call_count == 2 - - @patch("services.api_based_extension_service.decrypt_token", return_value="decrypted-key") - @patch("services.api_based_extension_service.db") - def test_returns_empty_list_when_no_extensions(self, mock_db, mock_decrypt): - """Returns an empty list gracefully when no records exist.""" - mock_db.session.query.return_value.filter_by.return_value.order_by.return_value.all.return_value = [] - - result = APIBasedExtensionService.get_all_by_tenant_id("tenant-001") - - assert result == [] - mock_decrypt.assert_not_called() - - @patch("services.api_based_extension_service.decrypt_token", return_value="decrypted-key") - @patch("services.api_based_extension_service.db") - def test_calls_query_with_correct_tenant_id(self, mock_db, mock_decrypt): - """Verifies the DB is queried with the supplied tenant_id.""" - mock_db.session.query.return_value.filter_by.return_value.order_by.return_value.all.return_value = [] - - APIBasedExtensionService.get_all_by_tenant_id("tenant-xyz") - - mock_db.session.query.return_value.filter_by.assert_called_once_with(tenant_id="tenant-xyz") - - -# --------------------------------------------------------------------------- -# Tests: save -# --------------------------------------------------------------------------- - - -class TestSave: - """Tests for APIBasedExtensionService.save.""" - - @patch("services.api_based_extension_service.encrypt_token", return_value="encrypted-key") - @patch("services.api_based_extension_service.db") - @patch.object(APIBasedExtensionService, "_validation") - def test_save_new_record_encrypts_key_and_commits(self, mock_validation, mock_db, mock_encrypt): - """Happy path: validation passes, key is encrypted, record is added and committed.""" - ext = _make_extension(id_=None, api_key="plain-key-123") - - result = APIBasedExtensionService.save(ext) - - mock_validation.assert_called_once_with(ext) - mock_encrypt.assert_called_once_with(ext.tenant_id, "plain-key-123") - assert ext.api_key == "encrypted-key" - mock_db.session.add.assert_called_once_with(ext) - mock_db.session.commit.assert_called_once() - assert result is ext - - @patch("services.api_based_extension_service.encrypt_token", return_value="encrypted-key") - @patch("services.api_based_extension_service.db") - @patch.object(APIBasedExtensionService, "_validation", side_effect=ValueError("name must not be empty")) - def test_save_raises_when_validation_fails(self, mock_validation, mock_db, mock_encrypt): - """If _validation raises, save should propagate the error without touching the DB.""" - ext = _make_extension(name="") - - with pytest.raises(ValueError, match="name must not be empty"): - APIBasedExtensionService.save(ext) - - mock_db.session.add.assert_not_called() - mock_db.session.commit.assert_not_called() - - -# --------------------------------------------------------------------------- -# Tests: delete -# --------------------------------------------------------------------------- - - -class TestDelete: - """Tests for APIBasedExtensionService.delete.""" - - @patch("services.api_based_extension_service.db") - def test_delete_removes_record_and_commits(self, mock_db): - """delete() must call session.delete with the extension and then commit.""" - ext = _make_extension(id_="delete-me") - - APIBasedExtensionService.delete(ext) - - mock_db.session.delete.assert_called_once_with(ext) - mock_db.session.commit.assert_called_once() - - -# --------------------------------------------------------------------------- -# Tests: get_with_tenant_id -# --------------------------------------------------------------------------- - - -class TestGetWithTenantId: - """Tests for APIBasedExtensionService.get_with_tenant_id.""" - - @patch("services.api_based_extension_service.decrypt_token", return_value="decrypted-key") - @patch("services.api_based_extension_service.db") - def test_returns_extension_with_decrypted_key(self, mock_db, mock_decrypt): - """Found extension has its api_key decrypted before being returned.""" - ext = _make_extension(id_="ext-123", api_key="enc-key") - - (mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value) = ext - - result = APIBasedExtensionService.get_with_tenant_id("tenant-001", "ext-123") - - assert result is ext - assert ext.api_key == "decrypted-key" - mock_decrypt.assert_called_once_with(ext.tenant_id, "enc-key") - - @patch("services.api_based_extension_service.db") - def test_raises_value_error_when_not_found(self, mock_db): - """Raises ValueError when no matching extension exists.""" - (mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value) = None - - with pytest.raises(ValueError, match="API based extension is not found"): - APIBasedExtensionService.get_with_tenant_id("tenant-001", "non-existent") - - @patch("services.api_based_extension_service.decrypt_token", return_value="decrypted-key") - @patch("services.api_based_extension_service.db") - def test_queries_with_correct_tenant_and_extension_id(self, mock_db, mock_decrypt): - """Verifies both tenant_id and extension id are used in the query.""" - ext = _make_extension(id_="ext-abc") - chain = mock_db.session.query.return_value - chain.filter_by.return_value.filter_by.return_value.first.return_value = ext - - APIBasedExtensionService.get_with_tenant_id("tenant-002", "ext-abc") - - # First filter_by call uses tenant_id - chain.filter_by.assert_called_once_with(tenant_id="tenant-002") - # Second filter_by call uses id - chain.filter_by.return_value.filter_by.assert_called_once_with(id="ext-abc") - - -# --------------------------------------------------------------------------- -# Tests: _validation (new record — id is falsy) -# --------------------------------------------------------------------------- - - -class TestValidationNewRecord: - """Tests for _validation() with a brand-new record (no id).""" - - def _build_mock_db(self, name_exists: bool = False): - mock_db = MagicMock() - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = ( - MagicMock() if name_exists else None - ) - return mock_db - - @patch.object(APIBasedExtensionService, "_ping_connection") - @patch("services.api_based_extension_service.db") - def test_valid_new_extension_passes(self, mock_db, mock_ping): - """A new record with all valid fields should pass without exceptions.""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, name="valid-ext", api_key="longenoughkey") - - # Should not raise - APIBasedExtensionService._validation(ext) - mock_ping.assert_called_once_with(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_name_is_empty(self, mock_db): - """Empty name raises ValueError.""" - ext = _make_extension(id_=None, name="") - with pytest.raises(ValueError, match="name must not be empty"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_name_is_none(self, mock_db): - """None name raises ValueError.""" - ext = _make_extension(id_=None, name=None) - with pytest.raises(ValueError, match="name must not be empty"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_name_already_exists_for_new_record(self, mock_db): - """A new record whose name already exists raises ValueError.""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = ( - MagicMock() - ) - ext = _make_extension(id_=None, name="duplicate-name") - - with pytest.raises(ValueError, match="name must be unique, it is already existed"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_api_endpoint_is_empty(self, mock_db): - """Empty api_endpoint raises ValueError.""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, api_endpoint="") - - with pytest.raises(ValueError, match="api_endpoint must not be empty"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_api_endpoint_is_none(self, mock_db): - """None api_endpoint raises ValueError.""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, api_endpoint=None) - - with pytest.raises(ValueError, match="api_endpoint must not be empty"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_api_key_is_empty(self, mock_db): - """Empty api_key raises ValueError.""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, api_key="") - - with pytest.raises(ValueError, match="api_key must not be empty"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_api_key_is_none(self, mock_db): - """None api_key raises ValueError.""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, api_key=None) - - with pytest.raises(ValueError, match="api_key must not be empty"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_api_key_too_short(self, mock_db): - """api_key shorter than 5 characters raises ValueError.""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, api_key="abc") - - with pytest.raises(ValueError, match="api_key must be at least 5 characters"): - APIBasedExtensionService._validation(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_api_key_exactly_four_chars(self, mock_db): - """api_key with exactly 4 characters raises ValueError (boundary condition).""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, api_key="1234") - - with pytest.raises(ValueError, match="api_key must be at least 5 characters"): - APIBasedExtensionService._validation(ext) - - @patch.object(APIBasedExtensionService, "_ping_connection") - @patch("services.api_based_extension_service.db") - def test_api_key_exactly_five_chars_is_accepted(self, mock_db, mock_ping): - """api_key with exactly 5 characters should pass (boundary condition).""" - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.first.return_value = None - ext = _make_extension(id_=None, api_key="12345") - - # Should not raise - APIBasedExtensionService._validation(ext) - - -# --------------------------------------------------------------------------- -# Tests: _validation (existing record — id is truthy) -# --------------------------------------------------------------------------- - - -class TestValidationExistingRecord: - """Tests for _validation() with an existing record (id is set).""" - - @patch.object(APIBasedExtensionService, "_ping_connection") - @patch("services.api_based_extension_service.db") - def test_valid_existing_extension_passes(self, mock_db, mock_ping): - """An existing record whose name is unique (excluding self) should pass.""" - # .where(...).first() → None means no *other* record has that name - ( - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.where.return_value.first.return_value - ) = None - ext = _make_extension(id_="existing-id", name="unique-name", api_key="longenoughkey") - - # Should not raise - APIBasedExtensionService._validation(ext) - mock_ping.assert_called_once_with(ext) - - @patch("services.api_based_extension_service.db") - def test_raises_if_existing_record_name_conflicts_with_another(self, mock_db): - """Existing record cannot use a name already owned by a different record.""" - ( - mock_db.session.query.return_value.filter_by.return_value.filter_by.return_value.where.return_value.first.return_value - ) = MagicMock() - ext = _make_extension(id_="existing-id", name="taken-name") - - with pytest.raises(ValueError, match="name must be unique, it is already existed"): - APIBasedExtensionService._validation(ext) - - -# --------------------------------------------------------------------------- -# Tests: _ping_connection -# --------------------------------------------------------------------------- - - -class TestPingConnection: - """Tests for APIBasedExtensionService._ping_connection.""" - - @patch("services.api_based_extension_service.APIBasedExtensionRequestor") - def test_successful_ping_returns_pong(self, mock_requestor_class): - """When the endpoint returns {"result": "pong"}, no exception is raised.""" - mock_client = MagicMock() - mock_client.request.return_value = {"result": "pong"} - mock_requestor_class.return_value = mock_client - - ext = _make_extension(api_endpoint="https://ok.example.com", api_key="secret-key") - # Should not raise - APIBasedExtensionService._ping_connection(ext) - - mock_requestor_class.assert_called_once_with(ext.api_endpoint, ext.api_key) - - @patch("services.api_based_extension_service.APIBasedExtensionRequestor") - def test_wrong_ping_response_raises_value_error(self, mock_requestor_class): - """When the response is not {"result": "pong"}, a ValueError is raised.""" - mock_client = MagicMock() - mock_client.request.return_value = {"result": "error"} - mock_requestor_class.return_value = mock_client - - ext = _make_extension() - with pytest.raises(ValueError, match="connection error"): - APIBasedExtensionService._ping_connection(ext) - - @patch("services.api_based_extension_service.APIBasedExtensionRequestor") - def test_network_exception_wraps_in_value_error(self, mock_requestor_class): - """Any exception raised during request is wrapped in a ValueError.""" - mock_client = MagicMock() - mock_client.request.side_effect = ConnectionError("network failure") - mock_requestor_class.return_value = mock_client - - ext = _make_extension() - with pytest.raises(ValueError, match="connection error: network failure"): - APIBasedExtensionService._ping_connection(ext) - - @patch("services.api_based_extension_service.APIBasedExtensionRequestor") - def test_requestor_constructor_exception_wraps_in_value_error(self, mock_requestor_class): - """Exception raised by the requestor constructor itself is wrapped.""" - mock_requestor_class.side_effect = RuntimeError("bad config") - - ext = _make_extension() - with pytest.raises(ValueError, match="connection error: bad config"): - APIBasedExtensionService._ping_connection(ext) - - @patch("services.api_based_extension_service.APIBasedExtensionRequestor") - def test_missing_result_key_raises_value_error(self, mock_requestor_class): - """A response dict without a 'result' key does not equal 'pong' → raises.""" - mock_client = MagicMock() - mock_client.request.return_value = {} # no 'result' key - mock_requestor_class.return_value = mock_client - - ext = _make_extension() - with pytest.raises(ValueError, match="connection error"): - APIBasedExtensionService._ping_connection(ext) - - @patch("services.api_based_extension_service.APIBasedExtensionRequestor") - def test_uses_ping_extension_point(self, mock_requestor_class): - """The PING extension point is passed to the client.request call.""" - from models.api_based_extension import APIBasedExtensionPoint - - mock_client = MagicMock() - mock_client.request.return_value = {"result": "pong"} - mock_requestor_class.return_value = mock_client - - ext = _make_extension() - APIBasedExtensionService._ping_connection(ext) - - call_kwargs = mock_client.request.call_args - assert call_kwargs.kwargs["point"] == APIBasedExtensionPoint.PING - assert call_kwargs.kwargs["params"] == {} diff --git a/api/tests/unit_tests/services/test_app_service.py b/api/tests/unit_tests/services/test_app_service.py index bff8dc92c6..95fc28b1e7 100644 --- a/api/tests/unit_tests/services/test_app_service.py +++ b/api/tests/unit_tests/services/test_app_service.py @@ -9,7 +9,7 @@ import pytest from core.errors.error import ProviderTokenNotInitError from models import Account, Tenant -from models.model import App, AppMode +from models.model import App, AppMode, IconType from services.app_service import AppService @@ -411,6 +411,7 @@ class TestAppServiceGetAndUpdate: # Assert assert updated is app + assert updated.icon_type == IconType.IMAGE assert renamed is app assert iconed is app assert site_same is app @@ -419,6 +420,79 @@ class TestAppServiceGetAndUpdate: assert api_changed is app assert mock_db.session.commit.call_count >= 5 + def test_update_app_should_preserve_icon_type_when_not_provided(self, service: AppService) -> None: + """Test update_app keeps the existing icon_type when the payload omits it.""" + # Arrange + app = cast( + App, + SimpleNamespace( + name="old", + description="old", + icon_type=IconType.EMOJI, + icon="a", + icon_background="#111", + use_icon_as_answer_icon=False, + max_active_requests=1, + ), + ) + args = { + "name": "new", + "description": "new-desc", + "icon_type": None, + "icon": "new-icon", + "icon_background": "#222", + "use_icon_as_answer_icon": True, + "max_active_requests": 5, + } + user = SimpleNamespace(id="user-1") + + with ( + patch("services.app_service.current_user", user), + patch("services.app_service.db") as mock_db, + patch("services.app_service.naive_utc_now", return_value="now"), + ): + # Act + updated = service.update_app(app, args) + + # Assert + assert updated is app + assert updated.icon_type == IconType.EMOJI + mock_db.session.commit.assert_called_once() + + def test_update_app_should_reject_empty_icon_type(self, service: AppService) -> None: + """Test update_app rejects an explicit empty icon_type.""" + app = cast( + App, + SimpleNamespace( + name="old", + description="old", + icon_type=IconType.EMOJI, + icon="a", + icon_background="#111", + use_icon_as_answer_icon=False, + max_active_requests=1, + ), + ) + args = { + "name": "new", + "description": "new-desc", + "icon_type": "", + "icon": "new-icon", + "icon_background": "#222", + "use_icon_as_answer_icon": True, + "max_active_requests": 5, + } + user = SimpleNamespace(id="user-1") + + with ( + patch("services.app_service.current_user", user), + patch("services.app_service.db") as mock_db, + ): + with pytest.raises(ValueError): + service.update_app(app, args) + + mock_db.session.commit.assert_not_called() + class TestAppServiceDeleteAndMeta: """Test suite for delete and metadata methods.""" diff --git a/api/tests/unit_tests/services/test_attachment_service.py b/api/tests/unit_tests/services/test_attachment_service.py deleted file mode 100644 index 88be20bc41..0000000000 --- a/api/tests/unit_tests/services/test_attachment_service.py +++ /dev/null @@ -1,73 +0,0 @@ -import base64 -from unittest.mock import MagicMock, patch - -import pytest -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker -from werkzeug.exceptions import NotFound - -import services.attachment_service as attachment_service_module -from models.model import UploadFile -from services.attachment_service import AttachmentService - - -class TestAttachmentService: - def test_should_initialize_with_sessionmaker_when_sessionmaker_is_provided(self): - """Test that AttachmentService keeps the provided sessionmaker instance.""" - session_factory = sessionmaker() - - service = AttachmentService(session_factory=session_factory) - - assert service._session_maker is session_factory - - def test_should_initialize_with_bound_sessionmaker_when_engine_is_provided(self): - """Test that AttachmentService builds a sessionmaker bound to the provided engine.""" - engine = create_engine("sqlite:///:memory:") - - service = AttachmentService(session_factory=engine) - session = service._session_maker() - try: - assert session.bind == engine - finally: - session.close() - engine.dispose() - - @pytest.mark.parametrize("invalid_session_factory", [None, "not-a-session-factory", 1]) - def test_should_raise_assertion_error_when_session_factory_type_is_invalid(self, invalid_session_factory): - """Test that invalid session_factory types are rejected.""" - with pytest.raises(AssertionError, match="must be a sessionmaker or an Engine."): - AttachmentService(session_factory=invalid_session_factory) - - def test_should_return_base64_encoded_blob_when_file_exists(self): - """Test that existing files are loaded from storage and returned as base64.""" - service = AttachmentService(session_factory=sessionmaker()) - upload_file = MagicMock(spec=UploadFile) - upload_file.key = "upload-file-key" - - session = MagicMock() - session.query.return_value.where.return_value.first.return_value = upload_file - service._session_maker = MagicMock(return_value=session) - - with patch.object(attachment_service_module.storage, "load_once", return_value=b"binary-content") as mock_load: - result = service.get_file_base64("file-123") - - assert result == base64.b64encode(b"binary-content").decode() - service._session_maker.assert_called_once_with(expire_on_commit=False) - session.query.assert_called_once_with(UploadFile) - mock_load.assert_called_once_with("upload-file-key") - - def test_should_raise_not_found_when_file_does_not_exist(self): - """Test that missing files raise NotFound and never call storage.""" - service = AttachmentService(session_factory=sessionmaker()) - - session = MagicMock() - session.query.return_value.where.return_value.first.return_value = None - service._session_maker = MagicMock(return_value=session) - - with patch.object(attachment_service_module.storage, "load_once") as mock_load: - with pytest.raises(NotFound, match="File not found"): - service.get_file_base64("missing-file") - - service._session_maker.assert_called_once_with(expire_on_commit=False) - session.query.assert_called_once_with(UploadFile) - mock_load.assert_not_called() diff --git a/api/tests/unit_tests/services/test_conversation_variable_updater.py b/api/tests/unit_tests/services/test_conversation_variable_updater.py deleted file mode 100644 index 20f7caa78e..0000000000 --- a/api/tests/unit_tests/services/test_conversation_variable_updater.py +++ /dev/null @@ -1,75 +0,0 @@ -from types import SimpleNamespace -from unittest.mock import MagicMock - -import pytest - -from dify_graph.variables import StringVariable -from services.conversation_variable_updater import ConversationVariableNotFoundError, ConversationVariableUpdater - - -class TestConversationVariableUpdater: - def test_should_update_conversation_variable_data_and_commit(self): - """Test update persists serialized variable data when the row exists.""" - conversation_id = "conv-123" - variable = StringVariable( - id="var-123", - name="topic", - value="new value", - ) - expected_json = variable.model_dump_json() - - row = SimpleNamespace(data="old value") - session = MagicMock() - session.scalar.return_value = row - - session_context = MagicMock() - session_context.__enter__.return_value = session - session_context.__exit__.return_value = None - - session_maker = MagicMock(return_value=session_context) - updater = ConversationVariableUpdater(session_maker) - - updater.update(conversation_id=conversation_id, variable=variable) - - session_maker.assert_called_once_with() - session.scalar.assert_called_once() - stmt = session.scalar.call_args.args[0] - compiled_params = stmt.compile().params - assert variable.id in compiled_params.values() - assert conversation_id in compiled_params.values() - assert row.data == expected_json - session.commit.assert_called_once() - - def test_should_raise_not_found_error_when_conversation_variable_missing(self): - """Test update raises ConversationVariableNotFoundError when no matching row exists.""" - conversation_id = "conv-404" - variable = StringVariable( - id="var-404", - name="topic", - value="value", - ) - - session = MagicMock() - session.scalar.return_value = None - - session_context = MagicMock() - session_context.__enter__.return_value = session - session_context.__exit__.return_value = None - - session_maker = MagicMock(return_value=session_context) - updater = ConversationVariableUpdater(session_maker) - - with pytest.raises(ConversationVariableNotFoundError, match="conversation variable not found in the database"): - updater.update(conversation_id=conversation_id, variable=variable) - - session.commit.assert_not_called() - - def test_should_do_nothing_when_flush_is_called(self): - """Test flush currently behaves as a no-op and returns None.""" - session_maker = MagicMock() - updater = ConversationVariableUpdater(session_maker) - - result = updater.flush() - - assert result is None - session_maker.assert_not_called() diff --git a/api/tests/unit_tests/services/test_credit_pool_service.py b/api/tests/unit_tests/services/test_credit_pool_service.py deleted file mode 100644 index 9ef314cb9e..0000000000 --- a/api/tests/unit_tests/services/test_credit_pool_service.py +++ /dev/null @@ -1,157 +0,0 @@ -from types import SimpleNamespace -from unittest.mock import MagicMock, patch - -import pytest - -import services.credit_pool_service as credit_pool_service_module -from core.errors.error import QuotaExceededError -from models import TenantCreditPool -from services.credit_pool_service import CreditPoolService - - -@pytest.fixture -def mock_credit_deduction_setup(): - """Fixture providing common setup for credit deduction tests.""" - pool = SimpleNamespace(remaining_credits=50) - fake_engine = MagicMock() - session = MagicMock() - session_context = MagicMock() - session_context.__enter__.return_value = session - session_context.__exit__.return_value = None - - mock_get_pool = patch.object(CreditPoolService, "get_pool", return_value=pool) - mock_db = patch.object(credit_pool_service_module, "db", new=SimpleNamespace(engine=fake_engine)) - mock_session = patch.object(credit_pool_service_module, "Session", return_value=session_context) - - return { - "pool": pool, - "fake_engine": fake_engine, - "session": session, - "session_context": session_context, - "patches": (mock_get_pool, mock_db, mock_session), - } - - -class TestCreditPoolService: - def test_should_create_default_pool_with_trial_type_and_configured_quota(self): - """Test create_default_pool persists a trial pool using configured hosted credits.""" - tenant_id = "tenant-123" - hosted_pool_credits = 5000 - - with ( - patch.object(credit_pool_service_module.dify_config, "HOSTED_POOL_CREDITS", hosted_pool_credits), - patch.object(credit_pool_service_module, "db") as mock_db, - ): - pool = CreditPoolService.create_default_pool(tenant_id) - - assert isinstance(pool, TenantCreditPool) - assert pool.tenant_id == tenant_id - assert pool.pool_type == "trial" - assert pool.quota_limit == hosted_pool_credits - assert pool.quota_used == 0 - mock_db.session.add.assert_called_once_with(pool) - mock_db.session.commit.assert_called_once() - - def test_should_return_first_pool_from_query_when_get_pool_called(self): - """Test get_pool queries by tenant and pool_type and returns first result.""" - tenant_id = "tenant-123" - pool_type = "enterprise" - expected_pool = MagicMock(spec=TenantCreditPool) - - with patch.object(credit_pool_service_module, "db") as mock_db: - query = mock_db.session.query.return_value - filtered_query = query.filter_by.return_value - filtered_query.first.return_value = expected_pool - - result = CreditPoolService.get_pool(tenant_id=tenant_id, pool_type=pool_type) - - assert result == expected_pool - mock_db.session.query.assert_called_once_with(TenantCreditPool) - query.filter_by.assert_called_once_with(tenant_id=tenant_id, pool_type=pool_type) - filtered_query.first.assert_called_once() - - def test_should_return_false_when_pool_not_found_in_check_credits_available(self): - """Test check_credits_available returns False when tenant has no pool.""" - with patch.object(CreditPoolService, "get_pool", return_value=None) as mock_get_pool: - result = CreditPoolService.check_credits_available(tenant_id="tenant-123", credits_required=10) - - assert result is False - mock_get_pool.assert_called_once_with("tenant-123", "trial") - - def test_should_return_true_when_remaining_credits_cover_required_amount(self): - """Test check_credits_available returns True when remaining credits are sufficient.""" - pool = SimpleNamespace(remaining_credits=100) - - with patch.object(CreditPoolService, "get_pool", return_value=pool) as mock_get_pool: - result = CreditPoolService.check_credits_available(tenant_id="tenant-123", credits_required=60) - - assert result is True - mock_get_pool.assert_called_once_with("tenant-123", "trial") - - def test_should_return_false_when_remaining_credits_are_insufficient(self): - """Test check_credits_available returns False when required credits exceed remaining credits.""" - pool = SimpleNamespace(remaining_credits=30) - - with patch.object(CreditPoolService, "get_pool", return_value=pool): - result = CreditPoolService.check_credits_available(tenant_id="tenant-123", credits_required=60) - - assert result is False - - def test_should_raise_quota_exceeded_when_pool_not_found_in_check_and_deduct(self): - """Test check_and_deduct_credits raises when tenant credit pool does not exist.""" - with patch.object(CreditPoolService, "get_pool", return_value=None): - with pytest.raises(QuotaExceededError, match="Credit pool not found"): - CreditPoolService.check_and_deduct_credits(tenant_id="tenant-123", credits_required=10) - - def test_should_raise_quota_exceeded_when_pool_has_no_remaining_credits(self): - """Test check_and_deduct_credits raises when remaining credits are zero or negative.""" - pool = SimpleNamespace(remaining_credits=0) - - with patch.object(CreditPoolService, "get_pool", return_value=pool): - with pytest.raises(QuotaExceededError, match="No credits remaining"): - CreditPoolService.check_and_deduct_credits(tenant_id="tenant-123", credits_required=10) - - def test_should_deduct_minimum_of_required_and_remaining_credits(self, mock_credit_deduction_setup): - """Test check_and_deduct_credits updates quota_used by the actual deducted amount.""" - tenant_id = "tenant-123" - pool_type = "trial" - credits_required = 200 - remaining_credits = 120 - expected_deducted_credits = 120 - - mock_credit_deduction_setup["pool"].remaining_credits = remaining_credits - patches = mock_credit_deduction_setup["patches"] - session = mock_credit_deduction_setup["session"] - - with patches[0], patches[1], patches[2]: - result = CreditPoolService.check_and_deduct_credits( - tenant_id=tenant_id, - credits_required=credits_required, - pool_type=pool_type, - ) - - assert result == expected_deducted_credits - session.execute.assert_called_once() - session.commit.assert_called_once() - - stmt = session.execute.call_args.args[0] - compiled_params = stmt.compile().params - assert tenant_id in compiled_params.values() - assert pool_type in compiled_params.values() - assert expected_deducted_credits in compiled_params.values() - - def test_should_raise_quota_exceeded_when_deduction_update_fails(self, mock_credit_deduction_setup): - """Test check_and_deduct_credits translates DB update failures to QuotaExceededError.""" - mock_credit_deduction_setup["pool"].remaining_credits = 50 - mock_credit_deduction_setup["session"].execute.side_effect = Exception("db failure") - session = mock_credit_deduction_setup["session"] - - patches = mock_credit_deduction_setup["patches"] - mock_logger = patch.object(credit_pool_service_module, "logger") - - with patches[0], patches[1], patches[2], mock_logger as mock_logger_obj: - with pytest.raises(QuotaExceededError, match="Failed to deduct credits"): - CreditPoolService.check_and_deduct_credits(tenant_id="tenant-123", credits_required=10) - - session.commit.assert_not_called() - mock_logger_obj.exception.assert_called_once() diff --git a/api/tests/unit_tests/services/test_dataset_permission.py b/api/tests/unit_tests/services/test_dataset_permission.py deleted file mode 100644 index 4974d6c1ef..0000000000 --- a/api/tests/unit_tests/services/test_dataset_permission.py +++ /dev/null @@ -1,305 +0,0 @@ -from unittest.mock import Mock, patch - -import pytest - -from models.account import Account, TenantAccountRole -from models.dataset import Dataset, DatasetPermission, DatasetPermissionEnum -from services.dataset_service import DatasetService -from services.errors.account import NoPermissionError - - -class DatasetPermissionTestDataFactory: - """Factory class for creating test data and mock objects for dataset permission tests.""" - - @staticmethod - def create_dataset_mock( - dataset_id: str = "dataset-123", - tenant_id: str = "test-tenant-123", - created_by: str = "creator-456", - permission: DatasetPermissionEnum = DatasetPermissionEnum.ONLY_ME, - **kwargs, - ) -> Mock: - """Create a mock dataset with specified attributes.""" - dataset = Mock(spec=Dataset) - dataset.id = dataset_id - dataset.tenant_id = tenant_id - dataset.created_by = created_by - dataset.permission = permission - for key, value in kwargs.items(): - setattr(dataset, key, value) - return dataset - - @staticmethod - def create_user_mock( - user_id: str = "user-789", - tenant_id: str = "test-tenant-123", - role: TenantAccountRole = TenantAccountRole.NORMAL, - **kwargs, - ) -> Mock: - """Create a mock user with specified attributes.""" - user = Mock(spec=Account) - user.id = user_id - user.current_tenant_id = tenant_id - user.current_role = role - for key, value in kwargs.items(): - setattr(user, key, value) - return user - - @staticmethod - def create_dataset_permission_mock( - dataset_id: str = "dataset-123", - account_id: str = "user-789", - **kwargs, - ) -> Mock: - """Create a mock dataset permission record.""" - permission = Mock(spec=DatasetPermission) - permission.dataset_id = dataset_id - permission.account_id = account_id - for key, value in kwargs.items(): - setattr(permission, key, value) - return permission - - -class TestDatasetPermissionService: - """ - Comprehensive unit tests for DatasetService.check_dataset_permission method. - - This test suite covers all permission scenarios including: - - Cross-tenant access restrictions - - Owner privilege checks - - Different permission levels (ONLY_ME, ALL_TEAM, PARTIAL_TEAM) - - Explicit permission checks for PARTIAL_TEAM - - Error conditions and logging - """ - - @pytest.fixture - def mock_dataset_service_dependencies(self): - """Common mock setup for dataset service dependencies.""" - with patch("services.dataset_service.db.session") as mock_session: - yield { - "db_session": mock_session, - } - - @pytest.fixture - def mock_logging_dependencies(self): - """Mock setup for logging tests.""" - with patch("services.dataset_service.logger") as mock_logging: - yield { - "logging": mock_logging, - } - - def _assert_permission_check_passes(self, dataset: Mock, user: Mock): - """Helper method to verify that permission check passes without raising exceptions.""" - # Should not raise any exception - DatasetService.check_dataset_permission(dataset, user) - - def _assert_permission_check_fails( - self, dataset: Mock, user: Mock, expected_message: str = "You do not have permission to access this dataset." - ): - """Helper method to verify that permission check fails with expected error.""" - with pytest.raises(NoPermissionError, match=expected_message): - DatasetService.check_dataset_permission(dataset, user) - - def _assert_database_query_called(self, mock_session: Mock, dataset_id: str, account_id: str): - """Helper method to verify database query calls for permission checks.""" - mock_session.query().filter_by.assert_called_with(dataset_id=dataset_id, account_id=account_id) - - def _assert_database_query_not_called(self, mock_session: Mock): - """Helper method to verify that database query was not called.""" - mock_session.query.assert_not_called() - - # ==================== Cross-Tenant Access Tests ==================== - - def test_permission_check_different_tenant_should_fail(self): - """Test that users from different tenants cannot access dataset regardless of other permissions.""" - # Create dataset and user from different tenants - dataset = DatasetPermissionTestDataFactory.create_dataset_mock( - tenant_id="tenant-123", permission=DatasetPermissionEnum.ALL_TEAM - ) - user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="user-789", tenant_id="different-tenant-456", role=TenantAccountRole.EDITOR - ) - - # Should fail due to different tenant - self._assert_permission_check_fails(dataset, user) - - # ==================== Owner Privilege Tests ==================== - - def test_owner_can_access_any_dataset(self): - """Test that tenant owners can access any dataset regardless of permission level.""" - # Create dataset with restrictive permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.ONLY_ME) - - # Create owner user - owner_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="owner-999", role=TenantAccountRole.OWNER - ) - - # Owner should have access regardless of dataset permission - self._assert_permission_check_passes(dataset, owner_user) - - # ==================== ONLY_ME Permission Tests ==================== - - def test_only_me_permission_creator_can_access(self): - """Test ONLY_ME permission allows only the dataset creator to access.""" - # Create dataset with ONLY_ME permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock( - created_by="creator-456", permission=DatasetPermissionEnum.ONLY_ME - ) - - # Create creator user - creator_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="creator-456", role=TenantAccountRole.EDITOR - ) - - # Creator should be able to access - self._assert_permission_check_passes(dataset, creator_user) - - def test_only_me_permission_others_cannot_access(self): - """Test ONLY_ME permission denies access to non-creators.""" - # Create dataset with ONLY_ME permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock( - created_by="creator-456", permission=DatasetPermissionEnum.ONLY_ME - ) - - # Create normal user (not the creator) - normal_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="normal-789", role=TenantAccountRole.NORMAL - ) - - # Non-creator should be denied access - self._assert_permission_check_fails(dataset, normal_user) - - # ==================== ALL_TEAM Permission Tests ==================== - - def test_all_team_permission_allows_access(self): - """Test ALL_TEAM permission allows any team member to access the dataset.""" - # Create dataset with ALL_TEAM permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.ALL_TEAM) - - # Create different types of team members - normal_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="normal-789", role=TenantAccountRole.NORMAL - ) - editor_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="editor-456", role=TenantAccountRole.EDITOR - ) - - # All team members should have access - self._assert_permission_check_passes(dataset, normal_user) - self._assert_permission_check_passes(dataset, editor_user) - - # ==================== PARTIAL_TEAM Permission Tests ==================== - - def test_partial_team_permission_creator_can_access(self, mock_dataset_service_dependencies): - """Test PARTIAL_TEAM permission allows creator to access without database query.""" - # Create dataset with PARTIAL_TEAM permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock( - created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM - ) - - # Create creator user - creator_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="creator-456", role=TenantAccountRole.EDITOR - ) - - # Creator should have access without database query - self._assert_permission_check_passes(dataset, creator_user) - self._assert_database_query_not_called(mock_dataset_service_dependencies["db_session"]) - - def test_partial_team_permission_with_explicit_permission(self, mock_dataset_service_dependencies): - """Test PARTIAL_TEAM permission allows users with explicit permission records.""" - # Create dataset with PARTIAL_TEAM permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM) - - # Create normal user (not the creator) - normal_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="normal-789", role=TenantAccountRole.NORMAL - ) - - # Mock database query to return a permission record - mock_permission = DatasetPermissionTestDataFactory.create_dataset_permission_mock( - dataset_id=dataset.id, account_id=normal_user.id - ) - mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = mock_permission - - # User with explicit permission should have access - self._assert_permission_check_passes(dataset, normal_user) - self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, normal_user.id) - - def test_partial_team_permission_without_explicit_permission(self, mock_dataset_service_dependencies): - """Test PARTIAL_TEAM permission denies users without explicit permission records.""" - # Create dataset with PARTIAL_TEAM permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM) - - # Create normal user (not the creator) - normal_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="normal-789", role=TenantAccountRole.NORMAL - ) - - # Mock database query to return None (no permission record) - mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None - - # User without explicit permission should be denied access - self._assert_permission_check_fails(dataset, normal_user) - self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, normal_user.id) - - def test_partial_team_permission_non_creator_without_permission_fails(self, mock_dataset_service_dependencies): - """Test that non-creators without explicit permission are denied access to PARTIAL_TEAM datasets.""" - # Create dataset with PARTIAL_TEAM permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock( - created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM - ) - - # Create a different user (not the creator) - other_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="other-user-123", role=TenantAccountRole.NORMAL - ) - - # Mock database query to return None (no permission record) - mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None - - # Non-creator without explicit permission should be denied access - self._assert_permission_check_fails(dataset, other_user) - self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, other_user.id) - - # ==================== Enum Usage Tests ==================== - - def test_partial_team_permission_uses_correct_enum(self): - """Test that the method correctly uses DatasetPermissionEnum.PARTIAL_TEAM instead of string literals.""" - # Create dataset with PARTIAL_TEAM permission using enum - dataset = DatasetPermissionTestDataFactory.create_dataset_mock( - created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM - ) - - # Create creator user - creator_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="creator-456", role=TenantAccountRole.EDITOR - ) - - # Creator should always have access regardless of permission level - self._assert_permission_check_passes(dataset, creator_user) - - # ==================== Logging Tests ==================== - - def test_permission_denied_logs_debug_message(self, mock_dataset_service_dependencies, mock_logging_dependencies): - """Test that permission denied events are properly logged for debugging purposes.""" - # Create dataset with PARTIAL_TEAM permission - dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM) - - # Create normal user (not the creator) - normal_user = DatasetPermissionTestDataFactory.create_user_mock( - user_id="normal-789", role=TenantAccountRole.NORMAL - ) - - # Mock database query to return None (no permission record) - mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None - - # Attempt permission check (should fail) - with pytest.raises(NoPermissionError): - DatasetService.check_dataset_permission(dataset, normal_user) - - # Verify debug message was logged with correct user and dataset information - mock_logging_dependencies["logging"].debug.assert_called_with( - "User %s does not have permission to access dataset %s", normal_user.id, dataset.id - ) diff --git a/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py b/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py deleted file mode 100644 index caf73470a2..0000000000 --- a/api/tests/unit_tests/services/test_dataset_service_batch_update_document_status.py +++ /dev/null @@ -1,102 +0,0 @@ -import datetime -from unittest.mock import Mock, patch - -import pytest - -from models.dataset import Dataset, Document -from services.dataset_service import DocumentService -from tests.unit_tests.conftest import redis_mock - - -class DocumentBatchUpdateTestDataFactory: - """Factory class for creating test data and mock objects for document batch update tests.""" - - @staticmethod - def create_dataset_mock(dataset_id: str = "dataset-123", tenant_id: str = "tenant-456") -> Mock: - """Create a mock dataset with specified attributes.""" - dataset = Mock(spec=Dataset) - dataset.id = dataset_id - dataset.tenant_id = tenant_id - return dataset - - @staticmethod - def create_user_mock(user_id: str = "user-789") -> Mock: - """Create a mock user.""" - user = Mock() - user.id = user_id - return user - - @staticmethod - def create_document_mock( - document_id: str = "doc-1", - name: str = "test_document.pdf", - enabled: bool = True, - archived: bool = False, - indexing_status: str = "completed", - completed_at: datetime.datetime | None = None, - **kwargs, - ) -> Mock: - """Create a mock document with specified attributes.""" - document = Mock(spec=Document) - document.id = document_id - document.name = name - document.enabled = enabled - document.archived = archived - document.indexing_status = indexing_status - document.completed_at = completed_at or datetime.datetime.now() - - document.disabled_at = None - document.disabled_by = None - document.archived_at = None - document.archived_by = None - document.updated_at = None - - for key, value in kwargs.items(): - setattr(document, key, value) - return document - - -class TestDatasetServiceBatchUpdateDocumentStatus: - """Unit tests for non-SQL path in DocumentService.batch_update_document_status.""" - - @pytest.fixture - def mock_document_service_dependencies(self): - """Common mock setup for document service dependencies.""" - with ( - patch("services.dataset_service.DocumentService.get_document") as mock_get_doc, - patch("extensions.ext_database.db.session") as mock_db, - patch("services.dataset_service.naive_utc_now") as mock_naive_utc_now, - ): - current_time = datetime.datetime(2023, 1, 1, 12, 0, 0) - mock_naive_utc_now.return_value = current_time - - yield { - "get_document": mock_get_doc, - "db_session": mock_db, - "naive_utc_now": mock_naive_utc_now, - "current_time": current_time, - } - - def test_batch_update_invalid_action_error(self, mock_document_service_dependencies): - """Test that ValueError is raised when an invalid action is provided.""" - dataset = DocumentBatchUpdateTestDataFactory.create_dataset_mock() - user = DocumentBatchUpdateTestDataFactory.create_user_mock() - - doc = DocumentBatchUpdateTestDataFactory.create_document_mock(enabled=True) - mock_document_service_dependencies["get_document"].return_value = doc - - redis_mock.reset_mock() - redis_mock.get.reset_mock() - redis_mock.setex.reset_mock() - redis_mock.get.return_value = None - - invalid_action = "invalid_action" - with pytest.raises(ValueError) as exc_info: - DocumentService.batch_update_document_status( - dataset=dataset, document_ids=["doc-1"], action=invalid_action, user=user - ) - - assert invalid_action in str(exc_info.value) - assert "Invalid action" in str(exc_info.value) - - redis_mock.setex.assert_not_called() diff --git a/api/tests/unit_tests/services/test_dataset_service_create_dataset.py b/api/tests/unit_tests/services/test_dataset_service_create_dataset.py deleted file mode 100644 index f8c5270656..0000000000 --- a/api/tests/unit_tests/services/test_dataset_service_create_dataset.py +++ /dev/null @@ -1,50 +0,0 @@ -"""Unit tests for non-SQL validation paths in DatasetService dataset creation.""" - -from unittest.mock import Mock, patch -from uuid import uuid4 - -import pytest - -from services.dataset_service import DatasetService -from services.entities.knowledge_entities.rag_pipeline_entities import IconInfo, RagPipelineDatasetCreateEntity - - -class TestDatasetServiceCreateRagPipelineDatasetNonSQL: - """Unit coverage for non-SQL validation in create_empty_rag_pipeline_dataset.""" - - @pytest.fixture - def mock_rag_pipeline_dependencies(self): - """Patch database session and current_user for validation-only unit coverage.""" - with ( - patch("services.dataset_service.db.session") as mock_db, - patch("services.dataset_service.current_user") as mock_current_user, - ): - yield { - "db_session": mock_db, - "current_user_mock": mock_current_user, - } - - def test_create_rag_pipeline_dataset_missing_current_user_error(self, mock_rag_pipeline_dependencies): - """Raise ValueError when current_user.id is unavailable before SQL persistence.""" - # Arrange - tenant_id = str(uuid4()) - mock_rag_pipeline_dependencies["current_user_mock"].id = None - - mock_query = Mock() - mock_query.filter_by.return_value.first.return_value = None - mock_rag_pipeline_dependencies["db_session"].query.return_value = mock_query - - icon_info = IconInfo(icon="📙", icon_background="#FFF4ED", icon_type="emoji") - entity = RagPipelineDatasetCreateEntity( - name="Test Dataset", - description="", - icon_info=icon_info, - permission="only_me", - ) - - # Act / Assert - with pytest.raises(ValueError, match="Current user or current user id not found"): - DatasetService.create_empty_rag_pipeline_dataset( - tenant_id=tenant_id, - rag_pipeline_dataset_create_entity=entity, - ) diff --git a/api/tests/unit_tests/services/test_dataset_service_lock_not_owned.py b/api/tests/unit_tests/services/test_dataset_service_lock_not_owned.py index bd226f7536..d2287e8982 100644 --- a/api/tests/unit_tests/services/test_dataset_service_lock_not_owned.py +++ b/api/tests/unit_tests/services/test_dataset_service_lock_not_owned.py @@ -4,6 +4,7 @@ from unittest.mock import Mock, create_autospec import pytest from redis.exceptions import LockNotOwnedError +from core.rag.index_processor.constant.index_type import IndexStructureType from models.account import Account from models.dataset import Dataset, Document from services.dataset_service import DocumentService, SegmentService @@ -76,7 +77,7 @@ def test_save_document_with_dataset_id_ignores_lock_not_owned( info_list = types.SimpleNamespace(data_source_type="upload_file") data_source = types.SimpleNamespace(info_list=info_list) knowledge_config = types.SimpleNamespace( - doc_form="qa_model", + doc_form=IndexStructureType.QA_INDEX, original_document_id=None, # go into "new document" branch data_source=data_source, indexing_technique="high_quality", @@ -131,7 +132,7 @@ def test_add_segment_ignores_lock_not_owned( document.id = "doc-1" document.dataset_id = dataset.id document.word_count = 0 - document.doc_form = "qa_model" + document.doc_form = IndexStructureType.QA_INDEX # Minimal args required by add_segment args = { @@ -174,4 +175,4 @@ def test_multi_create_segment_ignores_lock_not_owned( document.id = "doc-1" document.dataset_id = dataset.id document.word_count = 0 - document.doc_form = "qa_model" + document.doc_form = IndexStructureType.QA_INDEX diff --git a/api/tests/unit_tests/services/test_delete_archived_workflow_run.py b/api/tests/unit_tests/services/test_delete_archived_workflow_run.py deleted file mode 100644 index a7e1a011f6..0000000000 --- a/api/tests/unit_tests/services/test_delete_archived_workflow_run.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Unit tests for archived workflow run deletion service. -""" - -from unittest.mock import MagicMock, patch - - -class TestArchivedWorkflowRunDeletion: - def test_delete_by_run_id_calls_delete_run(self): - from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion - - deleter = ArchivedWorkflowRunDeletion() - repo = MagicMock() - repo.get_archived_run_ids.return_value = {"run-1"} - run = MagicMock() - run.id = "run-1" - run.tenant_id = "tenant-1" - - session = MagicMock() - session.get.return_value = run - - session_maker = MagicMock() - session_maker.return_value.__enter__.return_value = session - session_maker.return_value.__exit__.return_value = None - mock_db = MagicMock() - mock_db.engine = MagicMock() - - with ( - patch("services.retention.workflow_run.delete_archived_workflow_run.db", mock_db), - patch( - "services.retention.workflow_run.delete_archived_workflow_run.sessionmaker", - return_value=session_maker, - autospec=True, - ), - patch.object(deleter, "_get_workflow_run_repo", return_value=repo, autospec=True), - patch.object( - deleter, "_delete_run", return_value=MagicMock(success=True), autospec=True - ) as mock_delete_run, - ): - result = deleter.delete_by_run_id("run-1") - - assert result.success is True - mock_delete_run.assert_called_once_with(run) - - def test_delete_run_dry_run(self): - from services.retention.workflow_run.delete_archived_workflow_run import ArchivedWorkflowRunDeletion - - deleter = ArchivedWorkflowRunDeletion(dry_run=True) - run = MagicMock() - run.id = "run-1" - run.tenant_id = "tenant-1" - - with patch.object(deleter, "_get_workflow_run_repo", autospec=True) as mock_get_repo: - result = deleter._delete_run(run) - - assert result.success is True - mock_get_repo.assert_not_called() diff --git a/api/tests/unit_tests/services/test_document_service_display_status.py b/api/tests/unit_tests/services/test_document_service_display_status.py deleted file mode 100644 index cb2e2940c8..0000000000 --- a/api/tests/unit_tests/services/test_document_service_display_status.py +++ /dev/null @@ -1,8 +0,0 @@ -from services.dataset_service import DocumentService - - -def test_normalize_display_status_alias_mapping(): - assert DocumentService.normalize_display_status("ACTIVE") == "available" - assert DocumentService.normalize_display_status("enabled") == "available" - assert DocumentService.normalize_display_status("archived") == "archived" - assert DocumentService.normalize_display_status("unknown") is None diff --git a/api/tests/unit_tests/services/test_end_user_service.py b/api/tests/unit_tests/services/test_end_user_service.py deleted file mode 100644 index a3b1f46436..0000000000 --- a/api/tests/unit_tests/services/test_end_user_service.py +++ /dev/null @@ -1,841 +0,0 @@ -from unittest.mock import MagicMock, patch - -import pytest - -from core.app.entities.app_invoke_entities import InvokeFrom -from models.model import App, DefaultEndUserSessionID, EndUser -from services.end_user_service import EndUserService - - -class TestEndUserServiceFactory: - """Factory class for creating test data and mock objects for end user service tests.""" - - @staticmethod - def create_app_mock( - app_id: str = "app-123", - tenant_id: str = "tenant-456", - name: str = "Test App", - ) -> MagicMock: - """Create a mock App object.""" - app = MagicMock(spec=App) - app.id = app_id - app.tenant_id = tenant_id - app.name = name - return app - - @staticmethod - def create_end_user_mock( - user_id: str = "user-789", - tenant_id: str = "tenant-456", - app_id: str = "app-123", - session_id: str = "session-001", - type: InvokeFrom = InvokeFrom.SERVICE_API, - is_anonymous: bool = False, - ) -> MagicMock: - """Create a mock EndUser object.""" - end_user = MagicMock(spec=EndUser) - end_user.id = user_id - end_user.tenant_id = tenant_id - end_user.app_id = app_id - end_user.session_id = session_id - end_user.type = type - end_user.is_anonymous = is_anonymous - end_user.external_user_id = session_id - return end_user - - -class TestEndUserServiceGetEndUserById: - """Unit tests for EndUserService.get_end_user_by_id method.""" - - @pytest.fixture - def factory(self): - """Provide test data factory.""" - return TestEndUserServiceFactory() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_get_end_user_by_id_success(self, mock_db, mock_session_class, factory): - """Test successful retrieval of end user by ID.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - end_user_id = "user-789" - - mock_end_user = factory.create_end_user_mock(user_id=end_user_id, tenant_id=tenant_id, app_id=app_id) - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = mock_end_user - - # Act - result = EndUserService.get_end_user_by_id(tenant_id=tenant_id, app_id=app_id, end_user_id=end_user_id) - - # Assert - assert result == mock_end_user - mock_session.query.assert_called_once_with(EndUser) - mock_query.where.assert_called_once() - mock_query.first.assert_called_once() - mock_context.__enter__.assert_called_once() - mock_context.__exit__.assert_called_once() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_get_end_user_by_id_not_found(self, mock_db, mock_session_class): - """Test retrieval of non-existent end user returns None.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - end_user_id = "user-789" - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = None - - # Act - result = EndUserService.get_end_user_by_id(tenant_id=tenant_id, app_id=app_id, end_user_id=end_user_id) - - # Assert - assert result is None - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_get_end_user_by_id_query_parameters(self, mock_db, mock_session_class): - """Test that query parameters are correctly applied.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - end_user_id = "user-789" - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = None - - # Act - EndUserService.get_end_user_by_id(tenant_id=tenant_id, app_id=app_id, end_user_id=end_user_id) - - # Assert - # Verify the where clause was called with the correct conditions - call_args = mock_query.where.call_args[0] - assert len(call_args) == 3 - # Check that the conditions match the expected filters - # (We can't easily test the exact conditions without importing SQLAlchemy) - - -class TestEndUserServiceGetOrCreateEndUser: - """Unit tests for EndUserService.get_or_create_end_user method.""" - - @pytest.fixture - def factory(self): - """Provide test data factory.""" - return TestEndUserServiceFactory() - - @patch("services.end_user_service.EndUserService.get_or_create_end_user_by_type") - def test_get_or_create_end_user_with_user_id(self, mock_get_or_create_by_type, factory): - """Test get_or_create_end_user with specific user_id.""" - # Arrange - app_mock = factory.create_app_mock() - user_id = "user-123" - expected_end_user = factory.create_end_user_mock() - mock_get_or_create_by_type.return_value = expected_end_user - - # Act - result = EndUserService.get_or_create_end_user(app_mock, user_id) - - # Assert - assert result == expected_end_user - mock_get_or_create_by_type.assert_called_once_with( - InvokeFrom.SERVICE_API, app_mock.tenant_id, app_mock.id, user_id - ) - - @patch("services.end_user_service.EndUserService.get_or_create_end_user_by_type") - def test_get_or_create_end_user_without_user_id(self, mock_get_or_create_by_type, factory): - """Test get_or_create_end_user without user_id (None).""" - # Arrange - app_mock = factory.create_app_mock() - expected_end_user = factory.create_end_user_mock() - mock_get_or_create_by_type.return_value = expected_end_user - - # Act - result = EndUserService.get_or_create_end_user(app_mock, None) - - # Assert - assert result == expected_end_user - mock_get_or_create_by_type.assert_called_once_with( - InvokeFrom.SERVICE_API, app_mock.tenant_id, app_mock.id, None - ) - - -class TestEndUserServiceGetOrCreateEndUserByType: - """ - Unit tests for EndUserService.get_or_create_end_user_by_type method. - - This test suite covers: - - Creating end users with different InvokeFrom types - - Type migration for legacy users - - Query ordering and prioritization - - Session management - """ - - @pytest.fixture - def factory(self): - """Provide test data factory.""" - return TestEndUserServiceFactory() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_new_end_user_with_user_id(self, mock_db, mock_session_class, factory): - """Test creating a new end user with specific user_id.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.first.return_value = None # No existing user - - # Act - result = EndUserService.get_or_create_end_user_by_type( - type=type_enum, tenant_id=tenant_id, app_id=app_id, user_id=user_id - ) - - # Assert - # Verify new EndUser was created with correct parameters - mock_session.add.assert_called_once() - mock_session.commit.assert_called_once() - added_user = mock_session.add.call_args[0][0] - assert added_user.tenant_id == tenant_id - assert added_user.app_id == app_id - assert added_user.type == type_enum - assert added_user.session_id == user_id - assert added_user.external_user_id == user_id - assert added_user._is_anonymous is False - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_new_end_user_default_session(self, mock_db, mock_session_class, factory): - """Test creating a new end user with default session ID.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - user_id = None - type_enum = InvokeFrom.WEB_APP - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.first.return_value = None # No existing user - - # Act - result = EndUserService.get_or_create_end_user_by_type( - type=type_enum, tenant_id=tenant_id, app_id=app_id, user_id=user_id - ) - - # Assert - added_user = mock_session.add.call_args[0][0] - assert added_user.session_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID - assert added_user.external_user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID - assert added_user._is_anonymous is True - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - @patch("services.end_user_service.logger") - def test_existing_user_same_type(self, mock_logger, mock_db, mock_session_class, factory): - """Test retrieving existing user with same type.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - existing_user = factory.create_end_user_mock( - tenant_id=tenant_id, app_id=app_id, session_id=user_id, type=type_enum - ) - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.first.return_value = existing_user - - # Act - result = EndUserService.get_or_create_end_user_by_type( - type=type_enum, tenant_id=tenant_id, app_id=app_id, user_id=user_id - ) - - # Assert - assert result == existing_user - mock_session.add.assert_not_called() - mock_session.commit.assert_not_called() - mock_logger.info.assert_not_called() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - @patch("services.end_user_service.logger") - def test_existing_user_different_type_upgrade(self, mock_logger, mock_db, mock_session_class, factory): - """Test upgrading existing user with different type.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - user_id = "user-789" - old_type = InvokeFrom.WEB_APP - new_type = InvokeFrom.SERVICE_API - - existing_user = factory.create_end_user_mock( - tenant_id=tenant_id, app_id=app_id, session_id=user_id, type=old_type - ) - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.first.return_value = existing_user - - # Act - result = EndUserService.get_or_create_end_user_by_type( - type=new_type, tenant_id=tenant_id, app_id=app_id, user_id=user_id - ) - - # Assert - assert result == existing_user - assert existing_user.type == new_type - mock_session.commit.assert_called_once() - mock_logger.info.assert_called_once() - logger_call_args = mock_logger.info.call_args[0] - assert "Upgrading legacy EndUser" in logger_call_args[0] - # The old and new types are passed as separate arguments - assert mock_logger.info.call_args[0][1] == existing_user.id - assert mock_logger.info.call_args[0][2] == old_type - assert mock_logger.info.call_args[0][3] == new_type - assert mock_logger.info.call_args[0][4] == user_id - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_query_ordering_prioritizes_exact_type_match(self, mock_db, mock_session_class, factory): - """Test that query ordering prioritizes exact type matches.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - user_id = "user-789" - target_type = InvokeFrom.SERVICE_API - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.first.return_value = None - - # Act - EndUserService.get_or_create_end_user_by_type( - type=target_type, tenant_id=tenant_id, app_id=app_id, user_id=user_id - ) - - # Assert - mock_query.order_by.assert_called_once() - # Verify that case statement is used for ordering - order_by_call = mock_query.order_by.call_args[0][0] - # The exact structure depends on SQLAlchemy's case implementation - # but we can verify it was called - - # Test 10: Session context manager properly closes - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_session_context_manager_closes(self, mock_db, mock_session_class, factory): - """Test that Session context manager is properly used.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - user_id = "user-789" - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.first.return_value = None - - # Act - EndUserService.get_or_create_end_user_by_type( - type=InvokeFrom.SERVICE_API, - tenant_id=tenant_id, - app_id=app_id, - user_id=user_id, - ) - - # Assert - # Verify context manager was entered and exited - mock_context.__enter__.assert_called_once() - mock_context.__exit__.assert_called_once() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_all_invokefrom_types_supported(self, mock_db, mock_session_class): - """Test that all InvokeFrom enum values are supported.""" - # Arrange - tenant_id = "tenant-123" - app_id = "app-456" - user_id = "user-789" - - for invoke_type in InvokeFrom: - with patch("services.end_user_service.Session") as mock_session_class: - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.first.return_value = None - - # Act - result = EndUserService.get_or_create_end_user_by_type( - type=invoke_type, tenant_id=tenant_id, app_id=app_id, user_id=user_id - ) - - # Assert - added_user = mock_session.add.call_args[0][0] - assert added_user.type == invoke_type - - -class TestEndUserServiceCreateEndUserBatch: - """Unit tests for EndUserService.create_end_user_batch method.""" - - @pytest.fixture - def factory(self): - """Provide test data factory.""" - return TestEndUserServiceFactory() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_empty_app_ids(self, mock_db, mock_session_class): - """Test batch creation with empty app_ids list.""" - # Arrange - tenant_id = "tenant-123" - app_ids: list[str] = [] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - # Act - result = EndUserService.create_end_user_batch( - type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - assert result == {} - mock_session_class.assert_not_called() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_default_session_id(self, mock_db, mock_session_class): - """Test batch creation with empty user_id (uses default session).""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456", "app-789"] - user_id = "" - type_enum = InvokeFrom.SERVICE_API - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [] # No existing users - - # Act - result = EndUserService.create_end_user_batch( - type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - assert len(result) == 2 - for app_id, end_user in result.items(): - assert end_user.session_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID - assert end_user.external_user_id == DefaultEndUserSessionID.DEFAULT_SESSION_ID - assert end_user._is_anonymous is True - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_deduplicate_app_ids(self, mock_db, mock_session_class): - """Test that duplicate app_ids are deduplicated while preserving order.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456", "app-789", "app-456", "app-123", "app-789"] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [] # No existing users - - # Act - result = EndUserService.create_end_user_batch( - type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - # Should have 3 unique app_ids in original order - assert len(result) == 3 - assert "app-456" in result - assert "app-789" in result - assert "app-123" in result - - # Verify the order is preserved - added_users = mock_session.add_all.call_args[0][0] - assert len(added_users) == 3 - assert added_users[0].app_id == "app-456" - assert added_users[1].app_id == "app-789" - assert added_users[2].app_id == "app-123" - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_all_existing_users(self, mock_db, mock_session_class, factory): - """Test batch creation when all users already exist.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456", "app-789"] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - existing_user1 = factory.create_end_user_mock( - tenant_id=tenant_id, app_id="app-456", session_id=user_id, type=type_enum - ) - existing_user2 = factory.create_end_user_mock( - tenant_id=tenant_id, app_id="app-789", session_id=user_id, type=type_enum - ) - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [existing_user1, existing_user2] - - # Act - result = EndUserService.create_end_user_batch( - type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - assert len(result) == 2 - assert result["app-456"] == existing_user1 - assert result["app-789"] == existing_user2 - mock_session.add_all.assert_not_called() - mock_session.commit.assert_not_called() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_partial_existing_users(self, mock_db, mock_session_class, factory): - """Test batch creation with some existing and some new users.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456", "app-789", "app-123"] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - existing_user1 = factory.create_end_user_mock( - tenant_id=tenant_id, app_id="app-456", session_id=user_id, type=type_enum - ) - # app-789 and app-123 don't exist - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [existing_user1] - - # Act - result = EndUserService.create_end_user_batch( - type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - assert len(result) == 3 - assert result["app-456"] == existing_user1 - assert "app-789" in result - assert "app-123" in result - - # Should create 2 new users - mock_session.add_all.assert_called_once() - added_users = mock_session.add_all.call_args[0][0] - assert len(added_users) == 2 - - mock_session.commit.assert_called_once() - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_handles_duplicates_in_existing(self, mock_db, mock_session_class, factory): - """Test batch creation handles duplicates in existing users gracefully.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456"] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - # Simulate duplicate records in database - existing_user1 = factory.create_end_user_mock( - user_id="user-1", tenant_id=tenant_id, app_id="app-456", session_id=user_id, type=type_enum - ) - existing_user2 = factory.create_end_user_mock( - user_id="user-2", tenant_id=tenant_id, app_id="app-456", session_id=user_id, type=type_enum - ) - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [existing_user1, existing_user2] - - # Act - result = EndUserService.create_end_user_batch( - type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - assert len(result) == 1 - # Should prefer the first one found - assert result["app-456"] == existing_user1 - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_all_invokefrom_types(self, mock_db, mock_session_class): - """Test batch creation with all InvokeFrom types.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456"] - user_id = "user-789" - - for invoke_type in InvokeFrom: - with patch("services.end_user_service.Session") as mock_session_class: - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [] # No existing users - - # Act - result = EndUserService.create_end_user_batch( - type=invoke_type, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - added_user = mock_session.add_all.call_args[0][0][0] - assert added_user.type == invoke_type - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_single_app_id(self, mock_db, mock_session_class, factory): - """Test batch creation with single app_id.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456"] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [] # No existing users - - # Act - result = EndUserService.create_end_user_batch( - type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id - ) - - # Assert - assert len(result) == 1 - assert "app-456" in result - mock_session.add_all.assert_called_once() - added_users = mock_session.add_all.call_args[0][0] - assert len(added_users) == 1 - assert added_users[0].app_id == "app-456" - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_anonymous_vs_authenticated(self, mock_db, mock_session_class): - """Test batch creation correctly sets anonymous flag.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456", "app-789"] - - # Test with regular user ID - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [] # No existing users - - # Act - authenticated user - result = EndUserService.create_end_user_batch( - type=InvokeFrom.SERVICE_API, tenant_id=tenant_id, app_ids=app_ids, user_id="user-789" - ) - - # Assert - added_users = mock_session.add_all.call_args[0][0] - for user in added_users: - assert user._is_anonymous is False - - # Test with default session ID - mock_session.reset_mock() - mock_query.reset_mock() - mock_query.all.return_value = [] - - # Act - anonymous user - result = EndUserService.create_end_user_batch( - type=InvokeFrom.SERVICE_API, - tenant_id=tenant_id, - app_ids=app_ids, - user_id=DefaultEndUserSessionID.DEFAULT_SESSION_ID, - ) - - # Assert - added_users = mock_session.add_all.call_args[0][0] - for user in added_users: - assert user._is_anonymous is True - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_efficient_single_query(self, mock_db, mock_session_class): - """Test that batch creation uses efficient single query for existing users.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456", "app-789", "app-123"] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [] # No existing users - - # Act - EndUserService.create_end_user_batch(type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id) - - # Assert - # Should make exactly one query to check for existing users - mock_session.query.assert_called_once_with(EndUser) - mock_query.where.assert_called_once() - mock_query.all.assert_called_once() - - # Verify the where clause uses .in_() for app_ids - where_call = mock_query.where.call_args[0] - # The exact structure depends on SQLAlchemy implementation - # but we can verify it was called with the right parameters - - @patch("services.end_user_service.Session") - @patch("services.end_user_service.db") - def test_create_batch_session_context_manager(self, mock_db, mock_session_class): - """Test that batch creation properly uses session context manager.""" - # Arrange - tenant_id = "tenant-123" - app_ids = ["app-456"] - user_id = "user-789" - type_enum = InvokeFrom.SERVICE_API - - mock_session = MagicMock() - mock_context = MagicMock() - mock_context.__enter__.return_value = mock_session - mock_session_class.return_value = mock_context - - mock_query = MagicMock() - mock_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.all.return_value = [] # No existing users - - # Act - EndUserService.create_end_user_batch(type=type_enum, tenant_id=tenant_id, app_ids=app_ids, user_id=user_id) - - # Assert - mock_context.__enter__.assert_called_once() - mock_context.__exit__.assert_called_once() - mock_session.commit.assert_called_once() diff --git a/api/tests/unit_tests/services/test_file_service_zip_and_lookup.py b/api/tests/unit_tests/services/test_file_service_zip_and_lookup.py deleted file mode 100644 index 7b4d349e33..0000000000 --- a/api/tests/unit_tests/services/test_file_service_zip_and_lookup.py +++ /dev/null @@ -1,99 +0,0 @@ -""" -Unit tests for `services.file_service.FileService` helpers. - -We keep these tests focused on: -- ZIP tempfile building (sanitization + deduplication + content writes) -- tenant-scoped batch lookup behavior (`get_upload_files_by_ids`) -""" - -from __future__ import annotations - -from types import SimpleNamespace -from typing import Any -from zipfile import ZipFile - -import pytest - -import services.file_service as file_service_module -from services.file_service import FileService - - -def test_build_upload_files_zip_tempfile_sanitizes_and_dedupes_names(monkeypatch: pytest.MonkeyPatch) -> None: - """Ensure ZIP entry names are safe and unique while preserving extensions.""" - - # Arrange: three upload files that all sanitize down to the same basename ("b.txt"). - upload_files: list[Any] = [ - SimpleNamespace(name="a/b.txt", key="k1"), - SimpleNamespace(name="c/b.txt", key="k2"), - SimpleNamespace(name="../b.txt", key="k3"), - ] - - # Stream distinct bytes per key so we can verify content is written to the right entry. - data_by_key: dict[str, list[bytes]] = {"k1": [b"one"], "k2": [b"two"], "k3": [b"three"]} - - def _load(key: str, stream: bool = True) -> list[bytes]: - # Return the corresponding chunks for this key (the production code iterates chunks). - assert stream is True - return data_by_key[key] - - monkeypatch.setattr(file_service_module.storage, "load", _load) - - # Act: build zip in a tempfile. - with FileService.build_upload_files_zip_tempfile(upload_files=upload_files) as tmp: - with ZipFile(tmp, mode="r") as zf: - # Assert: names are sanitized (no directory components) and deduped with suffixes. - assert zf.namelist() == ["b.txt", "b (1).txt", "b (2).txt"] - - # Assert: each entry contains the correct bytes from storage. - assert zf.read("b.txt") == b"one" - assert zf.read("b (1).txt") == b"two" - assert zf.read("b (2).txt") == b"three" - - -def test_get_upload_files_by_ids_returns_empty_when_no_ids(monkeypatch: pytest.MonkeyPatch) -> None: - """Ensure empty input returns an empty mapping without hitting the database.""" - - class _Session: - def scalars(self, _stmt): # type: ignore[no-untyped-def] - raise AssertionError("db.session.scalars should not be called for empty id lists") - - monkeypatch.setattr(file_service_module, "db", SimpleNamespace(session=_Session())) - - assert FileService.get_upload_files_by_ids("tenant-1", []) == {} - - -def test_get_upload_files_by_ids_returns_id_keyed_mapping(monkeypatch: pytest.MonkeyPatch) -> None: - """Ensure batch lookup returns a dict keyed by stringified UploadFile ids.""" - - upload_files: list[Any] = [ - SimpleNamespace(id="file-1", tenant_id="tenant-1"), - SimpleNamespace(id="file-2", tenant_id="tenant-1"), - ] - - class _ScalarResult: - def __init__(self, items: list[Any]) -> None: - self._items = items - - def all(self) -> list[Any]: - return self._items - - class _Session: - def __init__(self, items: list[Any]) -> None: - self._items = items - self.calls: list[object] = [] - - def scalars(self, stmt): # type: ignore[no-untyped-def] - # Capture the statement so we can at least assert the query path is taken. - self.calls.append(stmt) - return _ScalarResult(self._items) - - session = _Session(upload_files) - monkeypatch.setattr(file_service_module, "db", SimpleNamespace(session=session)) - - # Provide duplicates to ensure callers can safely pass repeated ids. - result = FileService.get_upload_files_by_ids("tenant-1", ["file-1", "file-1", "file-2"]) - - assert set(result.keys()) == {"file-1", "file-2"} - assert result["file-1"].id == "file-1" - assert result["file-2"].id == "file-2" - assert len(session.calls) == 1 diff --git a/api/tests/unit_tests/services/test_oauth_server_service.py b/api/tests/unit_tests/services/test_oauth_server_service.py deleted file mode 100644 index 231ceb74dc..0000000000 --- a/api/tests/unit_tests/services/test_oauth_server_service.py +++ /dev/null @@ -1,224 +0,0 @@ -from __future__ import annotations - -import uuid -from types import SimpleNamespace -from typing import cast -from unittest.mock import MagicMock - -import pytest -from pytest_mock import MockerFixture -from werkzeug.exceptions import BadRequest - -from services.oauth_server import ( - OAUTH_ACCESS_TOKEN_EXPIRES_IN, - OAUTH_ACCESS_TOKEN_REDIS_KEY, - OAUTH_AUTHORIZATION_CODE_REDIS_KEY, - OAUTH_REFRESH_TOKEN_EXPIRES_IN, - OAUTH_REFRESH_TOKEN_REDIS_KEY, - OAuthGrantType, - OAuthServerService, -) - - -@pytest.fixture -def mock_redis_client(mocker: MockerFixture) -> MagicMock: - return mocker.patch("services.oauth_server.redis_client") - - -@pytest.fixture -def mock_session(mocker: MockerFixture) -> MagicMock: - """Mock the OAuth server Session context manager.""" - mocker.patch("services.oauth_server.db", SimpleNamespace(engine=object())) - session = MagicMock() - session_cm = MagicMock() - session_cm.__enter__.return_value = session - mocker.patch("services.oauth_server.Session", return_value=session_cm) - return session - - -def test_get_oauth_provider_app_should_return_app_when_record_exists(mock_session: MagicMock) -> None: - # Arrange - mock_execute_result = MagicMock() - expected_app = MagicMock() - mock_execute_result.scalar_one_or_none.return_value = expected_app - mock_session.execute.return_value = mock_execute_result - - # Act - result = OAuthServerService.get_oauth_provider_app("client-1") - - # Assert - assert result is expected_app - mock_session.execute.assert_called_once() - mock_execute_result.scalar_one_or_none.assert_called_once() - - -def test_sign_oauth_authorization_code_should_store_code_and_return_value( - mocker: MockerFixture, mock_redis_client: MagicMock -) -> None: - # Arrange - deterministic_uuid = uuid.UUID("00000000-0000-0000-0000-000000000111") - mocker.patch("services.oauth_server.uuid.uuid4", return_value=deterministic_uuid) - - # Act - code = OAuthServerService.sign_oauth_authorization_code("client-1", "user-1") - - # Assert - expected_code = str(deterministic_uuid) - assert code == expected_code - mock_redis_client.set.assert_called_once_with( - OAUTH_AUTHORIZATION_CODE_REDIS_KEY.format(client_id="client-1", code=expected_code), - "user-1", - ex=600, - ) - - -def test_sign_oauth_access_token_should_raise_bad_request_when_authorization_code_is_invalid( - mock_redis_client: MagicMock, -) -> None: - # Arrange - mock_redis_client.get.return_value = None - - # Act + Assert - with pytest.raises(BadRequest, match="invalid code"): - OAuthServerService.sign_oauth_access_token( - grant_type=OAuthGrantType.AUTHORIZATION_CODE, - code="bad-code", - client_id="client-1", - ) - - -def test_sign_oauth_access_token_should_issue_access_and_refresh_token_when_authorization_code_is_valid( - mocker: MockerFixture, mock_redis_client: MagicMock -) -> None: - # Arrange - token_uuids = [ - uuid.UUID("00000000-0000-0000-0000-000000000201"), - uuid.UUID("00000000-0000-0000-0000-000000000202"), - ] - mocker.patch("services.oauth_server.uuid.uuid4", side_effect=token_uuids) - mock_redis_client.get.return_value = b"user-1" - code_key = OAUTH_AUTHORIZATION_CODE_REDIS_KEY.format(client_id="client-1", code="code-1") - - # Act - access_token, refresh_token = OAuthServerService.sign_oauth_access_token( - grant_type=OAuthGrantType.AUTHORIZATION_CODE, - code="code-1", - client_id="client-1", - ) - - # Assert - assert access_token == str(token_uuids[0]) - assert refresh_token == str(token_uuids[1]) - mock_redis_client.delete.assert_called_once_with(code_key) - mock_redis_client.set.assert_any_call( - OAUTH_ACCESS_TOKEN_REDIS_KEY.format(client_id="client-1", token=access_token), - b"user-1", - ex=OAUTH_ACCESS_TOKEN_EXPIRES_IN, - ) - mock_redis_client.set.assert_any_call( - OAUTH_REFRESH_TOKEN_REDIS_KEY.format(client_id="client-1", token=refresh_token), - b"user-1", - ex=OAUTH_REFRESH_TOKEN_EXPIRES_IN, - ) - - -def test_sign_oauth_access_token_should_raise_bad_request_when_refresh_token_is_invalid( - mock_redis_client: MagicMock, -) -> None: - # Arrange - mock_redis_client.get.return_value = None - - # Act + Assert - with pytest.raises(BadRequest, match="invalid refresh token"): - OAuthServerService.sign_oauth_access_token( - grant_type=OAuthGrantType.REFRESH_TOKEN, - refresh_token="stale-token", - client_id="client-1", - ) - - -def test_sign_oauth_access_token_should_issue_new_access_token_when_refresh_token_is_valid( - mocker: MockerFixture, mock_redis_client: MagicMock -) -> None: - # Arrange - deterministic_uuid = uuid.UUID("00000000-0000-0000-0000-000000000301") - mocker.patch("services.oauth_server.uuid.uuid4", return_value=deterministic_uuid) - mock_redis_client.get.return_value = b"user-1" - - # Act - access_token, returned_refresh_token = OAuthServerService.sign_oauth_access_token( - grant_type=OAuthGrantType.REFRESH_TOKEN, - refresh_token="refresh-1", - client_id="client-1", - ) - - # Assert - assert access_token == str(deterministic_uuid) - assert returned_refresh_token == "refresh-1" - mock_redis_client.set.assert_called_once_with( - OAUTH_ACCESS_TOKEN_REDIS_KEY.format(client_id="client-1", token=access_token), - b"user-1", - ex=OAUTH_ACCESS_TOKEN_EXPIRES_IN, - ) - - -def test_sign_oauth_access_token_with_unknown_grant_type_should_return_none() -> None: - # Arrange - grant_type = cast(OAuthGrantType, "invalid-grant-type") - - # Act - result = OAuthServerService.sign_oauth_access_token( - grant_type=grant_type, - client_id="client-1", - ) - - # Assert - assert result is None - - -def test_sign_oauth_refresh_token_should_store_token_with_expected_expiry( - mocker: MockerFixture, mock_redis_client: MagicMock -) -> None: - # Arrange - deterministic_uuid = uuid.UUID("00000000-0000-0000-0000-000000000401") - mocker.patch("services.oauth_server.uuid.uuid4", return_value=deterministic_uuid) - - # Act - refresh_token = OAuthServerService._sign_oauth_refresh_token("client-2", "user-2") - - # Assert - assert refresh_token == str(deterministic_uuid) - mock_redis_client.set.assert_called_once_with( - OAUTH_REFRESH_TOKEN_REDIS_KEY.format(client_id="client-2", token=refresh_token), - "user-2", - ex=OAUTH_REFRESH_TOKEN_EXPIRES_IN, - ) - - -def test_validate_oauth_access_token_should_return_none_when_token_not_found( - mock_redis_client: MagicMock, -) -> None: - # Arrange - mock_redis_client.get.return_value = None - - # Act - result = OAuthServerService.validate_oauth_access_token("client-1", "missing-token") - - # Assert - assert result is None - - -def test_validate_oauth_access_token_should_load_user_when_token_exists( - mocker: MockerFixture, mock_redis_client: MagicMock -) -> None: - # Arrange - mock_redis_client.get.return_value = b"user-88" - expected_user = MagicMock() - mock_load_user = mocker.patch("services.oauth_server.AccountService.load_user", return_value=expected_user) - - # Act - result = OAuthServerService.validate_oauth_access_token("client-1", "access-token") - - # Assert - assert result is expected_user - mock_load_user.assert_called_once_with("user-88") diff --git a/api/tests/unit_tests/services/test_saved_message_service.py b/api/tests/unit_tests/services/test_saved_message_service.py deleted file mode 100644 index 5b86930e20..0000000000 --- a/api/tests/unit_tests/services/test_saved_message_service.py +++ /dev/null @@ -1,626 +0,0 @@ -""" -Comprehensive unit tests for SavedMessageService. - -This test suite provides complete coverage of saved message operations in Dify, -following TDD principles with the Arrange-Act-Assert pattern. - -## Test Coverage - -### 1. Pagination (TestSavedMessageServicePagination) -Tests saved message listing and pagination: -- Pagination with valid user (Account and EndUser) -- Pagination without user raises ValueError -- Pagination with last_id parameter -- Empty results when no saved messages exist -- Integration with MessageService pagination - -### 2. Save Operations (TestSavedMessageServiceSave) -Tests saving messages: -- Save message for Account user -- Save message for EndUser -- Save without user (no-op) -- Prevent duplicate saves (idempotent) -- Message validation through MessageService - -### 3. Delete Operations (TestSavedMessageServiceDelete) -Tests deleting saved messages: -- Delete saved message for Account user -- Delete saved message for EndUser -- Delete without user (no-op) -- Delete non-existent saved message (no-op) -- Proper database cleanup - -## Testing Approach - -- **Mocking Strategy**: All external dependencies (database, MessageService) are mocked - for fast, isolated unit tests -- **Factory Pattern**: SavedMessageServiceTestDataFactory provides consistent test data -- **Fixtures**: Mock objects are configured per test method -- **Assertions**: Each test verifies return values and side effects - (database operations, method calls) - -## Key Concepts - -**User Types:** -- Account: Workspace members (console users) -- EndUser: API users (end users) - -**Saved Messages:** -- Users can save messages for later reference -- Each user has their own saved message list -- Saving is idempotent (duplicate saves ignored) -- Deletion is safe (non-existent deletes ignored) -""" - -from datetime import UTC, datetime -from unittest.mock import MagicMock, Mock, create_autospec, patch - -import pytest - -from libs.infinite_scroll_pagination import InfiniteScrollPagination -from models import Account -from models.model import App, EndUser, Message -from models.web import SavedMessage -from services.saved_message_service import SavedMessageService - - -class SavedMessageServiceTestDataFactory: - """ - Factory for creating test data and mock objects. - - Provides reusable methods to create consistent mock objects for testing - saved message operations. - """ - - @staticmethod - def create_account_mock(account_id: str = "account-123", **kwargs) -> Mock: - """ - Create a mock Account object. - - Args: - account_id: Unique identifier for the account - **kwargs: Additional attributes to set on the mock - - Returns: - Mock Account object with specified attributes - """ - account = create_autospec(Account, instance=True) - account.id = account_id - for key, value in kwargs.items(): - setattr(account, key, value) - return account - - @staticmethod - def create_end_user_mock(user_id: str = "user-123", **kwargs) -> Mock: - """ - Create a mock EndUser object. - - Args: - user_id: Unique identifier for the end user - **kwargs: Additional attributes to set on the mock - - Returns: - Mock EndUser object with specified attributes - """ - user = create_autospec(EndUser, instance=True) - user.id = user_id - for key, value in kwargs.items(): - setattr(user, key, value) - return user - - @staticmethod - def create_app_mock(app_id: str = "app-123", tenant_id: str = "tenant-123", **kwargs) -> Mock: - """ - Create a mock App object. - - Args: - app_id: Unique identifier for the app - tenant_id: Tenant/workspace identifier - **kwargs: Additional attributes to set on the mock - - Returns: - Mock App object with specified attributes - """ - app = create_autospec(App, instance=True) - app.id = app_id - app.tenant_id = tenant_id - app.name = kwargs.get("name", "Test App") - app.mode = kwargs.get("mode", "chat") - for key, value in kwargs.items(): - setattr(app, key, value) - return app - - @staticmethod - def create_message_mock( - message_id: str = "msg-123", - app_id: str = "app-123", - **kwargs, - ) -> Mock: - """ - Create a mock Message object. - - Args: - message_id: Unique identifier for the message - app_id: Associated app identifier - **kwargs: Additional attributes to set on the mock - - Returns: - Mock Message object with specified attributes - """ - message = create_autospec(Message, instance=True) - message.id = message_id - message.app_id = app_id - message.query = kwargs.get("query", "Test query") - message.answer = kwargs.get("answer", "Test answer") - message.created_at = kwargs.get("created_at", datetime.now(UTC)) - for key, value in kwargs.items(): - setattr(message, key, value) - return message - - @staticmethod - def create_saved_message_mock( - saved_message_id: str = "saved-123", - app_id: str = "app-123", - message_id: str = "msg-123", - created_by: str = "user-123", - created_by_role: str = "account", - **kwargs, - ) -> Mock: - """ - Create a mock SavedMessage object. - - Args: - saved_message_id: Unique identifier for the saved message - app_id: Associated app identifier - message_id: Associated message identifier - created_by: User who saved the message - created_by_role: Role of the user ('account' or 'end_user') - **kwargs: Additional attributes to set on the mock - - Returns: - Mock SavedMessage object with specified attributes - """ - saved_message = create_autospec(SavedMessage, instance=True) - saved_message.id = saved_message_id - saved_message.app_id = app_id - saved_message.message_id = message_id - saved_message.created_by = created_by - saved_message.created_by_role = created_by_role - saved_message.created_at = kwargs.get("created_at", datetime.now(UTC)) - for key, value in kwargs.items(): - setattr(saved_message, key, value) - return saved_message - - -@pytest.fixture -def factory(): - """Provide the test data factory to all tests.""" - return SavedMessageServiceTestDataFactory - - -class TestSavedMessageServicePagination: - """Test saved message pagination operations.""" - - @patch("services.saved_message_service.MessageService.pagination_by_last_id", autospec=True) - @patch("services.saved_message_service.db.session") - def test_pagination_with_account_user(self, mock_db_session, mock_message_pagination, factory): - """Test pagination with an Account user.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - - # Create saved messages for this user - saved_messages = [ - factory.create_saved_message_mock( - saved_message_id=f"saved-{i}", - app_id=app.id, - message_id=f"msg-{i}", - created_by=user.id, - created_by_role="account", - ) - for i in range(3) - ] - - # Mock database query - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.all.return_value = saved_messages - - # Mock MessageService pagination response - expected_pagination = InfiniteScrollPagination(data=[], limit=20, has_more=False) - mock_message_pagination.return_value = expected_pagination - - # Act - result = SavedMessageService.pagination_by_last_id(app_model=app, user=user, last_id=None, limit=20) - - # Assert - assert result == expected_pagination - mock_db_session.query.assert_called_once_with(SavedMessage) - # Verify MessageService was called with correct message IDs - mock_message_pagination.assert_called_once_with( - app_model=app, - user=user, - last_id=None, - limit=20, - include_ids=["msg-0", "msg-1", "msg-2"], - ) - - @patch("services.saved_message_service.MessageService.pagination_by_last_id", autospec=True) - @patch("services.saved_message_service.db.session") - def test_pagination_with_end_user(self, mock_db_session, mock_message_pagination, factory): - """Test pagination with an EndUser.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_end_user_mock() - - # Create saved messages for this end user - saved_messages = [ - factory.create_saved_message_mock( - saved_message_id=f"saved-{i}", - app_id=app.id, - message_id=f"msg-{i}", - created_by=user.id, - created_by_role="end_user", - ) - for i in range(2) - ] - - # Mock database query - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.all.return_value = saved_messages - - # Mock MessageService pagination response - expected_pagination = InfiniteScrollPagination(data=[], limit=10, has_more=False) - mock_message_pagination.return_value = expected_pagination - - # Act - result = SavedMessageService.pagination_by_last_id(app_model=app, user=user, last_id=None, limit=10) - - # Assert - assert result == expected_pagination - # Verify correct role was used in query - mock_message_pagination.assert_called_once_with( - app_model=app, - user=user, - last_id=None, - limit=10, - include_ids=["msg-0", "msg-1"], - ) - - def test_pagination_without_user_raises_error(self, factory): - """Test that pagination without user raises ValueError.""" - # Arrange - app = factory.create_app_mock() - - # Act & Assert - with pytest.raises(ValueError, match="User is required"): - SavedMessageService.pagination_by_last_id(app_model=app, user=None, last_id=None, limit=20) - - @patch("services.saved_message_service.MessageService.pagination_by_last_id", autospec=True) - @patch("services.saved_message_service.db.session") - def test_pagination_with_last_id(self, mock_db_session, mock_message_pagination, factory): - """Test pagination with last_id parameter.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - last_id = "msg-last" - - saved_messages = [ - factory.create_saved_message_mock( - message_id=f"msg-{i}", - app_id=app.id, - created_by=user.id, - ) - for i in range(5) - ] - - # Mock database query - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.all.return_value = saved_messages - - # Mock MessageService pagination response - expected_pagination = InfiniteScrollPagination(data=[], limit=10, has_more=True) - mock_message_pagination.return_value = expected_pagination - - # Act - result = SavedMessageService.pagination_by_last_id(app_model=app, user=user, last_id=last_id, limit=10) - - # Assert - assert result == expected_pagination - # Verify last_id was passed to MessageService - mock_message_pagination.assert_called_once() - call_args = mock_message_pagination.call_args - assert call_args.kwargs["last_id"] == last_id - - @patch("services.saved_message_service.MessageService.pagination_by_last_id", autospec=True) - @patch("services.saved_message_service.db.session") - def test_pagination_with_empty_saved_messages(self, mock_db_session, mock_message_pagination, factory): - """Test pagination when user has no saved messages.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - - # Mock database query returning empty list - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.order_by.return_value = mock_query - mock_query.all.return_value = [] - - # Mock MessageService pagination response - expected_pagination = InfiniteScrollPagination(data=[], limit=20, has_more=False) - mock_message_pagination.return_value = expected_pagination - - # Act - result = SavedMessageService.pagination_by_last_id(app_model=app, user=user, last_id=None, limit=20) - - # Assert - assert result == expected_pagination - # Verify MessageService was called with empty include_ids - mock_message_pagination.assert_called_once_with( - app_model=app, - user=user, - last_id=None, - limit=20, - include_ids=[], - ) - - -class TestSavedMessageServiceSave: - """Test save message operations.""" - - @patch("services.saved_message_service.MessageService.get_message", autospec=True) - @patch("services.saved_message_service.db.session") - def test_save_message_for_account(self, mock_db_session, mock_get_message, factory): - """Test saving a message for an Account user.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - message = factory.create_message_mock(message_id="msg-123", app_id=app.id) - - # Mock database query - no existing saved message - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = None - - # Mock MessageService.get_message - mock_get_message.return_value = message - - # Act - SavedMessageService.save(app_model=app, user=user, message_id=message.id) - - # Assert - mock_db_session.add.assert_called_once() - saved_message = mock_db_session.add.call_args[0][0] - assert saved_message.app_id == app.id - assert saved_message.message_id == message.id - assert saved_message.created_by == user.id - assert saved_message.created_by_role == "account" - mock_db_session.commit.assert_called_once() - - @patch("services.saved_message_service.MessageService.get_message", autospec=True) - @patch("services.saved_message_service.db.session") - def test_save_message_for_end_user(self, mock_db_session, mock_get_message, factory): - """Test saving a message for an EndUser.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_end_user_mock() - message = factory.create_message_mock(message_id="msg-456", app_id=app.id) - - # Mock database query - no existing saved message - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = None - - # Mock MessageService.get_message - mock_get_message.return_value = message - - # Act - SavedMessageService.save(app_model=app, user=user, message_id=message.id) - - # Assert - mock_db_session.add.assert_called_once() - saved_message = mock_db_session.add.call_args[0][0] - assert saved_message.app_id == app.id - assert saved_message.message_id == message.id - assert saved_message.created_by == user.id - assert saved_message.created_by_role == "end_user" - mock_db_session.commit.assert_called_once() - - @patch("services.saved_message_service.db.session") - def test_save_without_user_does_nothing(self, mock_db_session, factory): - """Test that saving without user is a no-op.""" - # Arrange - app = factory.create_app_mock() - - # Act - SavedMessageService.save(app_model=app, user=None, message_id="msg-123") - - # Assert - mock_db_session.query.assert_not_called() - mock_db_session.add.assert_not_called() - mock_db_session.commit.assert_not_called() - - @patch("services.saved_message_service.MessageService.get_message", autospec=True) - @patch("services.saved_message_service.db.session") - def test_save_duplicate_message_is_idempotent(self, mock_db_session, mock_get_message, factory): - """Test that saving an already saved message is idempotent.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - message_id = "msg-789" - - # Mock database query - existing saved message found - existing_saved = factory.create_saved_message_mock( - app_id=app.id, - message_id=message_id, - created_by=user.id, - created_by_role="account", - ) - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = existing_saved - - # Act - SavedMessageService.save(app_model=app, user=user, message_id=message_id) - - # Assert - no new saved message created - mock_db_session.add.assert_not_called() - mock_db_session.commit.assert_not_called() - mock_get_message.assert_not_called() - - @patch("services.saved_message_service.MessageService.get_message", autospec=True) - @patch("services.saved_message_service.db.session") - def test_save_validates_message_exists(self, mock_db_session, mock_get_message, factory): - """Test that save validates message exists through MessageService.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - message = factory.create_message_mock() - - # Mock database query - no existing saved message - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = None - - # Mock MessageService.get_message - mock_get_message.return_value = message - - # Act - SavedMessageService.save(app_model=app, user=user, message_id=message.id) - - # Assert - MessageService.get_message was called for validation - mock_get_message.assert_called_once_with(app_model=app, user=user, message_id=message.id) - - -class TestSavedMessageServiceDelete: - """Test delete saved message operations.""" - - @patch("services.saved_message_service.db.session") - def test_delete_saved_message_for_account(self, mock_db_session, factory): - """Test deleting a saved message for an Account user.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - message_id = "msg-123" - - # Mock database query - existing saved message found - saved_message = factory.create_saved_message_mock( - app_id=app.id, - message_id=message_id, - created_by=user.id, - created_by_role="account", - ) - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = saved_message - - # Act - SavedMessageService.delete(app_model=app, user=user, message_id=message_id) - - # Assert - mock_db_session.delete.assert_called_once_with(saved_message) - mock_db_session.commit.assert_called_once() - - @patch("services.saved_message_service.db.session") - def test_delete_saved_message_for_end_user(self, mock_db_session, factory): - """Test deleting a saved message for an EndUser.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_end_user_mock() - message_id = "msg-456" - - # Mock database query - existing saved message found - saved_message = factory.create_saved_message_mock( - app_id=app.id, - message_id=message_id, - created_by=user.id, - created_by_role="end_user", - ) - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = saved_message - - # Act - SavedMessageService.delete(app_model=app, user=user, message_id=message_id) - - # Assert - mock_db_session.delete.assert_called_once_with(saved_message) - mock_db_session.commit.assert_called_once() - - @patch("services.saved_message_service.db.session") - def test_delete_without_user_does_nothing(self, mock_db_session, factory): - """Test that deleting without user is a no-op.""" - # Arrange - app = factory.create_app_mock() - - # Act - SavedMessageService.delete(app_model=app, user=None, message_id="msg-123") - - # Assert - mock_db_session.query.assert_not_called() - mock_db_session.delete.assert_not_called() - mock_db_session.commit.assert_not_called() - - @patch("services.saved_message_service.db.session") - def test_delete_non_existent_saved_message_does_nothing(self, mock_db_session, factory): - """Test that deleting a non-existent saved message is a no-op.""" - # Arrange - app = factory.create_app_mock() - user = factory.create_account_mock() - message_id = "msg-nonexistent" - - # Mock database query - no saved message found - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = None - - # Act - SavedMessageService.delete(app_model=app, user=user, message_id=message_id) - - # Assert - no deletion occurred - mock_db_session.delete.assert_not_called() - mock_db_session.commit.assert_not_called() - - @patch("services.saved_message_service.db.session") - def test_delete_only_affects_user_own_saved_messages(self, mock_db_session, factory): - """Test that delete only removes the user's own saved message.""" - # Arrange - app = factory.create_app_mock() - user1 = factory.create_account_mock(account_id="user-1") - message_id = "msg-shared" - - # Mock database query - finds user1's saved message - saved_message = factory.create_saved_message_mock( - app_id=app.id, - message_id=message_id, - created_by=user1.id, - created_by_role="account", - ) - mock_query = MagicMock() - mock_db_session.query.return_value = mock_query - mock_query.where.return_value = mock_query - mock_query.first.return_value = saved_message - - # Act - SavedMessageService.delete(app_model=app, user=user1, message_id=message_id) - - # Assert - only user1's saved message is deleted - mock_db_session.delete.assert_called_once_with(saved_message) - # Verify the query filters by user - assert mock_query.where.called diff --git a/api/tests/unit_tests/services/test_summary_index_service.py b/api/tests/unit_tests/services/test_summary_index_service.py index be64e431ba..c4285c73a0 100644 --- a/api/tests/unit_tests/services/test_summary_index_service.py +++ b/api/tests/unit_tests/services/test_summary_index_service.py @@ -11,6 +11,7 @@ from unittest.mock import MagicMock import pytest import services.summary_index_service as summary_module +from core.rag.index_processor.constant.index_type import IndexStructureType from models.enums import SegmentStatus, SummaryStatus from services.summary_index_service import SummaryIndexService @@ -48,7 +49,7 @@ def _segment(*, has_document: bool = True) -> MagicMock: if has_document: doc = MagicMock(name="document") doc.doc_language = "en" - doc.doc_form = "text_model" + doc.doc_form = IndexStructureType.PARAGRAPH_INDEX segment.document = doc else: segment.document = None @@ -623,13 +624,13 @@ def test_generate_summaries_for_document_skip_conditions(monkeypatch: pytest.Mon dataset = _dataset(indexing_technique="economy") document = MagicMock(spec=summary_module.DatasetDocument) document.id = "doc-1" - document.doc_form = "text_model" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX assert SummaryIndexService.generate_summaries_for_document(dataset, document, {"enable": True}) == [] dataset = _dataset() assert SummaryIndexService.generate_summaries_for_document(dataset, document, {"enable": False}) == [] - document.doc_form = "qa_model" + document.doc_form = IndexStructureType.QA_INDEX assert SummaryIndexService.generate_summaries_for_document(dataset, document, {"enable": True}) == [] @@ -637,7 +638,7 @@ def test_generate_summaries_for_document_runs_and_handles_errors(monkeypatch: py dataset = _dataset() document = MagicMock(spec=summary_module.DatasetDocument) document.id = "doc-1" - document.doc_form = "text_model" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX seg1 = _segment() seg2 = _segment() @@ -673,7 +674,7 @@ def test_generate_summaries_for_document_no_segments_returns_empty(monkeypatch: dataset = _dataset() document = MagicMock(spec=summary_module.DatasetDocument) document.id = "doc-1" - document.doc_form = "text_model" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX session = MagicMock() query = MagicMock() @@ -696,7 +697,7 @@ def test_generate_summaries_for_document_applies_segment_ids_and_only_parent_chu dataset = _dataset() document = MagicMock(spec=summary_module.DatasetDocument) document.id = "doc-1" - document.doc_form = "text_model" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX seg = _segment() session = MagicMock() @@ -935,7 +936,7 @@ def test_update_summary_for_segment_skip_conditions() -> None: SummaryIndexService.update_summary_for_segment(_segment(), _dataset(indexing_technique="economy"), "x") is None ) seg = _segment(has_document=True) - seg.document.doc_form = "qa_model" + seg.document.doc_form = IndexStructureType.QA_INDEX assert SummaryIndexService.update_summary_for_segment(seg, _dataset(), "x") is None diff --git a/api/tests/unit_tests/services/test_tag_service.py b/api/tests/unit_tests/services/test_tag_service.py index a94842ddcf..b09463b1bc 100644 --- a/api/tests/unit_tests/services/test_tag_service.py +++ b/api/tests/unit_tests/services/test_tag_service.py @@ -75,6 +75,7 @@ import pytest from werkzeug.exceptions import NotFound from models.dataset import Dataset +from models.enums import TagType from models.model import App, Tag, TagBinding from services.tag_service import TagService @@ -102,7 +103,7 @@ class TagServiceTestDataFactory: def create_tag_mock( tag_id: str = "tag-123", name: str = "Test Tag", - tag_type: str = "app", + tag_type: TagType = TagType.APP, tenant_id: str = "tenant-123", **kwargs, ) -> Mock: @@ -705,7 +706,7 @@ class TestTagServiceCRUD: # Verify tag attributes added_tag = mock_db_session.add.call_args[0][0] assert added_tag.name == "New Tag", "Tag name should match" - assert added_tag.type == "app", "Tag type should match" + assert added_tag.type == TagType.APP, "Tag type should match" assert added_tag.created_by == "user-123", "Created by should match current user" assert added_tag.tenant_id == "tenant-123", "Tenant ID should match current tenant" diff --git a/api/tests/unit_tests/services/test_vector_service.py b/api/tests/unit_tests/services/test_vector_service.py index 7b0103a2a1..d3a98dd4bb 100644 --- a/api/tests/unit_tests/services/test_vector_service.py +++ b/api/tests/unit_tests/services/test_vector_service.py @@ -9,6 +9,7 @@ from unittest.mock import MagicMock import pytest import services.vector_service as vector_service_module +from core.rag.index_processor.constant.index_type import IndexStructureType from services.vector_service import VectorService @@ -32,7 +33,7 @@ class _ParentDocStub: def _make_dataset( *, indexing_technique: str = "high_quality", - doc_form: str = "text_model", + doc_form: str = IndexStructureType.PARAGRAPH_INDEX, tenant_id: str = "tenant-1", dataset_id: str = "dataset-1", is_multimodal: bool = False, @@ -106,7 +107,7 @@ def test_create_segments_vector_regular_indexing_loads_documents_and_keywords(mo factory_instance.init_index_processor.return_value = index_processor monkeypatch.setattr(vector_service_module, "IndexProcessorFactory", MagicMock(return_value=factory_instance)) - VectorService.create_segments_vector([["k1"]], [segment], dataset, "text_model") + VectorService.create_segments_vector([["k1"]], [segment], dataset, IndexStructureType.PARAGRAPH_INDEX) index_processor.load.assert_called_once() args, kwargs = index_processor.load.call_args @@ -131,7 +132,7 @@ def test_create_segments_vector_regular_indexing_loads_multimodal_documents(monk factory_instance.init_index_processor.return_value = index_processor monkeypatch.setattr(vector_service_module, "IndexProcessorFactory", MagicMock(return_value=factory_instance)) - VectorService.create_segments_vector([["k1"]], [segment], dataset, "text_model") + VectorService.create_segments_vector([["k1"]], [segment], dataset, IndexStructureType.PARAGRAPH_INDEX) assert index_processor.load.call_count == 2 first_args, first_kwargs = index_processor.load.call_args_list[0] @@ -153,7 +154,7 @@ def test_create_segments_vector_with_no_segments_does_not_load(monkeypatch: pyte factory_instance.init_index_processor.return_value = index_processor monkeypatch.setattr(vector_service_module, "IndexProcessorFactory", MagicMock(return_value=factory_instance)) - VectorService.create_segments_vector(None, [], dataset, "text_model") + VectorService.create_segments_vector(None, [], dataset, IndexStructureType.PARAGRAPH_INDEX) index_processor.load.assert_not_called() @@ -392,7 +393,7 @@ def test_update_segment_vector_economy_uses_keyword_without_keywords_list(monkey def test_generate_child_chunks_regenerate_cleans_then_saves_children(monkeypatch: pytest.MonkeyPatch) -> None: - dataset = _make_dataset(doc_form="text_model", tenant_id="tenant-1", dataset_id="dataset-1") + dataset = _make_dataset(doc_form=IndexStructureType.PARAGRAPH_INDEX, tenant_id="tenant-1", dataset_id="dataset-1") segment = _make_segment(segment_id="seg-1") dataset_document = MagicMock() @@ -439,7 +440,7 @@ def test_generate_child_chunks_regenerate_cleans_then_saves_children(monkeypatch def test_generate_child_chunks_commits_even_when_no_children(monkeypatch: pytest.MonkeyPatch) -> None: - dataset = _make_dataset(doc_form="text_model") + dataset = _make_dataset(doc_form=IndexStructureType.PARAGRAPH_INDEX) segment = _make_segment() dataset_document = MagicMock() dataset_document.doc_language = "en" diff --git a/api/tests/unit_tests/services/test_web_conversation_service.py b/api/tests/unit_tests/services/test_web_conversation_service.py deleted file mode 100644 index 7687d355e9..0000000000 --- a/api/tests/unit_tests/services/test_web_conversation_service.py +++ /dev/null @@ -1,259 +0,0 @@ -from __future__ import annotations - -from types import SimpleNamespace -from typing import Any, cast -from unittest.mock import MagicMock - -import pytest -from pytest_mock import MockerFixture - -from core.app.entities.app_invoke_entities import InvokeFrom -from models import Account -from models.model import App, EndUser -from services.web_conversation_service import WebConversationService - - -@pytest.fixture -def app_model() -> App: - return cast(App, SimpleNamespace(id="app-1")) - - -def _account(**kwargs: Any) -> Account: - return cast(Account, SimpleNamespace(**kwargs)) - - -def _end_user(**kwargs: Any) -> EndUser: - return cast(EndUser, SimpleNamespace(**kwargs)) - - -def test_pagination_by_last_id_should_raise_error_when_user_is_none( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - session = MagicMock() - mocker.patch("services.web_conversation_service.ConversationService.pagination_by_last_id") - - # Act + Assert - with pytest.raises(ValueError, match="User is required"): - WebConversationService.pagination_by_last_id( - session=session, - app_model=app_model, - user=None, - last_id=None, - limit=20, - invoke_from=InvokeFrom.WEB_APP, - ) - - -def test_pagination_by_last_id_should_forward_without_pin_filter_when_pinned_is_none( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - session = MagicMock() - fake_user = _account(id="user-1") - mock_pagination = mocker.patch("services.web_conversation_service.ConversationService.pagination_by_last_id") - mock_pagination.return_value = MagicMock() - - # Act - WebConversationService.pagination_by_last_id( - session=session, - app_model=app_model, - user=fake_user, - last_id="conv-9", - limit=10, - invoke_from=InvokeFrom.WEB_APP, - pinned=None, - ) - - # Assert - call_kwargs = mock_pagination.call_args.kwargs - assert call_kwargs["include_ids"] is None - assert call_kwargs["exclude_ids"] is None - assert call_kwargs["last_id"] == "conv-9" - assert call_kwargs["sort_by"] == "-updated_at" - - -def test_pagination_by_last_id_should_include_only_pinned_ids_when_pinned_true( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - session = MagicMock() - fake_account_cls = type("FakeAccount", (), {}) - fake_user = cast(Account, fake_account_cls()) - fake_user.id = "account-1" - mocker.patch("services.web_conversation_service.Account", fake_account_cls) - mocker.patch("services.web_conversation_service.EndUser", type("FakeEndUser", (), {})) - session.scalars.return_value.all.return_value = ["conv-1", "conv-2"] - mock_pagination = mocker.patch("services.web_conversation_service.ConversationService.pagination_by_last_id") - mock_pagination.return_value = MagicMock() - - # Act - WebConversationService.pagination_by_last_id( - session=session, - app_model=app_model, - user=fake_user, - last_id=None, - limit=20, - invoke_from=InvokeFrom.WEB_APP, - pinned=True, - ) - - # Assert - call_kwargs = mock_pagination.call_args.kwargs - assert call_kwargs["include_ids"] == ["conv-1", "conv-2"] - assert call_kwargs["exclude_ids"] is None - - -def test_pagination_by_last_id_should_exclude_pinned_ids_when_pinned_false( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - session = MagicMock() - fake_end_user_cls = type("FakeEndUser", (), {}) - fake_user = cast(EndUser, fake_end_user_cls()) - fake_user.id = "end-user-1" - mocker.patch("services.web_conversation_service.Account", type("FakeAccount", (), {})) - mocker.patch("services.web_conversation_service.EndUser", fake_end_user_cls) - session.scalars.return_value.all.return_value = ["conv-3"] - mock_pagination = mocker.patch("services.web_conversation_service.ConversationService.pagination_by_last_id") - mock_pagination.return_value = MagicMock() - - # Act - WebConversationService.pagination_by_last_id( - session=session, - app_model=app_model, - user=fake_user, - last_id=None, - limit=20, - invoke_from=InvokeFrom.WEB_APP, - pinned=False, - ) - - # Assert - call_kwargs = mock_pagination.call_args.kwargs - assert call_kwargs["include_ids"] is None - assert call_kwargs["exclude_ids"] == ["conv-3"] - - -def test_pin_should_return_early_when_user_is_none(app_model: App, mocker: MockerFixture) -> None: - # Arrange - mock_db = mocker.patch("services.web_conversation_service.db") - mocker.patch("services.web_conversation_service.ConversationService.get_conversation") - - # Act - WebConversationService.pin(app_model, "conv-1", None) - - # Assert - mock_db.session.add.assert_not_called() - mock_db.session.commit.assert_not_called() - - -def test_pin_should_return_early_when_conversation_is_already_pinned( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - fake_account_cls = type("FakeAccount", (), {}) - fake_user = cast(Account, fake_account_cls()) - fake_user.id = "account-1" - mocker.patch("services.web_conversation_service.Account", fake_account_cls) - mock_db = mocker.patch("services.web_conversation_service.db") - mock_db.session.query.return_value.where.return_value.first.return_value = object() - mock_get_conversation = mocker.patch("services.web_conversation_service.ConversationService.get_conversation") - - # Act - WebConversationService.pin(app_model, "conv-1", fake_user) - - # Assert - mock_get_conversation.assert_not_called() - mock_db.session.add.assert_not_called() - mock_db.session.commit.assert_not_called() - - -def test_pin_should_create_pinned_conversation_when_not_already_pinned( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - fake_account_cls = type("FakeAccount", (), {}) - fake_user = cast(Account, fake_account_cls()) - fake_user.id = "account-2" - mocker.patch("services.web_conversation_service.Account", fake_account_cls) - mock_db = mocker.patch("services.web_conversation_service.db") - mock_db.session.query.return_value.where.return_value.first.return_value = None - mock_conversation = SimpleNamespace(id="conv-2") - mock_get_conversation = mocker.patch( - "services.web_conversation_service.ConversationService.get_conversation", - return_value=mock_conversation, - ) - - # Act - WebConversationService.pin(app_model, "conv-2", fake_user) - - # Assert - mock_get_conversation.assert_called_once_with(app_model=app_model, conversation_id="conv-2", user=fake_user) - added_obj = mock_db.session.add.call_args.args[0] - assert added_obj.app_id == "app-1" - assert added_obj.conversation_id == "conv-2" - assert added_obj.created_by_role == "account" - assert added_obj.created_by == "account-2" - mock_db.session.commit.assert_called_once() - - -def test_unpin_should_return_early_when_user_is_none(app_model: App, mocker: MockerFixture) -> None: - # Arrange - mock_db = mocker.patch("services.web_conversation_service.db") - - # Act - WebConversationService.unpin(app_model, "conv-1", None) - - # Assert - mock_db.session.delete.assert_not_called() - mock_db.session.commit.assert_not_called() - - -def test_unpin_should_return_early_when_conversation_is_not_pinned( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - fake_end_user_cls = type("FakeEndUser", (), {}) - fake_user = cast(EndUser, fake_end_user_cls()) - fake_user.id = "end-user-3" - mocker.patch("services.web_conversation_service.Account", type("FakeAccount", (), {})) - mocker.patch("services.web_conversation_service.EndUser", fake_end_user_cls) - mock_db = mocker.patch("services.web_conversation_service.db") - mock_db.session.query.return_value.where.return_value.first.return_value = None - - # Act - WebConversationService.unpin(app_model, "conv-7", fake_user) - - # Assert - mock_db.session.delete.assert_not_called() - mock_db.session.commit.assert_not_called() - - -def test_unpin_should_delete_pinned_conversation_when_exists( - app_model: App, - mocker: MockerFixture, -) -> None: - # Arrange - fake_end_user_cls = type("FakeEndUser", (), {}) - fake_user = cast(EndUser, fake_end_user_cls()) - fake_user.id = "end-user-4" - mocker.patch("services.web_conversation_service.Account", type("FakeAccount", (), {})) - mocker.patch("services.web_conversation_service.EndUser", fake_end_user_cls) - mock_db = mocker.patch("services.web_conversation_service.db") - pinned_obj = SimpleNamespace(id="pin-1") - mock_db.session.query.return_value.where.return_value.first.return_value = pinned_obj - - # Act - WebConversationService.unpin(app_model, "conv-8", fake_user) - - # Assert - mock_db.session.delete.assert_called_once_with(pinned_obj) - mock_db.session.commit.assert_called_once() diff --git a/api/tests/unit_tests/services/tools/test_api_tools_manage_service.py b/api/tests/unit_tests/services/tools/test_api_tools_manage_service.py deleted file mode 100644 index ce44818886..0000000000 --- a/api/tests/unit_tests/services/tools/test_api_tools_manage_service.py +++ /dev/null @@ -1,643 +0,0 @@ -from __future__ import annotations - -from types import SimpleNamespace -from unittest.mock import MagicMock - -import pytest -from pytest_mock import MockerFixture - -from core.tools.entities.tool_entities import ApiProviderSchemaType -from services.tools.api_tools_manage_service import ApiToolManageService - - -@pytest.fixture -def mock_db(mocker: MockerFixture) -> MagicMock: - # Arrange - mocked_db = mocker.patch("services.tools.api_tools_manage_service.db") - mocked_db.session = MagicMock() - return mocked_db - - -def _tool_bundle(operation_id: str = "tool-1") -> SimpleNamespace: - return SimpleNamespace(operation_id=operation_id) - - -def test_parser_api_schema_should_return_schema_payload_when_schema_is_valid(mocker: MockerFixture) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - return_value=([_tool_bundle()], ApiProviderSchemaType.OPENAPI.value), - ) - - # Act - result = ApiToolManageService.parser_api_schema("valid-schema") - - # Assert - assert result["schema_type"] == ApiProviderSchemaType.OPENAPI.value - assert len(result["credentials_schema"]) == 3 - assert "warning" in result - - -def test_parser_api_schema_should_raise_value_error_when_parser_raises(mocker: MockerFixture) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - side_effect=RuntimeError("bad schema"), - ) - - # Act + Assert - with pytest.raises(ValueError, match="invalid schema: invalid schema: bad schema"): - ApiToolManageService.parser_api_schema("invalid") - - -def test_convert_schema_to_tool_bundles_should_return_tool_bundles_when_valid(mocker: MockerFixture) -> None: - # Arrange - expected = ([_tool_bundle("a"), _tool_bundle("b")], ApiProviderSchemaType.SWAGGER) - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - return_value=expected, - ) - extra_info: dict[str, str] = {} - - # Act - result = ApiToolManageService.convert_schema_to_tool_bundles("schema", extra_info=extra_info) - - # Assert - assert result == expected - - -def test_convert_schema_to_tool_bundles_should_raise_value_error_when_parser_fails(mocker: MockerFixture) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - side_effect=ValueError("parse failed"), - ) - - # Act + Assert - with pytest.raises(ValueError, match="invalid schema: parse failed"): - ApiToolManageService.convert_schema_to_tool_bundles("schema") - - -def test_create_api_tool_provider_should_raise_error_when_provider_already_exists( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - mock_db.session.query.return_value.where.return_value.first.return_value = object() - - # Act + Assert - with pytest.raises(ValueError, match="provider provider-a already exists"): - ApiToolManageService.create_api_tool_provider( - user_id="user-1", - tenant_id="tenant-1", - provider_name=" provider-a ", - icon={"emoji": "X"}, - credentials={"auth_type": "none"}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - privacy_policy="privacy", - custom_disclaimer="custom", - labels=[], - ) - - -def test_create_api_tool_provider_should_raise_error_when_tool_count_exceeds_limit( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - mock_db.session.query.return_value.where.return_value.first.return_value = None - many_tools = [_tool_bundle(str(i)) for i in range(101)] - mocker.patch.object( - ApiToolManageService, - "convert_schema_to_tool_bundles", - return_value=(many_tools, ApiProviderSchemaType.OPENAPI), - ) - - # Act + Assert - with pytest.raises(ValueError, match="the number of apis should be less than 100"): - ApiToolManageService.create_api_tool_provider( - user_id="user-1", - tenant_id="tenant-1", - provider_name="provider-a", - icon={"emoji": "X"}, - credentials={"auth_type": "none"}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - privacy_policy="privacy", - custom_disclaimer="custom", - labels=[], - ) - - -def test_create_api_tool_provider_should_raise_error_when_auth_type_is_missing( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - mock_db.session.query.return_value.where.return_value.first.return_value = None - mocker.patch.object( - ApiToolManageService, - "convert_schema_to_tool_bundles", - return_value=([_tool_bundle()], ApiProviderSchemaType.OPENAPI), - ) - - # Act + Assert - with pytest.raises(ValueError, match="auth_type is required"): - ApiToolManageService.create_api_tool_provider( - user_id="user-1", - tenant_id="tenant-1", - provider_name="provider-a", - icon={"emoji": "X"}, - credentials={}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - privacy_policy="privacy", - custom_disclaimer="custom", - labels=[], - ) - - -def test_create_api_tool_provider_should_create_provider_when_input_is_valid( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - mock_db.session.query.return_value.where.return_value.first.return_value = None - mocker.patch.object( - ApiToolManageService, - "convert_schema_to_tool_bundles", - return_value=([_tool_bundle()], ApiProviderSchemaType.OPENAPI), - ) - mock_controller = MagicMock() - mocker.patch( - "services.tools.api_tools_manage_service.ApiToolProviderController.from_db", - return_value=mock_controller, - ) - mock_encrypter = MagicMock() - mock_encrypter.encrypt.return_value = {"auth_type": "none"} - mocker.patch( - "services.tools.api_tools_manage_service.create_tool_provider_encrypter", - return_value=(mock_encrypter, MagicMock()), - ) - mocker.patch("services.tools.api_tools_manage_service.ToolLabelManager.update_tool_labels") - - # Act - result = ApiToolManageService.create_api_tool_provider( - user_id="user-1", - tenant_id="tenant-1", - provider_name="provider-a", - icon={"emoji": "X"}, - credentials={"auth_type": "none"}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - privacy_policy="privacy", - custom_disclaimer="custom", - labels=["news"], - ) - - # Assert - assert result == {"result": "success"} - mock_controller.load_bundled_tools.assert_called_once() - mock_db.session.add.assert_called_once() - mock_db.session.commit.assert_called_once() - - -def test_get_api_tool_provider_remote_schema_should_return_schema_when_response_is_valid( - mocker: MockerFixture, -) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.get", - return_value=SimpleNamespace(status_code=200, text="schema-content"), - ) - mocker.patch.object(ApiToolManageService, "parser_api_schema", return_value={"ok": True}) - - # Act - result = ApiToolManageService.get_api_tool_provider_remote_schema("user-1", "tenant-1", "https://schema") - - # Assert - assert result == {"schema": "schema-content"} - - -@pytest.mark.parametrize("status_code", [400, 404, 500]) -def test_get_api_tool_provider_remote_schema_should_raise_error_when_remote_fetch_is_invalid( - status_code: int, - mocker: MockerFixture, -) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.get", - return_value=SimpleNamespace(status_code=status_code, text="schema-content"), - ) - mock_logger = mocker.patch("services.tools.api_tools_manage_service.logger") - - # Act + Assert - with pytest.raises(ValueError, match="invalid schema, please check the url you provided"): - ApiToolManageService.get_api_tool_provider_remote_schema("user-1", "tenant-1", "https://schema") - mock_logger.exception.assert_called_once() - - -def test_list_api_tool_provider_tools_should_raise_error_when_provider_not_found( - mock_db: MagicMock, -) -> None: - # Arrange - mock_db.session.query.return_value.where.return_value.first.return_value = None - - # Act + Assert - with pytest.raises(ValueError, match="you have not added provider provider-a"): - ApiToolManageService.list_api_tool_provider_tools("user-1", "tenant-1", "provider-a") - - -def test_list_api_tool_provider_tools_should_return_converted_tools_when_provider_exists( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - provider = SimpleNamespace(tools=[_tool_bundle("tool-a"), _tool_bundle("tool-b")]) - mock_db.session.query.return_value.where.return_value.first.return_value = provider - controller = MagicMock() - mocker.patch( - "services.tools.api_tools_manage_service.ToolTransformService.api_provider_to_controller", - return_value=controller, - ) - mocker.patch("services.tools.api_tools_manage_service.ToolLabelManager.get_tool_labels", return_value=["search"]) - mock_convert = mocker.patch( - "services.tools.api_tools_manage_service.ToolTransformService.convert_tool_entity_to_api_entity", - side_effect=[{"name": "tool-a"}, {"name": "tool-b"}], - ) - - # Act - result = ApiToolManageService.list_api_tool_provider_tools("user-1", "tenant-1", "provider-a") - - # Assert - assert result == [{"name": "tool-a"}, {"name": "tool-b"}] - assert mock_convert.call_count == 2 - - -def test_update_api_tool_provider_should_raise_error_when_original_provider_not_found( - mock_db: MagicMock, -) -> None: - # Arrange - mock_db.session.query.return_value.where.return_value.first.return_value = None - - # Act + Assert - with pytest.raises(ValueError, match="api provider provider-a does not exists"): - ApiToolManageService.update_api_tool_provider( - user_id="user-1", - tenant_id="tenant-1", - provider_name="provider-a", - original_provider="provider-a", - icon={}, - credentials={"auth_type": "none"}, - _schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - privacy_policy=None, - custom_disclaimer="custom", - labels=[], - ) - - -def test_update_api_tool_provider_should_raise_error_when_auth_type_missing( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - provider = SimpleNamespace(credentials={}, name="old") - mock_db.session.query.return_value.where.return_value.first.return_value = provider - mocker.patch.object( - ApiToolManageService, - "convert_schema_to_tool_bundles", - return_value=([_tool_bundle()], ApiProviderSchemaType.OPENAPI), - ) - - # Act + Assert - with pytest.raises(ValueError, match="auth_type is required"): - ApiToolManageService.update_api_tool_provider( - user_id="user-1", - tenant_id="tenant-1", - provider_name="provider-a", - original_provider="provider-a", - icon={}, - credentials={}, - _schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - privacy_policy=None, - custom_disclaimer="custom", - labels=[], - ) - - -def test_update_api_tool_provider_should_update_provider_and_preserve_masked_credentials( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - provider = SimpleNamespace( - credentials={"auth_type": "none", "api_key_value": "encrypted-old"}, - name="old", - icon="", - schema="", - description="", - schema_type_str="", - tools_str="", - privacy_policy="", - custom_disclaimer="", - credentials_str="", - ) - mock_db.session.query.return_value.where.return_value.first.return_value = provider - mocker.patch.object( - ApiToolManageService, - "convert_schema_to_tool_bundles", - return_value=([_tool_bundle()], ApiProviderSchemaType.OPENAPI), - ) - controller = MagicMock() - mocker.patch( - "services.tools.api_tools_manage_service.ApiToolProviderController.from_db", - return_value=controller, - ) - cache = MagicMock() - encrypter = MagicMock() - encrypter.decrypt.return_value = {"auth_type": "none", "api_key_value": "plain-old"} - encrypter.mask_plugin_credentials.return_value = {"api_key_value": "***"} - encrypter.encrypt.return_value = {"auth_type": "none", "api_key_value": "encrypted-new"} - mocker.patch( - "services.tools.api_tools_manage_service.create_tool_provider_encrypter", - return_value=(encrypter, cache), - ) - mocker.patch("services.tools.api_tools_manage_service.ToolLabelManager.update_tool_labels") - - # Act - result = ApiToolManageService.update_api_tool_provider( - user_id="user-1", - tenant_id="tenant-1", - provider_name="provider-new", - original_provider="provider-old", - icon={"emoji": "E"}, - credentials={"auth_type": "none", "api_key_value": "***"}, - _schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - privacy_policy="privacy", - custom_disclaimer="custom", - labels=["news"], - ) - - # Assert - assert result == {"result": "success"} - assert provider.name == "provider-new" - assert provider.privacy_policy == "privacy" - assert provider.credentials_str != "" - cache.delete.assert_called_once() - mock_db.session.commit.assert_called_once() - - -def test_delete_api_tool_provider_should_raise_error_when_provider_missing(mock_db: MagicMock) -> None: - # Arrange - mock_db.session.query.return_value.where.return_value.first.return_value = None - - # Act + Assert - with pytest.raises(ValueError, match="you have not added provider provider-a"): - ApiToolManageService.delete_api_tool_provider("user-1", "tenant-1", "provider-a") - - -def test_delete_api_tool_provider_should_delete_provider_when_exists(mock_db: MagicMock) -> None: - # Arrange - provider = object() - mock_db.session.query.return_value.where.return_value.first.return_value = provider - - # Act - result = ApiToolManageService.delete_api_tool_provider("user-1", "tenant-1", "provider-a") - - # Assert - assert result == {"result": "success"} - mock_db.session.delete.assert_called_once_with(provider) - mock_db.session.commit.assert_called_once() - - -def test_get_api_tool_provider_should_delegate_to_tool_manager(mocker: MockerFixture) -> None: - # Arrange - expected = {"provider": "value"} - mock_get = mocker.patch( - "services.tools.api_tools_manage_service.ToolManager.user_get_api_provider", - return_value=expected, - ) - - # Act - result = ApiToolManageService.get_api_tool_provider("user-1", "tenant-1", "provider-a") - - # Assert - assert result == expected - mock_get.assert_called_once_with(provider="provider-a", tenant_id="tenant-1") - - -def test_test_api_tool_preview_should_raise_error_for_invalid_schema_type() -> None: - # Arrange - schema_type = "bad-schema-type" - - # Act + Assert - with pytest.raises(ValueError, match="invalid schema type"): - ApiToolManageService.test_api_tool_preview( - tenant_id="tenant-1", - provider_name="provider-a", - tool_name="tool-a", - credentials={"auth_type": "none"}, - parameters={}, - schema_type=schema_type, # type: ignore[arg-type] - schema="schema", - ) - - -def test_test_api_tool_preview_should_raise_error_when_schema_parser_fails(mocker: MockerFixture) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - side_effect=RuntimeError("invalid"), - ) - - # Act + Assert - with pytest.raises(ValueError, match="invalid schema"): - ApiToolManageService.test_api_tool_preview( - tenant_id="tenant-1", - provider_name="provider-a", - tool_name="tool-a", - credentials={"auth_type": "none"}, - parameters={}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - ) - - -def test_test_api_tool_preview_should_raise_error_when_tool_name_is_invalid( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - return_value=([_tool_bundle("tool-a")], ApiProviderSchemaType.OPENAPI), - ) - mock_db.session.query.return_value.where.return_value.first.return_value = SimpleNamespace(id="provider-id") - - # Act + Assert - with pytest.raises(ValueError, match="invalid tool name tool-b"): - ApiToolManageService.test_api_tool_preview( - tenant_id="tenant-1", - provider_name="provider-a", - tool_name="tool-b", - credentials={"auth_type": "none"}, - parameters={}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - ) - - -def test_test_api_tool_preview_should_raise_error_when_auth_type_missing( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - return_value=([_tool_bundle("tool-a")], ApiProviderSchemaType.OPENAPI), - ) - mock_db.session.query.return_value.where.return_value.first.return_value = SimpleNamespace(id="provider-id") - - # Act + Assert - with pytest.raises(ValueError, match="auth_type is required"): - ApiToolManageService.test_api_tool_preview( - tenant_id="tenant-1", - provider_name="provider-a", - tool_name="tool-a", - credentials={}, - parameters={}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - ) - - -def test_test_api_tool_preview_should_return_error_payload_when_tool_validation_raises( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - db_provider = SimpleNamespace(id="provider-id", credentials={"auth_type": "none"}) - mock_db.session.query.return_value.where.return_value.first.return_value = db_provider - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - return_value=([_tool_bundle("tool-a")], ApiProviderSchemaType.OPENAPI), - ) - provider_controller = MagicMock() - tool_obj = MagicMock() - tool_obj.fork_tool_runtime.return_value = tool_obj - tool_obj.validate_credentials.side_effect = ValueError("validation failed") - provider_controller.get_tool.return_value = tool_obj - mocker.patch( - "services.tools.api_tools_manage_service.ApiToolProviderController.from_db", - return_value=provider_controller, - ) - mock_encrypter = MagicMock() - mock_encrypter.decrypt.return_value = {"auth_type": "none"} - mock_encrypter.mask_plugin_credentials.return_value = {} - mocker.patch( - "services.tools.api_tools_manage_service.create_tool_provider_encrypter", - return_value=(mock_encrypter, MagicMock()), - ) - - # Act - result = ApiToolManageService.test_api_tool_preview( - tenant_id="tenant-1", - provider_name="provider-a", - tool_name="tool-a", - credentials={"auth_type": "none"}, - parameters={}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - ) - - # Assert - assert result == {"error": "validation failed"} - - -def test_test_api_tool_preview_should_return_result_payload_when_validation_succeeds( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - db_provider = SimpleNamespace(id="provider-id", credentials={"auth_type": "none"}) - mock_db.session.query.return_value.where.return_value.first.return_value = db_provider - mocker.patch( - "services.tools.api_tools_manage_service.ApiBasedToolSchemaParser.auto_parse_to_tool_bundle", - return_value=([_tool_bundle("tool-a")], ApiProviderSchemaType.OPENAPI), - ) - provider_controller = MagicMock() - tool_obj = MagicMock() - tool_obj.fork_tool_runtime.return_value = tool_obj - tool_obj.validate_credentials.return_value = {"ok": True} - provider_controller.get_tool.return_value = tool_obj - mocker.patch( - "services.tools.api_tools_manage_service.ApiToolProviderController.from_db", - return_value=provider_controller, - ) - mock_encrypter = MagicMock() - mock_encrypter.decrypt.return_value = {"auth_type": "none"} - mock_encrypter.mask_plugin_credentials.return_value = {} - mocker.patch( - "services.tools.api_tools_manage_service.create_tool_provider_encrypter", - return_value=(mock_encrypter, MagicMock()), - ) - - # Act - result = ApiToolManageService.test_api_tool_preview( - tenant_id="tenant-1", - provider_name="provider-a", - tool_name="tool-a", - credentials={"auth_type": "none"}, - parameters={"x": "1"}, - schema_type=ApiProviderSchemaType.OPENAPI, - schema="schema", - ) - - # Assert - assert result == {"result": {"ok": True}} - - -def test_list_api_tools_should_return_all_user_providers_with_converted_tools( - mock_db: MagicMock, - mocker: MockerFixture, -) -> None: - # Arrange - provider_one = SimpleNamespace(name="p1") - provider_two = SimpleNamespace(name="p2") - mock_db.session.scalars.return_value.all.return_value = [provider_one, provider_two] - - controller_one = MagicMock() - controller_one.get_tools.return_value = ["tool-a"] - controller_two = MagicMock() - controller_two.get_tools.return_value = ["tool-b", "tool-c"] - - user_provider_one = SimpleNamespace(labels=[], tools=[]) - user_provider_two = SimpleNamespace(labels=[], tools=[]) - - mocker.patch( - "services.tools.api_tools_manage_service.ToolTransformService.api_provider_to_controller", - side_effect=[controller_one, controller_two], - ) - mocker.patch("services.tools.api_tools_manage_service.ToolLabelManager.get_tool_labels", return_value=["news"]) - mocker.patch( - "services.tools.api_tools_manage_service.ToolTransformService.api_provider_to_user_provider", - side_effect=[user_provider_one, user_provider_two], - ) - mocker.patch("services.tools.api_tools_manage_service.ToolTransformService.repack_provider") - mock_convert = mocker.patch( - "services.tools.api_tools_manage_service.ToolTransformService.convert_tool_entity_to_api_entity", - side_effect=[{"name": "tool-a"}, {"name": "tool-b"}, {"name": "tool-c"}], - ) - - # Act - result = ApiToolManageService.list_api_tools("tenant-1") - - # Assert - assert len(result) == 2 - assert user_provider_one.tools == [{"name": "tool-a"}] - assert user_provider_two.tools == [{"name": "tool-b"}, {"name": "tool-c"}] - assert mock_convert.call_count == 3 diff --git a/api/tests/unit_tests/services/tools/test_workflow_tools_manage_service.py b/api/tests/unit_tests/services/tools/test_workflow_tools_manage_service.py deleted file mode 100644 index e9bcc89445..0000000000 --- a/api/tests/unit_tests/services/tools/test_workflow_tools_manage_service.py +++ /dev/null @@ -1,955 +0,0 @@ -""" -Unit tests for services.tools.workflow_tools_manage_service - -Covers WorkflowToolManageService: create, update, list, delete, get, list_single. -""" - -import json -from types import SimpleNamespace -from unittest.mock import MagicMock - -import pytest - -from core.tools.entities.tool_entities import ToolParameter, WorkflowToolParameterConfiguration -from core.tools.errors import WorkflowToolHumanInputNotSupportedError -from models.model import App -from models.tools import WorkflowToolProvider -from services.tools import workflow_tools_manage_service -from services.tools.workflow_tools_manage_service import WorkflowToolManageService - -# --------------------------------------------------------------------------- -# Shared helpers / fake infrastructure -# --------------------------------------------------------------------------- - - -class DummyWorkflow: - """Minimal in-memory Workflow substitute.""" - - def __init__(self, graph_dict: dict, version: str = "1.0.0") -> None: - self._graph_dict = graph_dict - self.version = version - - @property - def graph_dict(self) -> dict: - return self._graph_dict - - -class FakeQuery: - """Chainable query object that always returns a fixed result.""" - - def __init__(self, result: object) -> None: - self._result = result - - def where(self, *args: object, **kwargs: object) -> "FakeQuery": - return self - - def first(self) -> object: - return self._result - - def delete(self) -> int: - return 1 - - -class DummySession: - """Minimal SQLAlchemy session substitute.""" - - def __init__(self) -> None: - self.added: list[WorkflowToolProvider] = [] - self.committed: bool = False - - def __enter__(self) -> "DummySession": - return self - - def __exit__(self, exc_type: object, exc: object, tb: object) -> bool: - return False - - def add(self, obj: WorkflowToolProvider) -> None: - self.added.append(obj) - - def begin(self) -> "DummySession": - return self - - def commit(self) -> None: - self.committed = True - - -def _build_parameters() -> list[WorkflowToolParameterConfiguration]: - return [ - WorkflowToolParameterConfiguration(name="input", description="input", form=ToolParameter.ToolParameterForm.LLM), - ] - - -def _build_fake_db( - *, - existing_tool: WorkflowToolProvider | None = None, - app: object | None = None, - tool_by_id: WorkflowToolProvider | None = None, -) -> tuple[MagicMock, DummySession]: - """ - Build a fake db object plus a DummySession for Session context-manager. - - query(WorkflowToolProvider) returns existing_tool on first call, - then tool_by_id on subsequent calls (or None if not provided). - query(App) returns app. - """ - call_counts: dict[str, int] = {"wftp": 0} - - def query(model: type) -> FakeQuery: - if model is WorkflowToolProvider: - call_counts["wftp"] += 1 - if call_counts["wftp"] == 1: - return FakeQuery(existing_tool) - return FakeQuery(tool_by_id) - if model is App: - return FakeQuery(app) - return FakeQuery(None) - - fake_db = MagicMock() - fake_db.session = SimpleNamespace(query=query, commit=MagicMock()) - dummy_session = DummySession() - return fake_db, dummy_session - - -# --------------------------------------------------------------------------- -# TestCreateWorkflowTool -# --------------------------------------------------------------------------- - - -class TestCreateWorkflowTool: - """Tests for WorkflowToolManageService.create_workflow_tool.""" - - def test_should_raise_when_human_input_nodes_present(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Human-input nodes must be rejected before any provider is created.""" - # Arrange - workflow = DummyWorkflow(graph_dict={"nodes": [{"id": "n1", "data": {"type": "human-input"}}]}) - app = SimpleNamespace(workflow=workflow) - fake_session = SimpleNamespace(query=lambda m: FakeQuery(None) if m is WorkflowToolProvider else FakeQuery(app)) - monkeypatch.setattr(workflow_tools_manage_service.db, "session", fake_session) - mock_from_db = MagicMock() - monkeypatch.setattr(workflow_tools_manage_service.WorkflowToolProviderController, "from_db", mock_from_db) - - # Act + Assert - with pytest.raises(WorkflowToolHumanInputNotSupportedError) as exc_info: - WorkflowToolManageService.create_workflow_tool( - user_id="user-id", - tenant_id="tenant-id", - workflow_app_id="app-id", - name="tool_name", - label="Tool", - icon={"type": "emoji", "emoji": "🔧"}, - description="desc", - parameters=_build_parameters(), - ) - - assert exc_info.value.error_code == "workflow_tool_human_input_not_supported" - mock_from_db.assert_not_called() - - def test_should_raise_when_duplicate_name_or_app_id(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Existing provider with same name or app_id raises ValueError.""" - # Arrange - existing = MagicMock(spec=WorkflowToolProvider) - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(existing)), - ) - - # Act + Assert - with pytest.raises(ValueError, match="already exists"): - WorkflowToolManageService.create_workflow_tool( - user_id="u", - tenant_id="t", - workflow_app_id="app-1", - name="dup", - label="Dup", - icon={}, - description="", - parameters=[], - ) - - def test_should_raise_when_app_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the referenced App does not exist.""" - # Arrange - call_count = {"n": 0} - - def query(m: type) -> FakeQuery: - call_count["n"] += 1 - if m is WorkflowToolProvider: - return FakeQuery(None) - return FakeQuery(None) # App returns None - - monkeypatch.setattr(workflow_tools_manage_service.db, "session", SimpleNamespace(query=query)) - - # Act + Assert - with pytest.raises(ValueError, match="not found"): - WorkflowToolManageService.create_workflow_tool( - user_id="u", - tenant_id="t", - workflow_app_id="missing-app", - name="n", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_raise_when_workflow_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the App has no attached Workflow.""" - # Arrange - app_no_workflow = SimpleNamespace(workflow=None) - - def query(m: type) -> FakeQuery: - if m is WorkflowToolProvider: - return FakeQuery(None) - return FakeQuery(app_no_workflow) - - monkeypatch.setattr(workflow_tools_manage_service.db, "session", SimpleNamespace(query=query)) - - # Act + Assert - with pytest.raises(ValueError, match="Workflow not found"): - WorkflowToolManageService.create_workflow_tool( - user_id="u", - tenant_id="t", - workflow_app_id="app-id", - name="n", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_raise_when_from_db_fails(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Exceptions from WorkflowToolProviderController.from_db are wrapped as ValueError.""" - # Arrange - workflow = DummyWorkflow(graph_dict={"nodes": []}) - app = SimpleNamespace(workflow=workflow) - - def query(m: type) -> FakeQuery: - if m is WorkflowToolProvider: - return FakeQuery(None) - return FakeQuery(app) - - fake_db = MagicMock() - fake_db.session = SimpleNamespace(query=query) - monkeypatch.setattr(workflow_tools_manage_service, "db", fake_db) - dummy_session = DummySession() - monkeypatch.setattr(workflow_tools_manage_service, "Session", lambda *_, **__: dummy_session) - monkeypatch.setattr( - workflow_tools_manage_service.WorkflowToolProviderController, - "from_db", - MagicMock(side_effect=RuntimeError("bad config")), - ) - - # Act + Assert - with pytest.raises(ValueError, match="bad config"): - WorkflowToolManageService.create_workflow_tool( - user_id="u", - tenant_id="t", - workflow_app_id="app-id", - name="n", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_succeed_and_persist_provider(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Happy path: provider is added to session and success dict is returned.""" - # Arrange - workflow = DummyWorkflow(graph_dict={"nodes": []}, version="2.0.0") - app = SimpleNamespace(workflow=workflow) - - def query(m: type) -> FakeQuery: - if m is WorkflowToolProvider: - return FakeQuery(None) - return FakeQuery(app) - - fake_db = MagicMock() - fake_db.session = SimpleNamespace(query=query) - monkeypatch.setattr(workflow_tools_manage_service, "db", fake_db) - dummy_session = DummySession() - monkeypatch.setattr(workflow_tools_manage_service, "Session", lambda *_, **__: dummy_session) - monkeypatch.setattr(workflow_tools_manage_service.WorkflowToolProviderController, "from_db", MagicMock()) - - icon = {"type": "emoji", "emoji": "🔧"} - - # Act - result = WorkflowToolManageService.create_workflow_tool( - user_id="user-id", - tenant_id="tenant-id", - workflow_app_id="app-id", - name="tool_name", - label="Tool", - icon=icon, - description="desc", - parameters=_build_parameters(), - ) - - # Assert - assert result == {"result": "success"} - assert len(dummy_session.added) == 1 - created: WorkflowToolProvider = dummy_session.added[0] - assert created.name == "tool_name" - assert created.label == "Tool" - assert created.icon == json.dumps(icon) - assert created.version == "2.0.0" - - def test_should_call_label_manager_when_labels_provided(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Labels are forwarded to ToolLabelManager when provided.""" - # Arrange - workflow = DummyWorkflow(graph_dict={"nodes": []}) - app = SimpleNamespace(workflow=workflow) - - def query(m: type) -> FakeQuery: - if m is WorkflowToolProvider: - return FakeQuery(None) - return FakeQuery(app) - - fake_db = MagicMock() - fake_db.session = SimpleNamespace(query=query) - monkeypatch.setattr(workflow_tools_manage_service, "db", fake_db) - dummy_session = DummySession() - monkeypatch.setattr(workflow_tools_manage_service, "Session", lambda *_, **__: dummy_session) - monkeypatch.setattr(workflow_tools_manage_service.WorkflowToolProviderController, "from_db", MagicMock()) - mock_label_mgr = MagicMock() - monkeypatch.setattr(workflow_tools_manage_service.ToolLabelManager, "update_tool_labels", mock_label_mgr) - mock_to_ctrl = MagicMock() - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, "workflow_provider_to_controller", mock_to_ctrl - ) - - # Act - WorkflowToolManageService.create_workflow_tool( - user_id="u", - tenant_id="t", - workflow_app_id="app-id", - name="n", - label="L", - icon={}, - description="", - parameters=[], - labels=["tag1", "tag2"], - ) - - # Assert - mock_label_mgr.assert_called_once() - - -# --------------------------------------------------------------------------- -# TestUpdateWorkflowTool -# --------------------------------------------------------------------------- - - -class TestUpdateWorkflowTool: - """Tests for WorkflowToolManageService.update_workflow_tool.""" - - def _make_provider(self) -> WorkflowToolProvider: - p = MagicMock(spec=WorkflowToolProvider) - p.app_id = "app-id" - p.tenant_id = "tenant-id" - return p - - def test_should_raise_when_name_duplicated(self, monkeypatch: pytest.MonkeyPatch) -> None: - """If another tool with the given name already exists, raise ValueError.""" - # Arrange - existing = MagicMock(spec=WorkflowToolProvider) - - def query(m: type) -> FakeQuery: - return FakeQuery(existing) - - monkeypatch.setattr(workflow_tools_manage_service.db, "session", SimpleNamespace(query=query)) - - # Act + Assert - with pytest.raises(ValueError, match="already exists"): - WorkflowToolManageService.update_workflow_tool( - user_id="u", - tenant_id="t", - workflow_tool_id="tool-1", - name="dup", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_raise_when_tool_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the workflow tool to update does not exist.""" - # Arrange - call_count = {"n": 0} - - def query(m: type) -> FakeQuery: - call_count["n"] += 1 - # 1st call: name uniqueness check → None (no duplicate) - # 2nd call: fetch tool by id → None (not found) - return FakeQuery(None) - - monkeypatch.setattr(workflow_tools_manage_service.db, "session", SimpleNamespace(query=query)) - - # Act + Assert - with pytest.raises(ValueError, match="not found"): - WorkflowToolManageService.update_workflow_tool( - user_id="u", - tenant_id="t", - workflow_tool_id="missing", - name="n", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_raise_when_app_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the tool's referenced App has been removed.""" - # Arrange - provider = self._make_provider() - call_count = {"n": 0} - - def query(m: type) -> FakeQuery: - call_count["n"] += 1 - if m is WorkflowToolProvider: - # 1st: duplicate name check (None), 2nd: fetch provider - return FakeQuery(None) if call_count["n"] == 1 else FakeQuery(provider) - return FakeQuery(None) # App not found - - monkeypatch.setattr(workflow_tools_manage_service.db, "session", SimpleNamespace(query=query)) - - # Act + Assert - with pytest.raises(ValueError, match="not found"): - WorkflowToolManageService.update_workflow_tool( - user_id="u", - tenant_id="t", - workflow_tool_id="tool-1", - name="n", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_raise_when_workflow_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the App exists but has no Workflow.""" - # Arrange - provider = self._make_provider() - app_no_wf = SimpleNamespace(workflow=None) - call_count = {"n": 0} - - def query(m: type) -> FakeQuery: - call_count["n"] += 1 - if m is WorkflowToolProvider: - return FakeQuery(None) if call_count["n"] == 1 else FakeQuery(provider) - return FakeQuery(app_no_wf) - - monkeypatch.setattr(workflow_tools_manage_service.db, "session", SimpleNamespace(query=query)) - - # Act + Assert - with pytest.raises(ValueError, match="Workflow not found"): - WorkflowToolManageService.update_workflow_tool( - user_id="u", - tenant_id="t", - workflow_tool_id="tool-1", - name="n", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_raise_when_from_db_fails(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Exceptions from from_db are re-raised as ValueError.""" - # Arrange - provider = self._make_provider() - workflow = DummyWorkflow(graph_dict={"nodes": []}) - app = SimpleNamespace(workflow=workflow) - call_count = {"n": 0} - - def query(m: type) -> FakeQuery: - call_count["n"] += 1 - if m is WorkflowToolProvider: - return FakeQuery(None) if call_count["n"] == 1 else FakeQuery(provider) - return FakeQuery(app) - - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=query, commit=MagicMock()), - ) - monkeypatch.setattr( - workflow_tools_manage_service.WorkflowToolProviderController, - "from_db", - MagicMock(side_effect=RuntimeError("from_db error")), - ) - - # Act + Assert - with pytest.raises(ValueError, match="from_db error"): - WorkflowToolManageService.update_workflow_tool( - user_id="u", - tenant_id="t", - workflow_tool_id="tool-1", - name="n", - label="L", - icon={}, - description="", - parameters=[], - ) - - def test_should_succeed_and_call_commit(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Happy path: provider fields are updated and session committed.""" - # Arrange - provider = self._make_provider() - workflow = DummyWorkflow(graph_dict={"nodes": []}, version="3.0.0") - app = SimpleNamespace(workflow=workflow) - call_count = {"n": 0} - - def query(m: type) -> FakeQuery: - call_count["n"] += 1 - if m is WorkflowToolProvider: - return FakeQuery(None) if call_count["n"] == 1 else FakeQuery(provider) - return FakeQuery(app) - - mock_commit = MagicMock() - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=query, commit=mock_commit), - ) - monkeypatch.setattr(workflow_tools_manage_service.WorkflowToolProviderController, "from_db", MagicMock()) - - icon = {"type": "emoji", "emoji": "🛠"} - - # Act - result = WorkflowToolManageService.update_workflow_tool( - user_id="u", - tenant_id="t", - workflow_tool_id="tool-1", - name="new_name", - label="New Label", - icon=icon, - description="new desc", - parameters=_build_parameters(), - ) - - # Assert - assert result == {"result": "success"} - mock_commit.assert_called_once() - assert provider.name == "new_name" - assert provider.label == "New Label" - assert provider.icon == json.dumps(icon) - assert provider.version == "3.0.0" - - def test_should_call_label_manager_when_labels_provided(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Labels are forwarded to ToolLabelManager during update.""" - # Arrange - provider = self._make_provider() - workflow = DummyWorkflow(graph_dict={"nodes": []}) - app = SimpleNamespace(workflow=workflow) - call_count = {"n": 0} - - def query(m: type) -> FakeQuery: - call_count["n"] += 1 - if m is WorkflowToolProvider: - return FakeQuery(None) if call_count["n"] == 1 else FakeQuery(provider) - return FakeQuery(app) - - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=query, commit=MagicMock()), - ) - monkeypatch.setattr(workflow_tools_manage_service.WorkflowToolProviderController, "from_db", MagicMock()) - mock_label_mgr = MagicMock() - monkeypatch.setattr(workflow_tools_manage_service.ToolLabelManager, "update_tool_labels", mock_label_mgr) - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, "workflow_provider_to_controller", MagicMock() - ) - - # Act - WorkflowToolManageService.update_workflow_tool( - user_id="u", - tenant_id="t", - workflow_tool_id="tool-1", - name="n", - label="L", - icon={}, - description="", - parameters=[], - labels=["a"], - ) - - # Assert - mock_label_mgr.assert_called_once() - - -# --------------------------------------------------------------------------- -# TestListTenantWorkflowTools -# --------------------------------------------------------------------------- - - -class TestListTenantWorkflowTools: - """Tests for WorkflowToolManageService.list_tenant_workflow_tools.""" - - def test_should_return_empty_list_when_no_tools(self, monkeypatch: pytest.MonkeyPatch) -> None: - """An empty database yields an empty result list.""" - # Arrange - fake_scalars = MagicMock() - fake_scalars.all.return_value = [] - fake_db = MagicMock() - fake_db.session.scalars.return_value = fake_scalars - monkeypatch.setattr(workflow_tools_manage_service, "db", fake_db) - - # Act - result = WorkflowToolManageService.list_tenant_workflow_tools("u", "t") - - # Assert - assert result == [] - - def test_should_skip_broken_providers_and_log(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Providers that fail to load are logged and skipped.""" - # Arrange - good_provider = MagicMock(spec=WorkflowToolProvider) - good_provider.id = "good-id" - good_provider.app_id = "app-good" - bad_provider = MagicMock(spec=WorkflowToolProvider) - bad_provider.id = "bad-id" - bad_provider.app_id = "app-bad" - - fake_scalars = MagicMock() - fake_scalars.all.return_value = [good_provider, bad_provider] - fake_db = MagicMock() - fake_db.session.scalars.return_value = fake_scalars - monkeypatch.setattr(workflow_tools_manage_service, "db", fake_db) - - good_ctrl = MagicMock() - good_ctrl.provider_id = "good-id" - - def to_controller(provider: WorkflowToolProvider) -> MagicMock: - if provider is bad_provider: - raise RuntimeError("broken provider") - return good_ctrl - - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, "workflow_provider_to_controller", to_controller - ) - mock_get_labels = MagicMock(return_value={}) - monkeypatch.setattr(workflow_tools_manage_service.ToolLabelManager, "get_tools_labels", mock_get_labels) - mock_to_user = MagicMock() - mock_to_user.return_value.tools = [] - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, "workflow_provider_to_user_provider", mock_to_user - ) - monkeypatch.setattr(workflow_tools_manage_service.ToolTransformService, "repack_provider", MagicMock()) - mock_get_tools = MagicMock(return_value=[MagicMock()]) - good_ctrl.get_tools = mock_get_tools - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, "convert_tool_entity_to_api_entity", MagicMock() - ) - - # Act - result = WorkflowToolManageService.list_tenant_workflow_tools("u", "t") - - # Assert - only good provider contributed - assert len(result) == 1 - - def test_should_return_tools_for_all_providers(self, monkeypatch: pytest.MonkeyPatch) -> None: - """All successfully loaded providers appear in the result.""" - # Arrange - provider = MagicMock(spec=WorkflowToolProvider) - provider.id = "p-1" - provider.app_id = "app-1" - - fake_scalars = MagicMock() - fake_scalars.all.return_value = [provider] - fake_db = MagicMock() - fake_db.session.scalars.return_value = fake_scalars - monkeypatch.setattr(workflow_tools_manage_service, "db", fake_db) - - ctrl = MagicMock() - ctrl.provider_id = "p-1" - ctrl.get_tools.return_value = [MagicMock()] - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, - "workflow_provider_to_controller", - MagicMock(return_value=ctrl), - ) - monkeypatch.setattr( - workflow_tools_manage_service.ToolLabelManager, "get_tools_labels", MagicMock(return_value={"p-1": []}) - ) - user_provider = MagicMock() - user_provider.tools = [] - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, - "workflow_provider_to_user_provider", - MagicMock(return_value=user_provider), - ) - monkeypatch.setattr(workflow_tools_manage_service.ToolTransformService, "repack_provider", MagicMock()) - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, "convert_tool_entity_to_api_entity", MagicMock() - ) - - # Act - result = WorkflowToolManageService.list_tenant_workflow_tools("u", "t") - - # Assert - assert len(result) == 1 - assert result[0] is user_provider - - -# --------------------------------------------------------------------------- -# TestDeleteWorkflowTool -# --------------------------------------------------------------------------- - - -class TestDeleteWorkflowTool: - """Tests for WorkflowToolManageService.delete_workflow_tool.""" - - def test_should_delete_and_commit(self, monkeypatch: pytest.MonkeyPatch) -> None: - """delete_workflow_tool queries, deletes, commits, and returns success.""" - # Arrange - mock_query = MagicMock() - mock_query.where.return_value.delete.return_value = 1 - mock_commit = MagicMock() - fake_session = SimpleNamespace(query=lambda m: mock_query, commit=mock_commit) - monkeypatch.setattr(workflow_tools_manage_service.db, "session", fake_session) - - # Act - result = WorkflowToolManageService.delete_workflow_tool("u", "t", "tool-1") - - # Assert - assert result == {"result": "success"} - mock_commit.assert_called_once() - - -# --------------------------------------------------------------------------- -# TestGetWorkflowToolByToolId / ByAppId -# --------------------------------------------------------------------------- - - -class TestGetWorkflowToolByToolIdAndAppId: - """Tests for get_workflow_tool_by_tool_id and get_workflow_tool_by_app_id.""" - - def test_get_by_tool_id_should_raise_when_db_tool_is_none(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Raises ValueError when no WorkflowToolProvider found by tool id.""" - # Arrange - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(None)), - ) - - # Act + Assert - with pytest.raises(ValueError, match="Tool not found"): - WorkflowToolManageService.get_workflow_tool_by_tool_id("u", "t", "missing") - - def test_get_by_app_id_should_raise_when_db_tool_is_none(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Raises ValueError when no WorkflowToolProvider found by app id.""" - # Arrange - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(None)), - ) - - # Act + Assert - with pytest.raises(ValueError, match="Tool not found"): - WorkflowToolManageService.get_workflow_tool_by_app_id("u", "t", "missing-app") - - -# --------------------------------------------------------------------------- -# TestGetWorkflowTool (private _get_workflow_tool) -# --------------------------------------------------------------------------- - - -class TestGetWorkflowTool: - """Tests for the internal _get_workflow_tool helper.""" - - def test_should_raise_when_db_tool_none(self) -> None: - """_get_workflow_tool raises ValueError when db_tool is None.""" - with pytest.raises(ValueError, match="Tool not found"): - WorkflowToolManageService._get_workflow_tool("t", None) - - def test_should_raise_when_app_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the corresponding App row is missing.""" - # Arrange - db_tool = MagicMock(spec=WorkflowToolProvider) - db_tool.app_id = "app-1" - db_tool.tenant_id = "t" - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(None)), - ) - - # Act + Assert - with pytest.raises(ValueError, match="not found"): - WorkflowToolManageService._get_workflow_tool("t", db_tool) - - def test_should_raise_when_workflow_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when App has no attached Workflow.""" - # Arrange - db_tool = MagicMock(spec=WorkflowToolProvider) - db_tool.app_id = "app-1" - db_tool.tenant_id = "t" - app = SimpleNamespace(workflow=None) - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(app)), - ) - - # Act + Assert - with pytest.raises(ValueError, match="Workflow not found"): - WorkflowToolManageService._get_workflow_tool("t", db_tool) - - def test_should_raise_when_no_workflow_tools(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the controller returns no WorkflowTool instances.""" - # Arrange - db_tool = MagicMock(spec=WorkflowToolProvider) - db_tool.app_id = "app-1" - db_tool.tenant_id = "t" - db_tool.id = "tool-1" - workflow = DummyWorkflow(graph_dict={"nodes": []}) - app = SimpleNamespace(workflow=workflow) - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(app)), - ) - ctrl = MagicMock() - ctrl.get_tools.return_value = [] - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, - "workflow_provider_to_controller", - MagicMock(return_value=ctrl), - ) - - # Act + Assert - with pytest.raises(ValueError, match="not found"): - WorkflowToolManageService._get_workflow_tool("t", db_tool) - - def test_should_return_dict_on_success(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Happy path: returns a dict with name, label, icon, synced, etc.""" - # Arrange - db_tool = MagicMock(spec=WorkflowToolProvider) - db_tool.app_id = "app-1" - db_tool.tenant_id = "t" - db_tool.id = "tool-1" - db_tool.name = "my_tool" - db_tool.label = "My Tool" - db_tool.icon = json.dumps({"emoji": "🔧"}) - db_tool.description = "some desc" - db_tool.privacy_policy = "" - db_tool.version = "1.0" - db_tool.parameter_configurations = [] - workflow = DummyWorkflow(graph_dict={"nodes": []}, version="1.0") - app = SimpleNamespace(workflow=workflow) - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(app)), - ) - - workflow_tool = MagicMock() - workflow_tool.entity.output_schema = {"type": "object"} - ctrl = MagicMock() - ctrl.get_tools.return_value = [workflow_tool] - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, - "workflow_provider_to_controller", - MagicMock(return_value=ctrl), - ) - mock_convert = MagicMock(return_value={"tool": "api_entity"}) - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, "convert_tool_entity_to_api_entity", mock_convert - ) - monkeypatch.setattr( - workflow_tools_manage_service.ToolLabelManager, "get_tool_labels", MagicMock(return_value=[]) - ) - - # Act - result = WorkflowToolManageService._get_workflow_tool("t", db_tool) - - # Assert - assert result["name"] == "my_tool" - assert result["label"] == "My Tool" - assert result["synced"] is True - assert "icon" in result - assert "output_schema" in result - - -# --------------------------------------------------------------------------- -# TestListSingleWorkflowTools -# --------------------------------------------------------------------------- - - -class TestListSingleWorkflowTools: - """Tests for WorkflowToolManageService.list_single_workflow_tools.""" - - def test_should_raise_when_tool_not_found(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the specified tool does not exist in DB.""" - # Arrange - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(None)), - ) - - # Act + Assert - with pytest.raises(ValueError, match="not found"): - WorkflowToolManageService.list_single_workflow_tools("u", "t", "tool-1") - - def test_should_raise_when_no_workflow_tools(self, monkeypatch: pytest.MonkeyPatch) -> None: - """ValueError when the controller yields no tools for the provider.""" - # Arrange - db_tool = MagicMock(spec=WorkflowToolProvider) - db_tool.id = "tool-1" - db_tool.tenant_id = "t" - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(db_tool)), - ) - ctrl = MagicMock() - ctrl.get_tools.return_value = [] - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, - "workflow_provider_to_controller", - MagicMock(return_value=ctrl), - ) - - # Act + Assert - with pytest.raises(ValueError, match="not found"): - WorkflowToolManageService.list_single_workflow_tools("u", "t", "tool-1") - - def test_should_return_api_entity_list(self, monkeypatch: pytest.MonkeyPatch) -> None: - """Happy path: returns list with one ToolApiEntity.""" - # Arrange - db_tool = MagicMock(spec=WorkflowToolProvider) - db_tool.id = "tool-1" - db_tool.tenant_id = "t" - monkeypatch.setattr( - workflow_tools_manage_service.db, - "session", - SimpleNamespace(query=lambda m: FakeQuery(db_tool)), - ) - workflow_tool = MagicMock() - ctrl = MagicMock() - ctrl.get_tools.return_value = [workflow_tool] - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, - "workflow_provider_to_controller", - MagicMock(return_value=ctrl), - ) - api_entity = MagicMock() - monkeypatch.setattr( - workflow_tools_manage_service.ToolTransformService, - "convert_tool_entity_to_api_entity", - MagicMock(return_value=api_entity), - ) - monkeypatch.setattr( - workflow_tools_manage_service.ToolLabelManager, "get_tool_labels", MagicMock(return_value=[]) - ) - - # Act - result = WorkflowToolManageService.list_single_workflow_tools("u", "t", "tool-1") - - # Assert - assert result == [api_entity] diff --git a/api/tests/unit_tests/services/vector_service.py b/api/tests/unit_tests/services/vector_service.py index c99275c6b2..e180063041 100644 --- a/api/tests/unit_tests/services/vector_service.py +++ b/api/tests/unit_tests/services/vector_service.py @@ -121,6 +121,7 @@ import pytest from core.rag.datasource.vdb.vector_base import BaseVector from core.rag.datasource.vdb.vector_factory import Vector from core.rag.datasource.vdb.vector_type import VectorType +from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.models.document import Document from models.dataset import ChildChunk, Dataset, DatasetDocument, DatasetProcessRule, DocumentSegment from services.vector_service import VectorService @@ -151,7 +152,7 @@ class VectorServiceTestDataFactory: def create_dataset_mock( dataset_id: str = "dataset-123", tenant_id: str = "tenant-123", - doc_form: str = "text_model", + doc_form: str = IndexStructureType.PARAGRAPH_INDEX, indexing_technique: str = "high_quality", embedding_model_provider: str = "openai", embedding_model: str = "text-embedding-ada-002", @@ -493,7 +494,7 @@ class TestVectorService: """ # Arrange dataset = VectorServiceTestDataFactory.create_dataset_mock( - doc_form="text_model", indexing_technique="high_quality" + doc_form=IndexStructureType.PARAGRAPH_INDEX, indexing_technique="high_quality" ) segment = VectorServiceTestDataFactory.create_document_segment_mock() @@ -505,7 +506,7 @@ class TestVectorService: mock_index_processor_factory.return_value.init_index_processor.return_value = mock_index_processor # Act - VectorService.create_segments_vector(keywords_list, [segment], dataset, "text_model") + VectorService.create_segments_vector(keywords_list, [segment], dataset, IndexStructureType.PARAGRAPH_INDEX) # Assert mock_index_processor.load.assert_called_once() @@ -649,7 +650,7 @@ class TestVectorService: mock_index_processor_factory.return_value.init_index_processor.return_value = mock_index_processor # Act - VectorService.create_segments_vector(None, [], dataset, "text_model") + VectorService.create_segments_vector(None, [], dataset, IndexStructureType.PARAGRAPH_INDEX) # Assert mock_index_processor.load.assert_not_called() diff --git a/api/tests/unit_tests/services/workflow/test_workflow_deletion.py b/api/tests/unit_tests/services/workflow/test_workflow_deletion.py deleted file mode 100644 index dfe325648d..0000000000 --- a/api/tests/unit_tests/services/workflow/test_workflow_deletion.py +++ /dev/null @@ -1,127 +0,0 @@ -from unittest.mock import MagicMock - -import pytest -from sqlalchemy.orm import Session - -from models.model import App -from models.workflow import Workflow -from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService - - -@pytest.fixture -def workflow_setup(): - mock_session_maker = MagicMock() - workflow_service = WorkflowService(mock_session_maker) - session = MagicMock(spec=Session) - tenant_id = "test-tenant-id" - workflow_id = "test-workflow-id" - - # Mock workflow - workflow = MagicMock(spec=Workflow) - workflow.id = workflow_id - workflow.tenant_id = tenant_id - workflow.version = "1.0" # Not a draft - workflow.tool_published = False # Not published as a tool by default - - # Mock app - app = MagicMock(spec=App) - app.id = "test-app-id" - app.name = "Test App" - app.workflow_id = None # Not used by an app by default - - return { - "workflow_service": workflow_service, - "session": session, - "tenant_id": tenant_id, - "workflow_id": workflow_id, - "workflow": workflow, - "app": app, - } - - -def test_delete_workflow_success(workflow_setup): - # Setup mocks - - # Mock the tool provider query to return None (not published as a tool) - workflow_setup["session"].query.return_value.where.return_value.first.return_value = None - - workflow_setup["session"].scalar = MagicMock( - side_effect=[workflow_setup["workflow"], None] - ) # Return workflow first, then None for app - - # Call the method - result = workflow_setup["workflow_service"].delete_workflow( - session=workflow_setup["session"], - workflow_id=workflow_setup["workflow_id"], - tenant_id=workflow_setup["tenant_id"], - ) - - # Verify - assert result is True - workflow_setup["session"].delete.assert_called_once_with(workflow_setup["workflow"]) - - -def test_delete_workflow_draft_error(workflow_setup): - # Setup mocks - workflow_setup["workflow"].version = "draft" - workflow_setup["session"].scalar = MagicMock(return_value=workflow_setup["workflow"]) - - # Call the method and verify exception - with pytest.raises(DraftWorkflowDeletionError): - workflow_setup["workflow_service"].delete_workflow( - session=workflow_setup["session"], - workflow_id=workflow_setup["workflow_id"], - tenant_id=workflow_setup["tenant_id"], - ) - - # Verify - workflow_setup["session"].delete.assert_not_called() - - -def test_delete_workflow_in_use_by_app_error(workflow_setup): - # Setup mocks - workflow_setup["app"].workflow_id = workflow_setup["workflow_id"] - workflow_setup["session"].scalar = MagicMock( - side_effect=[workflow_setup["workflow"], workflow_setup["app"]] - ) # Return workflow first, then app - - # Call the method and verify exception - with pytest.raises(WorkflowInUseError) as excinfo: - workflow_setup["workflow_service"].delete_workflow( - session=workflow_setup["session"], - workflow_id=workflow_setup["workflow_id"], - tenant_id=workflow_setup["tenant_id"], - ) - - # Verify error message contains app name - assert "Cannot delete workflow that is currently in use by app" in str(excinfo.value) - - # Verify - workflow_setup["session"].delete.assert_not_called() - - -def test_delete_workflow_published_as_tool_error(workflow_setup): - # Setup mocks - from models.tools import WorkflowToolProvider - - # Mock the tool provider query - mock_tool_provider = MagicMock(spec=WorkflowToolProvider) - workflow_setup["session"].query.return_value.where.return_value.first.return_value = mock_tool_provider - - workflow_setup["session"].scalar = MagicMock( - side_effect=[workflow_setup["workflow"], None] - ) # Return workflow first, then None for app - - # Call the method and verify exception - with pytest.raises(WorkflowInUseError) as excinfo: - workflow_setup["workflow_service"].delete_workflow( - session=workflow_setup["session"], - workflow_id=workflow_setup["workflow_id"], - tenant_id=workflow_setup["tenant_id"], - ) - - # Verify error message - assert "Cannot delete workflow that is published as a tool" in str(excinfo.value) - - # Verify - workflow_setup["session"].delete.assert_not_called() diff --git a/api/tests/unit_tests/services/workflow/test_workflow_node_execution_service_repository.py b/api/tests/unit_tests/services/workflow/test_workflow_node_execution_service_repository.py deleted file mode 100644 index 79bf5e94c2..0000000000 --- a/api/tests/unit_tests/services/workflow/test_workflow_node_execution_service_repository.py +++ /dev/null @@ -1,30 +0,0 @@ -from unittest.mock import MagicMock - -import pytest - -from repositories.sqlalchemy_api_workflow_node_execution_repository import ( - DifyAPISQLAlchemyWorkflowNodeExecutionRepository, -) - - -class TestSQLAlchemyWorkflowNodeExecutionServiceRepository: - @pytest.fixture - def repository(self): - mock_session_maker = MagicMock() - return DifyAPISQLAlchemyWorkflowNodeExecutionRepository(session_maker=mock_session_maker) - - def test_repository_implements_protocol(self, repository): - """Test that the repository implements the required protocol methods.""" - # Verify all protocol methods are implemented - assert hasattr(repository, "get_node_last_execution") - assert hasattr(repository, "get_executions_by_workflow_run") - assert hasattr(repository, "get_execution_by_id") - - # Verify methods are callable - assert callable(repository.get_node_last_execution) - assert callable(repository.get_executions_by_workflow_run) - assert callable(repository.get_execution_by_id) - assert callable(repository.delete_expired_executions) - assert callable(repository.delete_executions_by_app) - assert callable(repository.get_expired_executions_batch) - assert callable(repository.delete_executions_by_ids) diff --git a/api/tests/unit_tests/tasks/test_clean_dataset_task.py b/api/tests/unit_tests/tasks/test_clean_dataset_task.py index 74ba7f9c34..c0a4d2f113 100644 --- a/api/tests/unit_tests/tasks/test_clean_dataset_task.py +++ b/api/tests/unit_tests/tasks/test_clean_dataset_task.py @@ -16,6 +16,7 @@ from unittest.mock import MagicMock, patch import pytest +from core.rag.index_processor.constant.index_type import IndexStructureType from models.enums import DataSourceType from tasks.clean_dataset_task import clean_dataset_task @@ -186,7 +187,7 @@ class TestErrorHandling: indexing_technique="high_quality", index_struct='{"type": "paragraph"}', collection_binding_id=collection_binding_id, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) # Assert @@ -231,7 +232,7 @@ class TestPipelineAndWorkflowDeletion: indexing_technique="high_quality", index_struct='{"type": "paragraph"}', collection_binding_id=collection_binding_id, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, pipeline_id=pipeline_id, ) @@ -267,7 +268,7 @@ class TestPipelineAndWorkflowDeletion: indexing_technique="high_quality", index_struct='{"type": "paragraph"}', collection_binding_id=collection_binding_id, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, pipeline_id=None, ) @@ -323,7 +324,7 @@ class TestSegmentAttachmentCleanup: indexing_technique="high_quality", index_struct='{"type": "paragraph"}', collection_binding_id=collection_binding_id, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) # Assert @@ -368,7 +369,7 @@ class TestSegmentAttachmentCleanup: indexing_technique="high_quality", index_struct='{"type": "paragraph"}', collection_binding_id=collection_binding_id, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) # Assert - storage delete was attempted @@ -410,7 +411,7 @@ class TestEdgeCases: indexing_technique="high_quality", index_struct='{"type": "paragraph"}', collection_binding_id=collection_binding_id, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) # Assert @@ -454,7 +455,7 @@ class TestIndexProcessorParameters: indexing_technique=indexing_technique, index_struct=index_struct, collection_binding_id=collection_binding_id, - doc_form="paragraph_index", + doc_form=IndexStructureType.PARAGRAPH_INDEX, ) # Assert diff --git a/api/tests/unit_tests/tasks/test_dataset_indexing_task.py b/api/tests/unit_tests/tasks/test_dataset_indexing_task.py index 9d9cfa54b7..027cd3b1ec 100644 --- a/api/tests/unit_tests/tasks/test_dataset_indexing_task.py +++ b/api/tests/unit_tests/tasks/test_dataset_indexing_task.py @@ -15,6 +15,7 @@ from unittest.mock import MagicMock, Mock, patch import pytest from core.indexing_runner import DocumentIsPausedError +from core.rag.index_processor.constant.index_type import IndexStructureType from core.rag.pipeline.queue import TenantIsolatedTaskQueue from enums.cloud_plan import CloudPlan from extensions.ext_redis import redis_client @@ -227,7 +228,7 @@ def mock_documents(document_ids, dataset_id): doc.stopped_at = None doc.processing_started_at = None # optional attribute used in some code paths - doc.doc_form = "text_model" + doc.doc_form = IndexStructureType.PARAGRAPH_INDEX documents.append(doc) return documents diff --git a/api/tests/unit_tests/tasks/test_document_indexing_sync_task.py b/api/tests/unit_tests/tasks/test_document_indexing_sync_task.py index 3668416e36..f49f4535af 100644 --- a/api/tests/unit_tests/tasks/test_document_indexing_sync_task.py +++ b/api/tests/unit_tests/tasks/test_document_indexing_sync_task.py @@ -11,6 +11,7 @@ from unittest.mock import MagicMock, Mock, patch import pytest +from core.rag.index_processor.constant.index_type import IndexStructureType from models.dataset import Dataset, Document from tasks.document_indexing_sync_task import document_indexing_sync_task @@ -62,7 +63,7 @@ def mock_document(document_id, dataset_id, notion_workspace_id, notion_page_id, document.tenant_id = str(uuid.uuid4()) document.data_source_type = "notion_import" document.indexing_status = "completed" - document.doc_form = "text_model" + document.doc_form = IndexStructureType.PARAGRAPH_INDEX document.data_source_info_dict = { "notion_workspace_id": notion_workspace_id, "notion_page_id": notion_page_id, diff --git a/api/uv.lock b/api/uv.lock index 952ec87273..30c5b851bc 100644 --- a/api/uv.lock +++ b/api/uv.lock @@ -169,12 +169,6 @@ version = "1.0.0" source = { registry = "https://pypi.org/simple" } sdist = { url = "https://files.pythonhosted.org/packages/a0/87/1d7019d23891897cb076b2f7e3c81ab3c2ba91de3bb067196f675d60d34c/alibabacloud-credentials-api-1.0.0.tar.gz", hash = "sha256:8c340038d904f0218d7214a8f4088c31912bfcf279af2cbc7d9be4897a97dd2f", size = 2330, upload-time = "2025-01-13T05:53:04.931Z" } -[[package]] -name = "alibabacloud-endpoint-util" -version = "0.0.4" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/92/7d/8cc92a95c920e344835b005af6ea45a0db98763ad6ad19299d26892e6c8d/alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90", size = 2813, upload-time = "2025-06-12T07:20:52.572Z" } - [[package]] name = "alibabacloud-gateway-spi" version = "0.0.3" @@ -186,69 +180,17 @@ sdist = { url = "https://files.pythonhosted.org/packages/ab/98/d7111245f17935bf7 [[package]] name = "alibabacloud-gpdb20160503" -version = "3.8.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alibabacloud-endpoint-util" }, - { name = "alibabacloud-openapi-util" }, - { name = "alibabacloud-openplatform20191219" }, - { name = "alibabacloud-oss-sdk" }, - { name = "alibabacloud-oss-util" }, - { name = "alibabacloud-tea-fileform" }, - { name = "alibabacloud-tea-openapi" }, - { name = "alibabacloud-tea-util" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/15/6a/cc72e744e95c8f37fa6a84e66ae0b9b57a13ee97a0ef03d94c7127c31d75/alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30", size = 155092, upload-time = "2024-07-18T17:09:42.438Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ab/36/bce41704b3bf59d607590ec73a42a254c5dea27c0f707aee11d20512a200/alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8", size = 156097, upload-time = "2024-07-18T17:09:40.414Z" }, -] - -[[package]] -name = "alibabacloud-openapi-util" -version = "0.2.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alibabacloud-tea-util" }, - { name = "cryptography" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f6/50/5f41ab550d7874c623f6e992758429802c4b52a6804db437017e5387de33/alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8", size = 7201, upload-time = "2023-10-23T07:44:18.523Z" } - -[[package]] -name = "alibabacloud-openplatform20191219" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alibabacloud-endpoint-util" }, - { name = "alibabacloud-openapi-util" }, - { name = "alibabacloud-tea-openapi" }, - { name = "alibabacloud-tea-util" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/4f/bf/f7fa2f3657ed352870f442434cb2f27b7f70dcd52a544a1f3998eeaf6d71/alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020", size = 5038, upload-time = "2022-09-21T06:16:10.683Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/94/e5/18c75213551eeca9db1f6b41ddcc0bd87b5b6508c75a67f05cd8671847b4/alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36", size = 5204, upload-time = "2022-09-21T06:16:07.844Z" }, -] - -[[package]] -name = "alibabacloud-oss-sdk" -version = "0.1.1" +version = "5.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "alibabacloud-credentials" }, - { name = "alibabacloud-oss-util" }, - { name = "alibabacloud-tea-fileform" }, - { name = "alibabacloud-tea-util" }, - { name = "alibabacloud-tea-xml" }, + { name = "alibabacloud-tea-openapi" }, + { name = "darabonba-core" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7e/d1/f442dd026908fcf55340ca694bb1d027aa91e119e76ae2fbea62f2bde4f4/alibabacloud_oss_sdk-0.1.1.tar.gz", hash = "sha256:f51a368020d0964fcc0978f96736006f49f5ab6a4a4bf4f0b8549e2c659e7358", size = 46434, upload-time = "2025-04-22T12:40:41.717Z" } - -[[package]] -name = "alibabacloud-oss-util" -version = "0.0.6" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alibabacloud-tea" }, +sdist = { url = "https://files.pythonhosted.org/packages/b3/36/69333c7fb7fb5267f338371b14fdd8dbdd503717c97bbc7a6419d155ab4c/alibabacloud_gpdb20160503-5.1.0.tar.gz", hash = "sha256:086ec6d5e39b64f54d0e44bb3fd4fde1a4822a53eb9f6ff7464dff7d19b07b63", size = 295641, upload-time = "2026-03-19T10:09:02.444Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/68/7f/a91a2f9ad97c92fa9a6981587ea0ff789240cea05b17b17b7c244e5bac64/alibabacloud_gpdb20160503-5.1.0-py3-none-any.whl", hash = "sha256:580e4579285a54c7f04570782e0f60423a1997568684187fe88e4110acfb640e", size = 848784, upload-time = "2026-03-19T10:09:00.72Z" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/7c/d7e812b9968247a302573daebcfef95d0f9a718f7b4bfcca8d3d83e266be/alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6", size = 10008, upload-time = "2021-04-28T09:25:04.056Z" } [[package]] name = "alibabacloud-tea" @@ -260,15 +202,6 @@ dependencies = [ ] sdist = { url = "https://files.pythonhosted.org/packages/9a/7d/b22cb9a0d4f396ee0f3f9d7f26b76b9ed93d4101add7867a2c87ed2534f5/alibabacloud-tea-0.4.3.tar.gz", hash = "sha256:ec8053d0aa8d43ebe1deb632d5c5404339b39ec9a18a0707d57765838418504a", size = 8785, upload-time = "2025-03-24T07:34:42.958Z" } -[[package]] -name = "alibabacloud-tea-fileform" -version = "0.0.5" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alibabacloud-tea" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984cad2749414b420369fe943e15e6d96b79be45367630e/alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8", size = 3961, upload-time = "2021-04-28T09:22:54.56Z" } - [[package]] name = "alibabacloud-tea-openapi" version = "0.4.3" @@ -297,15 +230,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/72/9e/c394b4e2104766fb28a1e44e3ed36e4c7773b4d05c868e482be99d5635c9/alibabacloud_tea_util-0.3.14-py3-none-any.whl", hash = "sha256:10d3e5c340d8f7ec69dd27345eb2fc5a1dab07875742525edf07bbe86db93bfe", size = 6697, upload-time = "2025-11-19T06:01:07.355Z" }, ] -[[package]] -name = "alibabacloud-tea-xml" -version = "0.0.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "alibabacloud-tea" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/32/eb/5e82e419c3061823f3feae9b5681588762929dc4da0176667297c2784c1a/alibabacloud_tea_xml-0.0.3.tar.gz", hash = "sha256:979cb51fadf43de77f41c69fc69c12529728919f849723eb0cd24eb7b048a90c", size = 3466, upload-time = "2025-07-01T08:04:55.144Z" } - [[package]] name = "aliyun-log-python-sdk" version = "0.9.37" @@ -570,28 +494,28 @@ wheels = [ [[package]] name = "basedpyright" -version = "1.38.2" +version = "1.38.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "nodejs-wheel-binaries" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e4/a3/20aa7c4e83f2f614e0036300f3c352775dede0655c66814da16c37b661a9/basedpyright-1.38.2.tar.gz", hash = "sha256:b433b2b8ba745ed7520cdc79a29a03682f3fb00346d272ece5944e9e5e5daa92", size = 25277019, upload-time = "2026-02-26T11:18:43.594Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/58/7abba2c743571a42b2548f07aee556ebc1e4d0bc2b277aeba1ee6c83b0af/basedpyright-1.38.3.tar.gz", hash = "sha256:9725419786afbfad8a9539527f162da02d462afad440b0412fdb3f3cdf179b90", size = 25277430, upload-time = "2026-03-17T13:10:41.526Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ac/12/736cab83626fea3fe65cdafb3ef3d2ee9480c56723f2fd33921537289a5e/basedpyright-1.38.2-py3-none-any.whl", hash = "sha256:153481d37fd19f9e3adedc8629d1d071b10c5f5e49321fb026b74444b7c70e24", size = 12312475, upload-time = "2026-02-26T11:18:40.373Z" }, + { url = "https://files.pythonhosted.org/packages/2c/e3/3ebb5c23bd3abb5fc2053b8a06a889aa5c1cf8cff738c78cb6c1957e90cd/basedpyright-1.38.3-py3-none-any.whl", hash = "sha256:1f15c2e489c67d6c5e896c24b6a63251195c04223a55e4568b8f8e8ed49ca830", size = 12313363, upload-time = "2026-03-17T13:10:47.344Z" }, ] [[package]] name = "bce-python-sdk" -version = "0.9.63" +version = "0.9.64" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "future" }, { name = "pycryptodome" }, { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8e/ab/4c2927b01a97562af6a296b722eee79658335795f341a395a12742d5e1a3/bce_python_sdk-0.9.63.tar.gz", hash = "sha256:0c80bc3ac128a0a144bae3b8dff1f397f42c30b36f7677e3a39d8df8e77b1088", size = 284419, upload-time = "2026-03-06T14:54:06.592Z" } +sdist = { url = "https://files.pythonhosted.org/packages/61/33/047e9c1a6c97e0cd4d93a6490abd8fbc2ccd13569462fc0228699edc08bc/bce_python_sdk-0.9.64.tar.gz", hash = "sha256:901bf787c26ad35855a80d65e58d7584c8541f7f0f2af20847830e572e5b622e", size = 287125, upload-time = "2026-03-17T11:24:29.345Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/67/a4/501e978776c7060aa8ba77e68536597e754d938bcdbe1826618acebfbddf/bce_python_sdk-0.9.63-py3-none-any.whl", hash = "sha256:ec66eee8807c6aa4036412592da7e8c9e2cd7fdec494190986288ac2195d8276", size = 400305, upload-time = "2026-03-06T14:53:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/48/7f/dd289582f37ab4effea47b2a8503880db4781ca0fc8e0a8ed5ff493359e5/bce_python_sdk-0.9.64-py3-none-any.whl", hash = "sha256:eaad97e4f0e7d613ae978da3cdc5294e9f724ffca2735f79820037fa1317cd6d", size = 402233, upload-time = "2026-03-17T11:24:24.673Z" }, ] [[package]] @@ -660,14 +584,14 @@ wheels = [ [[package]] name = "bleach" -version = "6.2.0" +version = "6.3.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "webencodings" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/18/3c8523962314be6bf4c8989c79ad9531c825210dd13a8669f6b84336e8bd/bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22", size = 203533, upload-time = "2025-10-27T17:57:39.211Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" }, + { url = "https://files.pythonhosted.org/packages/cd/3a/577b549de0cc09d95f11087ee63c739bba856cd3952697eec4c4bb91350a/bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6", size = 164437, upload-time = "2025-10-27T17:57:37.538Z" }, ] [[package]] @@ -706,30 +630,30 @@ wheels = [ [[package]] name = "boto3" -version = "1.42.68" +version = "1.42.73" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore" }, { name = "jmespath" }, { name = "s3transfer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/ae/60c642aa5413e560b671da825329f510b29a77274ed0f580bde77562294d/boto3-1.42.68.tar.gz", hash = "sha256:3f349f967ab38c23425626d130962bcb363e75f042734fe856ea8c5a00eef03c", size = 112761, upload-time = "2026-03-13T19:32:17.137Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e4/8b/d00575be514744ca4839e7d85bf4a8a3c7b6b4574433291e58d14c68ae09/boto3-1.42.73.tar.gz", hash = "sha256:d37b58d6cd452ca808dd6823ae19ca65b6244096c5125ef9052988b337298bae", size = 112775, upload-time = "2026-03-20T19:39:52.814Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fb/f6/dc6e993479dbb597d68223fbf61cb026511737696b15bd7d2a33e9b2c24f/boto3-1.42.68-py3-none-any.whl", hash = "sha256:dbff353eb7dc93cbddd7926ed24793e0174c04adbe88860dfa639568442e4962", size = 140556, upload-time = "2026-03-13T19:32:14.951Z" }, + { url = "https://files.pythonhosted.org/packages/aa/05/1fcf03d90abaa3d0b42a6bfd10231dd709493ecbacf794aa2eea5eae6841/boto3-1.42.73-py3-none-any.whl", hash = "sha256:1f81b79b873f130eeab14bb556417a7c66d38f3396b7f2fe3b958b3f9094f455", size = 140556, upload-time = "2026-03-20T19:39:50.298Z" }, ] [[package]] name = "boto3-stubs" -version = "1.42.68" +version = "1.42.73" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "botocore-stubs" }, { name = "types-s3transfer" }, { name = "typing-extensions", marker = "python_full_version < '3.12'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/4c/8c/dd4b0c95ff008bed5a35ab411452ece121b355539d2a0b6dcd62a0c47be5/boto3_stubs-1.42.68.tar.gz", hash = "sha256:96ad1020735619483fb9b4da7a5e694b460bf2e18f84a34d5d175d0ffe8c4653", size = 101372, upload-time = "2026-03-13T19:49:54.867Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/c3/fcc47102c63278af25ad57c93d97dc393f4dbc54c0117a29c78f2b96ec1e/boto3_stubs-1.42.73.tar.gz", hash = "sha256:36f625769b5505c4bc627f16244b98de9e10dae3ac36f1aa0f0ebe2f201dc138", size = 101373, upload-time = "2026-03-20T19:59:51.463Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/68/15/3ca5848917214a168134512a5b45f856a56e913659888947a052e02031b5/boto3_stubs-1.42.68-py3-none-any.whl", hash = "sha256:ed7f98334ef7b2377fa8532190e63dc2c6d1dc895e3d7cb3d6d1c83771b81bf6", size = 70011, upload-time = "2026-03-13T19:49:42.801Z" }, + { url = "https://files.pythonhosted.org/packages/4b/57/d570ba61a2a0c7fe0c8667e41269a0480293cb53e1786d6661a2bd827fc5/boto3_stubs-1.42.73-py3-none-any.whl", hash = "sha256:bd658429069d8215247fc3abc003220cd875c24ab6eda7b3405090408afaacdf", size = 70009, upload-time = "2026-03-20T19:59:43.786Z" }, ] [package.optional-dependencies] @@ -739,16 +663,16 @@ bedrock-runtime = [ [[package]] name = "botocore" -version = "1.42.68" +version = "1.42.73" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jmespath" }, { name = "python-dateutil" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3f/22/87502d5fbbfa8189406a617b30b1e2a3dc0ab2669f7268e91b385c1c1c7a/botocore-1.42.68.tar.gz", hash = "sha256:3951c69e12ac871dda245f48dac5c7dd88ea1bfdd74a8879ec356cf2874b806a", size = 14994514, upload-time = "2026-03-13T19:32:03.577Z" } +sdist = { url = "https://files.pythonhosted.org/packages/28/23/0c88ca116ef63b1ae77c901cd5d2095d22a8dbde9e80df74545db4a061b4/botocore-1.42.73.tar.gz", hash = "sha256:575858641e4949aaf2af1ced145b8524529edf006d075877af6b82ff96ad854c", size = 15008008, upload-time = "2026-03-20T19:39:40.082Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/3c/2a/1428f6594799780fe6ee845d8e6aeffafe026cd16a70c878684e2dcbbfc8/botocore-1.42.68-py3-none-any.whl", hash = "sha256:9df7da26374601f890e2f115bfa573d65bf15b25fe136bb3aac809f6145f52ab", size = 14668816, upload-time = "2026-03-13T19:31:58.572Z" }, + { url = "https://files.pythonhosted.org/packages/8e/65/971f3d55015f4d133a6ff3ad74cd39f4b8dd8f53f7775a3c2ad378ea5145/botocore-1.42.73-py3-none-any.whl", hash = "sha256:7b62e2a12f7a1b08eb7360eecd23bb16fe3b7ab7f5617cf91b25476c6f86a0fe", size = 14681861, upload-time = "2026-03-20T19:39:35.341Z" }, ] [[package]] @@ -1290,41 +1214,41 @@ wheels = [ [[package]] name = "coverage" -version = "7.13.4" +version = "7.13.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" }, - { url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" }, - { url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" }, - { url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" }, - { url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" }, - { url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" }, - { url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" }, - { url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" }, - { url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" }, - { url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" }, - { url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" }, - { url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" }, - { url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" }, - { url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" }, - { url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" }, - { url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" }, - { url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" }, - { url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" }, - { url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" }, - { url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" }, - { url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" }, - { url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" }, - { url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" }, - { url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" }, - { url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" }, - { url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" }, - { url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" }, - { url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" }, - { url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" }, - { url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" }, - { url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" }, + { url = "https://files.pythonhosted.org/packages/4b/37/d24c8f8220ff07b839b2c043ea4903a33b0f455abe673ae3c03bbdb7f212/coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d", size = 219381, upload-time = "2026-03-17T10:30:14.68Z" }, + { url = "https://files.pythonhosted.org/packages/35/8b/cd129b0ca4afe886a6ce9d183c44d8301acbd4ef248622e7c49a23145605/coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587", size = 219880, upload-time = "2026-03-17T10:30:16.231Z" }, + { url = "https://files.pythonhosted.org/packages/55/2f/e0e5b237bffdb5d6c530ce87cc1d413a5b7d7dfd60fb067ad6d254c35c76/coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642", size = 250303, upload-time = "2026-03-17T10:30:17.748Z" }, + { url = "https://files.pythonhosted.org/packages/92/be/b1afb692be85b947f3401375851484496134c5554e67e822c35f28bf2fbc/coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b", size = 252218, upload-time = "2026-03-17T10:30:19.804Z" }, + { url = "https://files.pythonhosted.org/packages/da/69/2f47bb6fa1b8d1e3e5d0c4be8ccb4313c63d742476a619418f85740d597b/coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686", size = 254326, upload-time = "2026-03-17T10:30:21.321Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d0/79db81da58965bd29dabc8f4ad2a2af70611a57cba9d1ec006f072f30a54/coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743", size = 256267, upload-time = "2026-03-17T10:30:23.094Z" }, + { url = "https://files.pythonhosted.org/packages/e5/32/d0d7cc8168f91ddab44c0ce4806b969df5f5fdfdbb568eaca2dbc2a04936/coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75", size = 250430, upload-time = "2026-03-17T10:30:25.311Z" }, + { url = "https://files.pythonhosted.org/packages/4d/06/a055311d891ddbe231cd69fdd20ea4be6e3603ffebddf8704b8ca8e10a3c/coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209", size = 252017, upload-time = "2026-03-17T10:30:27.284Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f6/d0fd2d21e29a657b5f77a2fe7082e1568158340dceb941954f776dce1b7b/coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a", size = 250080, upload-time = "2026-03-17T10:30:29.481Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ab/0d7fb2efc2e9a5eb7ddcc6e722f834a69b454b7e6e5888c3a8567ecffb31/coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e", size = 253843, upload-time = "2026-03-17T10:30:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/ba/6f/7467b917bbf5408610178f62a49c0ed4377bb16c1657f689cc61470da8ce/coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd", size = 249802, upload-time = "2026-03-17T10:30:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/75/2c/1172fb689df92135f5bfbbd69fc83017a76d24ea2e2f3a1154007e2fb9f8/coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8", size = 250707, upload-time = "2026-03-17T10:30:35.2Z" }, + { url = "https://files.pythonhosted.org/packages/67/21/9ac389377380a07884e3b48ba7a620fcd9dbfaf1d40565facdc6b36ec9ef/coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf", size = 221880, upload-time = "2026-03-17T10:30:36.775Z" }, + { url = "https://files.pythonhosted.org/packages/af/7f/4cd8a92531253f9d7c1bbecd9fa1b472907fb54446ca768c59b531248dc5/coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9", size = 222816, upload-time = "2026-03-17T10:30:38.891Z" }, + { url = "https://files.pythonhosted.org/packages/12/a6/1d3f6155fb0010ca68eba7fe48ca6c9da7385058b77a95848710ecf189b1/coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028", size = 221483, upload-time = "2026-03-17T10:30:40.463Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, ] [package.optional-dependencies] @@ -1744,8 +1668,8 @@ requires-dist = [ { name = "arize-phoenix-otel", specifier = "~=0.15.0" }, { name = "azure-identity", specifier = "==1.25.3" }, { name = "beautifulsoup4", specifier = "==4.14.3" }, - { name = "bleach", specifier = "~=6.2.0" }, - { name = "boto3", specifier = "==1.42.68" }, + { name = "bleach", specifier = "~=6.3.0" }, + { name = "boto3", specifier = "==1.42.73" }, { name = "bs4", specifier = "~=0.0.1" }, { name = "cachetools", specifier = "~=5.3.0" }, { name = "celery", specifier = "~=5.6.2" }, @@ -1763,7 +1687,7 @@ requires-dist = [ { name = "gevent", specifier = "~=25.9.1" }, { name = "gmpy2", specifier = "~=2.3.0" }, { name = "google-api-core", specifier = ">=2.19.1" }, - { name = "google-api-python-client", specifier = "==2.192.0" }, + { name = "google-api-python-client", specifier = "==2.193.0" }, { name = "google-auth", specifier = ">=2.47.0" }, { name = "google-auth-httplib2", specifier = "==0.3.0" }, { name = "google-cloud-aiplatform", specifier = ">=1.123.0" }, @@ -1776,7 +1700,7 @@ requires-dist = [ { name = "jsonschema", specifier = ">=4.25.1" }, { name = "langfuse", specifier = "~=2.51.3" }, { name = "langsmith", specifier = "~=0.7.16" }, - { name = "litellm", specifier = "==1.82.2" }, + { name = "litellm", specifier = "==1.82.6" }, { name = "markdown", specifier = "~=3.10.2" }, { name = "mlflow-skinny", specifier = ">=3.0.0" }, { name = "numpy", specifier = "~=1.26.4" }, @@ -1815,12 +1739,12 @@ requires-dist = [ { name = "pyyaml", specifier = "~=6.0.1" }, { name = "readabilipy", specifier = "~=0.3.0" }, { name = "redis", extras = ["hiredis"], specifier = "~=7.3.0" }, - { name = "resend", specifier = "~=2.23.0" }, + { name = "resend", specifier = "~=2.26.0" }, { name = "sendgrid", specifier = "~=6.12.3" }, - { name = "sentry-sdk", extras = ["flask"], specifier = "~=2.54.0" }, + { name = "sentry-sdk", extras = ["flask"], specifier = "~=2.55.0" }, { name = "sqlalchemy", specifier = "~=2.0.29" }, { name = "sseclient-py", specifier = "~=1.9.0" }, - { name = "starlette", specifier = "==0.52.1" }, + { name = "starlette", specifier = "==1.0.0" }, { name = "tiktoken", specifier = "~=0.12.0" }, { name = "transformers", specifier = "~=5.3.0" }, { name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.21.5" }, @@ -1846,7 +1770,7 @@ dev = [ { name = "pyrefly", specifier = ">=0.55.0" }, { name = "pytest", specifier = "~=9.0.2" }, { name = "pytest-benchmark", specifier = "~=5.2.3" }, - { name = "pytest-cov", specifier = "~=7.0.0" }, + { name = "pytest-cov", specifier = "~=7.1.0" }, { name = "pytest-env", specifier = "~=1.6.0" }, { name = "pytest-mock", specifier = "~=3.15.1" }, { name = "pytest-timeout", specifier = ">=2.4.0" }, @@ -1912,7 +1836,7 @@ tools = [ { name = "nltk", specifier = "~=3.9.1" }, ] vdb = [ - { name = "alibabacloud-gpdb20160503", specifier = "~=3.8.0" }, + { name = "alibabacloud-gpdb20160503", specifier = "~=5.1.0" }, { name = "alibabacloud-tea-openapi", specifier = "~=0.4.3" }, { name = "chromadb", specifier = "==0.5.20" }, { name = "clickhouse-connect", specifier = "~=0.14.1" }, @@ -2501,7 +2425,7 @@ grpc = [ [[package]] name = "google-api-python-client" -version = "2.192.0" +version = "2.193.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, @@ -2510,9 +2434,9 @@ dependencies = [ { name = "httplib2" }, { name = "uritemplate" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/85/d8/489052a40935e45b9b5b3d6accc14b041360c1507bdc659c2e1a19aaa3ff/google_api_python_client-2.192.0.tar.gz", hash = "sha256:d48cfa6078fadea788425481b007af33fe0ab6537b78f37da914fb6fc112eb27", size = 14209505, upload-time = "2026-03-05T15:17:01.598Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/f4/e14b6815d3b1885328dd209676a3a4c704882743ac94e18ef0093894f5c8/google_api_python_client-2.193.0.tar.gz", hash = "sha256:8f88d16e89d11341e0a8b199cafde0fb7e6b44260dffb88d451577cbd1bb5d33", size = 14281006, upload-time = "2026-03-17T18:25:29.415Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/76/ec4128f00fefb9011635ae2abc67d7dacd05c8559378f8f05f0c907c38d8/google_api_python_client-2.192.0-py3-none-any.whl", hash = "sha256:63a57d4457cd97df1d63eb89c5fda03c5a50588dcbc32c0115dd1433c08f4b62", size = 14783267, upload-time = "2026-03-05T15:16:58.804Z" }, + { url = "https://files.pythonhosted.org/packages/f0/6d/fe75167797790a56d17799b75e1129bb93f7ff061efc7b36e9731bd4be2b/google_api_python_client-2.193.0-py3-none-any.whl", hash = "sha256:c42aa324b822109901cfecab5dc4fc3915d35a7b376835233c916c70610322db", size = 14856490, upload-time = "2026-03-17T18:25:26.608Z" }, ] [[package]] @@ -2548,7 +2472,7 @@ wheels = [ [[package]] name = "google-cloud-aiplatform" -version = "1.141.0" +version = "1.142.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docstring-parser" }, @@ -2564,9 +2488,9 @@ dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ac/dc/1209c7aab43bd7233cf631165a3b1b4284d22fc7fe7387c66228d07868ab/google_cloud_aiplatform-1.141.0.tar.gz", hash = "sha256:e3b1cdb28865dd862aac9c685dfc5ac076488705aba0a5354016efadcddd59c6", size = 10152688, upload-time = "2026-03-10T22:20:08.692Z" } +sdist = { url = "https://files.pythonhosted.org/packages/41/0d/3063a0512d60cf18854a279e00ccb796429545464345ef821cf77cb93d05/google_cloud_aiplatform-1.142.0.tar.gz", hash = "sha256:87b49e002703dc14885093e9b264587db84222bef5f70f5a442d03f41beecdd1", size = 10207993, upload-time = "2026-03-20T22:49:13.797Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/fc/428af69a69ff2e477e7f5e12d227b31fe5790f1a8234aacd54297f49c836/google_cloud_aiplatform-1.141.0-py2.py3-none-any.whl", hash = "sha256:6bd25b4d514c40b8181ca703e1b313ad6d0454ab8006fc9907fb3e9f672f31d1", size = 8358409, upload-time = "2026-03-10T22:20:04.871Z" }, + { url = "https://files.pythonhosted.org/packages/59/8b/f29646d3fa940f0e38cfcc12137f4851856b50d7486a3c05103ebc78d82d/google_cloud_aiplatform-1.142.0-py2.py3-none-any.whl", hash = "sha256:17c91db9b613cbbafb2c36335b123686aeb2b4b8448be5134b565ae07165a39a", size = 8388991, upload-time = "2026-03-20T22:49:10.334Z" }, ] [[package]] @@ -2619,7 +2543,7 @@ wheels = [ [[package]] name = "google-cloud-storage" -version = "3.9.0" +version = "3.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, @@ -2629,9 +2553,9 @@ dependencies = [ { name = "google-resumable-media" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/b1/4f0798e88285b50dfc60ed3a7de071def538b358db2da468c2e0deecbb40/google_cloud_storage-3.9.0.tar.gz", hash = "sha256:f2d8ca7db2f652be757e92573b2196e10fbc09649b5c016f8b422ad593c641cc", size = 17298544, upload-time = "2026-02-02T13:36:34.119Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7a/e3/747759eebc72e420c25903d6bc231d0ceb110b66ac7e6ee3f350417152cd/google_cloud_storage-3.10.0.tar.gz", hash = "sha256:1aeebf097c27d718d84077059a28d7e87f136f3700212215f1ceeae1d1c5d504", size = 17309829, upload-time = "2026-03-18T15:54:11.875Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/0b/816a6ae3c9fd096937d2e5f9670558908811d57d59ddf69dd4b83b326fd1/google_cloud_storage-3.9.0-py3-none-any.whl", hash = "sha256:2dce75a9e8b3387078cbbdad44757d410ecdb916101f8ba308abf202b6968066", size = 321324, upload-time = "2026-02-02T13:36:32.271Z" }, + { url = "https://files.pythonhosted.org/packages/29/e2/d58442f4daee5babd9255cf492a1f3d114357164072f8339a22a3ad460a2/google_cloud_storage-3.10.0-py3-none-any.whl", hash = "sha256:0072e7783b201e45af78fd9779894cdb6bec2bf922ee932f3fcc16f8bce9b9a3", size = 324382, upload-time = "2026-03-18T15:54:10.091Z" }, ] [[package]] @@ -3460,7 +3384,7 @@ wheels = [ [[package]] name = "langsmith" -version = "0.7.17" +version = "0.7.22" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, @@ -3473,9 +3397,9 @@ dependencies = [ { name = "xxhash" }, { name = "zstandard" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/71/79/81041dde07a974e728db7def23c1c7255950b8874102925cc77093bc847d/langsmith-0.7.17.tar.gz", hash = "sha256:6c1b0c2863cdd6636d2a58b8d5b1b80060703d98cac2593f4233e09ac25b5a9d", size = 1132228, upload-time = "2026-03-12T20:41:10.808Z" } +sdist = { url = "https://files.pythonhosted.org/packages/be/2a/2d5e6c67396fd228670af278c4da7bd6db2b8d11deaf6f108490b6d3f561/langsmith-0.7.22.tar.gz", hash = "sha256:35bfe795d648b069958280760564632fd28ebc9921c04f3e209c0db6a6c7dc04", size = 1134923, upload-time = "2026-03-19T22:45:23.492Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/34/31/62689d57f4d25792bd6a3c05c868771899481be2f3e31f9e71d31e1ac4ab/langsmith-0.7.17-py3-none-any.whl", hash = "sha256:cbec10460cb6c6ecc94c18c807be88a9984838144ae6c4693c9f859f378d7d02", size = 359147, upload-time = "2026-03-12T20:41:08.758Z" }, + { url = "https://files.pythonhosted.org/packages/1a/94/1f5d72655ab6534129540843776c40eff757387b88e798d8b3bf7e313fd4/langsmith-0.7.22-py3-none-any.whl", hash = "sha256:6e9d5148314d74e86748cb9d3898632cad0320c9323d95f70f969e5bc078eee4", size = 359927, upload-time = "2026-03-19T22:45:21.603Z" }, ] [[package]] @@ -3523,7 +3447,7 @@ wheels = [ [[package]] name = "litellm" -version = "1.82.2" +version = "1.82.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "aiohttp" }, @@ -3539,9 +3463,9 @@ dependencies = [ { name = "tiktoken" }, { name = "tokenizers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/12/010a86643f12ac0b004032d5927c260094299a84ed38b5ed20a8f8c7e3c4/litellm-1.82.2.tar.gz", hash = "sha256:f5f4c4049f344a88bf80b2e421bb927807687c99624515d7ff4152d533ec9dcb", size = 17353218, upload-time = "2026-03-13T21:24:24.5Z" } +sdist = { url = "https://files.pythonhosted.org/packages/29/75/1c537aa458426a9127a92bc2273787b2f987f4e5044e21f01f2eed5244fd/litellm-1.82.6.tar.gz", hash = "sha256:2aa1c2da21fe940c33613aa447119674a3ad4d2ad5eb064e4d5ce5ee42420136", size = 17414147, upload-time = "2026-03-22T06:36:00.452Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/e4/87e3ca82a8bf6e6bfffb42a539a1350dd6ced1b7169397bd439ba56fde10/litellm-1.82.2-py3-none-any.whl", hash = "sha256:641ed024774fa3d5b4dd9347f0efb1e31fa422fba2a6500aabedee085d1194cb", size = 15524224, upload-time = "2026-03-13T21:24:21.288Z" }, + { url = "https://files.pythonhosted.org/packages/02/6c/5327667e6dbe9e98cbfbd4261c8e91386a52e38f41419575854248bbab6a/litellm-1.82.6-py3-none-any.whl", hash = "sha256:164a3ef3e19f309e3cabc199bef3d2045212712fefdfa25fc7f75884a5b5b205", size = 15591595, upload-time = "2026-03-22T06:35:56.795Z" }, ] [[package]] @@ -4538,7 +4462,7 @@ wheels = [ [[package]] name = "opik" -version = "1.10.39" +version = "1.10.45" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "boto3-stubs", extra = ["bedrock-runtime"] }, @@ -4557,9 +4481,9 @@ dependencies = [ { name = "tqdm" }, { name = "uuid6" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b5/0f/b1e00a18cac16b4f36bf6cecc2de962fda810a9416d1159c48f46b81f5ec/opik-1.10.39.tar.gz", hash = "sha256:4d808eb2137070fc5d92a3bed3c3100d9cccfb35f4f0b71ea9990733f293dbb2", size = 780312, upload-time = "2026-03-12T14:08:25.746Z" } +sdist = { url = "https://files.pythonhosted.org/packages/85/17/edea6308347cec62e6828de7c573c596559c502b54fa4f0c88a52e2e81f5/opik-1.10.45.tar.gz", hash = "sha256:d8d8627ba03d12def46965e03d58f611daaf5cf878b3d087c53fe1159788c140", size = 789876, upload-time = "2026-03-20T11:35:12.457Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/24/0f4404907a98b4aec4508504570a78a61a3a8b5e451c67326632695ba8e6/opik-1.10.39-py3-none-any.whl", hash = "sha256:a72d735b9afac62e5262294b2f704aca89ec31f5c9beda17504815f7423870c3", size = 1317833, upload-time = "2026-03-12T14:08:23.954Z" }, + { url = "https://files.pythonhosted.org/packages/b7/17/150e9eecfa28cb23f7a0bfe83ae1486a11022b97fe6d12328b455784658d/opik-1.10.45-py3-none-any.whl", hash = "sha256:e8050d9e5e0d92ff587f156eacbdd02099897f39cfe79a98380b6c8ae9906b95", size = 1337714, upload-time = "2026-03-20T11:35:10.237Z" }, ] [[package]] @@ -5275,15 +5199,15 @@ wheels = [ [[package]] name = "pydantic-extra-types" -version = "2.11.0" +version = "2.11.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/fd/35/2fee58b1316a73e025728583d3b1447218a97e621933fc776fb8c0f2ebdd/pydantic_extra_types-2.11.0.tar.gz", hash = "sha256:4e9991959d045b75feb775683437a97991d02c138e00b59176571db9ce634f0e", size = 157226, upload-time = "2025-12-31T16:18:27.944Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/71/dba38ee2651f84f7842206adbd2233d8bbdb59fb85e9fa14232486a8c471/pydantic_extra_types-2.11.1.tar.gz", hash = "sha256:46792d2307383859e923d8fcefa82108b1a141f8a9c0198982b3832ab5ef1049", size = 172002, upload-time = "2026-03-16T08:08:03.92Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/17/fabd56da47096d240dd45ba627bead0333b0cf0ee8ada9bec579287dadf3/pydantic_extra_types-2.11.0-py3-none-any.whl", hash = "sha256:84b864d250a0fc62535b7ec591e36f2c5b4d1325fa0017eb8cda9aeb63b374a6", size = 74296, upload-time = "2025-12-31T16:18:26.38Z" }, + { url = "https://files.pythonhosted.org/packages/17/c1/3226e6d7f5a4f736f38ac11a6fbb262d701889802595cdb0f53a885ac2e0/pydantic_extra_types-2.11.1-py3-none-any.whl", hash = "sha256:1722ea2bddae5628ace25f2aa685b69978ef533123e5638cfbddb999e0100ec1", size = 79526, upload-time = "2026-03-16T08:08:02.533Z" }, ] [[package]] @@ -5523,16 +5447,16 @@ wheels = [ [[package]] name = "pytest-cov" -version = "7.0.0" +version = "7.1.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, ] [[package]] @@ -5968,15 +5892,15 @@ wheels = [ [[package]] name = "resend" -version = "2.23.0" +version = "2.26.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/a3/20003e7d14604fef778bd30c69604df3560a657a95a5c29a9688610759b6/resend-2.23.0.tar.gz", hash = "sha256:df613827dcc40eb1c9de2e5ff600cd4081b89b206537dec8067af1a5016d23c7", size = 31416, upload-time = "2026-02-23T19:01:57.603Z" } +sdist = { url = "https://files.pythonhosted.org/packages/07/ff/6a4e5e758fc2145c6a7d8563934d8ee24bf96a0212d7ec7d1af1f155bb74/resend-2.26.0.tar.gz", hash = "sha256:957a6a59dc597ce27fbd6d5383220dd9cc497fab99d4f3d775c8a42a449a569e", size = 36238, upload-time = "2026-03-20T22:49:09.728Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e3/35/64df775b8cd95e89798fd7b1b7fcafa975b6b09f559c10c0650e65b33580/resend-2.23.0-py2.py3-none-any.whl", hash = "sha256:eca6d28a1ffd36c1fc489fa83cb6b511f384792c9f07465f7c92d96c8b4d5636", size = 52599, upload-time = "2026-02-23T19:01:55.962Z" }, + { url = "https://files.pythonhosted.org/packages/16/c2/f88d3299d97aa1d36a923d0846fe185fcf5355ca898c954b2e5a79f090b5/resend-2.26.0-py2.py3-none-any.whl", hash = "sha256:5e25a804a84a68df504f2ade5369ac37e0139e37788a1f20b66c88696595b4bc", size = 57699, upload-time = "2026-03-20T22:49:08.354Z" }, ] [[package]] @@ -6057,27 +5981,27 @@ wheels = [ [[package]] name = "ruff" -version = "0.15.6" +version = "0.15.7" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/22/9e4f66ee588588dc6c9af6a994e12d26e19efbe874d1a909d09a6dac7a59/ruff-0.15.7.tar.gz", hash = "sha256:04f1ae61fc20fe0b148617c324d9d009b5f63412c0b16474f3d5f1a1a665f7ac", size = 4601277, upload-time = "2026-03-19T16:26:22.605Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" }, - { url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" }, - { url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" }, - { url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" }, - { url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" }, - { url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" }, - { url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" }, - { url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" }, - { url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" }, - { url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" }, - { url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" }, - { url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" }, - { url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" }, - { url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" }, - { url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" }, - { url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" }, + { url = "https://files.pythonhosted.org/packages/41/2f/0b08ced94412af091807b6119ca03755d651d3d93a242682bf020189db94/ruff-0.15.7-py3-none-linux_armv6l.whl", hash = "sha256:a81cc5b6910fb7dfc7c32d20652e50fa05963f6e13ead3c5915c41ac5d16668e", size = 10489037, upload-time = "2026-03-19T16:26:32.47Z" }, + { url = "https://files.pythonhosted.org/packages/91/4a/82e0fa632e5c8b1eba5ee86ecd929e8ff327bbdbfb3c6ac5d81631bef605/ruff-0.15.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:722d165bd52403f3bdabc0ce9e41fc47070ac56d7a91b4e0d097b516a53a3477", size = 10955433, upload-time = "2026-03-19T16:27:00.205Z" }, + { url = "https://files.pythonhosted.org/packages/ab/10/12586735d0ff42526ad78c049bf51d7428618c8b5c467e72508c694119df/ruff-0.15.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7fbc2448094262552146cbe1b9643a92f66559d3761f1ad0656d4991491af49e", size = 10269302, upload-time = "2026-03-19T16:26:26.183Z" }, + { url = "https://files.pythonhosted.org/packages/eb/5d/32b5c44ccf149a26623671df49cbfbd0a0ae511ff3df9d9d2426966a8d57/ruff-0.15.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b39329b60eba44156d138275323cc726bbfbddcec3063da57caa8a8b1d50adf", size = 10607625, upload-time = "2026-03-19T16:27:03.263Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f1/f0001cabe86173aaacb6eb9bb734aa0605f9a6aa6fa7d43cb49cbc4af9c9/ruff-0.15.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87768c151808505f2bfc93ae44e5f9e7c8518943e5074f76ac21558ef5627c85", size = 10324743, upload-time = "2026-03-19T16:27:09.791Z" }, + { url = "https://files.pythonhosted.org/packages/7a/87/b8a8f3d56b8d848008559e7c9d8bf367934d5367f6d932ba779456e2f73b/ruff-0.15.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb0511670002c6c529ec66c0e30641c976c8963de26a113f3a30456b702468b0", size = 11138536, upload-time = "2026-03-19T16:27:06.101Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f2/4fd0d05aab0c5934b2e1464784f85ba2eab9d54bffc53fb5430d1ed8b829/ruff-0.15.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0d19644f801849229db8345180a71bee5407b429dd217f853ec515e968a6912", size = 11994292, upload-time = "2026-03-19T16:26:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/64/22/fc4483871e767e5e95d1622ad83dad5ebb830f762ed0420fde7dfa9d9b08/ruff-0.15.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4806d8e09ef5e84eb19ba833d0442f7e300b23fe3f0981cae159a248a10f0036", size = 11398981, upload-time = "2026-03-19T16:26:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/66f0343176d5eab02c3f7fcd2de7a8e0dd7a41f0d982bee56cd1c24db62b/ruff-0.15.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dce0896488562f09a27b9c91b1f58a097457143931f3c4d519690dea54e624c5", size = 11242422, upload-time = "2026-03-19T16:26:29.277Z" }, + { url = "https://files.pythonhosted.org/packages/5d/3a/a7060f145bfdcce4c987ea27788b30c60e2c81d6e9a65157ca8afe646328/ruff-0.15.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:1852ce241d2bc89e5dc823e03cff4ce73d816b5c6cdadd27dbfe7b03217d2a12", size = 11232158, upload-time = "2026-03-19T16:26:42.321Z" }, + { url = "https://files.pythonhosted.org/packages/a7/53/90fbb9e08b29c048c403558d3cdd0adf2668b02ce9d50602452e187cd4af/ruff-0.15.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5f3e4b221fb4bd293f79912fc5e93a9063ebd6d0dcbd528f91b89172a9b8436c", size = 10577861, upload-time = "2026-03-19T16:26:57.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/aa/5f486226538fe4d0f0439e2da1716e1acf895e2a232b26f2459c55f8ddad/ruff-0.15.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b15e48602c9c1d9bdc504b472e90b90c97dc7d46c7028011ae67f3861ceba7b4", size = 10327310, upload-time = "2026-03-19T16:26:35.909Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/271afdffb81fe7bfc8c43ba079e9d96238f674380099457a74ccb3863857/ruff-0.15.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b4705e0e85cedc74b0a23cf6a179dbb3df184cb227761979cc76c0440b5ab0d", size = 10840752, upload-time = "2026-03-19T16:26:45.723Z" }, + { url = "https://files.pythonhosted.org/packages/bf/29/a4ae78394f76c7759953c47884eb44de271b03a66634148d9f7d11e721bd/ruff-0.15.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:112c1fa316a558bb34319282c1200a8bf0495f1b735aeb78bfcb2991e6087580", size = 11336961, upload-time = "2026-03-19T16:26:39.076Z" }, + { url = "https://files.pythonhosted.org/packages/26/6b/8786ba5736562220d588a2f6653e6c17e90c59ced34a2d7b512ef8956103/ruff-0.15.7-py3-none-win32.whl", hash = "sha256:6d39e2d3505b082323352f733599f28169d12e891f7dd407f2d4f54b4c2886de", size = 10582538, upload-time = "2026-03-19T16:26:15.992Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e9/346d4d3fffc6871125e877dae8d9a1966b254fbd92a50f8561078b88b099/ruff-0.15.7-py3-none-win_amd64.whl", hash = "sha256:4d53d712ddebcd7dace1bc395367aec12c057aacfe9adbb6d832302575f4d3a1", size = 11755839, upload-time = "2026-03-19T16:26:19.897Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e8/726643a3ea68c727da31570bde48c7a10f1aa60eddd628d94078fec586ff/ruff-0.15.7-py3-none-win_arm64.whl", hash = "sha256:18e8d73f1c3fdf27931497972250340f92e8c861722161a9caeb89a58ead6ed2", size = 11023304, upload-time = "2026-03-19T16:26:51.669Z" }, ] [[package]] @@ -6116,14 +6040,14 @@ wheels = [ [[package]] name = "scipy-stubs" -version = "1.17.1.2" +version = "1.17.1.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "optype", extra = ["numpy"] }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c7/ab/43f681ffba42f363b7ed6b767fd215d1e26006578214ff8330586a11bf95/scipy_stubs-1.17.1.2.tar.gz", hash = "sha256:2ecadc8c87a3b61aaf7379d6d6b10f1038a829c53b9efe5b174fb97fc8b52237", size = 388354, upload-time = "2026-03-15T22:33:20.449Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/59/59c6cc3f9970154b9ed6b1aff42a0185cdd60cef54adc0404b9e77972221/scipy_stubs-1.17.1.3.tar.gz", hash = "sha256:5eb87a8d23d726706259b012ebe76a4a96a9ae9e141fc59bf55fc8eac2ed9e0f", size = 392185, upload-time = "2026-03-22T22:11:58.34Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/0b/ec4fe720c1202d9df729a3e9d9b7e4d2da9f6e7f28bd2877b7d0769f4f75/scipy_stubs-1.17.1.2-py3-none-any.whl", hash = "sha256:f19e8f5273dbe3b7ee6a9554678c3973b9695fa66b91f29206d00830a1536c06", size = 594377, upload-time = "2026-03-15T22:33:18.684Z" }, + { url = "https://files.pythonhosted.org/packages/2c/d4/94304532c0a75a55526119043dd44a9bd1541a21e14483cbb54261c527d2/scipy_stubs-1.17.1.3-py3-none-any.whl", hash = "sha256:7b91d3f05aa47da06fbca14eb6c5bb4c28994e9245fd250cc847e375bab31297", size = 597933, upload-time = "2026-03-22T22:11:56.525Z" }, ] [[package]] @@ -6142,15 +6066,15 @@ wheels = [ [[package]] name = "sentry-sdk" -version = "2.54.0" +version = "2.55.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c8/e9/2e3a46c304e7fa21eaa70612f60354e32699c7102eb961f67448e222ad7c/sentry_sdk-2.54.0.tar.gz", hash = "sha256:2620c2575128d009b11b20f7feb81e4e4e8ae08ec1d36cbc845705060b45cc1b", size = 413813, upload-time = "2026-03-02T15:12:41.355Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/b8/285293dc60fc198fffc3fcdbc7c6d4e646e0f74e61461c355d40faa64ceb/sentry_sdk-2.55.0.tar.gz", hash = "sha256:3774c4d8820720ca4101548131b9c162f4c9426eb7f4d24aca453012a7470f69", size = 424505, upload-time = "2026-03-17T14:15:51.707Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/53/39/be412cc86bc6247b8f69e9383d7950711bd86f8d0a4a4b0fe8fad685bc21/sentry_sdk-2.54.0-py2.py3-none-any.whl", hash = "sha256:fd74e0e281dcda63afff095d23ebcd6e97006102cdc8e78a29f19ecdf796a0de", size = 439198, upload-time = "2026-03-02T15:12:39.546Z" }, + { url = "https://files.pythonhosted.org/packages/9a/66/20465097782d7e1e742d846407ea7262d338c6e876ddddad38ca8907b38f/sentry_sdk-2.55.0-py2.py3-none-any.whl", hash = "sha256:97026981cb15699394474a196b88503a393cbc58d182ece0d3abe12b9bd978d4", size = 449284, upload-time = "2026-03-17T14:15:49.604Z" }, ] [package.optional-dependencies] @@ -6386,15 +6310,15 @@ wheels = [ [[package]] name = "starlette" -version = "0.52.1" +version = "1.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } +sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, + { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, ] [[package]] @@ -6803,11 +6727,11 @@ wheels = [ [[package]] name = "types-cachetools" -version = "6.2.0.20251022" +version = "6.2.0.20260317" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/3b/a8/f9bcc7f1be63af43ef0170a773e2d88817bcc7c9d8769f2228c802826efe/types_cachetools-6.2.0.20251022.tar.gz", hash = "sha256:f1d3c736f0f741e89ec10f0e1b0138625023e21eb33603a930c149e0318c0cef", size = 9608, upload-time = "2025-10-22T03:03:58.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/7f/16a4d8344c28193a5a74358028c2d2f753f0d9658dd98b9e1967c50045a2/types_cachetools-6.2.0.20260317.tar.gz", hash = "sha256:6d91855bcc944665897c125e720aa3c80aace929b77a64e796343701df4f61c6", size = 9812, upload-time = "2026-03-17T04:06:32.007Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/2d/8d821ed80f6c2c5b427f650bf4dc25b80676ed63d03388e4b637d2557107/types_cachetools-6.2.0.20251022-py3-none-any.whl", hash = "sha256:698eb17b8f16b661b90624708b6915f33dbac2d185db499ed57e4997e7962cad", size = 9341, upload-time = "2025-10-22T03:03:57.036Z" }, + { url = "https://files.pythonhosted.org/packages/17/9a/b00b23054934c4d569c19f7278c4fb32746cd36a64a175a216d3073a4713/types_cachetools-6.2.0.20260317-py3-none-any.whl", hash = "sha256:92fa9bc50e4629e31fca67ceb3fb1de71791e314fa16c0a0d2728724dc222c8b", size = 9346, upload-time = "2026-03-17T04:06:31.184Z" }, ] [[package]] @@ -6851,11 +6775,11 @@ wheels = [ [[package]] name = "types-docutils" -version = "0.22.3.20260316" +version = "0.22.3.20260322" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9f/27/a7f16b3a2fad0a4ddd85a668319f9a1d0311c4bd9578894f6471c7e6c788/types_docutils-0.22.3.20260316.tar.gz", hash = "sha256:8ef27d565b9831ff094fe2eac75337a74151013e2d21ecabd445c2955f891564", size = 57263, upload-time = "2026-03-16T04:29:12.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/44/bb/243a87fc1605a4a94c2c343d6dbddbf0d7ef7c0b9550f360b8cda8e82c39/types_docutils-0.22.3.20260322.tar.gz", hash = "sha256:e2450bb997283c3141ec5db3e436b91f0aa26efe35eb9165178ca976ccb4930b", size = 57311, upload-time = "2026-03-22T04:08:44.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/70/60/c1f22b7cfc4837d5419e5a2d8702c7d65f03343f866364b71cccd8a73b79/types_docutils-0.22.3.20260316-py3-none-any.whl", hash = "sha256:083c7091b8072c242998ec51da1bf1492f0332387da81c3b085efbf5ca754c7d", size = 91968, upload-time = "2026-03-16T04:29:11.114Z" }, + { url = "https://files.pythonhosted.org/packages/c6/4a/22c090cd4615a16917dff817cbe7c5956da376c961e024c241cd962d2c3d/types_docutils-0.22.3.20260322-py3-none-any.whl", hash = "sha256:681d4510ce9b80a0c6a593f0f9843d81f8caa786db7b39ba04d9fd5480ac4442", size = 91978, upload-time = "2026-03-22T04:08:43.117Z" }, ] [[package]] @@ -6885,15 +6809,15 @@ wheels = [ [[package]] name = "types-gevent" -version = "25.9.0.20251228" +version = "25.9.0.20260322" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "types-greenlet" }, { name = "types-psutil" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/85/c5043c4472f82c8ee3d9e0673eb4093c7d16770a26541a137a53a1d096f6/types_gevent-25.9.0.20251228.tar.gz", hash = "sha256:423ef9891d25c5a3af236c3e9aace4c444c86ff773fe13ef22731bc61d59abef", size = 38063, upload-time = "2025-12-28T03:28:28.651Z" } +sdist = { url = "https://files.pythonhosted.org/packages/34/f0/14a99ddcaa69b559fa7cec8c9de880b792bebb0b848ae865d94ea9058533/types_gevent-25.9.0.20260322.tar.gz", hash = "sha256:91257920845762f09753c08aa20fad1743ac13d2de8bcf23f4b8fe967d803732", size = 38241, upload-time = "2026-03-22T04:08:55.213Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c8/b7/a2d6b652ab5a26318b68cafd58c46fafb9b15c5313d2d76a70b838febb4b/types_gevent-25.9.0.20251228-py3-none-any.whl", hash = "sha256:e2e225af4fface9241c16044983eb2fc3993f2d13d801f55c2932848649b7f2f", size = 55486, upload-time = "2025-12-28T03:28:27.382Z" }, + { url = "https://files.pythonhosted.org/packages/89/0f/964440b57eb4ddb4aca03479a4093852e1ce79010d1c5967234e6f5d6bd9/types_gevent-25.9.0.20260322-py3-none-any.whl", hash = "sha256:21b3c269b3a20ecb0e4668289c63b97d21694d84a004ab059c1e32ab970eacc2", size = 55500, upload-time = "2026-03-22T04:08:54.103Z" }, ] [[package]] @@ -6976,11 +6900,11 @@ wheels = [ [[package]] name = "types-openpyxl" -version = "3.1.5.20260316" +version = "3.1.5.20260322" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/38/32f8ee633dd66ca6d52b8853b9fd45dc3869490195a6ed435d5c868b9c2d/types_openpyxl-3.1.5.20260316.tar.gz", hash = "sha256:081dda9427ea1141e5649e3dcf630e7013a4cf254a5862a7e0a3f53c123b7ceb", size = 101318, upload-time = "2026-03-16T04:29:05.004Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/bf/15240de4d68192d2a1f385ef2f6f1ecb29b85d2f3791dd2e2d5b980be30f/types_openpyxl-3.1.5.20260322.tar.gz", hash = "sha256:a61d66ebe1e49697853c6db8e0929e1cda2c96755e71fb676ed7fc48dfdcf697", size = 101325, upload-time = "2026-03-22T04:08:40.426Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/df/b87ae6226ed7cc84b9e43119c489c7f053a9a25e209e0ebb5d84bc36fa37/types_openpyxl-3.1.5.20260316-py3-none-any.whl", hash = "sha256:38e7e125df520fb7eb72cb1129c9f024eb99ef9564aad2c27f68f080c26bcf2d", size = 166084, upload-time = "2026-03-16T04:29:03.657Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b4/c14191b30bcb266365b124b2bb4e67ecd68425a78ba77ee026f33667daa9/types_openpyxl-3.1.5.20260322-py3-none-any.whl", hash = "sha256:2f515f0b0bbfb04bfb587de34f7522d90b5151a8da7bbbd11ecec4ca40f64238", size = 166102, upload-time = "2026-03-22T04:08:39.174Z" }, ] [[package]] @@ -7055,11 +6979,11 @@ wheels = [ [[package]] name = "types-python-dateutil" -version = "2.9.0.20260305" +version = "2.9.0.20260323" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/c7/025c624f347e10476b439a6619a95f1d200250ea88e7ccea6e09e48a7544/types_python_dateutil-2.9.0.20260305.tar.gz", hash = "sha256:389717c9f64d8f769f36d55a01873915b37e97e52ce21928198d210fbd393c8b", size = 16885, upload-time = "2026-03-05T04:00:47.409Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e9/02/f72df9ef5ffc4f959b83cb80c8aa03eb8718a43e563ecd99ccffe265fa89/types_python_dateutil-2.9.0.20260323.tar.gz", hash = "sha256:a107aef5841db41ace381dbbbd7e4945220fc940f7a72172a0be5a92d9ab7164", size = 16897, upload-time = "2026-03-23T04:15:14.829Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/77/8c0d1ec97f0d9707ad3d8fa270ab8964e7b31b076d2f641c94987395cc75/types_python_dateutil-2.9.0.20260305-py3-none-any.whl", hash = "sha256:a3be9ca444d38cadabd756cfbb29780d8b338ae2a3020e73c266a83cc3025dd7", size = 18419, upload-time = "2026-03-05T04:00:46.392Z" }, + { url = "https://files.pythonhosted.org/packages/92/c1/b661838b97453e699a215451f2e22cee750eaaf4ea4619b34bdaf01221a4/types_python_dateutil-2.9.0.20260323-py3-none-any.whl", hash = "sha256:a23a50a07f6eb87e729d4cb0c2eb511c81761eeb3f505db2c1413be94aae8335", size = 18433, upload-time = "2026-03-23T04:15:13.683Z" }, ] [[package]] @@ -7073,11 +6997,11 @@ wheels = [ [[package]] name = "types-pywin32" -version = "311.0.0.20260316" +version = "311.0.0.20260323" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/17/a8/b4652002a854fcfe5d272872a0ae2d5df0e9dc482e1a6dfb5e97b905b76f/types_pywin32-311.0.0.20260316.tar.gz", hash = "sha256:c136fa489fe6279a13bca167b750414e18d657169b7cf398025856dc363004e8", size = 329956, upload-time = "2026-03-16T04:28:57.366Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b5/cc/f03ddb7412ac2fc2238358b617c2d5919ba96812dff8d3081f3b2754bb83/types_pywin32-311.0.0.20260323.tar.gz", hash = "sha256:2e8dc6a59fedccbc51b241651ce1e8aa58488934f517debf23a9c6d0ff329b4b", size = 332263, upload-time = "2026-03-23T04:15:20.004Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f0/83/704698d93788cf1c2f5e236eae2b37f1b2152ef84dc66b4b83f6c7487b76/types_pywin32-311.0.0.20260316-py3-none-any.whl", hash = "sha256:abb643d50012386d697af49384cc0e6e475eab76b0ca2a7f93d480d0862b3692", size = 392959, upload-time = "2026-03-16T04:28:56.104Z" }, + { url = "https://files.pythonhosted.org/packages/dc/82/d786d5d8b846e3cbe1ee52da8945560b111c789b42c3771b2129b312ab94/types_pywin32-311.0.0.20260323-py3-none-any.whl", hash = "sha256:2f2b03fc72ae77ccbb0ee258da0f181c3a38bd8602f6e332e42587b3b0d5f095", size = 395435, upload-time = "2026-03-23T04:15:18.76Z" }, ] [[package]] @@ -7173,16 +7097,16 @@ wheels = [ [[package]] name = "types-tensorflow" -version = "2.18.0.20260224" +version = "2.18.0.20260322" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "numpy" }, { name = "types-protobuf" }, { name = "types-requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/af/cb/4914c2fbc1cf8a8d1ef2a7c727bb6f694879be85edeee880a0c88e696af8/types_tensorflow-2.18.0.20260224.tar.gz", hash = "sha256:9b0ccc91c79c88791e43d3f80d6c879748fa0361409c5ff23c7ffe3709be00f2", size = 258786, upload-time = "2026-02-24T04:06:45.613Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4a/cb/81dfaa2680031a6e087bcdfaf1c0556371098e229aee541e21c81a381065/types_tensorflow-2.18.0.20260322.tar.gz", hash = "sha256:135dc6ca06cc647a002e1bca5c5c99516fde51efd08e46c48a9b1916fc5df07f", size = 259030, upload-time = "2026-03-22T04:09:14.069Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d4/1d/a1c3c60f0eb1a204500dbdc66e3d18aafabc86ad07a8eca71ea05bc8c5a8/types_tensorflow-2.18.0.20260224-py3-none-any.whl", hash = "sha256:6a25f5f41f3e06f28c1f65c6e09f484d4ba0031d6d8df83a39df9d890245eefc", size = 329746, upload-time = "2026-02-24T04:06:44.4Z" }, + { url = "https://files.pythonhosted.org/packages/5b/0c/a178061450b640e53577e2c423ad22bf5d3f692f6bfeeb12156d02b531ef/types_tensorflow-2.18.0.20260322-py3-none-any.whl", hash = "sha256:d8776b6daacdb279e64f105f9dcbc0b8e3544b9a2f2eb71ec6ea5955081f65e6", size = 329771, upload-time = "2026-03-22T04:09:12.844Z" }, ] [[package]] diff --git a/docker/.env.example b/docker/.env.example index 9d6cd65318..8cf77cf56b 100644 --- a/docker/.env.example +++ b/docker/.env.example @@ -771,6 +771,9 @@ BAIDU_VECTOR_DB_SHARD=1 BAIDU_VECTOR_DB_REPLICAS=3 BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER=DEFAULT_ANALYZER BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE=COARSE_MODE +BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT=500 +BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO=0.05 +BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS=300 # VikingDB configurations, only available when VECTOR_STORE is `vikingdb` VIKINGDB_ACCESS_KEY=your-ak diff --git a/docker/dify-env-sync.py b/docker/dify-env-sync.py new file mode 100755 index 0000000000..d7c762748c --- /dev/null +++ b/docker/dify-env-sync.py @@ -0,0 +1,440 @@ +#!/usr/bin/env python3 + +# ================================================================ +# Dify Environment Variables Synchronization Script +# +# Features: +# - Synchronize latest settings from .env.example to .env +# - Preserve custom settings in existing .env +# - Add new environment variables +# - Detect removed environment variables +# - Create backup files +# ================================================================ + +import argparse +import re +import shutil +import sys +from datetime import datetime +from pathlib import Path + +# ANSI color codes +RED = "\033[0;31m" +GREEN = "\033[0;32m" +YELLOW = "\033[1;33m" +BLUE = "\033[0;34m" +NC = "\033[0m" # No Color + + +def supports_color() -> bool: + """Return True if the terminal supports ANSI color codes.""" + return hasattr(sys.stdout, "isatty") and sys.stdout.isatty() + + +def log_info(message: str) -> None: + """Print an informational message in blue.""" + if supports_color(): + print(f"{BLUE}[INFO]{NC} {message}") + else: + print(f"[INFO] {message}") + + +def log_success(message: str) -> None: + """Print a success message in green.""" + if supports_color(): + print(f"{GREEN}[SUCCESS]{NC} {message}") + else: + print(f"[SUCCESS] {message}") + + +def log_warning(message: str) -> None: + """Print a warning message in yellow to stderr.""" + if supports_color(): + print(f"{YELLOW}[WARNING]{NC} {message}", file=sys.stderr) + else: + print(f"[WARNING] {message}", file=sys.stderr) + + +def log_error(message: str) -> None: + """Print an error message in red to stderr.""" + if supports_color(): + print(f"{RED}[ERROR]{NC} {message}", file=sys.stderr) + else: + print(f"[ERROR] {message}", file=sys.stderr) + + +def parse_env_file(path: Path) -> dict[str, str]: + """Parse an .env-style file and return a mapping of key to raw value. + + Lines that are blank or start with '#' (after optional whitespace) are + skipped. Only lines containing '=' are considered variable definitions. + + Args: + path: Path to the .env file to parse. + + Returns: + Ordered dict mapping variable name to its value string. + """ + variables: dict[str, str] = {} + with path.open(encoding="utf-8") as fh: + for line in fh: + line = line.rstrip("\n") + # Skip blank lines and comment lines + stripped = line.strip() + if not stripped or stripped.startswith("#"): + continue + if "=" not in line: + continue + key, _, value = line.partition("=") + key = key.strip() + if key: + variables[key] = value.strip() + return variables + + +def check_files(work_dir: Path) -> None: + """Verify required files exist; create .env from .env.example if absent. + + Args: + work_dir: Directory that must contain .env.example (and optionally .env). + + Raises: + SystemExit: If .env.example does not exist. + """ + log_info("Checking required files...") + + example_file = work_dir / ".env.example" + env_file = work_dir / ".env" + + if not example_file.exists(): + log_error(".env.example file not found") + sys.exit(1) + + if not env_file.exists(): + log_warning(".env file does not exist. Creating from .env.example.") + shutil.copy2(example_file, env_file) + log_success(".env file created") + + log_success("Required files verified") + + +def create_backup(work_dir: Path) -> None: + """Create a timestamped backup of the current .env file. + + Backups are placed in ``/env-backup/`` with the filename + ``.env.backup_``. + + Args: + work_dir: Directory containing the .env file to back up. + """ + env_file = work_dir / ".env" + if not env_file.exists(): + return + + backup_dir = work_dir / "env-backup" + if not backup_dir.exists(): + backup_dir.mkdir(parents=True) + log_info(f"Created backup directory: {backup_dir}") + + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + backup_file = backup_dir / f".env.backup_{timestamp}" + shutil.copy2(env_file, backup_file) + log_success(f"Backed up existing .env to {backup_file}") + + +def analyze_value_change(current: str, recommended: str) -> str | None: + """Analyse what kind of change occurred between two env values. + + Args: + current: Value currently set in .env. + recommended: Value present in .env.example. + + Returns: + A human-readable description string, or None when no analysis applies. + """ + use_colors = supports_color() + + def colorize(color: str, text: str) -> str: + return f"{color}{text}{NC}" if use_colors else text + + if not current and recommended: + return colorize(RED, " -> Setting from empty to recommended value") + if current and not recommended: + return colorize(RED, " -> Recommended value changed to empty") + + # Numeric comparison + if re.fullmatch(r"\d+", current) and re.fullmatch(r"\d+", recommended): + cur_int, rec_int = int(current), int(recommended) + if cur_int < rec_int: + return colorize(BLUE, f" -> Numeric increase ({current} < {recommended})") + if cur_int > rec_int: + return colorize(YELLOW, f" -> Numeric decrease ({current} > {recommended})") + return None + + # Boolean comparison + if current.lower() in {"true", "false"} and recommended.lower() in {"true", "false"}: + if current.lower() != recommended.lower(): + return colorize(BLUE, f" -> Boolean value change ({current} -> {recommended})") + return None + + # URL / endpoint + if current.startswith(("http://", "https://")) or recommended.startswith(("http://", "https://")): + return colorize(BLUE, " -> URL/endpoint change") + + # File path + if current.startswith("/") or recommended.startswith("/"): + return colorize(BLUE, " -> File path change") + + # String length + if len(current) != len(recommended): + return colorize(YELLOW, f" -> String length change ({len(current)} -> {len(recommended)} characters)") + + return None + + +def detect_differences(env_vars: dict[str, str], example_vars: dict[str, str]) -> dict[str, tuple[str, str]]: + """Find variables whose values differ between .env and .env.example. + + Only variables present in *both* files are compared; new or removed + variables are handled by separate functions. + + Args: + env_vars: Parsed key/value pairs from .env. + example_vars: Parsed key/value pairs from .env.example. + + Returns: + Mapping of key -> (env_value, example_value) for every key whose + values differ. + """ + log_info("Detecting differences between .env and .env.example...") + + diffs: dict[str, tuple[str, str]] = {} + for key, example_value in example_vars.items(): + if key in env_vars and env_vars[key] != example_value: + diffs[key] = (env_vars[key], example_value) + + if diffs: + log_success(f"Detected differences in {len(diffs)} environment variables") + show_differences_detail(diffs) + else: + log_info("No differences detected") + + return diffs + + +def show_differences_detail(diffs: dict[str, tuple[str, str]]) -> None: + """Print a formatted table of differing environment variables. + + Args: + diffs: Mapping of key -> (current_value, recommended_value). + """ + use_colors = supports_color() + + log_info("") + log_info("=== Environment Variable Differences ===") + + if not diffs: + log_info("No differences to display") + return + + for count, (key, (env_value, example_value)) in enumerate(diffs.items(), start=1): + print() + if use_colors: + print(f"{YELLOW}[{count}] {key}{NC}") + print(f" {GREEN}.env (current){NC} : {env_value}") + print(f" {BLUE}.env.example (recommended){NC} : {example_value}") + else: + print(f"[{count}] {key}") + print(f" .env (current) : {env_value}") + print(f" .env.example (recommended) : {example_value}") + + analysis = analyze_value_change(env_value, example_value) + if analysis: + print(analysis) + + print() + log_info("=== Difference Analysis Complete ===") + log_info("Note: Consider changing to the recommended values above.") + log_info("Current implementation preserves .env values.") + print() + + +def detect_removed_variables(env_vars: dict[str, str], example_vars: dict[str, str]) -> list[str]: + """Identify variables present in .env but absent from .env.example. + + Args: + env_vars: Parsed key/value pairs from .env. + example_vars: Parsed key/value pairs from .env.example. + + Returns: + Sorted list of variable names that no longer appear in .env.example. + """ + log_info("Detecting removed environment variables...") + + removed = sorted(set(env_vars) - set(example_vars)) + + if removed: + log_warning("The following environment variables have been removed from .env.example:") + for var in removed: + log_warning(f" - {var}") + log_warning("Consider manually removing these variables from .env") + else: + log_success("No removed environment variables found") + + return removed + + +def sync_env_file(work_dir: Path, env_vars: dict[str, str], diffs: dict[str, tuple[str, str]]) -> None: + """Rewrite .env based on .env.example while preserving custom values. + + The output file follows the exact line structure of .env.example + (preserving comments, blank lines, and ordering). For every variable + that exists in .env with a different value from the example, the + current .env value is kept. Variables that are new in .env.example + (not present in .env at all) are added with the example's default. + + Args: + work_dir: Directory containing .env and .env.example. + env_vars: Parsed key/value pairs from the original .env. + diffs: Keys whose .env values differ from .env.example (to preserve). + """ + log_info("Starting partial synchronization of .env file...") + + example_file = work_dir / ".env.example" + new_env_file = work_dir / ".env.new" + + # Keys whose current .env value should override the example default + preserved_keys: set[str] = set(diffs.keys()) + + preserved_count = 0 + updated_count = 0 + + env_var_pattern = re.compile(r"^([A-Za-z_][A-Za-z0-9_]*)\s*=") + + with example_file.open(encoding="utf-8") as src, new_env_file.open("w", encoding="utf-8") as dst: + for line in src: + raw_line = line.rstrip("\n") + match = env_var_pattern.match(raw_line) + if match: + key = match.group(1) + if key in preserved_keys: + # Write the preserved value from .env + dst.write(f"{key}={env_vars[key]}\n") + log_info(f" Preserved: {key} (.env value)") + preserved_count += 1 + else: + # Use the example value (covers new vars and unchanged ones) + dst.write(line if line.endswith("\n") else raw_line + "\n") + updated_count += 1 + else: + # Blank line, comment, or non-variable line — keep as-is + dst.write(line if line.endswith("\n") else raw_line + "\n") + + # Atomically replace the original .env + try: + new_env_file.replace(work_dir / ".env") + except OSError as exc: + log_error(f"Failed to replace .env file: {exc}") + new_env_file.unlink(missing_ok=True) + sys.exit(1) + + log_success("Successfully created new .env file") + log_success("Partial synchronization of .env file completed") + log_info(f" Preserved .env values: {preserved_count}") + log_info(f" Updated to .env.example values: {updated_count}") + + +def show_statistics(work_dir: Path) -> None: + """Print a summary of variable counts from both env files. + + Args: + work_dir: Directory containing .env and .env.example. + """ + log_info("Synchronization statistics:") + + example_file = work_dir / ".env.example" + env_file = work_dir / ".env" + + example_count = len(parse_env_file(example_file)) if example_file.exists() else 0 + env_count = len(parse_env_file(env_file)) if env_file.exists() else 0 + + log_info(f" .env.example environment variables: {example_count}") + log_info(f" .env environment variables: {env_count}") + + +def build_arg_parser() -> argparse.ArgumentParser: + """Build and return the CLI argument parser. + + Returns: + Configured ArgumentParser instance. + """ + parser = argparse.ArgumentParser( + prog="dify-env-sync", + description=( + "Synchronize .env with .env.example: add new variables, " + "preserve custom values, and report removed variables." + ), + formatter_class=argparse.RawDescriptionHelpFormatter, + epilog=( + "Examples:\n" + " # Run from the docker/ directory (default)\n" + " python dify-env-sync.py\n\n" + " # Specify a custom working directory\n" + " python dify-env-sync.py --dir /path/to/docker\n" + ), + ) + parser.add_argument( + "--dir", + metavar="DIRECTORY", + default=".", + help="Working directory containing .env and .env.example (default: current directory)", + ) + parser.add_argument( + "--no-backup", + action="store_true", + default=False, + help="Skip creating a timestamped backup of the existing .env file", + ) + return parser + + +def main() -> None: + """Orchestrate the complete environment variable synchronization process.""" + parser = build_arg_parser() + args = parser.parse_args() + + work_dir = Path(args.dir).resolve() + + log_info("=== Dify Environment Variables Synchronization Script ===") + log_info(f"Execution started: {datetime.now()}") + log_info(f"Working directory: {work_dir}") + + # 1. Verify prerequisites + check_files(work_dir) + + # 2. Backup existing .env + if not args.no_backup: + create_backup(work_dir) + + # 3. Parse both files + env_vars = parse_env_file(work_dir / ".env") + example_vars = parse_env_file(work_dir / ".env.example") + + # 4. Report differences (values that changed in the example) + diffs = detect_differences(env_vars, example_vars) + + # 5. Report variables removed from the example + detect_removed_variables(env_vars, example_vars) + + # 6. Rewrite .env + sync_env_file(work_dir, env_vars, diffs) + + # 7. Print summary statistics + show_statistics(work_dir) + + log_success("=== Synchronization process completed successfully ===") + log_info(f"Execution finished: {datetime.now()}") + + +if __name__ == "__main__": + main() diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml index bf72a0f623..6e11cac678 100644 --- a/docker/docker-compose.yaml +++ b/docker/docker-compose.yaml @@ -345,6 +345,9 @@ x-shared-env: &shared-api-worker-env BAIDU_VECTOR_DB_REPLICAS: ${BAIDU_VECTOR_DB_REPLICAS:-3} BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER: ${BAIDU_VECTOR_DB_INVERTED_INDEX_ANALYZER:-DEFAULT_ANALYZER} BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE: ${BAIDU_VECTOR_DB_INVERTED_INDEX_PARSER_MODE:-COARSE_MODE} + BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT: ${BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT:-500} + BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO: ${BAIDU_VECTOR_DB_AUTO_BUILD_ROW_COUNT_INCREMENT_RATIO:-0.05} + BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS: ${BAIDU_VECTOR_DB_REBUILD_INDEX_TIMEOUT_IN_SECONDS:-300} VIKINGDB_ACCESS_KEY: ${VIKINGDB_ACCESS_KEY:-your-ak} VIKINGDB_SECRET_KEY: ${VIKINGDB_SECRET_KEY:-your-sk} VIKINGDB_REGION: ${VIKINGDB_REGION:-cn-shanghai} diff --git a/docker/ssrf_proxy/squid.conf.template b/docker/ssrf_proxy/squid.conf.template index 256e669c8d..fbe9ebc448 100644 --- a/docker/ssrf_proxy/squid.conf.template +++ b/docker/ssrf_proxy/squid.conf.template @@ -28,6 +28,7 @@ http_access deny manager http_access allow localhost include /etc/squid/conf.d/*.conf http_access deny all +tcp_outgoing_address 0.0.0.0 ################################## Proxy Server ################################ http_port ${HTTP_PORT} diff --git a/docs/eu-ai-act-compliance.md b/docs/eu-ai-act-compliance.md new file mode 100644 index 0000000000..5fa29eed3f --- /dev/null +++ b/docs/eu-ai-act-compliance.md @@ -0,0 +1,186 @@ +# EU AI Act Compliance Guide for Dify Deployers + +Dify is an LLMOps platform for building RAG pipelines, agents, and AI workflows. If you deploy Dify in the EU — whether self-hosted or using a cloud provider — the EU AI Act applies to your deployment. This guide covers what the regulation requires and how Dify's architecture maps to those requirements. + +## Is your system in scope? + +The detailed obligations in Articles 12, 13, and 14 only apply to **high-risk AI systems** as defined in Annex III of the EU AI Act. A Dify application is high-risk if it is used for: + +- **Recruitment and HR** — screening candidates, evaluating employee performance, allocating tasks +- **Credit scoring and insurance** — assessing creditworthiness or setting premiums +- **Law enforcement** — profiling, criminal risk assessment, border control +- **Critical infrastructure** — managing energy, water, transport, or telecommunications systems +- **Education assessment** — grading students, determining admissions +- **Essential public services** — evaluating eligibility for benefits, housing, or emergency services + +Most Dify deployments (customer-facing chatbots, internal knowledge bases, content generation workflows) are **not** high-risk. If your Dify application does not fall into one of the categories above: + +- **Article 50** (end-user transparency) still applies if users interact with your application directly. See the [Article 50 section](#article-50-end-user-transparency) below. +- **GDPR** still applies if you process personal data. See the [GDPR section](#gdpr-considerations) below. +- The high-risk obligations (Articles 9-15) are less likely to apply, but risk classification is context-dependent. **Do not self-classify without legal review.** Focus on Article 50 (transparency) and GDPR (data protection) as your baseline obligations. + +If you are unsure whether your use case qualifies as high-risk, consult a qualified legal professional before proceeding. + +## Self-hosted vs cloud: different compliance profiles + +| Deployment | Your role | Dify's role | Who handles compliance? | +|-----------|----------|-------------|------------------------| +| **Self-hosted** | Provider and deployer | Framework provider — obligations under Article 25 apply only if Dify is placed on the market or put into service as part of a complete AI system bearing its name or trademark | You | +| **Dify Cloud** | Deployer | Provider and processor | Shared — Dify handles SOC 2 and GDPR for the platform; you handle AI Act obligations for your specific use case | + +Dify Cloud already has SOC 2 Type II and GDPR compliance for the platform itself. But the EU AI Act adds obligations specific to AI systems that SOC 2 does not cover: risk classification, technical documentation, transparency, and human oversight. + +## Supported providers and services + +Dify integrates with a broad range of AI providers and data stores. The following are the key ones relevant to compliance: + +- **AI providers:** HuggingFace (core), plus integrations with OpenAI, Anthropic, Google, and 100+ models via provider plugins +- **Model identifiers include:** gpt-4o, gpt-3.5-turbo, claude-3-opus, gemini-2.5-flash, whisper-1, and others +- **Vector database connections:** Extensive RAG infrastructure supporting numerous vector stores + +Dify's plugin architecture means actual provider usage depends on your configuration. Document which providers and models are active in your deployment. + +## Data flow diagram + +A typical Dify RAG deployment: + +```mermaid +graph LR + USER((User)) -->|query| DIFY[Dify Platform] + DIFY -->|prompts| LLM([LLM Provider]) + LLM -->|responses| DIFY + DIFY -->|documents| EMBED([Embedding Model]) + EMBED -->|vectors| DIFY + DIFY -->|store/retrieve| VS[(Vector Store)] + DIFY -->|knowledge| KB[(Knowledge Base)] + DIFY -->|response| USER + + classDef processor fill:#60a5fa,stroke:#1e40af,color:#000 + classDef controller fill:#4ade80,stroke:#166534,color:#000 + classDef app fill:#a78bfa,stroke:#5b21b6,color:#000 + classDef user fill:#f472b6,stroke:#be185d,color:#000 + + class USER user + class DIFY app + class LLM processor + class EMBED processor + class VS controller + class KB controller +``` + +**GDPR roles** (providers are typically processors for customer-submitted data, but the exact role depends on each provider's terms of service and processing purpose; deployers should review each provider's DPA): +- **Cloud LLM providers (OpenAI, Anthropic, Google)** typically act as processors — requires DPA. +- **Cloud embedding services** typically act as processors — requires DPA. +- **Self-hosted vector stores (Weaviate, Qdrant, pgvector):** Your organization remains the controller — no third-party transfer. +- **Cloud vector stores (Pinecone, Zilliz Cloud)** typically act as processors — requires DPA. +- **Knowledge base documents:** Your organization is the controller — stored in your infrastructure. + +## Article 11: Technical documentation + +High-risk systems need Annex IV documentation. For Dify deployments, key sections include: + +| Section | What Dify provides | What you must document | +|---------|-------------------|----------------------| +| General description | Platform capabilities, supported models | Your specific use case, intended users, deployment context | +| Development process | Dify's architecture, plugin system | Your RAG pipeline design, prompt engineering, knowledge base curation | +| Monitoring | Dify's built-in logging and analytics | Your monitoring plan, alert thresholds, incident response | +| Performance metrics | Dify's evaluation features | Your accuracy benchmarks, quality thresholds, bias testing | +| Risk management | — | Risk assessment for your specific use case | + +Some sections can be derived from Dify's architecture and your deployment configuration, as shown in the table above. The remaining sections require your input. + +## Article 12: Record-keeping + +Dify's built-in logging covers several Article 12 requirements: + +| Requirement | Dify Feature | Status | +|------------|-------------|--------| +| Conversation logs | Full conversation history with timestamps | **Covered** | +| Model tracking | Model name recorded per interaction | **Covered** | +| Token usage | Token counts per message | **Covered** | +| Cost tracking | Cost per conversation (if provider reports it) | **Partial** | +| Document retrieval | RAG source documents logged | **Covered** | +| User identification | User session tracking | **Covered** | +| Error logging | Failed generation logs | **Covered** | +| Data retention | Configurable | **Your responsibility** | + +**Retention periods:** The required retention period depends on your role under the Act. Article 18 requires **providers** of high-risk systems to retain logs and technical documentation for **10 years** after market placement. Article 26(6) requires **deployers** to retain logs for at least **6 months**. If you self-host Dify and have substantially modified the system, you may be classified as a provider rather than a deployer. Confirm the applicable retention period with legal counsel. + +## Article 13: Transparency to deployers + +Article 13 requires providers of high-risk AI systems to supply deployers with the information needed to understand and operate the system correctly. This is a **documentation obligation**, not a logging obligation. For Dify deployments, this means the upstream LLM and embedding providers must give you: + +- Instructions for use, including intended purpose and known limitations +- Accuracy metrics and performance benchmarks +- Known or foreseeable risks and residual risks after mitigation +- Technical specifications: input/output formats, training data characteristics, model architecture details + +As a deployer, collect model cards, system documentation, and accuracy reports from each AI provider your Dify application uses. Maintain these as part of your Annex IV technical documentation. + +Dify's platform features provide **supporting evidence** that can inform Article 13 documentation, but they do not satisfy Article 13 on their own: +- **Source attribution** — Dify's RAG citation feature shows which documents informed the response, supporting deployer-side auditing +- **Model identification** — Dify logs which LLM model generates responses, providing evidence for system documentation +- **Conversation logs** — execution history helps compile performance and behavior evidence + +You must independently produce system documentation covering how your specific Dify deployment uses AI, its intended purpose, performance characteristics, and residual risks. + +## Article 50: End-user transparency + +Article 50 requires deployers to inform end users that they are interacting with an AI system. This is a separate obligation from Article 13 and applies even to limited-risk systems. + +For Dify applications serving end users: + +1. **Disclose AI involvement** — tell users they are interacting with an AI system +2. **AI-generated content labeling** — identify AI-generated content as such (e.g., clear labeling in the UI) + +Dify's "citation" feature also supports end-user transparency by showing users which knowledge base documents informed the answer. + +> **Note:** Article 50 applies to chatbots and systems interacting directly with natural persons. It has a separate scope from the high-risk designation under Annex III — it applies even to limited-risk systems. + +## Article 14: Human oversight + +Article 14 requires that high-risk AI systems be designed so that natural persons can effectively oversee them. Dify provides **automated technical safeguards** that support human oversight, but they are not a substitute for it: + +| Dify Feature | What It Does | Oversight Role | +|-------------|-------------|----------------| +| Annotation/feedback system | Human review of AI outputs | **Direct oversight** — humans evaluate and correct AI responses | +| Content moderation | Built-in filtering before responses reach users | **Automated safeguard** — reduces harmful outputs but does not replace human judgment on edge cases | +| Rate limiting | Controls on API usage | **Automated safeguard** — bounds system behavior, supports overseer's ability to maintain control | +| Workflow control | Insert human review steps between AI generation and output | **Oversight enabler** — allows building approval gates into the pipeline | + +These automated controls are necessary building blocks, but Article 14 compliance requires **human oversight procedures** on top of them: +- **Escalation procedures** — define what happens when moderation triggers or edge cases arise (who is notified, what action is taken) +- **Human review pipeline** — for high-stakes decisions, route AI outputs to a qualified person before they take effect +- **Override mechanism** — a human must be able to halt AI responses or override the system's output +- **Competence requirements** — the human overseer must understand the system's capabilities, limitations, and the context of its outputs + +### Recommended pattern + +For high-risk use cases (HR, legal, medical), configure your Dify workflow to require human approval before the AI response is delivered to the end user or acted upon. + +## Knowledge base compliance + +Dify's knowledge base feature has specific compliance implications: + +1. **Data provenance:** Document where your knowledge base documents come from. Article 10 requires data governance for training data; knowledge bases are analogous. +2. **Update tracking:** When you add, remove, or update documents in the knowledge base, log the change. The AI system's behavior changes with its knowledge base. +3. **PII in documents:** If knowledge base documents contain personal data, GDPR applies to the entire RAG pipeline. Implement access controls and consider PII redaction before indexing. +4. **Copyright:** Ensure you have the right to use the documents in your knowledge base for AI-assisted generation. + +## GDPR considerations + +1. **Legal basis** (Article 6): Document why AI processing of user queries is necessary +2. **Data Processing Agreements** (Article 28): Required for each cloud LLM and embedding provider +3. **Data minimization:** Only include necessary context in prompts; avoid sending entire documents when a relevant excerpt suffices +4. **Right to erasure:** If a user requests deletion, ensure their conversations are removed from Dify's logs AND any vector store entries derived from their data +5. **Cross-border transfers:** Providers based outside the EEA — including US-based providers (OpenAI, Anthropic), and any other non-EEA providers you route to — require Standard Contractual Clauses (SCCs) or equivalent safeguards under Chapter V of the GDPR. Review each provider's transfer mechanism individually. + +## Resources + +- [EU AI Act full text](https://artificialintelligenceact.eu/) +- [Dify documentation](https://docs.dify.ai/) +- [Dify SOC 2 compliance](https://dify.ai/trust) + +--- + +*This is not legal advice. Consult a qualified professional for compliance decisions.* diff --git a/sdks/nodejs-client/package.json b/sdks/nodejs-client/package.json index 7c8a293446..728aa0d054 100644 --- a/sdks/nodejs-client/package.json +++ b/sdks/nodejs-client/package.json @@ -69,6 +69,7 @@ }, "pnpm": { "overrides": { + "flatted@<=3.4.1": "3.4.2", "rollup@>=4.0.0,<4.59.0": "4.59.0" } } diff --git a/sdks/nodejs-client/pnpm-lock.yaml b/sdks/nodejs-client/pnpm-lock.yaml index c4b299cd73..c9081420f5 100644 --- a/sdks/nodejs-client/pnpm-lock.yaml +++ b/sdks/nodejs-client/pnpm-lock.yaml @@ -5,6 +5,7 @@ settings: excludeLinksFromLockfile: false overrides: + flatted@<=3.4.1: 3.4.2 rollup@>=4.0.0,<4.59.0: 4.59.0 importers: @@ -324,66 +325,79 @@ packages: resolution: {integrity: sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==} cpu: [arm] os: [linux] + libc: [glibc] '@rollup/rollup-linux-arm-musleabihf@4.59.0': resolution: {integrity: sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==} cpu: [arm] os: [linux] + libc: [musl] '@rollup/rollup-linux-arm64-gnu@4.59.0': resolution: {integrity: sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==} cpu: [arm64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-arm64-musl@4.59.0': resolution: {integrity: sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==} cpu: [arm64] os: [linux] + libc: [musl] '@rollup/rollup-linux-loong64-gnu@4.59.0': resolution: {integrity: sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==} cpu: [loong64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-loong64-musl@4.59.0': resolution: {integrity: sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==} cpu: [loong64] os: [linux] + libc: [musl] '@rollup/rollup-linux-ppc64-gnu@4.59.0': resolution: {integrity: sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==} cpu: [ppc64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-ppc64-musl@4.59.0': resolution: {integrity: sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==} cpu: [ppc64] os: [linux] + libc: [musl] '@rollup/rollup-linux-riscv64-gnu@4.59.0': resolution: {integrity: sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==} cpu: [riscv64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-riscv64-musl@4.59.0': resolution: {integrity: sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==} cpu: [riscv64] os: [linux] + libc: [musl] '@rollup/rollup-linux-s390x-gnu@4.59.0': resolution: {integrity: sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==} cpu: [s390x] os: [linux] + libc: [glibc] '@rollup/rollup-linux-x64-gnu@4.59.0': resolution: {integrity: sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==} cpu: [x64] os: [linux] + libc: [glibc] '@rollup/rollup-linux-x64-musl@4.59.0': resolution: {integrity: sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==} cpu: [x64] os: [linux] + libc: [musl] '@rollup/rollup-openbsd-x64@4.59.0': resolution: {integrity: sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==} @@ -741,8 +755,8 @@ packages: resolution: {integrity: sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==} engines: {node: '>=16'} - flatted@3.4.1: - resolution: {integrity: sha512-IxfVbRFVlV8V/yRaGzk0UVIcsKKHMSfYw66T/u4nTwlWteQePsxe//LjudR1AMX4tZW3WFCh3Zqa/sjlqpbURQ==} + flatted@3.4.2: + resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==} follow-redirects@1.15.11: resolution: {integrity: sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==} @@ -1836,10 +1850,10 @@ snapshots: flat-cache@4.0.1: dependencies: - flatted: 3.4.1 + flatted: 3.4.2 keyv: 4.5.4 - flatted@3.4.1: {} + flatted@3.4.2: {} follow-redirects@1.15.11: {} diff --git a/web/__tests__/plugins/plugin-install-flow.test.ts b/web/__tests__/plugins/plugin-install-flow.test.ts index 8edb6705d4..8fa2246198 100644 --- a/web/__tests__/plugins/plugin-install-flow.test.ts +++ b/web/__tests__/plugins/plugin-install-flow.test.ts @@ -12,8 +12,16 @@ vi.mock('@/config', () => ({ })) const mockToastNotify = vi.fn() -vi.mock('@/app/components/base/toast', () => ({ - default: { notify: (...args: unknown[]) => mockToastNotify(...args) }, +vi.mock('@/app/components/base/ui/toast', () => ({ + toast: Object.assign((message: string, options?: { type?: string }) => mockToastNotify({ type: options?.type, message }), { + success: (message: string) => mockToastNotify({ type: 'success', message }), + error: (message: string) => mockToastNotify({ type: 'error', message }), + warning: (message: string) => mockToastNotify({ type: 'warning', message }), + info: (message: string) => mockToastNotify({ type: 'info', message }), + dismiss: vi.fn(), + update: vi.fn(), + promise: vi.fn(), + }), })) const mockUploadGitHub = vi.fn() diff --git a/web/app/components/apps/index.tsx b/web/app/components/apps/index.tsx index dce9de190d..b6ca60bd7b 100644 --- a/web/app/components/apps/index.tsx +++ b/web/app/components/apps/index.tsx @@ -8,12 +8,14 @@ import AppListContext from '@/context/app-list-context' import useDocumentTitle from '@/hooks/use-document-title' import { useImportDSL } from '@/hooks/use-import-dsl' import { DSLImportMode } from '@/models/app' +import dynamic from '@/next/dynamic' import { fetchAppDetail } from '@/service/explore' -import DSLConfirmModal from '../app/create-from-dsl-modal/dsl-confirm-modal' -import CreateAppModal from '../explore/create-app-modal' -import TryApp from '../explore/try-app' import List from './list' +const DSLConfirmModal = dynamic(() => import('../app/create-from-dsl-modal/dsl-confirm-modal'), { ssr: false }) +const CreateAppModal = dynamic(() => import('../explore/create-app-modal'), { ssr: false }) +const TryApp = dynamic(() => import('../explore/try-app'), { ssr: false }) + const Apps = () => { const { t } = useTranslation() diff --git a/web/app/components/apps/list.tsx b/web/app/components/apps/list.tsx index 0d52bd468c..2ef344f816 100644 --- a/web/app/components/apps/list.tsx +++ b/web/app/components/apps/list.tsx @@ -5,11 +5,11 @@ import { useDebounceFn } from 'ahooks' import { parseAsStringLiteral, useQueryState } from 'nuqs' import { useCallback, useEffect, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' +import Checkbox from '@/app/components/base/checkbox' import Input from '@/app/components/base/input' import TabSliderNew from '@/app/components/base/tab-slider-new' import TagFilter from '@/app/components/base/tag-management/filter' import { useStore as useTagStore } from '@/app/components/base/tag-management/store' -import CheckboxWithLabel from '@/app/components/datasets/create/website/base/checkbox-with-label' import { NEED_REFRESH_APP_LIST_KEY } from '@/config' import { useAppContext } from '@/context/app-context' import { useGlobalPublicStore } from '@/context/global-public-context' @@ -205,12 +205,12 @@ const List: FC = ({ options={options} />
- + { - return IS_CLOUD_EDITION && !!AMPLITUDE_API_KEY -} - // Map URL pathname to English page name for consistent Amplitude tracking const getEnglishPageName = (pathname: string): string => { // Remove leading slash and get the first segment @@ -59,7 +54,7 @@ const AmplitudeProvider: FC = ({ }) => { useEffect(() => { // Only enable in Saas edition with valid API key - if (!isAmplitudeEnabled()) + if (!isAmplitudeEnabled) return // Initialize Amplitude diff --git a/web/app/components/base/amplitude/__tests__/AmplitudeProvider.spec.tsx b/web/app/components/base/amplitude/__tests__/AmplitudeProvider.spec.tsx index b30da72091..5835634eb7 100644 --- a/web/app/components/base/amplitude/__tests__/AmplitudeProvider.spec.tsx +++ b/web/app/components/base/amplitude/__tests__/AmplitudeProvider.spec.tsx @@ -2,14 +2,24 @@ import * as amplitude from '@amplitude/analytics-browser' import { sessionReplayPlugin } from '@amplitude/plugin-session-replay-browser' import { render } from '@testing-library/react' import { beforeEach, describe, expect, it, vi } from 'vitest' -import AmplitudeProvider, { isAmplitudeEnabled } from '../AmplitudeProvider' +import AmplitudeProvider from '../AmplitudeProvider' const mockConfig = vi.hoisted(() => ({ AMPLITUDE_API_KEY: 'test-api-key', IS_CLOUD_EDITION: true, })) -vi.mock('@/config', () => mockConfig) +vi.mock('@/config', () => ({ + get AMPLITUDE_API_KEY() { + return mockConfig.AMPLITUDE_API_KEY + }, + get IS_CLOUD_EDITION() { + return mockConfig.IS_CLOUD_EDITION + }, + get isAmplitudeEnabled() { + return mockConfig.IS_CLOUD_EDITION && !!mockConfig.AMPLITUDE_API_KEY + }, +})) vi.mock('@amplitude/analytics-browser', () => ({ init: vi.fn(), @@ -27,22 +37,6 @@ describe('AmplitudeProvider', () => { mockConfig.IS_CLOUD_EDITION = true }) - describe('isAmplitudeEnabled', () => { - it('returns true when cloud edition and api key present', () => { - expect(isAmplitudeEnabled()).toBe(true) - }) - - it('returns false when cloud edition but no api key', () => { - mockConfig.AMPLITUDE_API_KEY = '' - expect(isAmplitudeEnabled()).toBe(false) - }) - - it('returns false when not cloud edition', () => { - mockConfig.IS_CLOUD_EDITION = false - expect(isAmplitudeEnabled()).toBe(false) - }) - }) - describe('Component', () => { it('initializes amplitude when enabled', () => { render() diff --git a/web/app/components/base/amplitude/__tests__/index.spec.ts b/web/app/components/base/amplitude/__tests__/index.spec.ts deleted file mode 100644 index 2d7ad6ab84..0000000000 --- a/web/app/components/base/amplitude/__tests__/index.spec.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { describe, expect, it } from 'vitest' -import AmplitudeProvider, { isAmplitudeEnabled } from '../AmplitudeProvider' -import indexDefault, { - isAmplitudeEnabled as indexIsAmplitudeEnabled, - resetUser, - setUserId, - setUserProperties, - trackEvent, -} from '../index' -import { - resetUser as utilsResetUser, - setUserId as utilsSetUserId, - setUserProperties as utilsSetUserProperties, - trackEvent as utilsTrackEvent, -} from '../utils' - -describe('Amplitude index exports', () => { - it('exports AmplitudeProvider as default', () => { - expect(indexDefault).toBe(AmplitudeProvider) - }) - - it('exports isAmplitudeEnabled', () => { - expect(indexIsAmplitudeEnabled).toBe(isAmplitudeEnabled) - }) - - it('exports utils', () => { - expect(resetUser).toBe(utilsResetUser) - expect(setUserId).toBe(utilsSetUserId) - expect(setUserProperties).toBe(utilsSetUserProperties) - expect(trackEvent).toBe(utilsTrackEvent) - }) -}) diff --git a/web/app/components/base/amplitude/__tests__/utils.spec.ts b/web/app/components/base/amplitude/__tests__/utils.spec.ts index ecbc57e387..f1ff5db1e3 100644 --- a/web/app/components/base/amplitude/__tests__/utils.spec.ts +++ b/web/app/components/base/amplitude/__tests__/utils.spec.ts @@ -20,8 +20,10 @@ const MockIdentify = vi.hoisted(() => }, ) -vi.mock('../AmplitudeProvider', () => ({ - isAmplitudeEnabled: () => mockState.enabled, +vi.mock('@/config', () => ({ + get isAmplitudeEnabled() { + return mockState.enabled + }, })) vi.mock('@amplitude/analytics-browser', () => ({ diff --git a/web/app/components/base/amplitude/index.ts b/web/app/components/base/amplitude/index.ts index acc792339e..44cbf728e2 100644 --- a/web/app/components/base/amplitude/index.ts +++ b/web/app/components/base/amplitude/index.ts @@ -1,2 +1,2 @@ -export { default, isAmplitudeEnabled } from './AmplitudeProvider' +export { default } from './lazy-amplitude-provider' export { resetUser, setUserId, setUserProperties, trackEvent } from './utils' diff --git a/web/app/components/base/amplitude/lazy-amplitude-provider.tsx b/web/app/components/base/amplitude/lazy-amplitude-provider.tsx new file mode 100644 index 0000000000..5dfa0e7b53 --- /dev/null +++ b/web/app/components/base/amplitude/lazy-amplitude-provider.tsx @@ -0,0 +1,11 @@ +'use client' + +import type { FC } from 'react' +import type { IAmplitudeProps } from './AmplitudeProvider' +import dynamic from '@/next/dynamic' + +const AmplitudeProvider = dynamic(() => import('./AmplitudeProvider'), { ssr: false }) + +const LazyAmplitudeProvider: FC = props => + +export default LazyAmplitudeProvider diff --git a/web/app/components/base/amplitude/utils.ts b/web/app/components/base/amplitude/utils.ts index 57b96243ec..8faa8e852e 100644 --- a/web/app/components/base/amplitude/utils.ts +++ b/web/app/components/base/amplitude/utils.ts @@ -1,5 +1,5 @@ import * as amplitude from '@amplitude/analytics-browser' -import { isAmplitudeEnabled } from './AmplitudeProvider' +import { isAmplitudeEnabled } from '@/config' /** * Track custom event @@ -7,7 +7,7 @@ import { isAmplitudeEnabled } from './AmplitudeProvider' * @param eventProperties Event properties (optional) */ export const trackEvent = (eventName: string, eventProperties?: Record) => { - if (!isAmplitudeEnabled()) + if (!isAmplitudeEnabled) return amplitude.track(eventName, eventProperties) } @@ -17,7 +17,7 @@ export const trackEvent = (eventName: string, eventProperties?: Record { - if (!isAmplitudeEnabled()) + if (!isAmplitudeEnabled) return amplitude.setUserId(userId) } @@ -27,7 +27,7 @@ export const setUserId = (userId: string) => { * @param properties User properties */ export const setUserProperties = (properties: Record) => { - if (!isAmplitudeEnabled()) + if (!isAmplitudeEnabled) return const identifyEvent = new amplitude.Identify() Object.entries(properties).forEach(([key, value]) => { @@ -40,7 +40,7 @@ export const setUserProperties = (properties: Record) => { * Reset user (e.g., when user logs out) */ export const resetUser = () => { - if (!isAmplitudeEnabled()) + if (!isAmplitudeEnabled) return amplitude.reset() } diff --git a/web/app/components/base/markdown-blocks/__tests__/code-block.spec.tsx b/web/app/components/base/markdown-blocks/__tests__/code-block.spec.tsx index 308232fd0f..745b7657d7 100644 --- a/web/app/components/base/markdown-blocks/__tests__/code-block.spec.tsx +++ b/web/app/components/base/markdown-blocks/__tests__/code-block.spec.tsx @@ -21,6 +21,8 @@ let clientWidthSpy: { mockRestore: () => void } | null = null let clientHeightSpy: { mockRestore: () => void } | null = null let offsetWidthSpy: { mockRestore: () => void } | null = null let offsetHeightSpy: { mockRestore: () => void } | null = null +let consoleErrorSpy: ReturnType | null = null +let consoleWarnSpy: ReturnType | null = null type AudioContextCtor = new () => unknown type WindowWithLegacyAudio = Window & { @@ -83,6 +85,8 @@ describe('CodeBlock', () => { beforeEach(() => { vi.clearAllMocks() mockUseTheme.mockReturnValue({ theme: Theme.light }) + consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {}) + consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}) clientWidthSpy = vi.spyOn(HTMLElement.prototype, 'clientWidth', 'get').mockReturnValue(900) clientHeightSpy = vi.spyOn(HTMLElement.prototype, 'clientHeight', 'get').mockReturnValue(400) offsetWidthSpy = vi.spyOn(HTMLElement.prototype, 'offsetWidth', 'get').mockReturnValue(900) @@ -98,6 +102,10 @@ describe('CodeBlock', () => { afterEach(() => { vi.useRealTimers() + consoleErrorSpy?.mockRestore() + consoleWarnSpy?.mockRestore() + consoleErrorSpy = null + consoleWarnSpy = null clientWidthSpy?.mockRestore() clientHeightSpy?.mockRestore() offsetWidthSpy?.mockRestore() diff --git a/web/app/components/base/markdown-blocks/code-block.tsx b/web/app/components/base/markdown-blocks/code-block.tsx index b36d8d7788..412c61d52d 100644 --- a/web/app/components/base/markdown-blocks/code-block.tsx +++ b/web/app/components/base/markdown-blocks/code-block.tsx @@ -85,13 +85,30 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any const processedRef = useRef(false) // Track if content was successfully processed const isInitialRenderRef = useRef(true) // Track if this is initial render const chartInstanceRef = useRef(null) // Direct reference to ECharts instance - const resizeTimerRef = useRef(null) // For debounce handling + const resizeTimerRef = useRef | null>(null) // For debounce handling + const chartReadyTimerRef = useRef | null>(null) const finishedEventCountRef = useRef(0) // Track finished event trigger count const match = /language-(\w+)/.exec(className || '') const language = match?.[1] const languageShowName = getCorrectCapitalizationLanguageName(language || '') const isDarkMode = theme === Theme.dark + const clearResizeTimer = useCallback(() => { + if (!resizeTimerRef.current) + return + + clearTimeout(resizeTimerRef.current) + resizeTimerRef.current = null + }, []) + + const clearChartReadyTimer = useCallback(() => { + if (!chartReadyTimerRef.current) + return + + clearTimeout(chartReadyTimerRef.current) + chartReadyTimerRef.current = null + }, []) + const echartsStyle = useMemo(() => ({ height: '350px', width: '100%', @@ -104,26 +121,27 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any // Debounce resize operations const debouncedResize = useCallback(() => { - if (resizeTimerRef.current) - clearTimeout(resizeTimerRef.current) + clearResizeTimer() resizeTimerRef.current = setTimeout(() => { if (chartInstanceRef.current) chartInstanceRef.current.resize() resizeTimerRef.current = null }, 200) - }, []) + }, [clearResizeTimer]) // Handle ECharts instance initialization const handleChartReady = useCallback((instance: any) => { chartInstanceRef.current = instance // Force resize to ensure timeline displays correctly - setTimeout(() => { + clearChartReadyTimer() + chartReadyTimerRef.current = setTimeout(() => { if (chartInstanceRef.current) chartInstanceRef.current.resize() + chartReadyTimerRef.current = null }, 200) - }, []) + }, [clearChartReadyTimer]) // Store event handlers in useMemo to avoid recreating them const echartsEvents = useMemo(() => ({ @@ -157,10 +175,20 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any return () => { window.removeEventListener('resize', handleResize) - if (resizeTimerRef.current) - clearTimeout(resizeTimerRef.current) + clearResizeTimer() + clearChartReadyTimer() + chartInstanceRef.current = null } - }, [language, debouncedResize]) + }, [language, debouncedResize, clearResizeTimer, clearChartReadyTimer]) + + useEffect(() => { + return () => { + clearResizeTimer() + clearChartReadyTimer() + chartInstanceRef.current = null + echartsRef.current = null + } + }, [clearResizeTimer, clearChartReadyTimer]) // Process chart data when content changes useEffect(() => { // Only process echarts content diff --git a/web/app/components/billing/pricing/__tests__/header.spec.tsx b/web/app/components/billing/pricing/__tests__/header.spec.tsx index 0aadc3b0ce..cb8991ff42 100644 --- a/web/app/components/billing/pricing/__tests__/header.spec.tsx +++ b/web/app/components/billing/pricing/__tests__/header.spec.tsx @@ -1,12 +1,14 @@ import { fireEvent, render, screen } from '@testing-library/react' import * as React from 'react' -import { Dialog } from '@/app/components/base/ui/dialog' +import { Dialog, DialogContent } from '@/app/components/base/ui/dialog' import Header from '../header' function renderHeader(onClose: () => void) { return render( -
+ +
+
, ) } @@ -24,7 +26,7 @@ describe('Header', () => { expect(screen.getByText('billing.plansCommon.title.plans')).toBeInTheDocument() expect(screen.getByText('billing.plansCommon.title.description')).toBeInTheDocument() - expect(screen.getByRole('button')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'common.operation.close' })).toBeInTheDocument() }) }) @@ -33,7 +35,7 @@ describe('Header', () => { const handleClose = vi.fn() renderHeader(handleClose) - fireEvent.click(screen.getByRole('button')) + fireEvent.click(screen.getByRole('button', { name: 'common.operation.close' })) expect(handleClose).toHaveBeenCalledTimes(1) }) @@ -41,11 +43,11 @@ describe('Header', () => { describe('Edge Cases', () => { it('should render structural elements with translation keys', () => { - const { container } = renderHeader(vi.fn()) + renderHeader(vi.fn()) - expect(container.querySelector('span')).toBeInTheDocument() - expect(container.querySelector('p')).toBeInTheDocument() - expect(screen.getByRole('button')).toBeInTheDocument() + expect(screen.getByText('billing.plansCommon.title.plans')).toBeInTheDocument() + expect(screen.getByText('billing.plansCommon.title.description')).toBeInTheDocument() + expect(screen.getByRole('button', { name: 'common.operation.close' })).toBeInTheDocument() }) }) }) diff --git a/web/app/components/billing/pricing/__tests__/index.spec.tsx b/web/app/components/billing/pricing/__tests__/index.spec.tsx index 36848cd463..a8d0a4329e 100644 --- a/web/app/components/billing/pricing/__tests__/index.spec.tsx +++ b/web/app/components/billing/pricing/__tests__/index.spec.tsx @@ -68,6 +68,7 @@ describe('Pricing', () => { it('should render pricing header and localized footer link', () => { render() + expect(screen.getByRole('dialog', { name: 'billing.plansCommon.title.plans' })).toBeInTheDocument() expect(screen.getByText('billing.plansCommon.title.plans')).toBeInTheDocument() expect(screen.getByTestId('pricing-link')).toHaveAttribute('href', 'https://dify.ai/en/pricing#plans-and-features') }) diff --git a/web/app/components/billing/pricing/footer.tsx b/web/app/components/billing/pricing/footer.tsx index 0d3fd965b0..1422ec1cb1 100644 --- a/web/app/components/billing/pricing/footer.tsx +++ b/web/app/components/billing/pricing/footer.tsx @@ -28,8 +28,9 @@ const Footer = ({ {t('plansCommon.comparePlanAndFeatures', { ns: 'billing' })} diff --git a/web/app/components/billing/pricing/header.tsx b/web/app/components/billing/pricing/header.tsx index d0ffe100db..5ab1895667 100644 --- a/web/app/components/billing/pricing/header.tsx +++ b/web/app/components/billing/pricing/header.tsx @@ -1,5 +1,6 @@ import * as React from 'react' import { useTranslation } from 'react-i18next' +import { DialogDescription, DialogTitle } from '@/app/components/base/ui/dialog' import { cn } from '@/utils/classnames' import Button from '../../base/button' import DifyLogo from '../../base/logo/dify-logo' @@ -18,24 +19,25 @@ const Header = ({
-
+ - {t('plansCommon.title.plans', { ns: 'billing' })} - +
-

+ {t('plansCommon.title.description', { ns: 'billing' })} -

+ ))} diff --git a/web/app/components/devtools/agentation-loader.tsx b/web/app/components/devtools/agentation-loader.tsx new file mode 100644 index 0000000000..87e1b44c87 --- /dev/null +++ b/web/app/components/devtools/agentation-loader.tsx @@ -0,0 +1,13 @@ +'use client' + +import { IS_DEV } from '@/config' +import dynamic from '@/next/dynamic' + +const Agentation = dynamic(() => import('agentation').then(module => module.Agentation), { ssr: false }) + +export function AgentationLoader() { + if (!IS_DEV) + return null + + return +} diff --git a/web/app/components/explore/banner/__tests__/banner-item.spec.tsx b/web/app/components/explore/banner/__tests__/banner-item.spec.tsx index de35814e8e..2d07cbddd8 100644 --- a/web/app/components/explore/banner/__tests__/banner-item.spec.tsx +++ b/web/app/components/explore/banner/__tests__/banner-item.spec.tsx @@ -1,3 +1,4 @@ +import type { ComponentProps } from 'react' import type { Banner } from '@/models/app' import { cleanup, fireEvent, render, screen } from '@testing-library/react' import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' @@ -5,6 +6,11 @@ import { BannerItem } from '../banner-item' const mockScrollTo = vi.fn() const mockSlideNodes = vi.fn() +const mockTrackEvent = vi.fn() + +vi.mock('@/app/components/base/amplitude', () => ({ + trackEvent: (...args: unknown[]) => mockTrackEvent(...args), +})) vi.mock('@/app/components/base/carousel', () => ({ useCarousel: () => ({ @@ -48,19 +54,34 @@ class MockResizeObserver { } } +const renderBannerItem = ( + banner: Banner = createMockBanner(), + props: Partial> = {}, +) => { + return render( + , + ) +} + describe('BannerItem', () => { let mockWindowOpen: ReturnType beforeEach(() => { mockWindowOpen = vi.spyOn(window, 'open').mockImplementation(() => null) - mockSlideNodes.mockReturnValue([{}, {}, {}]) // 3 slides + mockSlideNodes.mockReturnValue([{}, {}, {}]) vi.stubGlobal('ResizeObserver', MockResizeObserver) Object.defineProperty(window, 'innerWidth', { writable: true, configurable: true, - value: 1400, // Above RESPONSIVE_BREAKPOINT (1200) + value: 1400, }) }) @@ -73,81 +94,51 @@ describe('BannerItem', () => { describe('basic rendering', () => { it('renders banner category', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('Featured')).toBeInTheDocument() }) it('renders banner title', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('Test Banner Title')).toBeInTheDocument() }) it('renders banner description', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('Test banner description text')).toBeInTheDocument() }) it('renders banner image with correct src and alt', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() const image = screen.getByRole('img') expect(image).toHaveAttribute('src', 'https://example.com/image.png') expect(image).toHaveAttribute('alt', 'Test Banner Title') }) it('renders view more text', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('explore.banner.viewMore')).toBeInTheDocument() }) }) describe('click handling', () => { - it('opens banner link in new tab when clicked', () => { + it('opens banner link in new tab and tracks click when clicked', () => { const banner = createMockBanner({ link: 'https://test-link.com' }) - render( - , - ) + renderBannerItem(banner, { sort: 2, language: 'zh-Hans', accountId: 'account-123' }) const bannerElement = screen.getByText('Test Banner Title').closest('div[class*="cursor-pointer"]') fireEvent.click(bannerElement!) + expect(mockTrackEvent).toHaveBeenCalledWith('explore_banner_click', expect.objectContaining({ + banner_id: 'banner-1', + title: 'Test Banner Title', + sort: 2, + link: 'https://test-link.com', + page: 'explore', + language: 'zh-Hans', + account_id: 'account-123', + event_time: expect.any(Number), + })) expect(mockWindowOpen).toHaveBeenCalledWith( 'https://test-link.com', '_blank', @@ -155,18 +146,16 @@ describe('BannerItem', () => { ) }) - it('does not open window when banner has no link', () => { + it('tracks click even when banner has no link', () => { const banner = createMockBanner({ link: '' }) - render( - , - ) + renderBannerItem(banner) const bannerElement = screen.getByText('Test Banner Title').closest('div[class*="cursor-pointer"]') fireEvent.click(bannerElement!) + expect(mockTrackEvent).toHaveBeenCalledWith('explore_banner_click', expect.objectContaining({ + link: '', + })) expect(mockWindowOpen).not.toHaveBeenCalled() }) }) @@ -174,28 +163,13 @@ describe('BannerItem', () => { describe('slide indicators', () => { it('renders correct number of indicator buttons', () => { mockSlideNodes.mockReturnValue([{}, {}, {}]) - const banner = createMockBanner() - render( - , - ) - - const buttons = screen.getAllByRole('button') - expect(buttons).toHaveLength(3) + renderBannerItem() + expect(screen.getAllByRole('button')).toHaveLength(3) }) it('renders indicator buttons with correct numbers', () => { mockSlideNodes.mockReturnValue([{}, {}, {}]) - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('01')).toBeInTheDocument() expect(screen.getByText('02')).toBeInTheDocument() expect(screen.getByText('03')).toBeInTheDocument() @@ -203,13 +177,7 @@ describe('BannerItem', () => { it('calls scrollTo when indicator is clicked', () => { mockSlideNodes.mockReturnValue([{}, {}, {}]) - const banner = createMockBanner() - render( - , - ) + renderBannerItem() const secondIndicator = screen.getByText('02').closest('button') fireEvent.click(secondIndicator!) @@ -219,81 +187,39 @@ describe('BannerItem', () => { it('renders no indicators when no slides', () => { mockSlideNodes.mockReturnValue([]) - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.queryByRole('button')).not.toBeInTheDocument() }) }) describe('isPaused prop', () => { it('defaults isPaused to false', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('Test Banner Title')).toBeInTheDocument() }) it('accepts isPaused prop', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem(createMockBanner(), { isPaused: true }) expect(screen.getByText('Test Banner Title')).toBeInTheDocument() }) }) describe('responsive behavior', () => { it('sets up ResizeObserver on mount', () => { - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(mockResizeObserverObserve).toHaveBeenCalled() }) it('adds resize event listener on mount', () => { const addEventListenerSpy = vi.spyOn(window, 'addEventListener') - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(addEventListenerSpy).toHaveBeenCalledWith('resize', expect.any(Function)) addEventListenerSpy.mockRestore() }) it('removes resize event listener on unmount', () => { const removeEventListenerSpy = vi.spyOn(window, 'removeEventListener') - const banner = createMockBanner() - const { unmount } = render( - , - ) + const { unmount } = renderBannerItem() unmount() @@ -308,14 +234,7 @@ describe('BannerItem', () => { value: 1000, }) - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('Test Banner Title')).toBeInTheDocument() }) @@ -326,14 +245,7 @@ describe('BannerItem', () => { value: 800, }) - const banner = createMockBanner() - render( - , - ) - + renderBannerItem() expect(screen.getByText('explore.banner.viewMore')).toBeInTheDocument() }) }) @@ -348,13 +260,8 @@ describe('BannerItem', () => { 'img-src': 'https://example.com/img.png', }, } as Partial) - render( - , - ) + renderBannerItem(banner) expect(screen.getByText('Very Long Category Name')).toBeInTheDocument() }) @@ -367,13 +274,8 @@ describe('BannerItem', () => { 'img-src': 'https://example.com/img.png', }, } as Partial) - render( - , - ) + renderBannerItem(banner) const titleElement = screen.getByText('A Very Long Title That Should Be Truncated Eventually') expect(titleElement).toHaveClass('line-clamp-2') }) @@ -387,13 +289,8 @@ describe('BannerItem', () => { 'img-src': 'https://example.com/img.png', }, } as Partial) - render( - , - ) + renderBannerItem(banner) const descriptionElement = screen.getByText(/A very long description/) expect(descriptionElement).toHaveClass('line-clamp-4') }) @@ -402,56 +299,26 @@ describe('BannerItem', () => { describe('slide calculation', () => { it('calculates next index correctly for first slide', () => { mockSlideNodes.mockReturnValue([{}, {}, {}]) - const banner = createMockBanner() - render( - , - ) - - const buttons = screen.getAllByRole('button') - expect(buttons).toHaveLength(3) + renderBannerItem() + expect(screen.getAllByRole('button')).toHaveLength(3) }) it('handles single slide case', () => { mockSlideNodes.mockReturnValue([{}]) - const banner = createMockBanner() - render( - , - ) - - const buttons = screen.getAllByRole('button') - expect(buttons).toHaveLength(1) + renderBannerItem() + expect(screen.getAllByRole('button')).toHaveLength(1) }) }) describe('wrapper styling', () => { it('has cursor-pointer class', () => { - const banner = createMockBanner() - const { container } = render( - , - ) - + const { container } = renderBannerItem() const wrapper = container.firstChild as HTMLElement expect(wrapper).toHaveClass('cursor-pointer') }) it('has rounded-2xl class', () => { - const banner = createMockBanner() - const { container } = render( - , - ) - + const { container } = renderBannerItem() const wrapper = container.firstChild as HTMLElement expect(wrapper).toHaveClass('rounded-2xl') }) diff --git a/web/app/components/explore/banner/__tests__/banner.spec.tsx b/web/app/components/explore/banner/__tests__/banner.spec.tsx index d6d0aa44a8..069aaf02dc 100644 --- a/web/app/components/explore/banner/__tests__/banner.spec.tsx +++ b/web/app/components/explore/banner/__tests__/banner.spec.tsx @@ -6,6 +6,8 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest' import Banner from '../banner' const mockUseGetBanners = vi.fn() +const mockUseSelector = vi.fn() +const mockTrackEvent = vi.fn() vi.mock('@/service/use-explore', () => ({ useGetBanners: (...args: unknown[]) => mockUseGetBanners(...args), @@ -15,6 +17,14 @@ vi.mock('@/context/i18n', () => ({ useLocale: () => 'en-US', })) +vi.mock('@/context/app-context', () => ({ + useSelector: (...args: unknown[]) => mockUseSelector(...args), +})) + +vi.mock('@/app/components/base/amplitude', () => ({ + trackEvent: (...args: unknown[]) => mockTrackEvent(...args), +})) + vi.mock('@/app/components/base/carousel', () => ({ Carousel: Object.assign( ({ children, onMouseEnter, onMouseLeave, className }: { @@ -54,9 +64,12 @@ vi.mock('@/app/components/base/carousel', () => ({ })) vi.mock('../banner-item', () => ({ - BannerItem: ({ banner, autoplayDelay, isPaused }: { + BannerItem: ({ banner, autoplayDelay, isPaused, sort, language, accountId }: { banner: BannerType autoplayDelay: number + sort: number + language: string + accountId?: string isPaused?: boolean }) => (
({ data-banner-id={banner.id} data-autoplay-delay={autoplayDelay} data-is-paused={isPaused} + data-sort={sort} + data-language={language} + data-account-id={accountId} > BannerItem: {' '} @@ -87,6 +103,11 @@ const createMockBanner = (id: string, status: string = 'enabled', title: string describe('Banner', () => { beforeEach(() => { vi.useFakeTimers() + mockUseSelector.mockImplementation(selector => selector({ + userProfile: { + id: 'account-123', + }, + })) }) afterEach(() => { @@ -235,6 +256,59 @@ describe('Banner', () => { expect(screen.getByTestId('carousel')).toHaveClass('rounded-2xl') }) + + it('tracks enabled banner impressions with expected payload', () => { + mockUseGetBanners.mockReturnValue({ + data: [ + createMockBanner('1', 'enabled', 'Enabled Banner 1'), + createMockBanner('2', 'disabled', 'Disabled Banner'), + createMockBanner('3', 'enabled', 'Enabled Banner 2'), + ], + isLoading: false, + isError: false, + }) + + render() + + expect(mockTrackEvent).toHaveBeenCalledTimes(2) + expect(mockTrackEvent).toHaveBeenNthCalledWith(1, 'explore_banner_impression', expect.objectContaining({ + banner_id: '1', + title: 'Enabled Banner 1', + sort: 1, + link: 'https://example.com', + page: 'explore', + language: 'en-US', + account_id: 'account-123', + event_time: expect.any(Number), + })) + expect(mockTrackEvent).toHaveBeenNthCalledWith(2, 'explore_banner_impression', expect.objectContaining({ + banner_id: '3', + title: 'Enabled Banner 2', + sort: 2, + link: 'https://example.com', + page: 'explore', + language: 'en-US', + account_id: 'account-123', + event_time: expect.any(Number), + })) + }) + + it('does not track impressions when account id is unavailable', () => { + mockUseSelector.mockImplementation(selector => selector({ + userProfile: { + id: '', + }, + })) + mockUseGetBanners.mockReturnValue({ + data: [createMockBanner('1', 'enabled', 'Enabled Banner 1')], + isLoading: false, + isError: false, + }) + + render() + + expect(mockTrackEvent).not.toHaveBeenCalled() + }) }) describe('hover behavior', () => { @@ -435,8 +509,25 @@ describe('Banner', () => { const bannerItems = screen.getAllByTestId('banner-item') expect(bannerItems[0]).toHaveAttribute('data-banner-id', '1') + expect(bannerItems[0]).toHaveAttribute('data-sort', '1') expect(bannerItems[1]).toHaveAttribute('data-banner-id', '2') + expect(bannerItems[1]).toHaveAttribute('data-sort', '2') expect(bannerItems[2]).toHaveAttribute('data-banner-id', '3') + expect(bannerItems[2]).toHaveAttribute('data-sort', '3') + }) + + it('passes tracking context to banner item', () => { + mockUseGetBanners.mockReturnValue({ + data: [createMockBanner('1', 'enabled', 'Banner 1')], + isLoading: false, + isError: false, + }) + + render() + + const bannerItem = screen.getByTestId('banner-item') + expect(bannerItem).toHaveAttribute('data-language', 'en-US') + expect(bannerItem).toHaveAttribute('data-account-id', 'account-123') }) }) diff --git a/web/app/components/explore/banner/banner-item.tsx b/web/app/components/explore/banner/banner-item.tsx index d90a1060f9..c1e48bf420 100644 --- a/web/app/components/explore/banner/banner-item.tsx +++ b/web/app/components/explore/banner/banner-item.tsx @@ -4,6 +4,7 @@ import type { Banner } from '@/models/app' import { RiArrowRightLine } from '@remixicon/react' import { useCallback, useEffect, useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' +import { trackEvent } from '@/app/components/base/amplitude' import { useCarousel } from '@/app/components/base/carousel' import { cn } from '@/utils/classnames' import { IndicatorButton } from './indicator-button' @@ -11,6 +12,9 @@ import { IndicatorButton } from './indicator-button' type BannerItemProps = { banner: Banner autoplayDelay: number + sort: number + language: string + accountId?: string isPaused?: boolean } @@ -20,7 +24,14 @@ const INDICATOR_WIDTH = 20 const INDICATOR_GAP = 8 const MIN_VIEW_MORE_WIDTH = 480 -export const BannerItem: FC = ({ banner, autoplayDelay, isPaused = false }) => { +export const BannerItem: FC = ({ + banner, + autoplayDelay, + sort, + language, + accountId, + isPaused = false, +}) => { const { t } = useTranslation() const { api, selectedIndex } = useCarousel() const { category, title, description, 'img-src': imgSrc } = banner.content @@ -91,9 +102,21 @@ export const BannerItem: FC = ({ banner, autoplayDelay, isPause const handleBannerClick = useCallback(() => { incrementResetKey() + + trackEvent('explore_banner_click', { + banner_id: banner.id, + title: banner.content.title, + sort, + link: banner.link, + page: 'explore', + language, + account_id: accountId, + event_time: Date.now(), + }) + if (banner.link) window.open(banner.link, '_blank', 'noopener,noreferrer') - }, [banner.link, incrementResetKey]) + }, [accountId, banner, incrementResetKey, language, sort]) const handleIndicatorClick = useCallback((index: number) => { incrementResetKey() diff --git a/web/app/components/explore/banner/banner.tsx b/web/app/components/explore/banner/banner.tsx index 4ec0efdb2b..a320bb1974 100644 --- a/web/app/components/explore/banner/banner.tsx +++ b/web/app/components/explore/banner/banner.tsx @@ -1,7 +1,9 @@ import type { FC } from 'react' import * as React from 'react' import { useEffect, useMemo, useRef, useState } from 'react' +import { trackEvent } from '@/app/components/base/amplitude' import { Carousel } from '@/app/components/base/carousel' +import { useSelector } from '@/context/app-context' import { useLocale } from '@/context/i18n' import { useGetBanners } from '@/service/use-explore' import Loading from '../../base/loading' @@ -23,9 +25,11 @@ const LoadingState: FC = () => ( const Banner: FC = () => { const locale = useLocale() const { data: banners, isLoading, isError } = useGetBanners(locale) + const accountId = useSelector(s => s.userProfile.id) const [isHovered, setIsHovered] = useState(false) const [isResizing, setIsResizing] = useState(false) const resizeTimerRef = useRef(null) + const trackedBannerIdsRef = useRef>(new Set()) const enabledBanners = useMemo( () => banners?.filter(banner => banner.status === 'enabled') ?? [], @@ -56,6 +60,28 @@ const Banner: FC = () => { } }, []) + useEffect(() => { + if (!accountId) + return + + enabledBanners.forEach((banner, index) => { + if (trackedBannerIdsRef.current.has(banner.id)) + return + + trackEvent('explore_banner_impression', { + banner_id: banner.id, + title: banner.content.title, + sort: index + 1, + link: banner.link, + page: 'explore', + language: locale, + account_id: accountId, + event_time: Date.now(), + }) + trackedBannerIdsRef.current.add(banner.id) + }) + }, [accountId, enabledBanners, locale]) + if (isLoading) return @@ -77,12 +103,15 @@ const Banner: FC = () => { onMouseLeave={() => setIsHovered(false)} > - {enabledBanners.map(banner => ( + {enabledBanners.map((banner, index) => ( ))} diff --git a/web/app/components/header/account-dropdown/__tests__/index.spec.tsx b/web/app/components/header/account-dropdown/__tests__/index.spec.tsx index eb4d543e66..9d4226c33a 100644 --- a/web/app/components/header/account-dropdown/__tests__/index.spec.tsx +++ b/web/app/components/header/account-dropdown/__tests__/index.spec.tsx @@ -69,6 +69,7 @@ vi.mock('@/context/i18n', () => ({ const { mockConfig, mockEnv } = vi.hoisted(() => ({ mockConfig: { IS_CLOUD_EDITION: false, + AMPLITUDE_API_KEY: '', ZENDESK_WIDGET_KEY: '', SUPPORT_EMAIL_ADDRESS: '', }, @@ -80,6 +81,8 @@ const { mockConfig, mockEnv } = vi.hoisted(() => ({ })) vi.mock('@/config', () => ({ get IS_CLOUD_EDITION() { return mockConfig.IS_CLOUD_EDITION }, + get AMPLITUDE_API_KEY() { return mockConfig.AMPLITUDE_API_KEY }, + get isAmplitudeEnabled() { return mockConfig.IS_CLOUD_EDITION && !!mockConfig.AMPLITUDE_API_KEY }, get ZENDESK_WIDGET_KEY() { return mockConfig.ZENDESK_WIDGET_KEY }, get SUPPORT_EMAIL_ADDRESS() { return mockConfig.SUPPORT_EMAIL_ADDRESS }, IS_DEV: false, diff --git a/web/app/components/header/account-setting/__tests__/index.spec.tsx b/web/app/components/header/account-setting/__tests__/index.spec.tsx index 2aa9db4771..279af0b114 100644 --- a/web/app/components/header/account-setting/__tests__/index.spec.tsx +++ b/web/app/components/header/account-setting/__tests__/index.spec.tsx @@ -315,14 +315,14 @@ describe('AccountSetting', () => { it('should handle scroll event in panel', () => { // Act renderAccountSetting() - const scrollContainer = screen.getByRole('dialog').querySelector('.overflow-y-auto') + const scrollContainer = screen.getByRole('dialog').querySelector('.overscroll-contain') // Assert expect(scrollContainer).toBeInTheDocument() if (scrollContainer) { // Scroll down fireEvent.scroll(scrollContainer, { target: { scrollTop: 100 } }) - expect(scrollContainer).toHaveClass('overflow-y-auto') + expect(scrollContainer).toHaveClass('overscroll-contain') // Scroll back up fireEvent.scroll(scrollContainer, { target: { scrollTop: 0 } }) diff --git a/web/app/components/header/account-setting/api-based-extension-page/modal.tsx b/web/app/components/header/account-setting/api-based-extension-page/modal.tsx index efe6c46dcc..5f1492f14a 100644 --- a/web/app/components/header/account-setting/api-based-extension-page/modal.tsx +++ b/web/app/components/header/account-setting/api-based-extension-page/modal.tsx @@ -78,6 +78,7 @@ const ApiBasedExtensionModal: FC = ({
diff --git a/web/app/components/header/account-setting/api-based-extension-page/selector.tsx b/web/app/components/header/account-setting/api-based-extension-page/selector.tsx index 38acb73154..62052aece6 100644 --- a/web/app/components/header/account-setting/api-based-extension-page/selector.tsx +++ b/web/app/components/header/account-setting/api-based-extension-page/selector.tsx @@ -69,7 +69,7 @@ const ApiBasedExtensionSelector: FC = ({ ) } - +
diff --git a/web/app/components/header/account-setting/data-source-page-new/configure.tsx b/web/app/components/header/account-setting/data-source-page-new/configure.tsx index a3dba783e1..484338d333 100644 --- a/web/app/components/header/account-setting/data-source-page-new/configure.tsx +++ b/web/app/components/header/account-setting/data-source-page-new/configure.tsx @@ -84,7 +84,7 @@ const Configure = ({ {t('dataSource.configure', { ns: 'common' })} - +
{ !!canOAuth && ( @@ -104,7 +104,7 @@ const Configure = ({ } { !!canApiKey && !!canOAuth && ( -
+
OR
diff --git a/web/app/components/header/account-setting/data-source-page-new/operator.tsx b/web/app/components/header/account-setting/data-source-page-new/operator.tsx index 14bdee4fd0..c5b2a948de 100644 --- a/web/app/components/header/account-setting/data-source-page-new/operator.tsx +++ b/web/app/components/header/account-setting/data-source-page-new/operator.tsx @@ -39,7 +39,7 @@ const Operator = ({ text: (
-
{t('auth.setDefault', { ns: 'plugin' })}
+
{t('auth.setDefault', { ns: 'plugin' })}
), }, @@ -51,7 +51,7 @@ const Operator = ({ text: (
-
{t('operation.rename', { ns: 'common' })}
+
{t('operation.rename', { ns: 'common' })}
), }, @@ -66,7 +66,7 @@ const Operator = ({ text: (
-
{t('operation.edit', { ns: 'common' })}
+
{t('operation.edit', { ns: 'common' })}
), }, @@ -81,7 +81,7 @@ const Operator = ({ text: (
-
{t('dataSource.notion.changeAuthorizedPages', { ns: 'common' })}
+
{t('dataSource.notion.changeAuthorizedPages', { ns: 'common' })}
), }, @@ -98,7 +98,7 @@ const Operator = ({ text: (
-
+
{t('operation.remove', { ns: 'common' })}
@@ -122,7 +122,7 @@ const Operator = ({ items={items} secondItems={secondItems} onSelect={handleSelect} - popupClassName="z-[61]" + popupClassName="z-[1002]" triggerProps={{ size: 'l', }} diff --git a/web/app/components/header/account-setting/data-source-page/data-source-notion/__tests__/index.spec.tsx b/web/app/components/header/account-setting/data-source-page/data-source-notion/__tests__/index.spec.tsx deleted file mode 100644 index dad82d81b9..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-notion/__tests__/index.spec.tsx +++ /dev/null @@ -1,462 +0,0 @@ -import type { UseQueryResult } from '@tanstack/react-query' -import type { AppContextValue } from '@/context/app-context' -import type { DataSourceNotion as TDataSourceNotion } from '@/models/common' -import { fireEvent, render, screen, waitFor, within } from '@testing-library/react' -import { useAppContext } from '@/context/app-context' -import { useDataSourceIntegrates, useInvalidDataSourceIntegrates, useNotionConnection } from '@/service/use-common' -import DataSourceNotion from '../index' - -/** - * DataSourceNotion Component Tests - * Using Unit approach with real Panel and sibling components to test Notion integration logic. - */ - -type MockQueryResult = UseQueryResult - -// Mock dependencies -vi.mock('@/context/app-context', () => ({ - useAppContext: vi.fn(), -})) - -vi.mock('@/service/common', () => ({ - syncDataSourceNotion: vi.fn(), - updateDataSourceNotionAction: vi.fn(), -})) - -vi.mock('@/service/use-common', () => ({ - useDataSourceIntegrates: vi.fn(), - useNotionConnection: vi.fn(), - useInvalidDataSourceIntegrates: vi.fn(), -})) - -describe('DataSourceNotion Component', () => { - const mockWorkspaces: TDataSourceNotion[] = [ - { - id: 'ws-1', - provider: 'notion', - is_bound: true, - source_info: { - workspace_name: 'Workspace 1', - workspace_icon: 'https://example.com/icon-1.png', - workspace_id: 'notion-ws-1', - total: 10, - pages: [], - }, - }, - ] - - const baseAppContext: AppContextValue = { - userProfile: { id: 'test-user-id', name: 'test-user', email: 'test@example.com', avatar: '', avatar_url: '', is_password_set: true }, - mutateUserProfile: vi.fn(), - currentWorkspace: { id: 'ws-id', name: 'Workspace', plan: 'basic', status: 'normal', created_at: 0, role: 'owner', providers: [], trial_credits: 0, trial_credits_used: 0, next_credit_reset_date: 0 }, - isCurrentWorkspaceManager: true, - isCurrentWorkspaceOwner: true, - isCurrentWorkspaceEditor: true, - isCurrentWorkspaceDatasetOperator: false, - mutateCurrentWorkspace: vi.fn(), - langGeniusVersionInfo: { current_version: '0.1.0', latest_version: '0.1.1', version: '0.1.1', release_date: '', release_notes: '', can_auto_update: false, current_env: 'test' }, - useSelector: vi.fn(), - isLoadingCurrentWorkspace: false, - isValidatingCurrentWorkspace: false, - } - - /* eslint-disable-next-line ts/no-explicit-any */ - const mockQuerySuccess = (data: T): MockQueryResult => ({ data, isSuccess: true, isError: false, isLoading: false, isPending: false, status: 'success', error: null, fetchStatus: 'idle' } as any) - /* eslint-disable-next-line ts/no-explicit-any */ - const mockQueryPending = (): MockQueryResult => ({ data: undefined, isSuccess: false, isError: false, isLoading: true, isPending: true, status: 'pending', error: null, fetchStatus: 'fetching' } as any) - - const originalLocation = window.location - - beforeEach(() => { - vi.clearAllMocks() - vi.mocked(useAppContext).mockReturnValue(baseAppContext) - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: [] })) - vi.mocked(useNotionConnection).mockReturnValue(mockQueryPending()) - vi.mocked(useInvalidDataSourceIntegrates).mockReturnValue(vi.fn()) - - const locationMock = { href: '', assign: vi.fn() } - Object.defineProperty(window, 'location', { value: locationMock, writable: true, configurable: true }) - - // Clear document body to avoid toast leaks between tests - document.body.innerHTML = '' - }) - - afterEach(() => { - Object.defineProperty(window, 'location', { value: originalLocation, writable: true, configurable: true }) - }) - - const getWorkspaceItem = (name: string) => { - const nameEl = screen.getByText(name) - return (nameEl.closest('div[class*="workspace-item"]') || nameEl.parentElement) as HTMLElement - } - - describe('Rendering', () => { - it('should render with no workspaces initially and call integration hook', () => { - // Act - render() - - // Assert - expect(screen.getByText('common.dataSource.notion.title')).toBeInTheDocument() - expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument() - expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: undefined }) - }) - - it('should render with provided workspaces and pass initialData to hook', () => { - // Arrange - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces })) - - // Act - render() - - // Assert - expect(screen.getByText('common.dataSource.notion.connectedWorkspace')).toBeInTheDocument() - expect(screen.getByText('Workspace 1')).toBeInTheDocument() - expect(screen.getByText('common.dataSource.notion.connected')).toBeInTheDocument() - expect(screen.getByAltText('workspace icon')).toHaveAttribute('src', 'https://example.com/icon-1.png') - expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: { data: mockWorkspaces } }) - }) - - it('should handle workspaces prop being an empty array', () => { - // Act - render() - - // Assert - expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument() - expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: { data: [] } }) - }) - - it('should handle optional workspaces configurations', () => { - // Branch: workspaces passed as undefined - const { rerender } = render() - expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: undefined }) - - // Branch: workspaces passed as null - /* eslint-disable-next-line ts/no-explicit-any */ - rerender() - expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: undefined }) - - // Branch: workspaces passed as [] - rerender() - expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: { data: [] } }) - }) - - it('should handle cases where integrates data is loading or broken', () => { - // Act (Loading) - const { rerender } = render() - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQueryPending()) - rerender() - // Assert - expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument() - - // Act (Broken) - const brokenData = {} as { data: TDataSourceNotion[] } - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess(brokenData)) - rerender() - // Assert - expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument() - }) - - it('should handle integrates being nullish', () => { - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: undefined, isSuccess: true } as any) - render() - expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument() - }) - - it('should handle integrates data being nullish', () => { - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: { data: null }, isSuccess: true } as any) - render() - expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument() - }) - - it('should handle integrates data being valid', () => { - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: { data: [{ id: '1', is_bound: true, source_info: { workspace_name: 'W', workspace_icon: 'https://example.com/i.png', total: 1, pages: [] } }] }, isSuccess: true } as any) - render() - expect(screen.getByText('common.dataSource.notion.connectedWorkspace')).toBeInTheDocument() - }) - - it('should cover all possible falsy/nullish branches for integrates and workspaces', () => { - /* eslint-disable-next-line ts/no-explicit-any */ - const { rerender } = render() - - const integratesCases = [ - undefined, - null, - {}, - { data: null }, - { data: undefined }, - { data: [] }, - { data: [mockWorkspaces[0]] }, - { data: false }, - { data: 0 }, - { data: '' }, - 123, - 'string', - false, - ] - - integratesCases.forEach((val) => { - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: val, isSuccess: true } as any) - /* eslint-disable-next-line ts/no-explicit-any */ - rerender() - }) - - expect(useDataSourceIntegrates).toHaveBeenCalled() - }) - }) - - describe('User Permissions', () => { - it('should pass readOnly as false when user is a manager', () => { - // Arrange - vi.mocked(useAppContext).mockReturnValue({ ...baseAppContext, isCurrentWorkspaceManager: true }) - - // Act - render() - - // Assert - expect(screen.getByText('common.dataSource.notion.title').closest('div')).not.toHaveClass('grayscale') - }) - - it('should pass readOnly as true when user is NOT a manager', () => { - // Arrange - vi.mocked(useAppContext).mockReturnValue({ ...baseAppContext, isCurrentWorkspaceManager: false }) - - // Act - render() - - // Assert - expect(screen.getByText('common.dataSource.connect')).toHaveClass('opacity-50', 'grayscale') - }) - }) - - describe('Configure and Auth Actions', () => { - it('should handle configure action when user is workspace manager', () => { - // Arrange - render() - - // Act - fireEvent.click(screen.getByText('common.dataSource.connect')) - - // Assert - expect(useNotionConnection).toHaveBeenCalledWith(true) - }) - - it('should block configure action when user is NOT workspace manager', () => { - // Arrange - vi.mocked(useAppContext).mockReturnValue({ ...baseAppContext, isCurrentWorkspaceManager: false }) - render() - - // Act - fireEvent.click(screen.getByText('common.dataSource.connect')) - - // Assert - expect(useNotionConnection).toHaveBeenCalledWith(false) - }) - - it('should redirect if auth URL is available when "Auth Again" is clicked', async () => { - // Arrange - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces })) - vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'http://auth-url' })) - render() - - // Act - const workspaceItem = getWorkspaceItem('Workspace 1') - const actionBtn = within(workspaceItem).getByRole('button') - fireEvent.click(actionBtn) - const authAgainBtn = await screen.findByText('common.dataSource.notion.changeAuthorizedPages') - fireEvent.click(authAgainBtn) - - // Assert - expect(window.location.href).toBe('http://auth-url') - }) - - it('should trigger connection flow if URL is missing when "Auth Again" is clicked', async () => { - // Arrange - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces })) - render() - - // Act - const workspaceItem = getWorkspaceItem('Workspace 1') - const actionBtn = within(workspaceItem).getByRole('button') - fireEvent.click(actionBtn) - const authAgainBtn = await screen.findByText('common.dataSource.notion.changeAuthorizedPages') - fireEvent.click(authAgainBtn) - - // Assert - expect(useNotionConnection).toHaveBeenCalledWith(true) - }) - }) - - describe('Side Effects (Redirection and Toast)', () => { - it('should redirect automatically when connection data returns an http URL', async () => { - // Arrange - vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'http://redirect-url' })) - - // Act - render() - - // Assert - await waitFor(() => { - expect(window.location.href).toBe('http://redirect-url') - }) - }) - - it('should show toast notification when connection data is "internal"', async () => { - // Arrange - vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'internal' })) - - // Act - render() - - // Assert - expect(await screen.findByText('common.dataSource.notion.integratedAlert')).toBeInTheDocument() - }) - - it('should handle various data types and missing properties in connection data correctly', async () => { - // Arrange & Act (Unknown string) - const { rerender } = render() - vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'unknown' })) - rerender() - // Assert - await waitFor(() => { - expect(window.location.href).toBe('') - expect(screen.queryByText('common.dataSource.notion.integratedAlert')).not.toBeInTheDocument() - }) - - // Act (Broken object) - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({} as any)) - rerender() - // Assert - await waitFor(() => { - expect(window.location.href).toBe('') - }) - - // Act (Non-string) - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 123 } as any)) - rerender() - // Assert - await waitFor(() => { - expect(window.location.href).toBe('') - }) - }) - - it('should redirect if data starts with "http" even if it is just "http"', async () => { - // Arrange - vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'http' })) - - // Act - render() - - // Assert - await waitFor(() => { - expect(window.location.href).toBe('http') - }) - }) - - it('should skip side effect logic if connection data is an object but missing the "data" property', async () => { - // Arrange - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useNotionConnection).mockReturnValue({} as any) - - // Act - render() - - // Assert - await waitFor(() => { - expect(window.location.href).toBe('') - }) - }) - - it('should skip side effect logic if data.data is falsy', async () => { - // Arrange - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useNotionConnection).mockReturnValue({ data: { data: null } } as any) - - // Act - render() - - // Assert - await waitFor(() => { - expect(window.location.href).toBe('') - }) - }) - }) - - describe('Additional Action Edge Cases', () => { - it.each([ - undefined, - null, - {}, - { data: undefined }, - { data: null }, - { data: '' }, - { data: 0 }, - { data: false }, - { data: 'http' }, - { data: 'internal' }, - { data: 'unknown' }, - ])('should cover connection data branch: %s', async (val) => { - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces })) - /* eslint-disable-next-line ts/no-explicit-any */ - vi.mocked(useNotionConnection).mockReturnValue({ data: val, isSuccess: true } as any) - - render() - - // Trigger handleAuthAgain with these values - const workspaceItem = getWorkspaceItem('Workspace 1') - const actionBtn = within(workspaceItem).getByRole('button') - fireEvent.click(actionBtn) - const authAgainBtn = await screen.findByText('common.dataSource.notion.changeAuthorizedPages') - fireEvent.click(authAgainBtn) - - expect(useNotionConnection).toHaveBeenCalled() - }) - }) - - describe('Edge Cases in Workspace Data', () => { - it('should render correctly with missing source_info optional fields', async () => { - // Arrange - const workspaceWithMissingInfo: TDataSourceNotion = { - id: 'ws-2', - provider: 'notion', - is_bound: false, - source_info: { workspace_name: 'Workspace 2', workspace_id: 'notion-ws-2', workspace_icon: null, pages: [] }, - } - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: [workspaceWithMissingInfo] })) - - // Act - render() - - // Assert - expect(screen.getByText('Workspace 2')).toBeInTheDocument() - - const workspaceItem = getWorkspaceItem('Workspace 2') - const actionBtn = within(workspaceItem).getByRole('button') - fireEvent.click(actionBtn) - - expect(await screen.findByText('0 common.dataSource.notion.pagesAuthorized')).toBeInTheDocument() - }) - - it('should display inactive status correctly for unbound workspaces', () => { - // Arrange - const inactiveWS: TDataSourceNotion = { - id: 'ws-3', - provider: 'notion', - is_bound: false, - source_info: { workspace_name: 'Workspace 3', workspace_icon: 'https://example.com/icon-3.png', workspace_id: 'notion-ws-3', total: 5, pages: [] }, - } - vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: [inactiveWS] })) - - // Act - render() - - // Assert - expect(screen.getByText('common.dataSource.notion.disconnected')).toBeInTheDocument() - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-notion/index.tsx b/web/app/components/header/account-setting/data-source-page/data-source-notion/index.tsx deleted file mode 100644 index 0959383f29..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-notion/index.tsx +++ /dev/null @@ -1,103 +0,0 @@ -'use client' -import type { FC } from 'react' -import type { DataSourceNotion as TDataSourceNotion } from '@/models/common' -import { noop } from 'es-toolkit/function' -import * as React from 'react' -import { useEffect, useState } from 'react' -import { useTranslation } from 'react-i18next' -import NotionIcon from '@/app/components/base/notion-icon' -import Toast from '@/app/components/base/toast' -import { useAppContext } from '@/context/app-context' -import { useDataSourceIntegrates, useNotionConnection } from '@/service/use-common' -import Panel from '../panel' -import { DataSourceType } from '../panel/types' - -const Icon: FC<{ - src: string - name: string - className: string -}> = ({ src, name, className }) => { - return ( - - ) -} -type Props = { - workspaces?: TDataSourceNotion[] -} - -const DataSourceNotion: FC = ({ - workspaces, -}) => { - const { isCurrentWorkspaceManager } = useAppContext() - const [canConnectNotion, setCanConnectNotion] = useState(false) - const { data: integrates } = useDataSourceIntegrates({ - initialData: workspaces ? { data: workspaces } : undefined, - }) - const { data } = useNotionConnection(canConnectNotion) - const { t } = useTranslation() - - const resolvedWorkspaces = integrates?.data ?? [] - const connected = !!resolvedWorkspaces.length - - const handleConnectNotion = () => { - if (!isCurrentWorkspaceManager) - return - - setCanConnectNotion(true) - } - - const handleAuthAgain = () => { - if (data?.data) - window.location.href = data.data - else - setCanConnectNotion(true) - } - - useEffect(() => { - if (data && 'data' in data) { - if (data.data && typeof data.data === 'string' && data.data.startsWith('http')) { - window.location.href = data.data - } - else if (data.data === 'internal') { - Toast.notify({ - type: 'info', - message: t('dataSource.notion.integratedAlert', { ns: 'common' }), - }) - } - } - }, [data, t]) - - return ( - ({ - id: workspace.id, - logo: ({ className }: { className: string }) => ( - - ), - name: workspace.source_info.workspace_name, - isActive: workspace.is_bound, - notionConfig: { - total: workspace.source_info.total || 0, - }, - }))} - onRemove={noop} // handled in operation/index.tsx - notionActions={{ - onChangeAuthorizedPage: handleAuthAgain, - }} - /> - ) -} -export default React.memo(DataSourceNotion) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-notion/operate/__tests__/index.spec.tsx b/web/app/components/header/account-setting/data-source-page/data-source-notion/operate/__tests__/index.spec.tsx deleted file mode 100644 index f433b10020..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-notion/operate/__tests__/index.spec.tsx +++ /dev/null @@ -1,137 +0,0 @@ -import { fireEvent, render, screen, waitFor, within } from '@testing-library/react' -import { syncDataSourceNotion, updateDataSourceNotionAction } from '@/service/common' -import { useInvalidDataSourceIntegrates } from '@/service/use-common' -import Operate from '../index' - -/** - * Operate Component (Notion) Tests - * This component provides actions like Sync, Change Pages, and Remove for Notion data sources. - */ - -// Mock services and toast -vi.mock('@/service/common', () => ({ - syncDataSourceNotion: vi.fn(), - updateDataSourceNotionAction: vi.fn(), -})) - -vi.mock('@/service/use-common', () => ({ - useInvalidDataSourceIntegrates: vi.fn(), -})) - -describe('Operate Component (Notion)', () => { - const mockPayload = { - id: 'test-notion-id', - total: 5, - } - const mockOnAuthAgain = vi.fn() - const mockInvalidate = vi.fn() - - beforeEach(() => { - vi.clearAllMocks() - vi.mocked(useInvalidDataSourceIntegrates).mockReturnValue(mockInvalidate) - vi.mocked(syncDataSourceNotion).mockResolvedValue({ result: 'success' }) - vi.mocked(updateDataSourceNotionAction).mockResolvedValue({ result: 'success' }) - }) - - describe('Rendering', () => { - it('should render the menu button initially', () => { - // Act - const { container } = render() - - // Assert - const menuButton = within(container).getByRole('button') - expect(menuButton).toBeInTheDocument() - expect(menuButton).not.toHaveClass('bg-state-base-hover') - }) - - it('should open the menu and show all options when clicked', async () => { - // Arrange - const { container } = render() - const menuButton = within(container).getByRole('button') - - // Act - fireEvent.click(menuButton) - - // Assert - expect(await screen.findByText('common.dataSource.notion.changeAuthorizedPages')).toBeInTheDocument() - expect(screen.getByText('common.dataSource.notion.sync')).toBeInTheDocument() - expect(screen.getByText('common.dataSource.notion.remove')).toBeInTheDocument() - expect(screen.getByText(/5/)).toBeInTheDocument() - expect(screen.getByText(/common.dataSource.notion.pagesAuthorized/)).toBeInTheDocument() - expect(menuButton).toHaveClass('bg-state-base-hover') - }) - }) - - describe('Menu Actions', () => { - it('should call onAuthAgain when Change Authorized Pages is clicked', async () => { - // Arrange - const { container } = render() - fireEvent.click(within(container).getByRole('button')) - const option = await screen.findByText('common.dataSource.notion.changeAuthorizedPages') - - // Act - fireEvent.click(option) - - // Assert - expect(mockOnAuthAgain).toHaveBeenCalledTimes(1) - }) - - it('should call handleSync, show success toast, and invalidate cache when Sync is clicked', async () => { - // Arrange - const { container } = render() - fireEvent.click(within(container).getByRole('button')) - const syncBtn = await screen.findByText('common.dataSource.notion.sync') - - // Act - fireEvent.click(syncBtn) - - // Assert - await waitFor(() => { - expect(syncDataSourceNotion).toHaveBeenCalledWith({ - url: `/oauth/data-source/notion/${mockPayload.id}/sync`, - }) - }) - expect((await screen.findAllByText('common.api.success')).length).toBeGreaterThan(0) - expect(mockInvalidate).toHaveBeenCalledTimes(1) - }) - - it('should call handleRemove, show success toast, and invalidate cache when Remove is clicked', async () => { - // Arrange - const { container } = render() - fireEvent.click(within(container).getByRole('button')) - const removeBtn = await screen.findByText('common.dataSource.notion.remove') - - // Act - fireEvent.click(removeBtn) - - // Assert - await waitFor(() => { - expect(updateDataSourceNotionAction).toHaveBeenCalledWith({ - url: `/data-source/integrates/${mockPayload.id}/disable`, - }) - }) - expect((await screen.findAllByText('common.api.success')).length).toBeGreaterThan(0) - expect(mockInvalidate).toHaveBeenCalledTimes(1) - }) - }) - - describe('State Transitions', () => { - it('should toggle the open class on the button based on menu visibility', async () => { - // Arrange - const { container } = render() - const menuButton = within(container).getByRole('button') - - // Act (Open) - fireEvent.click(menuButton) - // Assert - expect(menuButton).toHaveClass('bg-state-base-hover') - - // Act (Close - click again) - fireEvent.click(menuButton) - // Assert - await waitFor(() => { - expect(menuButton).not.toHaveClass('bg-state-base-hover') - }) - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-notion/operate/index.tsx b/web/app/components/header/account-setting/data-source-page/data-source-notion/operate/index.tsx deleted file mode 100644 index 043eb3c846..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-notion/operate/index.tsx +++ /dev/null @@ -1,103 +0,0 @@ -'use client' -import { Menu, MenuButton, MenuItem, MenuItems, Transition } from '@headlessui/react' -import { - RiDeleteBinLine, - RiLoopLeftLine, - RiMoreFill, - RiStickyNoteAddLine, -} from '@remixicon/react' -import { Fragment } from 'react' -import { useTranslation } from 'react-i18next' -import Toast from '@/app/components/base/toast' -import { syncDataSourceNotion, updateDataSourceNotionAction } from '@/service/common' -import { useInvalidDataSourceIntegrates } from '@/service/use-common' -import { cn } from '@/utils/classnames' - -type OperateProps = { - payload: { - id: string - total: number - } - onAuthAgain: () => void -} -export default function Operate({ - payload, - onAuthAgain, -}: OperateProps) { - const { t } = useTranslation() - const invalidateDataSourceIntegrates = useInvalidDataSourceIntegrates() - - const updateIntegrates = () => { - Toast.notify({ - type: 'success', - message: t('api.success', { ns: 'common' }), - }) - invalidateDataSourceIntegrates() - } - const handleSync = async () => { - await syncDataSourceNotion({ url: `/oauth/data-source/notion/${payload.id}/sync` }) - updateIntegrates() - } - const handleRemove = async () => { - await updateDataSourceNotionAction({ url: `/data-source/integrates/${payload.id}/disable` }) - updateIntegrates() - } - - return ( - - { - ({ open }) => ( - <> - - - - - -
- -
- -
-
{t('dataSource.notion.changeAuthorizedPages', { ns: 'common' })}
-
- {payload.total} - {' '} - {t('dataSource.notion.pagesAuthorized', { ns: 'common' })} -
-
-
-
- -
- -
{t('dataSource.notion.sync', { ns: 'common' })}
-
-
-
- -
-
- -
{t('dataSource.notion.remove', { ns: 'common' })}
-
-
-
-
-
- - ) - } -
- ) -} diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-firecrawl-modal.spec.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-firecrawl-modal.spec.tsx deleted file mode 100644 index dadda4a349..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-firecrawl-modal.spec.tsx +++ /dev/null @@ -1,204 +0,0 @@ -import type { CommonResponse } from '@/models/common' -import { fireEvent, render, screen, waitFor } from '@testing-library/react' -import userEvent from '@testing-library/user-event' - -import { createDataSourceApiKeyBinding } from '@/service/datasets' -import ConfigFirecrawlModal from '../config-firecrawl-modal' - -/** - * ConfigFirecrawlModal Component Tests - * Tests validation, save logic, and basic rendering for the Firecrawl configuration modal. - */ - -vi.mock('@/service/datasets', () => ({ - createDataSourceApiKeyBinding: vi.fn(), -})) - -describe('ConfigFirecrawlModal Component', () => { - const mockOnCancel = vi.fn() - const mockOnSaved = vi.fn() - - beforeEach(() => { - vi.clearAllMocks() - }) - - describe('Initial Rendering', () => { - it('should render the modal with all fields and buttons', () => { - // Act - render() - - // Assert - expect(screen.getByText('datasetCreation.firecrawl.configFirecrawl')).toBeInTheDocument() - expect(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder')).toBeInTheDocument() - expect(screen.getByPlaceholderText('https://api.firecrawl.dev')).toBeInTheDocument() - expect(screen.getByRole('button', { name: /common\.operation\.save/i })).toBeInTheDocument() - expect(screen.getByRole('button', { name: /common\.operation\.cancel/i })).toBeInTheDocument() - expect(screen.getByRole('link', { name: /datasetCreation\.firecrawl\.getApiKeyLinkText/i })).toHaveAttribute('href', 'https://www.firecrawl.dev/account') - }) - }) - - describe('Form Interactions', () => { - it('should update state when input fields change', async () => { - // Arrange - render() - const apiKeyInput = screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder') - const baseUrlInput = screen.getByPlaceholderText('https://api.firecrawl.dev') - - // Act - fireEvent.change(apiKeyInput, { target: { value: 'firecrawl-key' } }) - fireEvent.change(baseUrlInput, { target: { value: 'https://custom.firecrawl.dev' } }) - - // Assert - expect(apiKeyInput).toHaveValue('firecrawl-key') - expect(baseUrlInput).toHaveValue('https://custom.firecrawl.dev') - }) - - it('should call onCancel when cancel button is clicked', async () => { - const user = userEvent.setup() - // Arrange - render() - - // Act - await user.click(screen.getByRole('button', { name: /common\.operation\.cancel/i })) - - // Assert - expect(mockOnCancel).toHaveBeenCalled() - }) - }) - - describe('Validation', () => { - it('should show error when saving without API Key', async () => { - const user = userEvent.setup() - // Arrange - render() - - // Act - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(screen.getByText('common.errorMsg.fieldRequired:{"field":"API Key"}')).toBeInTheDocument() - }) - expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled() - }) - - it('should show error for invalid Base URL format', async () => { - const user = userEvent.setup() - // Arrange - render() - const baseUrlInput = screen.getByPlaceholderText('https://api.firecrawl.dev') - - // Act - await user.type(baseUrlInput, 'ftp://invalid-url.com') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(screen.getByText('common.errorMsg.urlError')).toBeInTheDocument() - }) - expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled() - }) - }) - - describe('Saving Logic', () => { - it('should save successfully with valid API Key and custom URL', async () => { - const user = userEvent.setup() - // Arrange - vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' }) - render() - - // Act - await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'valid-key') - await user.type(screen.getByPlaceholderText('https://api.firecrawl.dev'), 'http://my-firecrawl.com') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith({ - category: 'website', - provider: 'firecrawl', - credentials: { - auth_type: 'bearer', - config: { - api_key: 'valid-key', - base_url: 'http://my-firecrawl.com', - }, - }, - }) - }) - await waitFor(() => { - expect(screen.getByText('common.api.success')).toBeInTheDocument() - expect(mockOnSaved).toHaveBeenCalled() - }) - }) - - it('should use default Base URL if none is provided during save', async () => { - const user = userEvent.setup() - // Arrange - vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' }) - render() - - // Act - await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'test-key') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({ - credentials: expect.objectContaining({ - config: expect.objectContaining({ - base_url: 'https://api.firecrawl.dev', - }), - }), - })) - }) - }) - - it('should ignore multiple save clicks while saving is in progress', async () => { - const user = userEvent.setup() - // Arrange - let resolveSave: (value: CommonResponse) => void - const savePromise = new Promise((resolve) => { - resolveSave = resolve - }) - vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(savePromise) - render() - await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'test-key') - const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i }) - - // Act - await user.click(saveBtn) - await user.click(saveBtn) - - // Assert - expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1) - - // Cleanup - resolveSave!({ result: 'success' }) - await waitFor(() => expect(mockOnSaved).toHaveBeenCalledTimes(1)) - }) - - it('should accept base_url starting with https://', async () => { - const user = userEvent.setup() - // Arrange - vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' }) - render() - - // Act - await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'test-key') - await user.type(screen.getByPlaceholderText('https://api.firecrawl.dev'), 'https://secure-firecrawl.com') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({ - credentials: expect.objectContaining({ - config: expect.objectContaining({ - base_url: 'https://secure-firecrawl.com', - }), - }), - })) - }) - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-jina-reader-modal.spec.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-jina-reader-modal.spec.tsx deleted file mode 100644 index 26c53993c1..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-jina-reader-modal.spec.tsx +++ /dev/null @@ -1,179 +0,0 @@ -import { render, screen, waitFor } from '@testing-library/react' -import userEvent from '@testing-library/user-event' - -import { DataSourceProvider } from '@/models/common' -import { createDataSourceApiKeyBinding } from '@/service/datasets' -import ConfigJinaReaderModal from '../config-jina-reader-modal' - -/** - * ConfigJinaReaderModal Component Tests - * Tests validation, save logic, and basic rendering for the Jina Reader configuration modal. - */ - -vi.mock('@/service/datasets', () => ({ - createDataSourceApiKeyBinding: vi.fn(), -})) - -describe('ConfigJinaReaderModal Component', () => { - const mockOnCancel = vi.fn() - const mockOnSaved = vi.fn() - - beforeEach(() => { - vi.clearAllMocks() - }) - - describe('Initial Rendering', () => { - it('should render the modal with API Key field and buttons', () => { - // Act - render() - - // Assert - expect(screen.getByText('datasetCreation.jinaReader.configJinaReader')).toBeInTheDocument() - expect(screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder')).toBeInTheDocument() - expect(screen.getByRole('button', { name: /common\.operation\.save/i })).toBeInTheDocument() - expect(screen.getByRole('button', { name: /common\.operation\.cancel/i })).toBeInTheDocument() - expect(screen.getByRole('link', { name: /datasetCreation\.jinaReader\.getApiKeyLinkText/i })).toHaveAttribute('href', 'https://jina.ai/reader/') - }) - }) - - describe('Form Interactions', () => { - it('should update state when API Key field changes', async () => { - const user = userEvent.setup() - // Arrange - render() - const apiKeyInput = screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder') - - // Act - await user.type(apiKeyInput, 'jina-test-key') - - // Assert - expect(apiKeyInput).toHaveValue('jina-test-key') - }) - - it('should call onCancel when cancel button is clicked', async () => { - const user = userEvent.setup() - // Arrange - render() - - // Act - await user.click(screen.getByRole('button', { name: /common\.operation\.cancel/i })) - - // Assert - expect(mockOnCancel).toHaveBeenCalled() - }) - }) - - describe('Validation', () => { - it('should show error when saving without API Key', async () => { - const user = userEvent.setup() - // Arrange - render() - - // Act - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(screen.getByText('common.errorMsg.fieldRequired:{"field":"API Key"}')).toBeInTheDocument() - }) - expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled() - }) - }) - - describe('Saving Logic', () => { - it('should save successfully with valid API Key', async () => { - const user = userEvent.setup() - // Arrange - vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' }) - render() - const apiKeyInput = screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder') - - // Act - await user.type(apiKeyInput, 'valid-jina-key') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith({ - category: 'website', - provider: DataSourceProvider.jinaReader, - credentials: { - auth_type: 'bearer', - config: { - api_key: 'valid-jina-key', - }, - }, - }) - }) - await waitFor(() => { - expect(screen.getByText('common.api.success')).toBeInTheDocument() - expect(mockOnSaved).toHaveBeenCalled() - }) - }) - - it('should ignore multiple save clicks while saving is in progress', async () => { - const user = userEvent.setup() - // Arrange - let resolveSave: (value: { result: 'success' }) => void - const savePromise = new Promise<{ result: 'success' }>((resolve) => { - resolveSave = resolve - }) - vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(savePromise) - render() - await user.type(screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder'), 'test-key') - const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i }) - - // Act - await user.click(saveBtn) - await user.click(saveBtn) - - // Assert - expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1) - - // Cleanup - resolveSave!({ result: 'success' }) - await waitFor(() => expect(mockOnSaved).toHaveBeenCalledTimes(1)) - }) - - it('should show encryption info and external link in the modal', async () => { - render() - - // Verify PKCS1_OAEP link exists - const pkcsLink = screen.getByText('PKCS1_OAEP') - expect(pkcsLink.closest('a')).toHaveAttribute('href', 'https://pycryptodome.readthedocs.io/en/latest/src/cipher/oaep.html') - - // Verify the Jina Reader external link - const jinaLink = screen.getByRole('link', { name: /datasetCreation\.jinaReader\.getApiKeyLinkText/i }) - expect(jinaLink).toHaveAttribute('target', '_blank') - }) - - it('should return early when save is clicked while already saving (isSaving guard)', async () => { - const user = userEvent.setup() - // Arrange - a save that never resolves so isSaving stays true - let resolveFirst: (value: { result: 'success' }) => void - const neverResolves = new Promise<{ result: 'success' }>((resolve) => { - resolveFirst = resolve - }) - vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(neverResolves) - render() - - const apiKeyInput = screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder') - await user.type(apiKeyInput, 'valid-key') - - const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i }) - // First click - starts saving, isSaving becomes true - await user.click(saveBtn) - expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1) - - // Second click using fireEvent bypasses disabled check - hits isSaving guard - const { fireEvent: fe } = await import('@testing-library/react') - fe.click(saveBtn) - // Still only called once because isSaving=true returns early - expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1) - - // Cleanup - resolveFirst!({ result: 'success' }) - await waitFor(() => expect(mockOnSaved).toHaveBeenCalled()) - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-watercrawl-modal.spec.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-watercrawl-modal.spec.tsx deleted file mode 100644 index 6c5961be54..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/config-watercrawl-modal.spec.tsx +++ /dev/null @@ -1,204 +0,0 @@ -import type { CommonResponse } from '@/models/common' -import { fireEvent, render, screen, waitFor } from '@testing-library/react' -import userEvent from '@testing-library/user-event' - -import { createDataSourceApiKeyBinding } from '@/service/datasets' -import ConfigWatercrawlModal from '../config-watercrawl-modal' - -/** - * ConfigWatercrawlModal Component Tests - * Tests validation, save logic, and basic rendering for the Watercrawl configuration modal. - */ - -vi.mock('@/service/datasets', () => ({ - createDataSourceApiKeyBinding: vi.fn(), -})) - -describe('ConfigWatercrawlModal Component', () => { - const mockOnCancel = vi.fn() - const mockOnSaved = vi.fn() - - beforeEach(() => { - vi.clearAllMocks() - }) - - describe('Initial Rendering', () => { - it('should render the modal with all fields and buttons', () => { - // Act - render() - - // Assert - expect(screen.getByText('datasetCreation.watercrawl.configWatercrawl')).toBeInTheDocument() - expect(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder')).toBeInTheDocument() - expect(screen.getByPlaceholderText('https://app.watercrawl.dev')).toBeInTheDocument() - expect(screen.getByRole('button', { name: /common\.operation\.save/i })).toBeInTheDocument() - expect(screen.getByRole('button', { name: /common\.operation\.cancel/i })).toBeInTheDocument() - expect(screen.getByRole('link', { name: /datasetCreation\.watercrawl\.getApiKeyLinkText/i })).toHaveAttribute('href', 'https://app.watercrawl.dev/') - }) - }) - - describe('Form Interactions', () => { - it('should update state when input fields change', async () => { - // Arrange - render() - const apiKeyInput = screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder') - const baseUrlInput = screen.getByPlaceholderText('https://app.watercrawl.dev') - - // Act - fireEvent.change(apiKeyInput, { target: { value: 'water-key' } }) - fireEvent.change(baseUrlInput, { target: { value: 'https://custom.watercrawl.dev' } }) - - // Assert - expect(apiKeyInput).toHaveValue('water-key') - expect(baseUrlInput).toHaveValue('https://custom.watercrawl.dev') - }) - - it('should call onCancel when cancel button is clicked', async () => { - const user = userEvent.setup() - // Arrange - render() - - // Act - await user.click(screen.getByRole('button', { name: /common\.operation\.cancel/i })) - - // Assert - expect(mockOnCancel).toHaveBeenCalled() - }) - }) - - describe('Validation', () => { - it('should show error when saving without API Key', async () => { - const user = userEvent.setup() - // Arrange - render() - - // Act - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(screen.getByText('common.errorMsg.fieldRequired:{"field":"API Key"}')).toBeInTheDocument() - }) - expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled() - }) - - it('should show error for invalid Base URL format', async () => { - const user = userEvent.setup() - // Arrange - render() - const baseUrlInput = screen.getByPlaceholderText('https://app.watercrawl.dev') - - // Act - await user.type(baseUrlInput, 'ftp://invalid-url.com') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(screen.getByText('common.errorMsg.urlError')).toBeInTheDocument() - }) - expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled() - }) - }) - - describe('Saving Logic', () => { - it('should save successfully with valid API Key and custom URL', async () => { - const user = userEvent.setup() - // Arrange - vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' }) - render() - - // Act - await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'valid-key') - await user.type(screen.getByPlaceholderText('https://app.watercrawl.dev'), 'http://my-watercrawl.com') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith({ - category: 'website', - provider: 'watercrawl', - credentials: { - auth_type: 'x-api-key', - config: { - api_key: 'valid-key', - base_url: 'http://my-watercrawl.com', - }, - }, - }) - }) - await waitFor(() => { - expect(screen.getByText('common.api.success')).toBeInTheDocument() - expect(mockOnSaved).toHaveBeenCalled() - }) - }) - - it('should use default Base URL if none is provided during save', async () => { - const user = userEvent.setup() - // Arrange - vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' }) - render() - - // Act - await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'test-api-key') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({ - credentials: expect.objectContaining({ - config: expect.objectContaining({ - base_url: 'https://app.watercrawl.dev', - }), - }), - })) - }) - }) - - it('should ignore multiple save clicks while saving is in progress', async () => { - const user = userEvent.setup() - // Arrange - let resolveSave: (value: CommonResponse) => void - const savePromise = new Promise((resolve) => { - resolveSave = resolve - }) - vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(savePromise) - render() - await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'test-api-key') - const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i }) - - // Act - await user.click(saveBtn) - await user.click(saveBtn) - - // Assert - expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1) - - // Cleanup - resolveSave!({ result: 'success' }) - await waitFor(() => expect(mockOnSaved).toHaveBeenCalledTimes(1)) - }) - - it('should accept base_url starting with https://', async () => { - const user = userEvent.setup() - // Arrange - vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' }) - render() - - // Act - await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'test-api-key') - await user.type(screen.getByPlaceholderText('https://app.watercrawl.dev'), 'https://secure-watercrawl.com') - await user.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({ - credentials: expect.objectContaining({ - config: expect.objectContaining({ - base_url: 'https://secure-watercrawl.com', - }), - }), - })) - }) - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/index.spec.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/index.spec.tsx deleted file mode 100644 index 1e95cbd087..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/__tests__/index.spec.tsx +++ /dev/null @@ -1,251 +0,0 @@ -import type { AppContextValue } from '@/context/app-context' -import type { CommonResponse } from '@/models/common' -import { fireEvent, render, screen, waitFor } from '@testing-library/react' - -import { useAppContext } from '@/context/app-context' -import { DataSourceProvider } from '@/models/common' -import { fetchDataSources, removeDataSourceApiKeyBinding } from '@/service/datasets' -import DataSourceWebsite from '../index' - -/** - * DataSourceWebsite Component Tests - * Tests integration of multiple website scraping providers (Firecrawl, WaterCrawl, Jina Reader). - */ - -type DataSourcesResponse = CommonResponse & { - sources: Array<{ id: string, provider: DataSourceProvider }> -} - -// Mock App Context -vi.mock('@/context/app-context', () => ({ - useAppContext: vi.fn(), -})) - -// Mock Service calls -vi.mock('@/service/datasets', () => ({ - fetchDataSources: vi.fn(), - removeDataSourceApiKeyBinding: vi.fn(), - createDataSourceApiKeyBinding: vi.fn(), -})) - -describe('DataSourceWebsite Component', () => { - const mockSources = [ - { id: '1', provider: DataSourceProvider.fireCrawl }, - { id: '2', provider: DataSourceProvider.waterCrawl }, - { id: '3', provider: DataSourceProvider.jinaReader }, - ] - - beforeEach(() => { - vi.clearAllMocks() - vi.mocked(useAppContext).mockReturnValue({ isCurrentWorkspaceManager: true } as unknown as AppContextValue) - vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [] } as DataSourcesResponse) - }) - - // Helper to render and wait for initial fetch to complete - const renderAndWait = async (provider: DataSourceProvider) => { - const result = render() - await waitFor(() => expect(fetchDataSources).toHaveBeenCalled()) - return result - } - - describe('Data Initialization', () => { - it('should fetch data sources on mount and reflect configured status', async () => { - // Arrange - vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: mockSources } as DataSourcesResponse) - - // Act - await renderAndWait(DataSourceProvider.fireCrawl) - - // Assert - expect(screen.getByText('common.dataSource.website.configuredCrawlers')).toBeInTheDocument() - }) - - it('should pass readOnly status based on workspace manager permissions', async () => { - // Arrange - vi.mocked(useAppContext).mockReturnValue({ isCurrentWorkspaceManager: false } as unknown as AppContextValue) - - // Act - await renderAndWait(DataSourceProvider.fireCrawl) - - // Assert - expect(screen.getByText('common.dataSource.configure')).toHaveClass('cursor-default') - }) - }) - - describe('Provider Specific Rendering', () => { - it('should render correct logo and name for Firecrawl', async () => { - // Arrange - vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[0]] } as DataSourcesResponse) - - // Act - await renderAndWait(DataSourceProvider.fireCrawl) - - // Assert - expect(await screen.findByText('Firecrawl')).toBeInTheDocument() - expect(screen.getByText('🔥')).toBeInTheDocument() - }) - - it('should render correct logo and name for WaterCrawl', async () => { - // Arrange - vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[1]] } as DataSourcesResponse) - - // Act - await renderAndWait(DataSourceProvider.waterCrawl) - - // Assert - const elements = await screen.findAllByText('WaterCrawl') - expect(elements.length).toBeGreaterThanOrEqual(1) - }) - - it('should render correct logo and name for Jina Reader', async () => { - // Arrange - vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[2]] } as DataSourcesResponse) - - // Act - await renderAndWait(DataSourceProvider.jinaReader) - - // Assert - const elements = await screen.findAllByText('Jina Reader') - expect(elements.length).toBeGreaterThanOrEqual(1) - }) - }) - - describe('Modal Interactions', () => { - it('should manage opening and closing of configuration modals', async () => { - // Arrange - await renderAndWait(DataSourceProvider.fireCrawl) - - // Act (Open) - fireEvent.click(screen.getByText('common.dataSource.configure')) - // Assert - expect(screen.getByText('datasetCreation.firecrawl.configFirecrawl')).toBeInTheDocument() - - // Act (Cancel) - fireEvent.click(screen.getByRole('button', { name: /common\.operation\.cancel/i })) - // Assert - expect(screen.queryByText('datasetCreation.firecrawl.configFirecrawl')).not.toBeInTheDocument() - }) - - it('should re-fetch sources after saving configuration (Watercrawl)', async () => { - // Arrange - await renderAndWait(DataSourceProvider.waterCrawl) - fireEvent.click(screen.getByText('common.dataSource.configure')) - vi.mocked(fetchDataSources).mockClear() - - // Act - fireEvent.change(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), { target: { value: 'test-key' } }) - fireEvent.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(fetchDataSources).toHaveBeenCalled() - expect(screen.queryByText('datasetCreation.watercrawl.configWatercrawl')).not.toBeInTheDocument() - }) - }) - - it('should re-fetch sources after saving configuration (Jina Reader)', async () => { - // Arrange - await renderAndWait(DataSourceProvider.jinaReader) - fireEvent.click(screen.getByText('common.dataSource.configure')) - vi.mocked(fetchDataSources).mockClear() - - // Act - fireEvent.change(screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder'), { target: { value: 'test-key' } }) - fireEvent.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(fetchDataSources).toHaveBeenCalled() - expect(screen.queryByText('datasetCreation.jinaReader.configJinaReader')).not.toBeInTheDocument() - }) - }) - }) - - describe('Management Actions', () => { - it('should handle successful data source removal with toast notification', async () => { - // Arrange - vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[0]] } as DataSourcesResponse) - vi.mocked(removeDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' } as CommonResponse) - await renderAndWait(DataSourceProvider.fireCrawl) - await waitFor(() => expect(screen.getByText('common.dataSource.website.configuredCrawlers')).toBeInTheDocument()) - - // Act - const removeBtn = screen.getByText('Firecrawl').parentElement?.querySelector('svg')?.parentElement - if (removeBtn) - fireEvent.click(removeBtn) - - // Assert - await waitFor(() => { - expect(removeDataSourceApiKeyBinding).toHaveBeenCalledWith('1') - expect(screen.getByText('common.api.remove')).toBeInTheDocument() - }) - expect(screen.queryByText('common.dataSource.website.configuredCrawlers')).not.toBeInTheDocument() - }) - - it('should skip removal API call if no data source ID is present', async () => { - // Arrange - await renderAndWait(DataSourceProvider.fireCrawl) - - // Act - const removeBtn = screen.queryByText('Firecrawl')?.parentElement?.querySelector('svg')?.parentElement - if (removeBtn) - fireEvent.click(removeBtn) - - // Assert - expect(removeDataSourceApiKeyBinding).not.toHaveBeenCalled() - }) - }) - - describe('Firecrawl Save Flow', () => { - it('should re-fetch sources after saving Firecrawl configuration', async () => { - // Arrange - await renderAndWait(DataSourceProvider.fireCrawl) - fireEvent.click(screen.getByText('common.dataSource.configure')) - expect(screen.getByText('datasetCreation.firecrawl.configFirecrawl')).toBeInTheDocument() - vi.mocked(fetchDataSources).mockClear() - - // Act - fill in required API key field and save - const apiKeyInput = screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder') - fireEvent.change(apiKeyInput, { target: { value: 'test-key' } }) - fireEvent.click(screen.getByRole('button', { name: /common\.operation\.save/i })) - - // Assert - await waitFor(() => { - expect(fetchDataSources).toHaveBeenCalled() - expect(screen.queryByText('datasetCreation.firecrawl.configFirecrawl')).not.toBeInTheDocument() - }) - }) - }) - - describe('Cancel Flow', () => { - it('should close watercrawl modal when cancel is clicked', async () => { - // Arrange - await renderAndWait(DataSourceProvider.waterCrawl) - fireEvent.click(screen.getByText('common.dataSource.configure')) - expect(screen.getByText('datasetCreation.watercrawl.configWatercrawl')).toBeInTheDocument() - - // Act - fireEvent.click(screen.getByRole('button', { name: /common\.operation\.cancel/i })) - - // Assert - modal closed - await waitFor(() => { - expect(screen.queryByText('datasetCreation.watercrawl.configWatercrawl')).not.toBeInTheDocument() - }) - }) - - it('should close jina reader modal when cancel is clicked', async () => { - // Arrange - await renderAndWait(DataSourceProvider.jinaReader) - fireEvent.click(screen.getByText('common.dataSource.configure')) - expect(screen.getByText('datasetCreation.jinaReader.configJinaReader')).toBeInTheDocument() - - // Act - fireEvent.click(screen.getByRole('button', { name: /common\.operation\.cancel/i })) - - // Assert - modal closed - await waitFor(() => { - expect(screen.queryByText('datasetCreation.jinaReader.configJinaReader')).not.toBeInTheDocument() - }) - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/config-firecrawl-modal.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/config-firecrawl-modal.tsx deleted file mode 100644 index d7f15236a7..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/config-firecrawl-modal.tsx +++ /dev/null @@ -1,165 +0,0 @@ -'use client' -import type { FC } from 'react' -import type { FirecrawlConfig } from '@/models/common' -import * as React from 'react' -import { useCallback, useState } from 'react' -import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' -import { LinkExternal02 } from '@/app/components/base/icons/src/vender/line/general' -import { Lock01 } from '@/app/components/base/icons/src/vender/solid/security' -import { - PortalToFollowElem, - PortalToFollowElemContent, -} from '@/app/components/base/portal-to-follow-elem' -import Toast from '@/app/components/base/toast' -import Field from '@/app/components/datasets/create/website/base/field' -import { createDataSourceApiKeyBinding } from '@/service/datasets' - -type Props = { - onCancel: () => void - onSaved: () => void -} - -const I18N_PREFIX = 'firecrawl' - -const DEFAULT_BASE_URL = 'https://api.firecrawl.dev' - -const ConfigFirecrawlModal: FC = ({ - onCancel, - onSaved, -}) => { - const { t } = useTranslation() - const [isSaving, setIsSaving] = useState(false) - const [config, setConfig] = useState({ - api_key: '', - base_url: '', - }) - - const handleConfigChange = useCallback((key: string) => { - return (value: string | number) => { - setConfig(prev => ({ ...prev, [key]: value as string })) - } - }, []) - - const handleSave = useCallback(async () => { - if (isSaving) - return - let errorMsg = '' - if (config.base_url && !((config.base_url.startsWith('http://') || config.base_url.startsWith('https://')))) - errorMsg = t('errorMsg.urlError', { ns: 'common' }) - if (!errorMsg) { - if (!config.api_key) { - errorMsg = t('errorMsg.fieldRequired', { - ns: 'common', - field: 'API Key', - }) - } - } - - if (errorMsg) { - Toast.notify({ - type: 'error', - message: errorMsg, - }) - return - } - const postData = { - category: 'website', - provider: 'firecrawl', - credentials: { - auth_type: 'bearer', - config: { - api_key: config.api_key, - base_url: config.base_url || DEFAULT_BASE_URL, - }, - }, - } - try { - setIsSaving(true) - await createDataSourceApiKeyBinding(postData) - Toast.notify({ - type: 'success', - message: t('api.success', { ns: 'common' }), - }) - } - finally { - setIsSaving(false) - } - - onSaved() - }, [config.api_key, config.base_url, onSaved, t, isSaving]) - - return ( - - -
-
-
-
-
{t(`${I18N_PREFIX}.configFirecrawl`, { ns: 'datasetCreation' })}
-
- -
- - -
-
- - {t(`${I18N_PREFIX}.getApiKeyLinkText`, { ns: 'datasetCreation' })} - - -
- - -
- -
-
-
-
- - {t('modelProvider.encrypted.front', { ns: 'common' })} - - PKCS1_OAEP - - {t('modelProvider.encrypted.back', { ns: 'common' })} -
-
-
-
-
-
- ) -} -export default React.memo(ConfigFirecrawlModal) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/config-jina-reader-modal.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/config-jina-reader-modal.tsx deleted file mode 100644 index 2374ae6174..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/config-jina-reader-modal.tsx +++ /dev/null @@ -1,144 +0,0 @@ -'use client' -import type { FC } from 'react' -import * as React from 'react' -import { useCallback, useState } from 'react' -import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' -import { LinkExternal02 } from '@/app/components/base/icons/src/vender/line/general' -import { Lock01 } from '@/app/components/base/icons/src/vender/solid/security' -import { - PortalToFollowElem, - PortalToFollowElemContent, -} from '@/app/components/base/portal-to-follow-elem' -import Toast from '@/app/components/base/toast' -import Field from '@/app/components/datasets/create/website/base/field' -import { DataSourceProvider } from '@/models/common' -import { createDataSourceApiKeyBinding } from '@/service/datasets' - -type Props = { - onCancel: () => void - onSaved: () => void -} - -const I18N_PREFIX = 'jinaReader' - -const ConfigJinaReaderModal: FC = ({ - onCancel, - onSaved, -}) => { - const { t } = useTranslation() - const [isSaving, setIsSaving] = useState(false) - const [apiKey, setApiKey] = useState('') - - const handleSave = useCallback(async () => { - if (isSaving) - return - let errorMsg = '' - if (!errorMsg) { - if (!apiKey) { - errorMsg = t('errorMsg.fieldRequired', { - ns: 'common', - field: 'API Key', - }) - } - } - - if (errorMsg) { - Toast.notify({ - type: 'error', - message: errorMsg, - }) - return - } - const postData = { - category: 'website', - provider: DataSourceProvider.jinaReader, - credentials: { - auth_type: 'bearer', - config: { - api_key: apiKey, - }, - }, - } - try { - setIsSaving(true) - await createDataSourceApiKeyBinding(postData) - Toast.notify({ - type: 'success', - message: t('api.success', { ns: 'common' }), - }) - } - finally { - setIsSaving(false) - } - - onSaved() - }, [apiKey, onSaved, t, isSaving]) - - return ( - - -
-
-
-
-
{t(`${I18N_PREFIX}.configJinaReader`, { ns: 'datasetCreation' })}
-
- -
- setApiKey(value as string)} - placeholder={t(`${I18N_PREFIX}.apiKeyPlaceholder`, { ns: 'datasetCreation' })!} - /> -
-
- - {t(`${I18N_PREFIX}.getApiKeyLinkText`, { ns: 'datasetCreation' })} - - -
- - -
- -
-
-
-
- - {t('modelProvider.encrypted.front', { ns: 'common' })} - - PKCS1_OAEP - - {t('modelProvider.encrypted.back', { ns: 'common' })} -
-
-
-
-
-
- ) -} -export default React.memo(ConfigJinaReaderModal) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/config-watercrawl-modal.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/config-watercrawl-modal.tsx deleted file mode 100644 index a9399f25cd..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/config-watercrawl-modal.tsx +++ /dev/null @@ -1,165 +0,0 @@ -'use client' -import type { FC } from 'react' -import type { WatercrawlConfig } from '@/models/common' -import * as React from 'react' -import { useCallback, useState } from 'react' -import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' -import { LinkExternal02 } from '@/app/components/base/icons/src/vender/line/general' -import { Lock01 } from '@/app/components/base/icons/src/vender/solid/security' -import { - PortalToFollowElem, - PortalToFollowElemContent, -} from '@/app/components/base/portal-to-follow-elem' -import Toast from '@/app/components/base/toast' -import Field from '@/app/components/datasets/create/website/base/field' -import { createDataSourceApiKeyBinding } from '@/service/datasets' - -type Props = { - onCancel: () => void - onSaved: () => void -} - -const I18N_PREFIX = 'watercrawl' - -const DEFAULT_BASE_URL = 'https://app.watercrawl.dev' - -const ConfigWatercrawlModal: FC = ({ - onCancel, - onSaved, -}) => { - const { t } = useTranslation() - const [isSaving, setIsSaving] = useState(false) - const [config, setConfig] = useState({ - api_key: '', - base_url: '', - }) - - const handleConfigChange = useCallback((key: string) => { - return (value: string | number) => { - setConfig(prev => ({ ...prev, [key]: value as string })) - } - }, []) - - const handleSave = useCallback(async () => { - if (isSaving) - return - let errorMsg = '' - if (config.base_url && !((config.base_url.startsWith('http://') || config.base_url.startsWith('https://')))) - errorMsg = t('errorMsg.urlError', { ns: 'common' }) - if (!errorMsg) { - if (!config.api_key) { - errorMsg = t('errorMsg.fieldRequired', { - ns: 'common', - field: 'API Key', - }) - } - } - - if (errorMsg) { - Toast.notify({ - type: 'error', - message: errorMsg, - }) - return - } - const postData = { - category: 'website', - provider: 'watercrawl', - credentials: { - auth_type: 'x-api-key', - config: { - api_key: config.api_key, - base_url: config.base_url || DEFAULT_BASE_URL, - }, - }, - } - try { - setIsSaving(true) - await createDataSourceApiKeyBinding(postData) - Toast.notify({ - type: 'success', - message: t('api.success', { ns: 'common' }), - }) - } - finally { - setIsSaving(false) - } - - onSaved() - }, [config.api_key, config.base_url, onSaved, t, isSaving]) - - return ( - - -
-
-
-
-
{t(`${I18N_PREFIX}.configWatercrawl`, { ns: 'datasetCreation' })}
-
- -
- - -
-
- - {t(`${I18N_PREFIX}.getApiKeyLinkText`, { ns: 'datasetCreation' })} - - -
- - -
- -
-
-
-
- - {t('modelProvider.encrypted.front', { ns: 'common' })} - - PKCS1_OAEP - - {t('modelProvider.encrypted.back', { ns: 'common' })} -
-
-
-
-
-
- ) -} -export default React.memo(ConfigWatercrawlModal) diff --git a/web/app/components/header/account-setting/data-source-page/data-source-website/index.tsx b/web/app/components/header/account-setting/data-source-page/data-source-website/index.tsx deleted file mode 100644 index 22bfb4950e..0000000000 --- a/web/app/components/header/account-setting/data-source-page/data-source-website/index.tsx +++ /dev/null @@ -1,137 +0,0 @@ -'use client' -import type { FC } from 'react' -import type { DataSourceItem } from '@/models/common' -import * as React from 'react' -import { useCallback, useEffect, useState } from 'react' -import { useTranslation } from 'react-i18next' -import Toast from '@/app/components/base/toast' -import s from '@/app/components/datasets/create/website/index.module.css' -import { useAppContext } from '@/context/app-context' -import { DataSourceProvider } from '@/models/common' -import { fetchDataSources, removeDataSourceApiKeyBinding } from '@/service/datasets' -import { cn } from '@/utils/classnames' -import Panel from '../panel' - -import { DataSourceType } from '../panel/types' -import ConfigFirecrawlModal from './config-firecrawl-modal' -import ConfigJinaReaderModal from './config-jina-reader-modal' -import ConfigWatercrawlModal from './config-watercrawl-modal' - -type Props = { - provider: DataSourceProvider -} - -const DataSourceWebsite: FC = ({ provider }) => { - const { t } = useTranslation() - const { isCurrentWorkspaceManager } = useAppContext() - const [sources, setSources] = useState([]) - const checkSetApiKey = useCallback(async () => { - const res = await fetchDataSources() as any - const list = res.sources - setSources(list) - }, []) - - useEffect(() => { - checkSetApiKey() - }, []) - - const [configTarget, setConfigTarget] = useState(null) - const showConfig = useCallback((provider: DataSourceProvider) => { - setConfigTarget(provider) - }, [setConfigTarget]) - - const hideConfig = useCallback(() => { - setConfigTarget(null) - }, [setConfigTarget]) - - const handleAdded = useCallback(() => { - checkSetApiKey() - hideConfig() - }, [checkSetApiKey, hideConfig]) - - const getIdByProvider = (provider: DataSourceProvider): string | undefined => { - const source = sources.find(item => item.provider === provider) - return source?.id - } - - const getProviderName = (provider: DataSourceProvider): string => { - if (provider === DataSourceProvider.fireCrawl) - return 'Firecrawl' - - if (provider === DataSourceProvider.waterCrawl) - return 'WaterCrawl' - - return 'Jina Reader' - } - - const handleRemove = useCallback((provider: DataSourceProvider) => { - return async () => { - const dataSourceId = getIdByProvider(provider) - if (dataSourceId) { - await removeDataSourceApiKeyBinding(dataSourceId) - setSources(sources.filter(item => item.provider !== provider)) - Toast.notify({ - type: 'success', - message: t('api.remove', { ns: 'common' }), - }) - } - } - }, [sources, t]) - - return ( - <> - item.provider === provider) !== undefined} - onConfigure={() => showConfig(provider)} - readOnly={!isCurrentWorkspaceManager} - configuredList={sources.filter(item => item.provider === provider).map(item => ({ - id: item.id, - logo: ({ className }: { className: string }) => { - if (item.provider === DataSourceProvider.fireCrawl) { - return ( -
- 🔥 -
- ) - } - - if (item.provider === DataSourceProvider.waterCrawl) { - return ( -
- -
- ) - } - return ( -
- -
- ) - }, - name: getProviderName(item.provider), - isActive: true, - }))} - onRemove={handleRemove(provider)} - /> - {configTarget === DataSourceProvider.fireCrawl && ( - - )} - {configTarget === DataSourceProvider.waterCrawl && ( - - )} - {configTarget === DataSourceProvider.jinaReader && ( - - )} - - - ) -} -export default React.memo(DataSourceWebsite) diff --git a/web/app/components/header/account-setting/data-source-page/panel/__tests__/config-item.spec.tsx b/web/app/components/header/account-setting/data-source-page/panel/__tests__/config-item.spec.tsx deleted file mode 100644 index 4ad49a8f8f..0000000000 --- a/web/app/components/header/account-setting/data-source-page/panel/__tests__/config-item.spec.tsx +++ /dev/null @@ -1,213 +0,0 @@ -import type { ConfigItemType } from '../config-item' -import { fireEvent, render, screen } from '@testing-library/react' -import ConfigItem from '../config-item' -import { DataSourceType } from '../types' - -/** - * ConfigItem Component Tests - * Tests rendering of individual configuration items for Notion and Website data sources. - */ - -// Mock Operate component to isolate ConfigItem unit tests. -vi.mock('../../data-source-notion/operate', () => ({ - default: ({ onAuthAgain, payload }: { onAuthAgain: () => void, payload: { id: string, total: number } }) => ( -
- - {JSON.stringify(payload)} -
- ), -})) - -describe('ConfigItem Component', () => { - const mockOnRemove = vi.fn() - const mockOnChangeAuthorizedPage = vi.fn() - const MockLogo = (props: React.SVGProps) => - - const baseNotionPayload: ConfigItemType = { - id: 'notion-1', - logo: MockLogo, - name: 'Notion Workspace', - isActive: true, - notionConfig: { total: 5 }, - } - - const baseWebsitePayload: ConfigItemType = { - id: 'website-1', - logo: MockLogo, - name: 'My Website', - isActive: true, - } - - afterEach(() => { - vi.clearAllMocks() - }) - - describe('Notion Configuration', () => { - it('should render active Notion config item with connected status and operator', () => { - // Act - render( - , - ) - - // Assert - expect(screen.getByTestId('mock-logo')).toBeInTheDocument() - expect(screen.getByText('Notion Workspace')).toBeInTheDocument() - const statusText = screen.getByText('common.dataSource.notion.connected') - expect(statusText).toHaveClass('text-util-colors-green-green-600') - expect(screen.getByTestId('operate-payload')).toHaveTextContent(JSON.stringify({ id: 'notion-1', total: 5 })) - }) - - it('should render inactive Notion config item with disconnected status', () => { - // Arrange - const inactivePayload = { ...baseNotionPayload, isActive: false } - - // Act - render( - , - ) - - // Assert - const statusText = screen.getByText('common.dataSource.notion.disconnected') - expect(statusText).toHaveClass('text-util-colors-warning-warning-600') - }) - - it('should handle auth action through the Operate component', () => { - // Arrange - render( - , - ) - - // Act - fireEvent.click(screen.getByTestId('operate-auth-btn')) - - // Assert - expect(mockOnChangeAuthorizedPage).toHaveBeenCalled() - }) - - it('should fallback to 0 total if notionConfig is missing', () => { - // Arrange - const payloadNoConfig = { ...baseNotionPayload, notionConfig: undefined } - - // Act - render( - , - ) - - // Assert - expect(screen.getByTestId('operate-payload')).toHaveTextContent(JSON.stringify({ id: 'notion-1', total: 0 })) - }) - - it('should handle missing notionActions safely without crashing', () => { - // Arrange - render( - , - ) - - // Act & Assert - expect(() => fireEvent.click(screen.getByTestId('operate-auth-btn'))).not.toThrow() - }) - }) - - describe('Website Configuration', () => { - it('should render active Website config item and hide operator', () => { - // Act - render( - , - ) - - // Assert - expect(screen.getByText('common.dataSource.website.active')).toBeInTheDocument() - expect(screen.queryByTestId('mock-operate')).not.toBeInTheDocument() - }) - - it('should render inactive Website config item', () => { - // Arrange - const inactivePayload = { ...baseWebsitePayload, isActive: false } - - // Act - render( - , - ) - - // Assert - const statusText = screen.getByText('common.dataSource.website.inactive') - expect(statusText).toHaveClass('text-util-colors-warning-warning-600') - }) - - it('should show remove button and trigger onRemove when clicked (not read-only)', () => { - // Arrange - const { container } = render( - , - ) - - // Note: This selector is brittle but necessary since the delete button lacks - // accessible attributes (data-testid, aria-label). Ideally, the component should - // be updated to include proper accessibility attributes. - const deleteBtn = container.querySelector('div[class*="cursor-pointer"]') as HTMLElement - - // Act - fireEvent.click(deleteBtn) - - // Assert - expect(mockOnRemove).toHaveBeenCalled() - }) - - it('should hide remove button in read-only mode', () => { - // Arrange - const { container } = render( - , - ) - - // Assert - const deleteBtn = container.querySelector('div[class*="cursor-pointer"]') - expect(deleteBtn).not.toBeInTheDocument() - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/panel/__tests__/index.spec.tsx b/web/app/components/header/account-setting/data-source-page/panel/__tests__/index.spec.tsx deleted file mode 100644 index d83cdb5360..0000000000 --- a/web/app/components/header/account-setting/data-source-page/panel/__tests__/index.spec.tsx +++ /dev/null @@ -1,226 +0,0 @@ -import type { ConfigItemType } from '../config-item' -import { fireEvent, render, screen } from '@testing-library/react' -import { DataSourceProvider } from '@/models/common' -import Panel from '../index' -import { DataSourceType } from '../types' - -/** - * Panel Component Tests - * Tests layout, conditional rendering, and interactions for data source panels (Notion and Website). - */ - -vi.mock('../../data-source-notion/operate', () => ({ - default: () =>
, -})) - -describe('Panel Component', () => { - const onConfigure = vi.fn() - const onRemove = vi.fn() - const mockConfiguredList: ConfigItemType[] = [ - { id: '1', name: 'Item 1', isActive: true, logo: () => null }, - { id: '2', name: 'Item 2', isActive: false, logo: () => null }, - ] - - beforeEach(() => { - vi.clearAllMocks() - }) - - describe('Notion Panel Rendering', () => { - it('should render Notion panel when not configured and isSupportList is true', () => { - // Act - render( - , - ) - - // Assert - expect(screen.getByText('common.dataSource.notion.title')).toBeInTheDocument() - expect(screen.getByText('common.dataSource.notion.description')).toBeInTheDocument() - const connectBtn = screen.getByText('common.dataSource.connect') - expect(connectBtn).toBeInTheDocument() - - // Act - fireEvent.click(connectBtn) - // Assert - expect(onConfigure).toHaveBeenCalled() - }) - - it('should render Notion panel in readOnly mode when not configured', () => { - // Act - render( - , - ) - - // Assert - const connectBtn = screen.getByText('common.dataSource.connect') - expect(connectBtn).toHaveClass('cursor-default opacity-50 grayscale') - }) - - it('should render Notion panel when configured with list of items', () => { - // Act - render( - , - ) - - // Assert - expect(screen.getByRole('button', { name: 'common.dataSource.configure' })).toBeInTheDocument() - expect(screen.getByText('common.dataSource.notion.connectedWorkspace')).toBeInTheDocument() - expect(screen.getByText('Item 1')).toBeInTheDocument() - expect(screen.getByText('Item 2')).toBeInTheDocument() - }) - - it('should hide connect button for Notion if isSupportList is false', () => { - // Act - render( - , - ) - - // Assert - expect(screen.queryByText('common.dataSource.connect')).not.toBeInTheDocument() - }) - - it('should disable Notion configure button in readOnly mode (configured state)', () => { - // Act - render( - , - ) - - // Assert - const btn = screen.getByRole('button', { name: 'common.dataSource.configure' }) - expect(btn).toBeDisabled() - }) - }) - - describe('Website Panel Rendering', () => { - it('should show correct provider names and handle configuration when not configured', () => { - // Arrange - const { rerender } = render( - , - ) - - // Assert Firecrawl - expect(screen.getByText('🔥 Firecrawl')).toBeInTheDocument() - - // Rerender for WaterCrawl - rerender( - , - ) - expect(screen.getByText('WaterCrawl')).toBeInTheDocument() - - // Rerender for Jina Reader - rerender( - , - ) - expect(screen.getByText('Jina Reader')).toBeInTheDocument() - - // Act - const configBtn = screen.getByText('common.dataSource.configure') - fireEvent.click(configBtn) - // Assert - expect(onConfigure).toHaveBeenCalled() - }) - - it('should handle readOnly mode for Website configuration button', () => { - // Act - render( - , - ) - - // Assert - const configBtn = screen.getByText('common.dataSource.configure') - expect(configBtn).toHaveClass('cursor-default opacity-50 grayscale') - - // Act - fireEvent.click(configBtn) - // Assert - expect(onConfigure).not.toHaveBeenCalled() - }) - - it('should render Website panel correctly when configured with crawlers', () => { - // Act - render( - , - ) - - // Assert - expect(screen.getByText('common.dataSource.website.configuredCrawlers')).toBeInTheDocument() - expect(screen.getByText('Item 1')).toBeInTheDocument() - expect(screen.getByText('Item 2')).toBeInTheDocument() - }) - }) -}) diff --git a/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx b/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx deleted file mode 100644 index f62c5e147d..0000000000 --- a/web/app/components/header/account-setting/data-source-page/panel/config-item.tsx +++ /dev/null @@ -1,85 +0,0 @@ -'use client' -import type { FC } from 'react' -import { - RiDeleteBinLine, -} from '@remixicon/react' -import { noop } from 'es-toolkit/function' -import * as React from 'react' -import { useTranslation } from 'react-i18next' -import { cn } from '@/utils/classnames' -import Indicator from '../../../indicator' -import Operate from '../data-source-notion/operate' -import s from './style.module.css' -import { DataSourceType } from './types' - -export type ConfigItemType = { - id: string - logo: any - name: string - isActive: boolean - notionConfig?: { - total: number - } -} - -type Props = { - type: DataSourceType - payload: ConfigItemType - onRemove: () => void - notionActions?: { - onChangeAuthorizedPage: () => void - } - readOnly: boolean -} - -const ConfigItem: FC = ({ - type, - payload, - onRemove, - notionActions, - readOnly, -}) => { - const { t } = useTranslation() - const isNotion = type === DataSourceType.notion - const isWebsite = type === DataSourceType.website - const onChangeAuthorizedPage = notionActions?.onChangeAuthorizedPage || noop - - return ( -
- -
{payload.name}
- { - payload.isActive - ? - : - } -
- { - payload.isActive - ? t(isNotion ? 'dataSource.notion.connected' : 'dataSource.website.active', { ns: 'common' }) - : t(isNotion ? 'dataSource.notion.disconnected' : 'dataSource.website.inactive', { ns: 'common' }) - } -
-
- {isNotion && ( - - )} - - { - isWebsite && !readOnly && ( -
- -
- ) - } - -
- ) -} -export default React.memo(ConfigItem) diff --git a/web/app/components/header/account-setting/data-source-page/panel/index.tsx b/web/app/components/header/account-setting/data-source-page/panel/index.tsx deleted file mode 100644 index 0909603ae8..0000000000 --- a/web/app/components/header/account-setting/data-source-page/panel/index.tsx +++ /dev/null @@ -1,151 +0,0 @@ -'use client' -import type { FC } from 'react' -import type { ConfigItemType } from './config-item' -import { RiAddLine } from '@remixicon/react' -import * as React from 'react' -import { useTranslation } from 'react-i18next' -import Button from '@/app/components/base/button' - -import { DataSourceProvider } from '@/models/common' -import { cn } from '@/utils/classnames' -import ConfigItem from './config-item' -import s from './style.module.css' -import { DataSourceType } from './types' - -type Props = { - type: DataSourceType - provider?: DataSourceProvider - isConfigured: boolean - onConfigure: () => void - readOnly: boolean - isSupportList?: boolean - configuredList: ConfigItemType[] - onRemove: () => void - notionActions?: { - onChangeAuthorizedPage: () => void - } -} - -const Panel: FC = ({ - type, - provider, - isConfigured, - onConfigure, - readOnly, - configuredList, - isSupportList, - onRemove, - notionActions, -}) => { - const { t } = useTranslation() - const isNotion = type === DataSourceType.notion - const isWebsite = type === DataSourceType.website - - const getProviderName = (): string => { - if (provider === DataSourceProvider.fireCrawl) - return '🔥 Firecrawl' - if (provider === DataSourceProvider.waterCrawl) - return 'WaterCrawl' - return 'Jina Reader' - } - - return ( -
-
-
-
-
-
{t(`dataSource.${type}.title`, { ns: 'common' })}
- {isWebsite && ( -
- {t('dataSource.website.with', { ns: 'common' })} - {' '} - {getProviderName()} -
- )} -
- { - !isConfigured && ( -
- {t(`dataSource.${type}.description`, { ns: 'common' })} -
- ) - } -
- {isNotion && ( - <> - { - isConfigured - ? ( - - ) - : ( - <> - {isSupportList && ( -
- - {t('dataSource.connect', { ns: 'common' })} -
- )} - - ) - } - - )} - - {isWebsite && !isConfigured && ( -
- {t('dataSource.configure', { ns: 'common' })} -
- )} - -
- { - isConfigured && ( - <> -
-
- {isNotion ? t('dataSource.notion.connectedWorkspace', { ns: 'common' }) : t('dataSource.website.configuredCrawlers', { ns: 'common' })} -
-
-
-
- { - configuredList.map(item => ( - - )) - } -
- - ) - } -
- ) -} -export default React.memo(Panel) diff --git a/web/app/components/header/account-setting/data-source-page/panel/style.module.css b/web/app/components/header/account-setting/data-source-page/panel/style.module.css deleted file mode 100644 index ac9be02205..0000000000 --- a/web/app/components/header/account-setting/data-source-page/panel/style.module.css +++ /dev/null @@ -1,17 +0,0 @@ -.notion-icon { - background: #ffffff url(../../../assets/notion.svg) center center no-repeat; - background-size: 20px 20px; -} - -.website-icon { - background: #ffffff url(../../../../datasets/create/assets/web.svg) center center no-repeat; - background-size: 20px 20px; -} - -.workspace-item { - box-shadow: 0px 1px 2px rgba(16, 24, 40, 0.05); -} - -.workspace-item:last-of-type { - margin-bottom: 0; -} diff --git a/web/app/components/header/account-setting/data-source-page/panel/types.ts b/web/app/components/header/account-setting/data-source-page/panel/types.ts deleted file mode 100644 index 345bc10f81..0000000000 --- a/web/app/components/header/account-setting/data-source-page/panel/types.ts +++ /dev/null @@ -1,4 +0,0 @@ -export enum DataSourceType { - notion = 'notion', - website = 'website', -} diff --git a/web/app/components/header/account-setting/index.tsx b/web/app/components/header/account-setting/index.tsx index 7e77af2e5f..bfceaeb059 100644 --- a/web/app/components/header/account-setting/index.tsx +++ b/web/app/components/header/account-setting/index.tsx @@ -1,8 +1,9 @@ 'use client' import type { AccountSettingTab } from '@/app/components/header/account-setting/constants' -import { useCallback, useEffect, useRef, useState } from 'react' +import { useCallback, useState } from 'react' import { useTranslation } from 'react-i18next' import SearchInput from '@/app/components/base/search-input' +import { ScrollArea } from '@/app/components/base/ui/scroll-area' import BillingPage from '@/app/components/billing/billing-page' import CustomPage from '@/app/components/custom/custom-page' import { @@ -129,20 +130,6 @@ export default function AccountSetting({ ], }, ] - const scrollRef = useRef(null) - const [scrolled, setScrolled] = useState(false) - useEffect(() => { - const targetElement = scrollRef.current - const scrollHandle = (e: Event) => { - const userScrolled = (e.target as HTMLDivElement).scrollTop > 0 - setScrolled(userScrolled) - } - targetElement?.addEventListener('scroll', scrollHandle) - return () => { - targetElement?.removeEventListener('scroll', scrollHandle) - } - }, []) - const activeItem = [...menuItems[0].items, ...menuItems[1].items].find(item => item.key === activeMenu) const [searchValue, setSearchValue] = useState('') @@ -201,7 +188,7 @@ export default function AccountSetting({ }
-
+
-
-
+ +
{activeItem?.name} {activeItem?.description && ( @@ -241,7 +234,7 @@ export default function AccountSetting({ {activeMenu === ACCOUNT_SETTING_TAB.CUSTOM && } {activeMenu === ACCOUNT_SETTING_TAB.LANGUAGE && }
-
+
diff --git a/web/app/components/header/account-setting/members-page/invite-modal/__tests__/index.spec.tsx b/web/app/components/header/account-setting/members-page/invite-modal/__tests__/index.spec.tsx index d2aeca1b6c..7de1fbeccb 100644 --- a/web/app/components/header/account-setting/members-page/invite-modal/__tests__/index.spec.tsx +++ b/web/app/components/header/account-setting/members-page/invite-modal/__tests__/index.spec.tsx @@ -2,11 +2,15 @@ import type { InvitationResponse } from '@/models/common' import { fireEvent, render, screen, waitFor } from '@testing-library/react' import userEvent from '@testing-library/user-event' import { vi } from 'vitest' -import { ToastContext } from '@/app/components/base/toast/context' +import { toast } from '@/app/components/base/ui/toast' import { useProviderContextSelector } from '@/context/provider-context' import { inviteMember } from '@/service/common' import InviteModal from '../index' +const { mockToastError } = vi.hoisted(() => ({ + mockToastError: vi.fn(), +})) + vi.mock('@/context/provider-context', () => ({ useProviderContextSelector: vi.fn(), useProviderContext: vi.fn(() => ({ @@ -14,6 +18,11 @@ vi.mock('@/context/provider-context', () => ({ })), })) vi.mock('@/service/common') +vi.mock('@/app/components/base/ui/toast', () => ({ + toast: { + error: mockToastError, + }, +})) vi.mock('@/context/i18n', () => ({ useLocale: () => 'en-US', })) @@ -37,7 +46,6 @@ describe('InviteModal', () => { const mockOnCancel = vi.fn() const mockOnSend = vi.fn() const mockRefreshLicenseLimit = vi.fn() - const mockNotify = vi.fn() beforeEach(() => { vi.clearAllMocks() @@ -49,10 +57,11 @@ describe('InviteModal', () => { }) const renderModal = (isEmailSetup = true) => render( - - - , + , ) + const fillEmails = (value: string) => { + fireEvent.change(screen.getByTestId('mock-email-input'), { target: { value } }) + } it('should render invite modal content', async () => { renderModal() @@ -68,12 +77,8 @@ describe('InviteModal', () => { }) it('should enable send button after entering an email', async () => { - const user = userEvent.setup() - renderModal() - - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') expect(screen.getByRole('button', { name: /members\.sendInvite/i })).toBeEnabled() }) @@ -84,7 +89,7 @@ describe('InviteModal', () => { renderModal() - await user.type(screen.getByTestId('mock-email-input'), 'user@example.com') + fillEmails('user@example.com') await user.click(screen.getByRole('button', { name: /members\.sendInvite/i })) await waitFor(() => { @@ -103,8 +108,7 @@ describe('InviteModal', () => { renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') await user.click(screen.getByRole('button', { name: /members\.sendInvite/i })) await waitFor(() => { @@ -116,8 +120,6 @@ describe('InviteModal', () => { }) it('should keep send button disabled when license limit is exceeded', async () => { - const user = userEvent.setup() - vi.mocked(useProviderContextSelector).mockImplementation(selector => selector({ licenseLimit: { workspace_members: { size: 10, limit: 10 } }, refreshLicenseLimit: mockRefreshLicenseLimit, @@ -125,8 +127,7 @@ describe('InviteModal', () => { renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') expect(screen.getByRole('button', { name: /members\.sendInvite/i })).toBeDisabled() }) @@ -144,15 +145,11 @@ describe('InviteModal', () => { const user = userEvent.setup() renderModal() - const input = screen.getByTestId('mock-email-input') // Use an email that passes basic validation but fails our strict regex (needs 2+ char TLD) - await user.type(input, 'invalid@email.c') + fillEmails('invalid@email.c') await user.click(screen.getByRole('button', { name: /members\.sendInvite/i })) - expect(mockNotify).toHaveBeenCalledWith({ - type: 'error', - message: 'common.members.emailInvalid', - }) + expect(toast.error).toHaveBeenCalledWith('common.members.emailInvalid') expect(inviteMember).not.toHaveBeenCalled() }) @@ -160,8 +157,7 @@ describe('InviteModal', () => { const user = userEvent.setup() renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') expect(screen.getByText('user@example.com')).toBeInTheDocument() @@ -203,7 +199,7 @@ describe('InviteModal', () => { renderModal() - await user.type(screen.getByTestId('mock-email-input'), 'user@example.com') + fillEmails('user@example.com') await user.click(screen.getByRole('button', { name: /members\.sendInvite/i })) await waitFor(() => { @@ -214,8 +210,6 @@ describe('InviteModal', () => { }) it('should show destructive text color when used size exceeds limit', async () => { - const user = userEvent.setup() - vi.mocked(useProviderContextSelector).mockImplementation(selector => selector({ licenseLimit: { workspace_members: { size: 10, limit: 10 } }, refreshLicenseLimit: mockRefreshLicenseLimit, @@ -223,8 +217,7 @@ describe('InviteModal', () => { renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') // usedSize = 10 + 1 = 11 > limit 10 → destructive color const counter = screen.getByText('11') @@ -241,8 +234,7 @@ describe('InviteModal', () => { renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') const sendBtn = screen.getByRole('button', { name: /members\.sendInvite/i }) @@ -264,8 +256,6 @@ describe('InviteModal', () => { }) it('should show destructive color and disable send button when limit is exactly met with one email', async () => { - const user = userEvent.setup() - // size=10, limit=10 - adding 1 email makes usedSize=11 > limit=10 vi.mocked(useProviderContextSelector).mockImplementation(selector => selector({ licenseLimit: { workspace_members: { size: 10, limit: 10 } }, @@ -274,8 +264,7 @@ describe('InviteModal', () => { renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') // isLimitExceeded=true → button is disabled, cannot submit const sendBtn = screen.getByRole('button', { name: /members\.sendInvite/i }) @@ -293,8 +282,7 @@ describe('InviteModal', () => { renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') const sendBtn = screen.getByRole('button', { name: /members\.sendInvite/i }) @@ -320,11 +308,9 @@ describe('InviteModal', () => { refreshLicenseLimit: mockRefreshLicenseLimit, } as unknown as Parameters[0])) - const user = userEvent.setup() renderModal() - const input = screen.getByTestId('mock-email-input') - await user.type(input, 'user@example.com') + fillEmails('user@example.com') // isLimited=false → no destructive color const counter = screen.getByText('1') diff --git a/web/app/components/header/account-setting/members-page/invite-modal/index.module.css b/web/app/components/header/account-setting/members-page/invite-modal/index.module.css deleted file mode 100644 index fbaa1187bd..0000000000 --- a/web/app/components/header/account-setting/members-page/invite-modal/index.module.css +++ /dev/null @@ -1,12 +0,0 @@ -.modal { - padding: 24px 32px !important; - width: 400px !important; -} - -.emailsInput { - background-color: rgb(243 244 246 / var(--tw-bg-opacity)) !important; -} - -.emailBackground { - background-color: white !important; -} diff --git a/web/app/components/header/account-setting/members-page/invite-modal/index.tsx b/web/app/components/header/account-setting/members-page/invite-modal/index.tsx index 8e4e47e0b8..9b4e9fccdc 100644 --- a/web/app/components/header/account-setting/members-page/invite-modal/index.tsx +++ b/web/app/components/header/account-setting/members-page/invite-modal/index.tsx @@ -2,20 +2,17 @@ import type { RoleKey } from './role-selector' import type { InvitationResult } from '@/models/common' import { useBoolean } from 'ahooks' -import { noop } from 'es-toolkit/function' import { useCallback, useEffect, useState } from 'react' import { useTranslation } from 'react-i18next' import { ReactMultiEmail } from 'react-multi-email' -import { useContext } from 'use-context-selector' import Button from '@/app/components/base/button' -import Modal from '@/app/components/base/modal' -import { ToastContext } from '@/app/components/base/toast/context' +import { Dialog, DialogCloseButton, DialogContent, DialogTitle } from '@/app/components/base/ui/dialog' +import { toast } from '@/app/components/base/ui/toast' import { emailRegex } from '@/config' import { useLocale } from '@/context/i18n' import { useProviderContextSelector } from '@/context/provider-context' import { inviteMember } from '@/service/common' import { cn } from '@/utils/classnames' -import s from './index.module.css' import RoleSelector from './role-selector' import 'react-multi-email/dist/style.css' @@ -34,7 +31,6 @@ const InviteModal = ({ const licenseLimit = useProviderContextSelector(s => s.licenseLimit) const refreshLicenseLimit = useProviderContextSelector(s => s.refreshLicenseLimit) const [emails, setEmails] = useState([]) - const { notify } = useContext(ToastContext) const [isLimited, setIsLimited] = useState(false) const [isLimitExceeded, setIsLimitExceeded] = useState(false) const [usedSize, setUsedSize] = useState(licenseLimit.workspace_members.size ?? 0) @@ -74,21 +70,28 @@ const InviteModal = ({ catch { } } else { - notify({ type: 'error', message: t('members.emailInvalid', { ns: 'common' }) }) + toast.error(t('members.emailInvalid', { ns: 'common' })) } setIsSubmitted() - }, [isLimitExceeded, emails, role, locale, onCancel, onSend, notify, t, isSubmitting, refreshLicenseLimit, setIsSubmitted, setIsSubmitting]) + }, [isLimitExceeded, emails, role, locale, onCancel, onSend, t, isSubmitting, refreshLicenseLimit, setIsSubmitted, setIsSubmitting]) return ( -
- -
-
{t('members.inviteTeamMember', { ns: 'common' })}
-
+ { + if (!open) + onCancel() + }} + > + + +
+ + {t('members.inviteTeamMember', { ns: 'common' })} +
{t('members.inviteTeamMemberTip', { ns: 'common' })}
{!isEmailSetup && ( @@ -152,8 +155,8 @@ const InviteModal = ({ {t('members.sendInvite', { ns: 'common' })}
- -
+ + ) } diff --git a/web/app/components/header/account-setting/members-page/invite-modal/role-selector.tsx b/web/app/components/header/account-setting/members-page/invite-modal/role-selector.tsx index e258884b0f..6383b203d9 100644 --- a/web/app/components/header/account-setting/members-page/invite-modal/role-selector.tsx +++ b/web/app/components/header/account-setting/members-page/invite-modal/role-selector.tsx @@ -1,11 +1,10 @@ import * as React from 'react' -import { useState } from 'react' import { useTranslation } from 'react-i18next' import { - PortalToFollowElem, - PortalToFollowElemContent, - PortalToFollowElemTrigger, -} from '@/app/components/base/portal-to-follow-elem' + Popover, + PopoverContent, + PopoverTrigger, +} from '@/app/components/base/ui/popover' import { useProviderContext } from '@/context/provider-context' import { cn } from '@/utils/classnames' @@ -25,115 +24,111 @@ export type RoleSelectorProps = { const RoleSelector = ({ value, onChange }: RoleSelectorProps) => { const { t } = useTranslation() - const [open, setOpen] = useState(false) const { datasetOperatorEnabled } = useProviderContext() + const [open, setOpen] = React.useState(false) return ( - -
- setOpen(v => !v)} - className="block" - > + +
{t('members.invitedAsRole', { ns: 'common', role: t(roleI18nKeyMap[value], { ns: 'common' }) })}
+
+ + +
{ + onChange('normal') + setOpen(false) + }} > -
{t('members.invitedAsRole', { ns: 'common', role: t(roleI18nKeyMap[value], { ns: 'common' }) })}
-
-
- - -
-
-
{ - onChange('normal') - setOpen(false) - }} - > -
-
{t('members.normal', { ns: 'common' })}
-
{t('members.normalTip', { ns: 'common' })}
- {value === 'normal' && ( -
- )} -
-
-
{ - onChange('editor') - setOpen(false) - }} - > -
-
{t('members.editor', { ns: 'common' })}
-
{t('members.editorTip', { ns: 'common' })}
- {value === 'editor' && ( -
- )} -
-
-
{ - onChange('admin') - setOpen(false) - }} - > -
-
{t('members.admin', { ns: 'common' })}
-
{t('members.adminTip', { ns: 'common' })}
- {value === 'admin' && ( -
- )} -
-
- {datasetOperatorEnabled && ( +
+
{t('members.normal', { ns: 'common' })}
+
{t('members.normalTip', { ns: 'common' })}
+ {value === 'normal' && (
{ - onChange('dataset_operator') - setOpen(false) - }} - > -
-
{t('members.datasetOperator', { ns: 'common' })}
-
{t('members.datasetOperatorTip', { ns: 'common' })}
- {value === 'dataset_operator' && ( -
- )} -
-
+ data-testid="role-option-check" + className="i-custom-vender-line-general-check absolute left-0 top-0.5 h-4 w-4 text-text-accent" + /> )}
- -
- +
{ + onChange('editor') + setOpen(false) + }} + > +
+
{t('members.editor', { ns: 'common' })}
+
{t('members.editorTip', { ns: 'common' })}
+ {value === 'editor' && ( +
+ )} +
+
+
{ + onChange('admin') + setOpen(false) + }} + > +
+
{t('members.admin', { ns: 'common' })}
+
{t('members.adminTip', { ns: 'common' })}
+ {value === 'admin' && ( +
+ )} +
+
+ {datasetOperatorEnabled && ( +
{ + onChange('dataset_operator') + setOpen(false) + }} + > +
+
{t('members.datasetOperator', { ns: 'common' })}
+
{t('members.datasetOperatorTip', { ns: 'common' })}
+ {value === 'dataset_operator' && ( +
+ )} +
+
+ )} +
+ + ) } diff --git a/web/app/components/header/account-setting/members-page/invited-modal/index.tsx b/web/app/components/header/account-setting/members-page/invited-modal/index.tsx index 389db4a42d..dbabb384a2 100644 --- a/web/app/components/header/account-setting/members-page/invited-modal/index.tsx +++ b/web/app/components/header/account-setting/members-page/invited-modal/index.tsx @@ -1,15 +1,10 @@ import type { InvitationResult } from '@/models/common' -import { XMarkIcon } from '@heroicons/react/24/outline' -import { CheckCircleIcon } from '@heroicons/react/24/solid' -import { RiQuestionLine } from '@remixicon/react' -import { noop } from 'es-toolkit/function' import { useMemo } from 'react' import { useTranslation } from 'react-i18next' import Button from '@/app/components/base/button' -import Modal from '@/app/components/base/modal' -import Tooltip from '@/app/components/base/tooltip' +import { Dialog, DialogCloseButton, DialogContent, DialogTitle } from '@/app/components/base/ui/dialog' +import { Tooltip, TooltipContent, TooltipTrigger } from '@/app/components/base/ui/tooltip' import { IS_CE_EDITION } from '@/config' -import s from './index.module.css' import InvitationLink from './invitation-link' export type SuccessInvitationResult = Extract @@ -29,8 +24,18 @@ const InvitedModal = ({ const failedInvitationResults = useMemo(() => invitationResults?.filter(item => item.status !== 'success') as FailedInvitationResult[], [invitationResults]) return ( -
- + { + if (!open) + onCancel() + }} + > + +
- +
-
-
{t('members.invitationSent', { ns: 'common' })}
+ {t('members.invitationSent', { ns: 'common' })} {!IS_CE_EDITION && (
{t('members.invitationSentTip', { ns: 'common' })}
)} @@ -54,7 +58,7 @@ const InvitedModal = ({ !!successInvitationResults.length && ( <> -
{t('members.invitationLink', { ns: 'common' })}
+
{t('members.invitationLink', { ns: 'common' })}
{successInvitationResults.map(item => )} @@ -64,18 +68,23 @@ const InvitedModal = ({ !!failedInvitationResults.length && ( <> -
{t('members.failedInvitationEmails', { ns: 'common' })}
+
{t('members.failedInvitationEmails', { ns: 'common' })}
{ failedInvitationResults.map(item => (
- -
- {item.email} - -
+ + + {item.email} +
+
+ )} + /> + + {item.message} +
), @@ -97,8 +106,8 @@ const InvitedModal = ({ {t('members.ok', { ns: 'common' })}
- -
+
+
) } diff --git a/web/app/components/header/account-setting/members-page/invited-modal/invitation-link.tsx b/web/app/components/header/account-setting/members-page/invited-modal/invitation-link.tsx index 8f55660fd8..0c5874c4dc 100644 --- a/web/app/components/header/account-setting/members-page/invited-modal/invitation-link.tsx +++ b/web/app/components/header/account-setting/members-page/invited-modal/invitation-link.tsx @@ -4,7 +4,7 @@ import copy from 'copy-to-clipboard' import { t } from 'i18next' import * as React from 'react' import { useCallback, useEffect, useState } from 'react' -import Tooltip from '@/app/components/base/tooltip' +import { Tooltip, TooltipContent, TooltipTrigger } from '@/app/components/base/ui/tooltip' import s from './index.module.css' type IInvitationLinkProps = { @@ -38,20 +38,28 @@ const InvitationLink = ({
- -
{value.url}
+ + {value.url}
} + /> + + {isCopied ? t('copied', { ns: 'appApi' }) : t('copy', { ns: 'appApi' })} +
- -
-
-
-
+ + +
+
+
+ )} + /> + + {isCopied ? t('copied', { ns: 'appApi' }) : t('copy', { ns: 'appApi' })} +
diff --git a/web/app/components/header/account-setting/members-page/operation/index.tsx b/web/app/components/header/account-setting/members-page/operation/index.tsx index 35c4676d5f..e2b14b9078 100644 --- a/web/app/components/header/account-setting/members-page/operation/index.tsx +++ b/web/app/components/header/account-setting/members-page/operation/index.tsx @@ -102,7 +102,7 @@ const Operation = ({
- +
{ diff --git a/web/app/components/header/account-setting/members-page/transfer-ownership-modal/index.tsx b/web/app/components/header/account-setting/members-page/transfer-ownership-modal/index.tsx index 099a146866..6a2af9ffdb 100644 --- a/web/app/components/header/account-setting/members-page/transfer-ownership-modal/index.tsx +++ b/web/app/components/header/account-setting/members-page/transfer-ownership-modal/index.tsx @@ -141,6 +141,7 @@ const TransferOwnershipModal = ({ onClose, show }: Props) => {
= ({
- +
{renderTrigger(open)} - +
{ @@ -136,7 +136,7 @@ const AddCustomModel = ({ modelName={model.model} />
{model.model} @@ -148,7 +148,7 @@ const AddCustomModel = ({ { !notAllowCustomCredential && (
{ handleOpenModalForAddNewCustomModel() setOpen(false) diff --git a/web/app/components/header/account-setting/model-provider-page/model-auth/authorized/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-auth/authorized/index.tsx index e2f859b09d..15101a6542 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-auth/authorized/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-auth/authorized/index.tsx @@ -164,7 +164,7 @@ const Authorized = ({ > {renderTrigger(mergedIsOpen)} - +
{ popupTitle && ( -
+
{popupTitle}
) @@ -218,7 +218,7 @@ const Authorized = ({ } : undefined, )} - className="system-xs-medium flex h-[40px] cursor-pointer items-center px-3 text-text-accent-light-mode-only" + className="flex h-[40px] cursor-pointer items-center px-3 text-text-accent-light-mode-only system-xs-medium" > {t('modelProvider.auth.addModelCredential', { ns: 'common' })} diff --git a/web/app/components/header/account-setting/model-provider-page/model-auth/credential-selector.tsx b/web/app/components/header/account-setting/model-provider-page/model-auth/credential-selector.tsx index 52513e7aeb..dd1d8e6eb9 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-auth/credential-selector.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-auth/credential-selector.tsx @@ -53,14 +53,14 @@ const CredentialSelector = ({ triggerPopupSameWidth > !disabled && setOpen(v => !v)}> -
+
{ selectedCredential && (
{ !selectedCredential.addNewCredential && } -
{selectedCredential.credential_name}
+
{selectedCredential.credential_name}
{ selectedCredential.from_enterprise && ( Enterprise @@ -71,13 +71,13 @@ const CredentialSelector = ({ } { !selectedCredential && ( -
{t('modelProvider.auth.selectModelCredential', { ns: 'common' })}
+
{t('modelProvider.auth.selectModelCredential', { ns: 'common' })}
) }
- +
{ @@ -98,7 +98,7 @@ const CredentialSelector = ({ { !notAllowAddNewCredential && (
diff --git a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/__tests__/index.spec.tsx b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/__tests__/index.spec.tsx index 496058bf9b..5c8d5e7489 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/__tests__/index.spec.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/__tests__/index.spec.tsx @@ -1,7 +1,6 @@ import { fireEvent, render, screen, waitFor } from '@testing-library/react' import ModelParameterModal from '../index' -let isAPIKeySet = true let parameterRules: Array> | undefined = [ { name: 'temperature', @@ -40,7 +39,7 @@ let activeTextGenerationModelList: Array> = [ vi.mock('@/context/provider-context', () => ({ useProviderContext: () => ({ - isAPIKeySet, + isAPIKeySet: true, }), })) @@ -50,6 +49,7 @@ vi.mock('@/service/use-common', () => ({ data: parameterRules, }, isLoading: isRulesLoading, + isPending: isRulesLoading, }), })) @@ -62,12 +62,18 @@ vi.mock('../../hooks', () => ({ })) vi.mock('../parameter-item', () => ({ - default: ({ parameterRule, onChange, onSwitch }: { + default: ({ parameterRule, onChange, onSwitch, nodesOutputVars, availableNodes }: { parameterRule: { name: string, label: { en_US: string } } onChange: (v: number) => void onSwitch: (checked: boolean, val: unknown) => void + nodesOutputVars?: unknown[] + availableNodes?: unknown[] }) => ( -
+
{parameterRule.label.en_US} @@ -119,7 +125,6 @@ describe('ModelParameterModal', () => { beforeEach(() => { vi.clearAllMocks() - isAPIKeySet = true isRulesLoading = false parameterRules = [ { @@ -233,6 +238,26 @@ describe('ModelParameterModal', () => { expect(screen.getByTestId('model-selector')).toBeInTheDocument() }) + it('should pass nodesOutputVars and availableNodes to ParameterItem', () => { + const mockNodesOutputVars = [{ nodeId: 'n1', title: 'Node', vars: [] }] + const mockAvailableNodes = [{ id: 'n1', data: { title: 'Node', type: 'llm' } }] + + render( + , + ) + + fireEvent.click(screen.getByText('Open Settings')) + + const paramEl = screen.getByTestId('param-temperature') + expect(paramEl).toHaveAttribute('data-has-nodes-output-vars', 'true') + expect(paramEl).toHaveAttribute('data-has-available-nodes', 'true') + }) + it('should support custom triggers, workflow mode, and missing default model values', async () => { render( ({ @@ -18,6 +23,29 @@ vi.mock('@/app/components/base/tag-input', () => ({ ), })) +let promptEditorOnChange: ((text: string) => void) | undefined +let capturedWorkflowNodesMap: Record | undefined + +vi.mock('@/app/components/base/prompt-editor', () => ({ + default: ({ value, onChange, workflowVariableBlock }: { + value: string + onChange: (text: string) => void + workflowVariableBlock?: { + show: boolean + variables: NodeOutPutVar[] + workflowNodesMap?: Record + } + }) => { + promptEditorOnChange = onChange + capturedWorkflowNodesMap = workflowVariableBlock?.workflowNodesMap + return ( +
+ {value} +
+ ) + }, +})) + describe('ParameterItem', () => { const createRule = (overrides: Partial = {}): ModelParameterRule => ({ name: 'temp', @@ -30,9 +58,10 @@ describe('ParameterItem', () => { beforeEach(() => { vi.clearAllMocks() + promptEditorOnChange = undefined + capturedWorkflowNodesMap = undefined }) - // Float tests it('should render float controls and clamp numeric input to max', () => { const onChange = vi.fn() render() @@ -50,7 +79,6 @@ describe('ParameterItem', () => { expect(onChange).toHaveBeenCalledWith(0.1) }) - // Int tests it('should render int controls and clamp numeric input', () => { const onChange = vi.fn() render() @@ -75,22 +103,17 @@ describe('ParameterItem', () => { it('should render int input without slider if min or max is missing', () => { render() expect(screen.queryByRole('slider')).not.toBeInTheDocument() - // No max -> precision step expect(screen.getByRole('spinbutton')).toHaveAttribute('step', '0') }) - // Slider events (uses generic value mock for slider) it('should handle slide change and clamp values', () => { const onChange = vi.fn() render() - // Test that the actual slider triggers the onChange logic correctly - // The implementation of Slider uses onChange(val) directly via the mock fireEvent.click(screen.getByTestId('slider-btn')) expect(onChange).toHaveBeenCalledWith(2) }) - // Text & String tests it('should render exact string input and propagate text changes', () => { const onChange = vi.fn() render() @@ -109,21 +132,17 @@ describe('ParameterItem', () => { it('should render select for string with options', () => { render() - // Select renders the selected value in the trigger expect(screen.getByText('a')).toBeInTheDocument() }) - // Tag Tests it('should render tag input for tag type', () => { const onChange = vi.fn() render() expect(screen.getByText('placeholder')).toBeInTheDocument() - // Trigger mock tag input fireEvent.click(screen.getByTestId('tag-input')) expect(onChange).toHaveBeenCalledWith(['tag1', 'tag2']) }) - // Boolean tests it('should render boolean radios and update value on click', () => { const onChange = vi.fn() render() @@ -131,7 +150,6 @@ describe('ParameterItem', () => { expect(onChange).toHaveBeenCalledWith(false) }) - // Switch tests it('should call onSwitch with current value when optional switch is toggled off', () => { const onSwitch = vi.fn() render() @@ -146,7 +164,6 @@ describe('ParameterItem', () => { expect(screen.queryByRole('switch')).not.toBeInTheDocument() }) - // Default Value Fallbacks (rendering without value) it('should use default values if value is undefined', () => { const { rerender } = render() expect(screen.getByRole('spinbutton')).toHaveValue(0.5) @@ -158,26 +175,102 @@ describe('ParameterItem', () => { expect(screen.getByText('True')).toBeInTheDocument() expect(screen.getByText('False')).toBeInTheDocument() - // Without default - rerender() // min is 0 by default in createRule + rerender() expect(screen.getByRole('spinbutton')).toHaveValue(0) }) - // Input Blur it('should reset input to actual bound value on blur', () => { render() const input = screen.getByRole('spinbutton') - // change local state (which triggers clamp internally to let's say 1.4 -> 1 but leaves input text, though handleInputChange updates local state) - // Actually our test fires a change so localValue = 1, then blur sets it fireEvent.change(input, { target: { value: '5' } }) fireEvent.blur(input) expect(input).toHaveValue(1) }) - // Unsupported it('should render no input for unsupported parameter type', () => { render() expect(screen.queryByRole('textbox')).not.toBeInTheDocument() expect(screen.queryByRole('spinbutton')).not.toBeInTheDocument() }) + + describe('workflow variable reference', () => { + const mockNodesOutputVars: NodeOutPutVar[] = [ + { nodeId: 'node1', title: 'LLM Node', vars: [] }, + ] + const mockAvailableNodes: Node[] = [ + { id: 'node1', type: 'custom', position: { x: 0, y: 0 }, data: { title: 'LLM Node', type: BlockEnum.LLM } } as Node, + { id: 'start', type: 'custom', position: { x: 0, y: 0 }, data: { title: 'Start', type: BlockEnum.Start } } as Node, + ] + + it('should build workflowNodesMap and render PromptEditor for string type', () => { + const onChange = vi.fn() + render( + , + ) + + const editor = screen.getByTestId('prompt-editor') + expect(editor).toBeInTheDocument() + expect(editor).toHaveAttribute('data-has-workflow-vars', 'true') + expect(capturedWorkflowNodesMap).toBeDefined() + expect(capturedWorkflowNodesMap!.node1.title).toBe('LLM Node') + expect(capturedWorkflowNodesMap!.sys.title).toBe('workflow.blocks.start') + expect(capturedWorkflowNodesMap!.sys.type).toBe(BlockEnum.Start) + + promptEditorOnChange?.('updated text') + expect(onChange).toHaveBeenCalledWith('updated text') + }) + + it('should build workflowNodesMap and render PromptEditor for text type', () => { + const onChange = vi.fn() + render( + , + ) + + const editor = screen.getByTestId('prompt-editor') + expect(editor).toBeInTheDocument() + expect(editor).toHaveAttribute('data-has-workflow-vars', 'true') + expect(capturedWorkflowNodesMap).toBeDefined() + + promptEditorOnChange?.('new long text') + expect(onChange).toHaveBeenCalledWith('new long text') + }) + + it('should fall back to plain input when not in workflow mode for string type', () => { + render( + , + ) + + expect(screen.queryByTestId('prompt-editor')).not.toBeInTheDocument() + expect(screen.getByRole('textbox')).toBeInTheDocument() + }) + + it('should return undefined workflowNodesMap when not in workflow mode', () => { + render( + , + ) + + expect(capturedWorkflowNodesMap).toBeUndefined() + }) + }) }) diff --git a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/index.tsx b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/index.tsx index 6b4018e2aa..ccb2c67a0d 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/index.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/index.tsx @@ -9,6 +9,10 @@ import type { } from '../declarations' import type { ParameterValue } from './parameter-item' import type { TriggerProps } from './trigger' +import type { + Node, + NodeOutPutVar, +} from '@/app/components/workflow/types' import { useMemo, useRef, useState } from 'react' import { useTranslation } from 'react-i18next' import { ArrowNarrowLeft } from '@/app/components/base/icons/src/vender/line/arrows' @@ -45,6 +49,8 @@ export type ModelParameterModalProps = { readonly?: boolean isInWorkflow?: boolean scope?: string + nodesOutputVars?: NodeOutPutVar[] + availableNodes?: Node[] } const ModelParameterModal: FC = ({ @@ -61,11 +67,18 @@ const ModelParameterModal: FC = ({ renderTrigger, readonly, isInWorkflow, + nodesOutputVars, + availableNodes, }) => { const { t } = useTranslation() const [open, setOpen] = useState(false) const settingsIconRef = useRef(null) - const { data: parameterRulesData, isLoading } = useModelParameterRules(provider, modelId) + const { + data: parameterRulesData, + isPending, + isLoading, + } = useModelParameterRules(provider, modelId) + const isRulesLoading = isPending || isLoading const { currentProvider, currentModel, @@ -191,7 +204,7 @@ const ModelParameterModal: FC = ({ }
{ - isLoading + isRulesLoading ?
: ( [ @@ -205,6 +218,8 @@ const ModelParameterModal: FC = ({ onChange={v => handleParamChange(parameter.name, v)} onSwitch={(checked, assignValue) => handleSwitch(parameter.name, checked, assignValue)} isInWorkflow={isInWorkflow} + nodesOutputVars={nodesOutputVars} + availableNodes={availableNodes} /> )) ) @@ -213,7 +228,7 @@ const ModelParameterModal: FC = ({ ) } { - !parameterRules.length && isLoading && ( + !parameterRules.length && isRulesLoading && (
) } diff --git a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx index 86fb6d81d0..01e3f45371 100644 --- a/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx +++ b/web/app/components/header/account-setting/model-provider-page/model-parameter-modal/parameter-item.tsx @@ -1,11 +1,18 @@ import type { ModelParameterRule } from '../declarations' -import { useEffect, useRef, useState } from 'react' +import type { + Node, + NodeOutPutVar, +} from '@/app/components/workflow/types' +import { useEffect, useMemo, useRef, useState } from 'react' +import { useTranslation } from 'react-i18next' +import PromptEditor from '@/app/components/base/prompt-editor' import Radio from '@/app/components/base/radio' import Slider from '@/app/components/base/slider' import Switch from '@/app/components/base/switch' import TagInput from '@/app/components/base/tag-input' import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/app/components/base/ui/select' import { Tooltip, TooltipContent, TooltipTrigger } from '@/app/components/base/ui/tooltip' +import { BlockEnum } from '@/app/components/workflow/types' import { cn } from '@/utils/classnames' import { useLanguage } from '../hooks' import { isNullOrUndefined } from '../utils' @@ -18,18 +25,43 @@ type ParameterItemProps = { onChange?: (value: ParameterValue) => void onSwitch?: (checked: boolean, assignValue: ParameterValue) => void isInWorkflow?: boolean + nodesOutputVars?: NodeOutPutVar[] + availableNodes?: Node[] } + function ParameterItem({ parameterRule, value, onChange, onSwitch, isInWorkflow, + nodesOutputVars, + availableNodes = [], }: ParameterItemProps) { + const { t } = useTranslation() const language = useLanguage() const [localValue, setLocalValue] = useState(value) const numberInputRef = useRef(null) + const workflowNodesMap = useMemo(() => { + if (!isInWorkflow || !availableNodes.length) + return undefined + + return availableNodes.reduce>>((acc, node) => { + acc[node.id] = { + title: node.data.title, + type: node.data.type, + } + if (node.data.type === BlockEnum.Start) { + acc.sys = { + title: t('blocks.start', { ns: 'workflow' }), + type: BlockEnum.Start, + } + } + return acc + }, {}) + }, [availableNodes, isInWorkflow, t]) + const getDefaultValue = () => { let defaultValue: ParameterValue @@ -196,6 +228,25 @@ function ParameterItem({ } if (parameterRule.type === 'string' && !parameterRule.options?.length) { + if (isInWorkflow && nodesOutputVars) { + return ( +
+ { handleInputChange(text) }} + workflowVariableBlock={{ + show: true, + variables: nodesOutputVars, + workflowNodesMap, + }} + editable + /> +
+ ) + } + return ( + { handleInputChange(text) }} + workflowVariableBlock={{ + show: true, + variables: nodesOutputVars, + workflowNodesMap, + }} + editable + /> +
+ ) + } + return (