Merge branch 'main' into refactor/auth-services-typeddict

This commit is contained in:
Asuka Minato 2026-03-23 21:29:00 +09:00 committed by GitHub
commit 0cf9eec8ec
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
215 changed files with 18744 additions and 8414 deletions

View File

@ -120,7 +120,7 @@ jobs:
- name: Run Claude Code for Translation Sync
if: steps.detect_changes.outputs.CHANGED_FILES != ''
uses: anthropics/claude-code-action@df37d2f0760a4b5683a6e617c9325bc1a36443f6 # v1.0.75
uses: anthropics/claude-code-action@6062f3709600659be5e47fcddf2cf76993c235c2 # v1.0.76
with:
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
github_token: ${{ secrets.GITHUB_TOKEN }}

View File

@ -4,7 +4,7 @@ from typing import Literal
from flask import request
from flask_restx import Resource, fields, marshal_with
from pydantic import BaseModel, Field, field_validator
from sqlalchemy import exists, select
from sqlalchemy import exists, func, select
from werkzeug.exceptions import InternalServerError, NotFound
from controllers.common.schema import register_schema_models
@ -244,27 +244,25 @@ class ChatMessageListApi(Resource):
def get(self, app_model):
args = ChatMessagesQuery.model_validate(request.args.to_dict())
conversation = (
db.session.query(Conversation)
conversation = db.session.scalar(
select(Conversation)
.where(Conversation.id == args.conversation_id, Conversation.app_id == app_model.id)
.first()
.limit(1)
)
if not conversation:
raise NotFound("Conversation Not Exists.")
if args.first_id:
first_message = (
db.session.query(Message)
.where(Message.conversation_id == conversation.id, Message.id == args.first_id)
.first()
first_message = db.session.scalar(
select(Message).where(Message.conversation_id == conversation.id, Message.id == args.first_id).limit(1)
)
if not first_message:
raise NotFound("First message not found")
history_messages = (
db.session.query(Message)
history_messages = db.session.scalars(
select(Message)
.where(
Message.conversation_id == conversation.id,
Message.created_at < first_message.created_at,
@ -272,16 +270,14 @@ class ChatMessageListApi(Resource):
)
.order_by(Message.created_at.desc())
.limit(args.limit)
.all()
)
).all()
else:
history_messages = (
db.session.query(Message)
history_messages = db.session.scalars(
select(Message)
.where(Message.conversation_id == conversation.id)
.order_by(Message.created_at.desc())
.limit(args.limit)
.all()
)
).all()
# Initialize has_more based on whether we have a full page
if len(history_messages) == args.limit:
@ -326,7 +322,9 @@ class MessageFeedbackApi(Resource):
message_id = str(args.message_id)
message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first()
message = db.session.scalar(
select(Message).where(Message.id == message_id, Message.app_id == app_model.id).limit(1)
)
if not message:
raise NotFound("Message Not Exists.")
@ -375,7 +373,9 @@ class MessageAnnotationCountApi(Resource):
@login_required
@account_initialization_required
def get(self, app_model):
count = db.session.query(MessageAnnotation).where(MessageAnnotation.app_id == app_model.id).count()
count = db.session.scalar(
select(func.count(MessageAnnotation.id)).where(MessageAnnotation.app_id == app_model.id)
)
return {"count": count}
@ -479,7 +479,9 @@ class MessageApi(Resource):
def get(self, app_model, message_id: str):
message_id = str(message_id)
message = db.session.query(Message).where(Message.id == message_id, Message.app_id == app_model.id).first()
message = db.session.scalar(
select(Message).where(Message.id == message_id, Message.app_id == app_model.id).limit(1)
)
if not message:
raise NotFound("Message Not Exists.")

View File

@ -1,4 +1,5 @@
import logging
import urllib.parse
import httpx
from flask import current_app, redirect, request
@ -112,6 +113,9 @@ class OAuthCallback(Resource):
error_text = e.response.text
logger.exception("An error occurred during the OAuth process with %s: %s", provider, error_text)
return {"error": "OAuth process failed"}, 400
except ValueError as e:
logger.warning("OAuth error with %s", provider, exc_info=True)
return redirect(f"{dify_config.CONSOLE_WEB_URL}/signin?message={urllib.parse.quote(str(e))}")
if invite_token and RegisterService.is_valid_invite_token(invite_token):
invitation = RegisterService.get_invitation_by_token(token=invite_token)

View File

@ -181,10 +181,6 @@ class ArizePhoenixDataTrace(BaseTraceInstance):
arize_phoenix_config: ArizeConfig | PhoenixConfig,
):
super().__init__(arize_phoenix_config)
import logging
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
self.arize_phoenix_config = arize_phoenix_config
self.tracer, self.processor = setup_tracer(arize_phoenix_config)
self.project = arize_phoenix_config.project

View File

@ -50,7 +50,7 @@ class BuiltinTool(Tool):
return ModelInvocationUtils.invoke(
user_id=user_id,
tenant_id=self.runtime.tenant_id or "",
tool_type="builtin",
tool_type=ToolProviderType.BUILT_IN,
tool_name=self.entity.identity.name,
prompt_messages=prompt_messages,
)

View File

@ -38,7 +38,7 @@ class ToolLabelManager:
db.session.add(
ToolLabelBinding(
tool_id=provider_id,
tool_type=controller.provider_type.value,
tool_type=controller.provider_type,
label_name=label,
)
)
@ -58,7 +58,7 @@ class ToolLabelManager:
raise ValueError("Unsupported tool type")
stmt = select(ToolLabelBinding.label_name).where(
ToolLabelBinding.tool_id == provider_id,
ToolLabelBinding.tool_type == controller.provider_type.value,
ToolLabelBinding.tool_type == controller.provider_type,
)
labels = db.session.scalars(stmt).all()

View File

@ -9,6 +9,7 @@ from decimal import Decimal
from typing import cast
from core.model_manager import ModelManager
from core.tools.entities.tool_entities import ToolProviderType
from dify_graph.model_runtime.entities.llm_entities import LLMResult
from dify_graph.model_runtime.entities.message_entities import PromptMessage
from dify_graph.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
@ -78,7 +79,7 @@ class ModelInvocationUtils:
@staticmethod
def invoke(
user_id: str, tenant_id: str, tool_type: str, tool_name: str, prompt_messages: list[PromptMessage]
user_id: str, tenant_id: str, tool_type: ToolProviderType, tool_name: str, prompt_messages: list[PromptMessage]
) -> LLMResult:
"""
invoke model with parameters in user's own context

View File

@ -1,16 +1,19 @@
import logging
import sys
import urllib.parse
from dataclasses import dataclass
from typing import NotRequired
import httpx
from pydantic import TypeAdapter
from pydantic import TypeAdapter, ValidationError
if sys.version_info >= (3, 12):
from typing import TypedDict
else:
from typing_extensions import TypedDict
logger = logging.getLogger(__name__)
JsonObject = dict[str, object]
JsonObjectList = list[JsonObject]
@ -30,8 +33,8 @@ class GitHubEmailRecord(TypedDict, total=False):
class GitHubRawUserInfo(TypedDict):
id: int | str
login: str
name: NotRequired[str]
email: NotRequired[str]
name: NotRequired[str | None]
email: NotRequired[str | None]
class GoogleRawUserInfo(TypedDict):
@ -127,9 +130,14 @@ class GitHubOAuth(OAuth):
response.raise_for_status()
user_info = GITHUB_RAW_USER_INFO_ADAPTER.validate_python(_json_object(response))
email_response = httpx.get(self._EMAIL_INFO_URL, headers=headers)
email_info = GITHUB_EMAIL_RECORDS_ADAPTER.validate_python(_json_list(email_response))
primary_email = next((email for email in email_info if email.get("primary") is True), None)
try:
email_response = httpx.get(self._EMAIL_INFO_URL, headers=headers)
email_response.raise_for_status()
email_info = GITHUB_EMAIL_RECORDS_ADAPTER.validate_python(_json_list(email_response))
primary_email = next((email for email in email_info if email.get("primary") is True), None)
except (httpx.HTTPStatusError, ValidationError):
logger.warning("Failed to retrieve email from GitHub /user/emails endpoint", exc_info=True)
primary_email = None
return {**user_info, "email": primary_email.get("email", "") if primary_email else ""}
@ -137,8 +145,11 @@ class GitHubOAuth(OAuth):
payload = GITHUB_RAW_USER_INFO_ADAPTER.validate_python(raw_info)
email = payload.get("email")
if not email:
email = f"{payload['id']}+{payload['login']}@users.noreply.github.com"
return OAuthUserInfo(id=str(payload["id"]), name=str(payload.get("name", "")), email=email)
raise ValueError(
'Dify currently not supports the "Keep my email addresses private" feature,'
" please disable it and login again"
)
return OAuthUserInfo(id=str(payload["id"]), name=str(payload.get("name") or ""), email=email)
class GoogleOAuth(OAuth):

View File

@ -66,8 +66,8 @@ class HumanInputContent(ExecutionExtraContent):
form_id: Mapped[str] = mapped_column(StringUUID, nullable=True)
@classmethod
def new(cls, form_id: str, message_id: str | None) -> "HumanInputContent":
return cls(form_id=form_id, message_id=message_id)
def new(cls, *, workflow_run_id: str, form_id: str, message_id: str | None) -> "HumanInputContent":
return cls(workflow_run_id=workflow_run_id, form_id=form_id, message_id=message_id)
form: Mapped["HumanInputForm"] = relationship(
"HumanInputForm",

View File

@ -43,6 +43,7 @@ from .enums import (
MessageChainType,
MessageFileBelongsTo,
MessageStatus,
TagType,
)
from .provider_ids import GenericProviderID
from .types import EnumText, LongText, StringUUID
@ -2404,7 +2405,7 @@ class Tag(TypeBase):
StringUUID, insert_default=lambda: str(uuid4()), default_factory=lambda: str(uuid4()), init=False
)
tenant_id: Mapped[str | None] = mapped_column(StringUUID, nullable=True)
type: Mapped[str] = mapped_column(String(16), nullable=False)
type: Mapped[TagType] = mapped_column(EnumText(TagType, length=16), nullable=False)
name: Mapped[str] = mapped_column(String(255), nullable=False)
created_by: Mapped[str] = mapped_column(StringUUID, nullable=False)
created_at: Mapped[datetime] = mapped_column(

View File

@ -13,12 +13,16 @@ from sqlalchemy.orm import Mapped, mapped_column
from core.tools.entities.common_entities import I18nObject
from core.tools.entities.tool_bundle import ApiToolBundle
from core.tools.entities.tool_entities import ApiProviderSchemaType, WorkflowToolParameterConfiguration
from core.tools.entities.tool_entities import (
ApiProviderSchemaType,
ToolProviderType,
WorkflowToolParameterConfiguration,
)
from .base import TypeBase
from .engine import db
from .model import Account, App, Tenant
from .types import LongText, StringUUID
from .types import EnumText, LongText, StringUUID
if TYPE_CHECKING:
from core.entities.mcp_provider import MCPProviderEntity
@ -208,7 +212,7 @@ class ToolLabelBinding(TypeBase):
# tool id
tool_id: Mapped[str] = mapped_column(String(64), nullable=False)
# tool type
tool_type: Mapped[str] = mapped_column(String(40), nullable=False)
tool_type: Mapped[ToolProviderType] = mapped_column(EnumText(ToolProviderType, length=40), nullable=False)
# label name
label_name: Mapped[str] = mapped_column(String(40), nullable=False)
@ -386,7 +390,7 @@ class ToolModelInvoke(TypeBase):
# provider
provider: Mapped[str] = mapped_column(String(255), nullable=False)
# type
tool_type: Mapped[str] = mapped_column(String(40), nullable=False)
tool_type: Mapped[ToolProviderType] = mapped_column(EnumText(ToolProviderType, length=40), nullable=False)
# tool name
tool_name: Mapped[str] = mapped_column(String(128), nullable=False)
# invoke parameters

View File

@ -8,7 +8,7 @@ dependencies = [
"arize-phoenix-otel~=0.15.0",
"azure-identity==1.25.3",
"beautifulsoup4==4.14.3",
"boto3==1.42.68",
"boto3==1.42.73",
"bs4~=0.0.1",
"cachetools~=5.3.0",
"celery~=5.6.2",
@ -23,7 +23,7 @@ dependencies = [
"gevent~=25.9.1",
"gmpy2~=2.3.0",
"google-api-core>=2.19.1",
"google-api-python-client==2.192.0",
"google-api-python-client==2.193.0",
"google-auth>=2.47.0",
"google-auth-httplib2==0.3.0",
"google-cloud-aiplatform>=1.123.0",
@ -40,7 +40,7 @@ dependencies = [
"numpy~=1.26.4",
"openpyxl~=3.1.5",
"opik~=1.10.37",
"litellm==1.82.2", # Pinned to avoid madoka dependency issue
"litellm==1.82.6", # Pinned to avoid madoka dependency issue
"opentelemetry-api==1.28.0",
"opentelemetry-distro==0.49b0",
"opentelemetry-exporter-otlp==1.28.0",
@ -72,13 +72,14 @@ dependencies = [
"pyyaml~=6.0.1",
"readabilipy~=0.3.0",
"redis[hiredis]~=7.3.0",
"resend~=2.23.0",
"sentry-sdk[flask]~=2.54.0",
"resend~=2.26.0",
"sentry-sdk[flask]~=2.55.0",
"sqlalchemy~=2.0.29",
"starlette==0.52.1",
"starlette==1.0.0",
"tiktoken~=0.12.0",
"transformers~=5.3.0",
"unstructured[docx,epub,md,ppt,pptx]~=0.21.5",
"pypandoc~=1.13",
"yarl~=1.23.0",
"webvtt-py~=0.5.1",
"sseclient-py~=1.9.0",
@ -91,7 +92,7 @@ dependencies = [
"apscheduler>=3.11.0",
"weave>=0.52.16",
"fastopenapi[flask]>=0.7.0",
"bleach~=6.2.0",
"bleach~=6.3.0",
]
# Before adding new dependency, consider place it in
# alphabet order (a-z) and suitable group.
@ -118,7 +119,7 @@ dev = [
"ruff~=0.15.5",
"pytest~=9.0.2",
"pytest-benchmark~=5.2.3",
"pytest-cov~=7.0.0",
"pytest-cov~=7.1.0",
"pytest-env~=1.6.0",
"pytest-mock~=3.15.1",
"testcontainers~=4.14.1",
@ -202,7 +203,7 @@ tools = ["cloudscraper~=1.2.71", "nltk~=3.9.1"]
# Required by vector store clients
############################################################
vdb = [
"alibabacloud_gpdb20160503~=3.8.0",
"alibabacloud_gpdb20160503~=5.1.0",
"alibabacloud_tea_openapi~=0.4.3",
"chromadb==0.5.20",
"clickhouse-connect~=0.14.1",

View File

@ -335,7 +335,11 @@ class BillingService:
# Redis returns bytes, decode to string and parse JSON
json_str = cached_value.decode("utf-8") if isinstance(cached_value, bytes) else cached_value
plan_dict = json.loads(json_str)
# NOTE (hj24): New billing versions may return timestamp as str, and validate_python
# in non-strict mode will coerce it to the expected int type.
# To preserve compatibility, always keep non-strict mode here and avoid strict mode.
subscription_plan = subscription_adapter.validate_python(plan_dict)
# NOTE END
tenant_plans[tenant_id] = subscription_plan
except Exception:
logger.exception(

View File

@ -7,6 +7,7 @@ from werkzeug.exceptions import NotFound
from extensions.ext_database import db
from models.dataset import Dataset
from models.enums import TagType
from models.model import App, Tag, TagBinding
@ -83,7 +84,7 @@ class TagService:
raise ValueError("Tag name already exists")
tag = Tag(
name=args["name"],
type=args["type"],
type=TagType(args["type"]),
created_by=current_user.id,
tenant_id=current_user.current_tenant_id,
)

View File

@ -0,0 +1,342 @@
"""Authenticated controller integration tests for console message APIs."""
from datetime import timedelta
from decimal import Decimal
from unittest.mock import patch
from uuid import uuid4
import pytest
from flask.testing import FlaskClient
from sqlalchemy import select
from sqlalchemy.orm import Session
from controllers.console.app.message import ChatMessagesQuery, FeedbackExportQuery, MessageFeedbackPayload
from controllers.console.app.message import attach_message_extra_contents as _attach_message_extra_contents
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from libs.datetime_utils import naive_utc_now
from models.enums import ConversationFromSource, FeedbackRating
from models.model import AppMode, Conversation, Message, MessageAnnotation, MessageFeedback
from services.errors.conversation import ConversationNotExistsError
from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError
from tests.test_containers_integration_tests.controllers.console.helpers import (
authenticate_console_client,
create_console_account_and_tenant,
create_console_app,
)
def _create_conversation(db_session: Session, app_id: str, account_id: str, mode: AppMode) -> Conversation:
conversation = Conversation(
app_id=app_id,
app_model_config_id=None,
model_provider=None,
model_id="",
override_model_configs=None,
mode=mode,
name="Test Conversation",
inputs={},
introduction="",
system_instruction="",
system_instruction_tokens=0,
status="normal",
from_source=ConversationFromSource.CONSOLE,
from_account_id=account_id,
)
db_session.add(conversation)
db_session.commit()
return conversation
def _create_message(
db_session: Session,
app_id: str,
conversation_id: str,
account_id: str,
*,
created_at_offset_seconds: int = 0,
) -> Message:
created_at = naive_utc_now() + timedelta(seconds=created_at_offset_seconds)
message = Message(
app_id=app_id,
model_provider=None,
model_id="",
override_model_configs=None,
conversation_id=conversation_id,
inputs={},
query="Hello",
message={"type": "text", "content": "Hello"},
message_tokens=1,
message_unit_price=Decimal("0.0001"),
message_price_unit=Decimal("0.001"),
answer="Hi there",
answer_tokens=1,
answer_unit_price=Decimal("0.0001"),
answer_price_unit=Decimal("0.001"),
parent_message_id=None,
provider_response_latency=0,
total_price=Decimal("0.0002"),
currency="USD",
from_source=ConversationFromSource.CONSOLE,
from_account_id=account_id,
created_at=created_at,
updated_at=created_at,
app_mode=AppMode.CHAT,
)
db_session.add(message)
db_session.commit()
return message
class TestMessageValidators:
def test_chat_messages_query_validators(self) -> None:
assert ChatMessagesQuery.empty_to_none("") is None
assert ChatMessagesQuery.empty_to_none("val") == "val"
assert ChatMessagesQuery.validate_uuid(None) is None
assert (
ChatMessagesQuery.validate_uuid("123e4567-e89b-12d3-a456-426614174000")
== "123e4567-e89b-12d3-a456-426614174000"
)
def test_message_feedback_validators(self) -> None:
assert (
MessageFeedbackPayload.validate_message_id("123e4567-e89b-12d3-a456-426614174000")
== "123e4567-e89b-12d3-a456-426614174000"
)
def test_feedback_export_validators(self) -> None:
assert FeedbackExportQuery.parse_bool(None) is None
assert FeedbackExportQuery.parse_bool(True) is True
assert FeedbackExportQuery.parse_bool("1") is True
assert FeedbackExportQuery.parse_bool("0") is False
assert FeedbackExportQuery.parse_bool("off") is False
with pytest.raises(ValueError):
FeedbackExportQuery.parse_bool("invalid")
def test_chat_message_list_not_found(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/chat-messages",
query_string={"conversation_id": str(uuid4())},
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 404
payload = response.get_json()
assert payload is not None
assert payload["code"] == "not_found"
def test_chat_message_list_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode)
_create_message(db_session_with_containers, app.id, conversation.id, account.id, created_at_offset_seconds=0)
second = _create_message(
db_session_with_containers,
app.id,
conversation.id,
account.id,
created_at_offset_seconds=1,
)
with patch(
"controllers.console.app.message.attach_message_extra_contents",
side_effect=_attach_message_extra_contents,
):
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/chat-messages",
query_string={"conversation_id": conversation.id, "limit": 1},
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert payload["limit"] == 1
assert payload["has_more"] is True
assert len(payload["data"]) == 1
assert payload["data"][0]["id"] == second.id
def test_message_feedback_not_found(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
response = test_client_with_containers.post(
f"/console/api/apps/{app.id}/feedbacks",
json={"message_id": str(uuid4()), "rating": "like"},
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 404
payload = response.get_json()
assert payload is not None
assert payload["code"] == "not_found"
def test_message_feedback_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode)
message = _create_message(db_session_with_containers, app.id, conversation.id, account.id)
response = test_client_with_containers.post(
f"/console/api/apps/{app.id}/feedbacks",
json={"message_id": message.id, "rating": "like"},
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"result": "success"}
feedback = db_session_with_containers.scalar(
select(MessageFeedback).where(MessageFeedback.message_id == message.id)
)
assert feedback is not None
assert feedback.rating == FeedbackRating.LIKE
assert feedback.from_account_id == account.id
def test_message_annotation_count(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode)
message = _create_message(db_session_with_containers, app.id, conversation.id, account.id)
db_session_with_containers.add(
MessageAnnotation(
app_id=app.id,
conversation_id=conversation.id,
message_id=message.id,
question="Q",
content="A",
account_id=account.id,
)
)
db_session_with_containers.commit()
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/annotations/count",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"count": 1}
def test_message_suggested_questions_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
message_id = str(uuid4())
with patch(
"controllers.console.app.message.MessageService.get_suggested_questions_after_answer",
return_value=["q1", "q2"],
):
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/chat-messages/{message_id}/suggested-questions",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"data": ["q1", "q2"]}
@pytest.mark.parametrize(
("exc", "expected_status", "expected_code"),
[
(MessageNotExistsError(), 404, "not_found"),
(ConversationNotExistsError(), 404, "not_found"),
(ProviderTokenNotInitError(), 400, "provider_not_initialize"),
(QuotaExceededError(), 400, "provider_quota_exceeded"),
(ModelCurrentlyNotSupportError(), 400, "model_currently_not_support"),
(SuggestedQuestionsAfterAnswerDisabledError(), 403, "app_suggested_questions_after_answer_disabled"),
(Exception(), 500, "internal_server_error"),
],
)
def test_message_suggested_questions_errors(
exc: Exception,
expected_status: int,
expected_code: str,
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
message_id = str(uuid4())
with patch(
"controllers.console.app.message.MessageService.get_suggested_questions_after_answer",
side_effect=exc,
):
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/chat-messages/{message_id}/suggested-questions",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == expected_status
payload = response.get_json()
assert payload is not None
assert payload["code"] == expected_code
def test_message_feedback_export_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
with patch("services.feedback_service.FeedbackService.export_feedbacks", return_value={"exported": True}):
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/feedbacks/export",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"exported": True}
def test_message_api_get_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, app.mode)
message = _create_message(db_session_with_containers, app.id, conversation.id, account.id)
with patch(
"controllers.console.app.message.attach_message_extra_contents",
side_effect=_attach_message_extra_contents,
):
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/messages/{message.id}",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert payload["id"] == message.id

View File

@ -0,0 +1,334 @@
"""Controller integration tests for console statistic routes."""
from datetime import timedelta
from decimal import Decimal
from unittest.mock import patch
from uuid import uuid4
from flask.testing import FlaskClient
from sqlalchemy.orm import Session
from core.app.entities.app_invoke_entities import InvokeFrom
from libs.datetime_utils import naive_utc_now
from models.enums import ConversationFromSource, FeedbackFromSource, FeedbackRating
from models.model import AppMode, Conversation, Message, MessageFeedback
from tests.test_containers_integration_tests.controllers.console.helpers import (
authenticate_console_client,
create_console_account_and_tenant,
create_console_app,
)
def _create_conversation(
db_session: Session,
app_id: str,
account_id: str,
*,
mode: AppMode,
created_at_offset_days: int = 0,
) -> Conversation:
created_at = naive_utc_now() + timedelta(days=created_at_offset_days)
conversation = Conversation(
app_id=app_id,
app_model_config_id=None,
model_provider=None,
model_id="",
override_model_configs=None,
mode=mode,
name="Stats Conversation",
inputs={},
introduction="",
system_instruction="",
system_instruction_tokens=0,
status="normal",
from_source=ConversationFromSource.CONSOLE,
from_account_id=account_id,
created_at=created_at,
updated_at=created_at,
)
db_session.add(conversation)
db_session.commit()
return conversation
def _create_message(
db_session: Session,
app_id: str,
conversation_id: str,
*,
from_account_id: str | None,
from_end_user_id: str | None = None,
message_tokens: int = 1,
answer_tokens: int = 1,
total_price: Decimal = Decimal("0.01"),
provider_response_latency: float = 1.0,
created_at_offset_days: int = 0,
) -> Message:
created_at = naive_utc_now() + timedelta(days=created_at_offset_days)
message = Message(
app_id=app_id,
model_provider=None,
model_id="",
override_model_configs=None,
conversation_id=conversation_id,
inputs={},
query="Hello",
message={"type": "text", "content": "Hello"},
message_tokens=message_tokens,
message_unit_price=Decimal("0.001"),
message_price_unit=Decimal("0.001"),
answer="Hi there",
answer_tokens=answer_tokens,
answer_unit_price=Decimal("0.001"),
answer_price_unit=Decimal("0.001"),
parent_message_id=None,
provider_response_latency=provider_response_latency,
total_price=total_price,
currency="USD",
invoke_from=InvokeFrom.EXPLORE,
from_source=ConversationFromSource.CONSOLE,
from_end_user_id=from_end_user_id,
from_account_id=from_account_id,
created_at=created_at,
updated_at=created_at,
app_mode=AppMode.CHAT,
)
db_session.add(message)
db_session.commit()
return message
def _create_like_feedback(
db_session: Session,
app_id: str,
conversation_id: str,
message_id: str,
account_id: str,
) -> None:
db_session.add(
MessageFeedback(
app_id=app_id,
conversation_id=conversation_id,
message_id=message_id,
rating=FeedbackRating.LIKE,
from_source=FeedbackFromSource.ADMIN,
from_account_id=account_id,
)
)
db_session.commit()
def test_daily_message_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
_create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/daily-messages",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json()["data"][0]["message_count"] == 1
def test_daily_conversation_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
_create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id)
_create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/daily-conversations",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json()["data"][0]["conversation_count"] == 1
def test_daily_terminals_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
_create_message(
db_session_with_containers,
app.id,
conversation.id,
from_account_id=None,
from_end_user_id=str(uuid4()),
)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/daily-end-users",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json()["data"][0]["terminal_count"] == 1
def test_daily_token_cost_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
_create_message(
db_session_with_containers,
app.id,
conversation.id,
from_account_id=account.id,
message_tokens=40,
answer_tokens=60,
total_price=Decimal("0.02"),
)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/token-costs",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload["data"][0]["token_count"] == 100
assert Decimal(payload["data"][0]["total_price"]) == Decimal("0.02")
def test_average_session_interaction_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
_create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id)
_create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/average-session-interactions",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json()["data"][0]["interactions"] == 2.0
def test_user_satisfaction_rate_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
first = _create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id)
for _ in range(9):
_create_message(db_session_with_containers, app.id, conversation.id, from_account_id=account.id)
_create_like_feedback(db_session_with_containers, app.id, conversation.id, first.id, account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/user-satisfaction-rate",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json()["data"][0]["rate"] == 100.0
def test_average_response_time_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.COMPLETION)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
_create_message(
db_session_with_containers,
app.id,
conversation.id,
from_account_id=account.id,
provider_response_latency=1.234,
)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/average-response-time",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json()["data"][0]["latency"] == 1234.0
def test_tokens_per_second_statistic(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
conversation = _create_conversation(db_session_with_containers, app.id, account.id, mode=app.mode)
_create_message(
db_session_with_containers,
app.id,
conversation.id,
from_account_id=account.id,
answer_tokens=31,
provider_response_latency=2.0,
)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/tokens-per-second",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json()["data"][0]["tps"] == 15.5
def test_invalid_time_range(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
with patch("controllers.console.app.statistic.parse_time_range", side_effect=ValueError("Invalid time")):
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/daily-messages?start=invalid&end=invalid",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 400
assert response.get_json()["message"] == "Invalid time"
def test_time_range_params_passed(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
import datetime
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.CHAT)
start = datetime.datetime.now()
end = datetime.datetime.now()
with patch("controllers.console.app.statistic.parse_time_range", return_value=(start, end)) as mock_parse:
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/statistics/daily-messages?start=something&end=something",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
mock_parse.assert_called_once_with("something", "something", "UTC")

View File

@ -0,0 +1,415 @@
"""Authenticated controller integration tests for workflow draft variable APIs."""
import uuid
from flask.testing import FlaskClient
from sqlalchemy import select
from sqlalchemy.orm import Session
from dify_graph.constants import CONVERSATION_VARIABLE_NODE_ID, ENVIRONMENT_VARIABLE_NODE_ID
from dify_graph.variables.segments import StringSegment
from factories.variable_factory import segment_to_variable
from models import Workflow
from models.model import AppMode
from models.workflow import WorkflowDraftVariable
from tests.test_containers_integration_tests.controllers.console.helpers import (
authenticate_console_client,
create_console_account_and_tenant,
create_console_app,
)
def _create_draft_workflow(
db_session: Session,
app_id: str,
tenant_id: str,
account_id: str,
*,
environment_variables: list | None = None,
conversation_variables: list | None = None,
) -> Workflow:
workflow = Workflow.new(
tenant_id=tenant_id,
app_id=app_id,
type="workflow",
version=Workflow.VERSION_DRAFT,
graph='{"nodes": [], "edges": []}',
features="{}",
created_by=account_id,
environment_variables=environment_variables or [],
conversation_variables=conversation_variables or [],
rag_pipeline_variables=[],
)
db_session.add(workflow)
db_session.commit()
return workflow
def _create_node_variable(
db_session: Session,
app_id: str,
user_id: str,
*,
node_id: str = "node_1",
name: str = "test_var",
) -> WorkflowDraftVariable:
variable = WorkflowDraftVariable.new_node_variable(
app_id=app_id,
user_id=user_id,
node_id=node_id,
name=name,
value=StringSegment(value="test_value"),
node_execution_id=str(uuid.uuid4()),
visible=True,
editable=True,
)
db_session.add(variable)
db_session.commit()
return variable
def _create_system_variable(
db_session: Session, app_id: str, user_id: str, name: str = "query"
) -> WorkflowDraftVariable:
variable = WorkflowDraftVariable.new_sys_variable(
app_id=app_id,
user_id=user_id,
name=name,
value=StringSegment(value="system-value"),
node_execution_id=str(uuid.uuid4()),
editable=True,
)
db_session.add(variable)
db_session.commit()
return variable
def _build_environment_variable(name: str, value: str):
return segment_to_variable(
segment=StringSegment(value=value),
selector=[ENVIRONMENT_VARIABLE_NODE_ID, name],
name=name,
description=f"Environment variable {name}",
)
def _build_conversation_variable(name: str, value: str):
return segment_to_variable(
segment=StringSegment(value=value),
selector=[CONVERSATION_VARIABLE_NODE_ID, name],
name=name,
description=f"Conversation variable {name}",
)
def test_workflow_variable_collection_get_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/variables?page=1&limit=20",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"items": [], "total": 0}
def test_workflow_variable_collection_get_not_exist(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 404
payload = response.get_json()
assert payload is not None
assert payload["code"] == "draft_workflow_not_exist"
def test_workflow_variable_collection_delete(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_node_variable(db_session_with_containers, app.id, account.id)
_create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_2", name="other_var")
response = test_client_with_containers.delete(
f"/console/api/apps/{app.id}/workflows/draft/variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 204
remaining = db_session_with_containers.scalars(
select(WorkflowDraftVariable).where(
WorkflowDraftVariable.app_id == app.id,
WorkflowDraftVariable.user_id == account.id,
)
).all()
assert remaining == []
def test_node_variable_collection_get_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
node_variable = _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_123")
_create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_456", name="other")
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/nodes/node_123/variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert [item["id"] for item in payload["items"]] == [node_variable.id]
def test_node_variable_collection_get_invalid_node_id(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/nodes/sys/variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 400
payload = response.get_json()
assert payload is not None
assert payload["code"] == "invalid_param"
def test_node_variable_collection_delete(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
target = _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_123")
untouched = _create_node_variable(db_session_with_containers, app.id, account.id, node_id="node_456")
target_id = target.id
untouched_id = untouched.id
response = test_client_with_containers.delete(
f"/console/api/apps/{app.id}/workflows/draft/nodes/node_123/variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 204
assert (
db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == target_id))
is None
)
assert (
db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == untouched_id))
is not None
)
def test_variable_api_get_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id)
variable = _create_node_variable(db_session_with_containers, app.id, account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert payload["id"] == variable.id
assert payload["name"] == "test_var"
def test_variable_api_get_not_found(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/variables/{uuid.uuid4()}",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 404
payload = response.get_json()
assert payload is not None
assert payload["code"] == "not_found"
def test_variable_api_patch_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id)
variable = _create_node_variable(db_session_with_containers, app.id, account.id)
response = test_client_with_containers.patch(
f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}",
headers=authenticate_console_client(test_client_with_containers, account),
json={"name": "renamed_var"},
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert payload["id"] == variable.id
assert payload["name"] == "renamed_var"
refreshed = db_session_with_containers.scalar(
select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == variable.id)
)
assert refreshed is not None
assert refreshed.name == "renamed_var"
def test_variable_api_delete_success(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id)
variable = _create_node_variable(db_session_with_containers, app.id, account.id)
response = test_client_with_containers.delete(
f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 204
assert (
db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == variable.id))
is None
)
def test_variable_reset_api_put_success_returns_no_content_without_execution(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(db_session_with_containers, app.id, tenant.id, account.id)
variable = _create_node_variable(db_session_with_containers, app.id, account.id)
response = test_client_with_containers.put(
f"/console/api/apps/{app.id}/workflows/draft/variables/{variable.id}/reset",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 204
assert (
db_session_with_containers.scalar(select(WorkflowDraftVariable).where(WorkflowDraftVariable.id == variable.id))
is None
)
def test_conversation_variable_collection_get(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(
db_session_with_containers,
app.id,
tenant.id,
account.id,
conversation_variables=[_build_conversation_variable("session_name", "Alice")],
)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/conversation-variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert [item["name"] for item in payload["items"]] == ["session_name"]
created = db_session_with_containers.scalars(
select(WorkflowDraftVariable).where(
WorkflowDraftVariable.app_id == app.id,
WorkflowDraftVariable.user_id == account.id,
WorkflowDraftVariable.node_id == CONVERSATION_VARIABLE_NODE_ID,
)
).all()
assert len(created) == 1
def test_system_variable_collection_get(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
variable = _create_system_variable(db_session_with_containers, app.id, account.id)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/system-variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert [item["id"] for item in payload["items"]] == [variable.id]
def test_environment_variable_collection_get(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
app = create_console_app(db_session_with_containers, tenant.id, account.id, AppMode.WORKFLOW)
_create_draft_workflow(
db_session_with_containers,
app.id,
tenant.id,
account.id,
environment_variables=[_build_environment_variable("api_key", "secret-value")],
)
response = test_client_with_containers.get(
f"/console/api/apps/{app.id}/workflows/draft/environment-variables",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert payload["items"][0]["name"] == "api_key"
assert payload["items"][0]["value"] == "secret-value"

View File

@ -0,0 +1,131 @@
"""Controller integration tests for API key data source auth routes."""
import json
from unittest.mock import patch
from flask.testing import FlaskClient
from sqlalchemy import select
from sqlalchemy.orm import Session
from models.source import DataSourceApiKeyAuthBinding
from tests.test_containers_integration_tests.controllers.console.helpers import (
authenticate_console_client,
create_console_account_and_tenant,
)
def test_get_api_key_auth_data_source(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
binding = DataSourceApiKeyAuthBinding(
tenant_id=tenant.id,
category="api_key",
provider="custom_provider",
credentials=json.dumps({"auth_type": "api_key", "config": {"api_key": "encrypted"}}),
disabled=False,
)
db_session_with_containers.add(binding)
db_session_with_containers.commit()
response = test_client_with_containers.get(
"/console/api/api-key-auth/data-source",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert len(payload["sources"]) == 1
assert payload["sources"][0]["provider"] == "custom_provider"
def test_get_api_key_auth_data_source_empty(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, _tenant = create_console_account_and_tenant(db_session_with_containers)
response = test_client_with_containers.get(
"/console/api/api-key-auth/data-source",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"sources": []}
def test_create_binding_successful(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, _tenant = create_console_account_and_tenant(db_session_with_containers)
with (
patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args"),
patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth"),
):
response = test_client_with_containers.post(
"/console/api/api-key-auth/data-source/binding",
json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}},
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"result": "success"}
def test_create_binding_failure(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, _tenant = create_console_account_and_tenant(db_session_with_containers)
with (
patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args"),
patch(
"controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth",
side_effect=ValueError("Invalid structure"),
),
):
response = test_client_with_containers.post(
"/console/api/api-key-auth/data-source/binding",
json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}},
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 500
payload = response.get_json()
assert payload is not None
assert payload["code"] == "auth_failed"
assert payload["message"] == "Invalid structure"
def test_delete_binding_successful(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
binding = DataSourceApiKeyAuthBinding(
tenant_id=tenant.id,
category="api_key",
provider="custom_provider",
credentials=json.dumps({"auth_type": "api_key", "config": {"api_key": "encrypted"}}),
disabled=False,
)
db_session_with_containers.add(binding)
db_session_with_containers.commit()
response = test_client_with_containers.delete(
f"/console/api/api-key-auth/data-source/{binding.id}",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 204
assert (
db_session_with_containers.scalar(
select(DataSourceApiKeyAuthBinding).where(DataSourceApiKeyAuthBinding.id == binding.id)
)
is None
)

View File

@ -0,0 +1,120 @@
"""Controller integration tests for console OAuth data source routes."""
from unittest.mock import MagicMock, patch
from flask.testing import FlaskClient
from sqlalchemy.orm import Session
from models.source import DataSourceOauthBinding
from tests.test_containers_integration_tests.controllers.console.helpers import (
authenticate_console_client,
create_console_account_and_tenant,
)
def test_get_oauth_url_successful(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
provider = MagicMock()
provider.get_authorization_url.return_value = "http://oauth.provider/auth"
with (
patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": provider}),
patch("controllers.console.auth.data_source_oauth.dify_config.NOTION_INTEGRATION_TYPE", None),
):
response = test_client_with_containers.get(
"/console/api/oauth/data-source/notion",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert tenant.id == account.current_tenant_id
assert response.status_code == 200
assert response.get_json() == {"data": "http://oauth.provider/auth"}
provider.get_authorization_url.assert_called_once()
def test_get_oauth_url_invalid_provider(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, _tenant = create_console_account_and_tenant(db_session_with_containers)
with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}):
response = test_client_with_containers.get(
"/console/api/oauth/data-source/unknown_provider",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 400
assert response.get_json() == {"error": "Invalid provider"}
def test_oauth_callback_successful(test_client_with_containers: FlaskClient) -> None:
with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}):
response = test_client_with_containers.get("/console/api/oauth/data-source/callback/notion?code=mock_code")
assert response.status_code == 302
assert "code=mock_code" in response.location
def test_oauth_callback_missing_code(test_client_with_containers: FlaskClient) -> None:
with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}):
response = test_client_with_containers.get("/console/api/oauth/data-source/callback/notion")
assert response.status_code == 302
assert "error=Access%20denied" in response.location
def test_oauth_callback_invalid_provider(test_client_with_containers: FlaskClient) -> None:
with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}):
response = test_client_with_containers.get("/console/api/oauth/data-source/callback/invalid?code=mock_code")
assert response.status_code == 400
assert response.get_json() == {"error": "Invalid provider"}
def test_get_binding_successful(test_client_with_containers: FlaskClient) -> None:
provider = MagicMock()
with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": provider}):
response = test_client_with_containers.get("/console/api/oauth/data-source/binding/notion?code=auth_code_123")
assert response.status_code == 200
assert response.get_json() == {"result": "success"}
provider.get_access_token.assert_called_once_with("auth_code_123")
def test_get_binding_missing_code(test_client_with_containers: FlaskClient) -> None:
with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": MagicMock()}):
response = test_client_with_containers.get("/console/api/oauth/data-source/binding/notion?code=")
assert response.status_code == 400
assert response.get_json() == {"error": "Invalid code"}
def test_sync_successful(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, tenant = create_console_account_and_tenant(db_session_with_containers)
binding = DataSourceOauthBinding(
tenant_id=tenant.id,
access_token="test-access-token",
provider="notion",
source_info={"workspace_name": "Workspace", "workspace_icon": None, "workspace_id": tenant.id, "pages": []},
disabled=False,
)
db_session_with_containers.add(binding)
db_session_with_containers.commit()
provider = MagicMock()
with patch("controllers.console.auth.data_source_oauth.get_oauth_providers", return_value={"notion": provider}):
response = test_client_with_containers.get(
f"/console/api/oauth/data-source/notion/{binding.id}/sync",
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"result": "success"}
provider.sync_data_source.assert_called_once_with(binding.id)

View File

@ -0,0 +1,365 @@
"""Controller integration tests for console OAuth server routes."""
from unittest.mock import patch
from flask.testing import FlaskClient
from sqlalchemy.orm import Session
from models.model import OAuthProviderApp
from services.oauth_server import OAUTH_ACCESS_TOKEN_EXPIRES_IN
from tests.test_containers_integration_tests.controllers.console.helpers import (
authenticate_console_client,
create_console_account_and_tenant,
ensure_dify_setup,
)
def _build_oauth_provider_app() -> OAuthProviderApp:
return OAuthProviderApp(
app_icon="icon_url",
client_id="test_client_id",
client_secret="test_secret",
app_label={"en-US": "Test App"},
redirect_uris=["http://localhost/callback"],
scope="read,write",
)
def test_oauth_provider_successful_post(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider",
json={"client_id": "test_client_id", "redirect_uri": "http://localhost/callback"},
)
assert response.status_code == 200
payload = response.get_json()
assert payload is not None
assert payload["app_icon"] == "icon_url"
assert payload["app_label"] == {"en-US": "Test App"}
assert payload["scope"] == "read,write"
def test_oauth_provider_invalid_redirect_uri(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider",
json={"client_id": "test_client_id", "redirect_uri": "http://invalid/callback"},
)
assert response.status_code == 400
payload = response.get_json()
assert payload is not None
assert "redirect_uri is invalid" in payload["message"]
def test_oauth_provider_invalid_client_id(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
response = test_client_with_containers.post(
"/console/api/oauth/provider",
json={"client_id": "test_invalid_client_id", "redirect_uri": "http://localhost/callback"},
)
assert response.status_code == 404
payload = response.get_json()
assert payload is not None
assert "client_id is invalid" in payload["message"]
def test_oauth_authorize_successful(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
account, _tenant = create_console_account_and_tenant(db_session_with_containers)
with (
patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
),
patch(
"controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_authorization_code",
return_value="auth_code_123",
) as mock_sign,
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/authorize",
json={"client_id": "test_client_id"},
headers=authenticate_console_client(test_client_with_containers, account),
)
assert response.status_code == 200
assert response.get_json() == {"code": "auth_code_123"}
mock_sign.assert_called_once_with("test_client_id", account.id)
def test_oauth_token_authorization_code_grant(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with (
patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
),
patch(
"controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token",
return_value=("access_123", "refresh_123"),
),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/token",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"code": "auth_code",
"client_secret": "test_secret",
"redirect_uri": "http://localhost/callback",
},
)
assert response.status_code == 200
assert response.get_json() == {
"access_token": "access_123",
"token_type": "Bearer",
"expires_in": OAUTH_ACCESS_TOKEN_EXPIRES_IN,
"refresh_token": "refresh_123",
}
def test_oauth_token_authorization_code_grant_missing_code(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/token",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"client_secret": "test_secret",
"redirect_uri": "http://localhost/callback",
},
)
assert response.status_code == 400
assert response.get_json()["message"] == "code is required"
def test_oauth_token_authorization_code_grant_invalid_secret(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/token",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"code": "auth_code",
"client_secret": "invalid_secret",
"redirect_uri": "http://localhost/callback",
},
)
assert response.status_code == 400
assert response.get_json()["message"] == "client_secret is invalid"
def test_oauth_token_authorization_code_grant_invalid_redirect_uri(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/token",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"code": "auth_code",
"client_secret": "test_secret",
"redirect_uri": "http://invalid/callback",
},
)
assert response.status_code == 400
assert response.get_json()["message"] == "redirect_uri is invalid"
def test_oauth_token_refresh_token_grant(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with (
patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
),
patch(
"controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token",
return_value=("new_access", "new_refresh"),
),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/token",
json={"client_id": "test_client_id", "grant_type": "refresh_token", "refresh_token": "refresh_123"},
)
assert response.status_code == 200
assert response.get_json() == {
"access_token": "new_access",
"token_type": "Bearer",
"expires_in": OAUTH_ACCESS_TOKEN_EXPIRES_IN,
"refresh_token": "new_refresh",
}
def test_oauth_token_refresh_token_grant_missing_token(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/token",
json={"client_id": "test_client_id", "grant_type": "refresh_token"},
)
assert response.status_code == 400
assert response.get_json()["message"] == "refresh_token is required"
def test_oauth_token_invalid_grant_type(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/token",
json={"client_id": "test_client_id", "grant_type": "invalid_grant"},
)
assert response.status_code == 400
assert response.get_json()["message"] == "invalid grant_type"
def test_oauth_account_successful_retrieval(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
account, _tenant = create_console_account_and_tenant(db_session_with_containers)
account.avatar = "avatar_url"
db_session_with_containers.commit()
with (
patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
),
patch(
"controllers.console.auth.oauth_server.OAuthServerService.validate_oauth_access_token",
return_value=account,
),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/account",
json={"client_id": "test_client_id"},
headers={"Authorization": "Bearer valid_access_token"},
)
assert response.status_code == 200
assert response.get_json() == {
"name": "Test User",
"email": account.email,
"avatar": "avatar_url",
"interface_language": "en-US",
"timezone": "UTC",
}
def test_oauth_account_missing_authorization_header(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/account",
json={"client_id": "test_client_id"},
)
assert response.status_code == 401
assert response.get_json() == {"error": "Authorization header is required"}
def test_oauth_account_invalid_authorization_header_format(
db_session_with_containers: Session,
test_client_with_containers: FlaskClient,
) -> None:
ensure_dify_setup(db_session_with_containers)
with patch(
"controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app",
return_value=_build_oauth_provider_app(),
):
response = test_client_with_containers.post(
"/console/api/oauth/provider/account",
json={"client_id": "test_client_id"},
headers={"Authorization": "InvalidFormat"},
)
assert response.status_code == 401
assert response.get_json() == {"error": "Invalid Authorization header format"}

View File

@ -0,0 +1,85 @@
"""Shared helpers for authenticated console controller integration tests."""
import uuid
from flask.testing import FlaskClient
from sqlalchemy import select
from sqlalchemy.orm import Session
from configs import dify_config
from constants import HEADER_NAME_CSRF_TOKEN
from libs.datetime_utils import naive_utc_now
from libs.token import _real_cookie_name, generate_csrf_token
from models import Account, DifySetup, Tenant, TenantAccountJoin
from models.account import AccountStatus, TenantAccountRole
from models.model import App, AppMode
from services.account_service import AccountService
def ensure_dify_setup(db_session: Session) -> None:
"""Create a setup marker once so setup-protected console routes can be exercised."""
if db_session.scalar(select(DifySetup).limit(1)) is not None:
return
db_session.add(DifySetup(version=dify_config.project.version))
db_session.commit()
def create_console_account_and_tenant(db_session: Session) -> tuple[Account, Tenant]:
"""Create an initialized owner account with a current tenant."""
account = Account(
email=f"test-{uuid.uuid4()}@example.com",
name="Test User",
interface_language="en-US",
status=AccountStatus.ACTIVE,
)
account.initialized_at = naive_utc_now()
db_session.add(account)
db_session.commit()
tenant = Tenant(name="Test Tenant", status="normal")
db_session.add(tenant)
db_session.commit()
db_session.add(
TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role=TenantAccountRole.OWNER,
current=True,
)
)
db_session.commit()
account.set_tenant_id(tenant.id)
account.timezone = "UTC"
db_session.commit()
ensure_dify_setup(db_session)
return account, tenant
def create_console_app(db_session: Session, tenant_id: str, account_id: str, mode: AppMode) -> App:
"""Create a minimal app row that can be loaded by get_app_model."""
app = App(
tenant_id=tenant_id,
name="Test App",
mode=mode,
enable_site=True,
enable_api=True,
created_by=account_id,
)
db_session.add(app)
db_session.commit()
return app
def authenticate_console_client(test_client: FlaskClient, account: Account) -> dict[str, str]:
"""Attach console auth cookies/headers for endpoints guarded by login_required."""
access_token = AccountService.get_account_jwt_token(account)
csrf_token = generate_csrf_token(account.id)
test_client.set_cookie(_real_cookie_name("csrf_token"), csrf_token, domain="localhost")
return {
"Authorization": f"Bearer {access_token}",
HEADER_NAME_CSRF_TOKEN: csrf_token,
}

View File

@ -1,27 +0,0 @@
from __future__ import annotations
from sqlalchemy.orm import sessionmaker
from extensions.ext_database import db
from repositories.sqlalchemy_execution_extra_content_repository import SQLAlchemyExecutionExtraContentRepository
from tests.test_containers_integration_tests.helpers.execution_extra_content import (
create_human_input_message_fixture,
)
def test_get_by_message_ids_returns_human_input_content(db_session_with_containers):
fixture = create_human_input_message_fixture(db_session_with_containers)
repository = SQLAlchemyExecutionExtraContentRepository(
session_maker=sessionmaker(bind=db.engine, expire_on_commit=False)
)
results = repository.get_by_message_ids([fixture.message.id])
assert len(results) == 1
assert len(results[0]) == 1
content = results[0][0]
assert content.submitted is True
assert content.form_submission_data is not None
assert content.form_submission_data.action_id == fixture.action_id
assert content.form_submission_data.action_text == fixture.action_text
assert content.form_submission_data.rendered_content == fixture.form.rendered_content

View File

@ -0,0 +1,407 @@
"""Integration tests for SQLAlchemyExecutionExtraContentRepository using Testcontainers.
Part of #32454 — replaces the mock-based unit tests with real database interactions.
"""
from __future__ import annotations
from collections.abc import Generator
from dataclasses import dataclass
from datetime import datetime, timedelta
from decimal import Decimal
from uuid import uuid4
import pytest
from sqlalchemy import Engine, delete, select
from sqlalchemy.orm import Session, sessionmaker
from dify_graph.nodes.human_input.entities import FormDefinition, UserAction
from dify_graph.nodes.human_input.enums import HumanInputFormStatus
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.enums import ConversationFromSource, InvokeFrom
from models.execution_extra_content import ExecutionExtraContent, HumanInputContent
from models.human_input import (
ConsoleRecipientPayload,
HumanInputDelivery,
HumanInputForm,
HumanInputFormRecipient,
RecipientType,
)
from models.model import App, Conversation, Message
from repositories.sqlalchemy_execution_extra_content_repository import SQLAlchemyExecutionExtraContentRepository
@dataclass
class _TestScope:
"""Per-test data scope used to isolate DB rows.
IDs are populated after flushing the base entities to the database.
"""
tenant_id: str = ""
app_id: str = ""
user_id: str = ""
def _cleanup_scope_data(session: Session, scope: _TestScope) -> None:
"""Remove test-created DB rows for a test scope."""
form_ids_subquery = select(HumanInputForm.id).where(
HumanInputForm.tenant_id == scope.tenant_id,
)
session.execute(delete(HumanInputFormRecipient).where(HumanInputFormRecipient.form_id.in_(form_ids_subquery)))
session.execute(delete(HumanInputDelivery).where(HumanInputDelivery.form_id.in_(form_ids_subquery)))
session.execute(
delete(ExecutionExtraContent).where(
ExecutionExtraContent.workflow_run_id.in_(
select(HumanInputForm.workflow_run_id).where(HumanInputForm.tenant_id == scope.tenant_id)
)
)
)
session.execute(delete(HumanInputForm).where(HumanInputForm.tenant_id == scope.tenant_id))
session.execute(delete(Message).where(Message.app_id == scope.app_id))
session.execute(delete(Conversation).where(Conversation.app_id == scope.app_id))
session.execute(delete(App).where(App.id == scope.app_id))
session.execute(delete(TenantAccountJoin).where(TenantAccountJoin.tenant_id == scope.tenant_id))
session.execute(delete(Account).where(Account.id == scope.user_id))
session.execute(delete(Tenant).where(Tenant.id == scope.tenant_id))
session.commit()
def _seed_base_entities(session: Session, scope: _TestScope) -> None:
"""Create the base tenant, account, and app needed by tests."""
tenant = Tenant(name="Test Tenant")
session.add(tenant)
session.flush()
scope.tenant_id = tenant.id
account = Account(
name="Test Account",
email=f"test_{uuid4()}@example.com",
password="hashed-password",
password_salt="salt",
interface_language="en-US",
timezone="UTC",
)
session.add(account)
session.flush()
scope.user_id = account.id
tenant_join = TenantAccountJoin(
tenant_id=scope.tenant_id,
account_id=scope.user_id,
role=TenantAccountRole.OWNER,
current=True,
)
session.add(tenant_join)
app = App(
tenant_id=scope.tenant_id,
name="Test App",
description="",
mode="chat",
icon_type="emoji",
icon="bot",
icon_background="#FFFFFF",
enable_site=False,
enable_api=True,
api_rpm=100,
api_rph=100,
is_demo=False,
is_public=False,
is_universal=False,
created_by=scope.user_id,
updated_by=scope.user_id,
)
session.add(app)
session.flush()
scope.app_id = app.id
def _create_conversation(session: Session, scope: _TestScope) -> Conversation:
conversation = Conversation(
app_id=scope.app_id,
mode="chat",
name="Test Conversation",
summary="",
introduction="",
system_instruction="",
status="normal",
invoke_from=InvokeFrom.EXPLORE,
from_source=ConversationFromSource.CONSOLE,
from_account_id=scope.user_id,
from_end_user_id=None,
)
conversation.inputs = {}
session.add(conversation)
session.flush()
return conversation
def _create_message(
session: Session,
scope: _TestScope,
conversation_id: str,
workflow_run_id: str,
) -> Message:
message = Message(
app_id=scope.app_id,
conversation_id=conversation_id,
inputs={},
query="test query",
message={"messages": []},
answer="test answer",
message_tokens=50,
message_unit_price=Decimal("0.001"),
answer_tokens=80,
answer_unit_price=Decimal("0.001"),
provider_response_latency=0.5,
currency="USD",
from_source=ConversationFromSource.CONSOLE,
from_account_id=scope.user_id,
workflow_run_id=workflow_run_id,
)
session.add(message)
session.flush()
return message
def _create_submitted_form(
session: Session,
scope: _TestScope,
*,
workflow_run_id: str,
action_id: str = "approve",
action_title: str = "Approve",
node_title: str = "Approval",
) -> HumanInputForm:
expiration_time = datetime.utcnow() + timedelta(days=1)
form_definition = FormDefinition(
form_content="content",
inputs=[],
user_actions=[UserAction(id=action_id, title=action_title)],
rendered_content="rendered",
expiration_time=expiration_time,
node_title=node_title,
display_in_ui=True,
)
form = HumanInputForm(
tenant_id=scope.tenant_id,
app_id=scope.app_id,
workflow_run_id=workflow_run_id,
node_id="node-id",
form_definition=form_definition.model_dump_json(),
rendered_content=f"Rendered {action_title}",
status=HumanInputFormStatus.SUBMITTED,
expiration_time=expiration_time,
selected_action_id=action_id,
)
session.add(form)
session.flush()
return form
def _create_waiting_form(
session: Session,
scope: _TestScope,
*,
workflow_run_id: str,
default_values: dict | None = None,
) -> HumanInputForm:
expiration_time = datetime.utcnow() + timedelta(days=1)
form_definition = FormDefinition(
form_content="content",
inputs=[],
user_actions=[UserAction(id="approve", title="Approve")],
rendered_content="rendered",
expiration_time=expiration_time,
default_values=default_values or {"name": "John"},
node_title="Approval",
display_in_ui=True,
)
form = HumanInputForm(
tenant_id=scope.tenant_id,
app_id=scope.app_id,
workflow_run_id=workflow_run_id,
node_id="node-id",
form_definition=form_definition.model_dump_json(),
rendered_content="Rendered block",
status=HumanInputFormStatus.WAITING,
expiration_time=expiration_time,
)
session.add(form)
session.flush()
return form
def _create_human_input_content(
session: Session,
*,
workflow_run_id: str,
message_id: str,
form_id: str,
) -> HumanInputContent:
content = HumanInputContent.new(
workflow_run_id=workflow_run_id,
message_id=message_id,
form_id=form_id,
)
session.add(content)
return content
def _create_recipient(
session: Session,
*,
form_id: str,
delivery_id: str,
recipient_type: RecipientType = RecipientType.CONSOLE,
access_token: str = "token-1",
) -> HumanInputFormRecipient:
payload = ConsoleRecipientPayload(account_id=None)
recipient = HumanInputFormRecipient(
form_id=form_id,
delivery_id=delivery_id,
recipient_type=recipient_type,
recipient_payload=payload.model_dump_json(),
access_token=access_token,
)
session.add(recipient)
return recipient
def _create_delivery(session: Session, *, form_id: str) -> HumanInputDelivery:
from dify_graph.nodes.human_input.enums import DeliveryMethodType
from models.human_input import ConsoleDeliveryPayload
delivery = HumanInputDelivery(
form_id=form_id,
delivery_method_type=DeliveryMethodType.WEBAPP,
channel_payload=ConsoleDeliveryPayload().model_dump_json(),
)
session.add(delivery)
session.flush()
return delivery
@pytest.fixture
def repository(db_session_with_containers: Session) -> SQLAlchemyExecutionExtraContentRepository:
"""Build a repository backed by the testcontainers database engine."""
engine = db_session_with_containers.get_bind()
assert isinstance(engine, Engine)
return SQLAlchemyExecutionExtraContentRepository(sessionmaker(bind=engine, expire_on_commit=False))
@pytest.fixture
def test_scope(db_session_with_containers: Session) -> Generator[_TestScope]:
"""Provide an isolated scope and clean related data after each test."""
scope = _TestScope()
_seed_base_entities(db_session_with_containers, scope)
db_session_with_containers.commit()
yield scope
_cleanup_scope_data(db_session_with_containers, scope)
class TestGetByMessageIds:
"""Tests for SQLAlchemyExecutionExtraContentRepository.get_by_message_ids."""
def test_groups_contents_by_message(
self,
db_session_with_containers: Session,
repository: SQLAlchemyExecutionExtraContentRepository,
test_scope: _TestScope,
) -> None:
"""Submitted forms are correctly mapped and grouped by message ID."""
workflow_run_id = str(uuid4())
conversation = _create_conversation(db_session_with_containers, test_scope)
msg1 = _create_message(db_session_with_containers, test_scope, conversation.id, workflow_run_id)
msg2 = _create_message(db_session_with_containers, test_scope, conversation.id, workflow_run_id)
form = _create_submitted_form(
db_session_with_containers,
test_scope,
workflow_run_id=workflow_run_id,
action_id="approve",
action_title="Approve",
)
_create_human_input_content(
db_session_with_containers,
workflow_run_id=workflow_run_id,
message_id=msg1.id,
form_id=form.id,
)
db_session_with_containers.commit()
result = repository.get_by_message_ids([msg1.id, msg2.id])
assert len(result) == 2
# msg1 has one submitted content
assert len(result[0]) == 1
content = result[0][0]
assert content.submitted is True
assert content.workflow_run_id == workflow_run_id
assert content.form_submission_data is not None
assert content.form_submission_data.action_id == "approve"
assert content.form_submission_data.action_text == "Approve"
assert content.form_submission_data.rendered_content == "Rendered Approve"
assert content.form_submission_data.node_id == "node-id"
assert content.form_submission_data.node_title == "Approval"
# msg2 has no content
assert result[1] == []
def test_returns_unsubmitted_form_definition(
self,
db_session_with_containers: Session,
repository: SQLAlchemyExecutionExtraContentRepository,
test_scope: _TestScope,
) -> None:
"""Waiting forms return full form_definition with resolved token and defaults."""
workflow_run_id = str(uuid4())
conversation = _create_conversation(db_session_with_containers, test_scope)
msg = _create_message(db_session_with_containers, test_scope, conversation.id, workflow_run_id)
form = _create_waiting_form(
db_session_with_containers,
test_scope,
workflow_run_id=workflow_run_id,
default_values={"name": "John"},
)
delivery = _create_delivery(db_session_with_containers, form_id=form.id)
_create_recipient(
db_session_with_containers,
form_id=form.id,
delivery_id=delivery.id,
access_token="token-1",
)
_create_human_input_content(
db_session_with_containers,
workflow_run_id=workflow_run_id,
message_id=msg.id,
form_id=form.id,
)
db_session_with_containers.commit()
result = repository.get_by_message_ids([msg.id])
assert len(result) == 1
assert len(result[0]) == 1
domain_content = result[0][0]
assert domain_content.submitted is False
assert domain_content.workflow_run_id == workflow_run_id
assert domain_content.form_definition is not None
form_def = domain_content.form_definition
assert form_def.form_id == form.id
assert form_def.node_id == "node-id"
assert form_def.node_title == "Approval"
assert form_def.form_content == "Rendered block"
assert form_def.display_in_ui is True
assert form_def.form_token == "token-1"
assert form_def.resolved_default_values == {"name": "John"}
assert form_def.expiration_time == int(form.expiration_time.timestamp())
def test_empty_message_ids_returns_empty_list(
self,
repository: SQLAlchemyExecutionExtraContentRepository,
) -> None:
"""Passing no message IDs returns an empty list without hitting the DB."""
result = repository.get_by_message_ids([])
assert result == []

View File

@ -0,0 +1,80 @@
"""Testcontainers integration tests for AttachmentService."""
import base64
from datetime import UTC, datetime
from unittest.mock import patch
from uuid import uuid4
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import NotFound
import services.attachment_service as attachment_service_module
from extensions.ext_database import db
from extensions.storage.storage_type import StorageType
from models.enums import CreatorUserRole
from models.model import UploadFile
from services.attachment_service import AttachmentService
class TestAttachmentService:
def _create_upload_file(self, db_session_with_containers, *, tenant_id: str | None = None) -> UploadFile:
upload_file = UploadFile(
tenant_id=tenant_id or str(uuid4()),
storage_type=StorageType.OPENDAL,
key=f"upload/{uuid4()}.txt",
name="test-file.txt",
size=100,
extension="txt",
mime_type="text/plain",
created_by_role=CreatorUserRole.ACCOUNT,
created_by=str(uuid4()),
created_at=datetime.now(UTC),
used=False,
)
db_session_with_containers.add(upload_file)
db_session_with_containers.commit()
return upload_file
def test_should_initialize_with_sessionmaker(self):
session_factory = sessionmaker()
service = AttachmentService(session_factory=session_factory)
assert service._session_maker is session_factory
def test_should_initialize_with_engine(self):
engine = create_engine("sqlite:///:memory:")
service = AttachmentService(session_factory=engine)
session = service._session_maker()
try:
assert session.bind == engine
finally:
session.close()
engine.dispose()
@pytest.mark.parametrize("invalid_session_factory", [None, "not-a-session-factory", 1])
def test_should_raise_assertion_error_for_invalid_session_factory(self, invalid_session_factory):
with pytest.raises(AssertionError, match="must be a sessionmaker or an Engine."):
AttachmentService(session_factory=invalid_session_factory)
def test_should_return_base64_when_file_exists(self, db_session_with_containers):
upload_file = self._create_upload_file(db_session_with_containers)
service = AttachmentService(session_factory=sessionmaker(bind=db.engine))
with patch.object(attachment_service_module.storage, "load_once", return_value=b"binary-content") as mock_load:
result = service.get_file_base64(upload_file.id)
assert result == base64.b64encode(b"binary-content").decode()
mock_load.assert_called_once_with(upload_file.key)
def test_should_raise_not_found_when_file_missing(self, db_session_with_containers):
service = AttachmentService(session_factory=sessionmaker(bind=db.engine))
with patch.object(attachment_service_module.storage, "load_once") as mock_load:
with pytest.raises(NotFound, match="File not found"):
service.get_file_base64(str(uuid4()))
mock_load.assert_not_called()

View File

@ -0,0 +1,58 @@
"""Testcontainers integration tests for ConversationVariableUpdater."""
from uuid import uuid4
import pytest
from sqlalchemy.orm import sessionmaker
from dify_graph.variables import StringVariable
from extensions.ext_database import db
from models.workflow import ConversationVariable
from services.conversation_variable_updater import ConversationVariableNotFoundError, ConversationVariableUpdater
class TestConversationVariableUpdater:
def _create_conversation_variable(
self, db_session_with_containers, *, conversation_id: str, variable: StringVariable, app_id: str | None = None
) -> ConversationVariable:
row = ConversationVariable(
id=variable.id,
conversation_id=conversation_id,
app_id=app_id or str(uuid4()),
data=variable.model_dump_json(),
)
db_session_with_containers.add(row)
db_session_with_containers.commit()
return row
def test_should_update_conversation_variable_data_and_commit(self, db_session_with_containers):
conversation_id = str(uuid4())
variable = StringVariable(id=str(uuid4()), name="topic", value="old value")
self._create_conversation_variable(
db_session_with_containers, conversation_id=conversation_id, variable=variable
)
updated_variable = StringVariable(id=variable.id, name="topic", value="new value")
updater = ConversationVariableUpdater(sessionmaker(bind=db.engine))
updater.update(conversation_id=conversation_id, variable=updated_variable)
db_session_with_containers.expire_all()
row = db_session_with_containers.get(ConversationVariable, (variable.id, conversation_id))
assert row is not None
assert row.data == updated_variable.model_dump_json()
def test_should_raise_not_found_when_variable_missing(self, db_session_with_containers):
conversation_id = str(uuid4())
variable = StringVariable(id=str(uuid4()), name="topic", value="value")
updater = ConversationVariableUpdater(sessionmaker(bind=db.engine))
with pytest.raises(ConversationVariableNotFoundError, match="conversation variable not found in the database"):
updater.update(conversation_id=conversation_id, variable=variable)
def test_should_do_nothing_when_flush_is_called(self, db_session_with_containers):
updater = ConversationVariableUpdater(sessionmaker(bind=db.engine))
result = updater.flush()
assert result is None

View File

@ -0,0 +1,103 @@
"""Testcontainers integration tests for CreditPoolService."""
from uuid import uuid4
import pytest
from core.errors.error import QuotaExceededError
from models import TenantCreditPool
from services.credit_pool_service import CreditPoolService
class TestCreditPoolService:
def _create_tenant_id(self) -> str:
return str(uuid4())
def test_create_default_pool(self, db_session_with_containers):
tenant_id = self._create_tenant_id()
pool = CreditPoolService.create_default_pool(tenant_id)
assert isinstance(pool, TenantCreditPool)
assert pool.tenant_id == tenant_id
assert pool.pool_type == "trial"
assert pool.quota_used == 0
assert pool.quota_limit > 0
def test_get_pool_returns_pool_when_exists(self, db_session_with_containers):
tenant_id = self._create_tenant_id()
CreditPoolService.create_default_pool(tenant_id)
result = CreditPoolService.get_pool(tenant_id=tenant_id, pool_type="trial")
assert result is not None
assert result.tenant_id == tenant_id
assert result.pool_type == "trial"
def test_get_pool_returns_none_when_not_exists(self, db_session_with_containers):
result = CreditPoolService.get_pool(tenant_id=self._create_tenant_id(), pool_type="trial")
assert result is None
def test_check_credits_available_returns_false_when_no_pool(self, db_session_with_containers):
result = CreditPoolService.check_credits_available(tenant_id=self._create_tenant_id(), credits_required=10)
assert result is False
def test_check_credits_available_returns_true_when_sufficient(self, db_session_with_containers):
tenant_id = self._create_tenant_id()
CreditPoolService.create_default_pool(tenant_id)
result = CreditPoolService.check_credits_available(tenant_id=tenant_id, credits_required=10)
assert result is True
def test_check_credits_available_returns_false_when_insufficient(self, db_session_with_containers):
tenant_id = self._create_tenant_id()
pool = CreditPoolService.create_default_pool(tenant_id)
# Exhaust credits
pool.quota_used = pool.quota_limit
db_session_with_containers.commit()
result = CreditPoolService.check_credits_available(tenant_id=tenant_id, credits_required=1)
assert result is False
def test_check_and_deduct_credits_raises_when_no_pool(self, db_session_with_containers):
with pytest.raises(QuotaExceededError, match="Credit pool not found"):
CreditPoolService.check_and_deduct_credits(tenant_id=self._create_tenant_id(), credits_required=10)
def test_check_and_deduct_credits_raises_when_no_remaining(self, db_session_with_containers):
tenant_id = self._create_tenant_id()
pool = CreditPoolService.create_default_pool(tenant_id)
pool.quota_used = pool.quota_limit
db_session_with_containers.commit()
with pytest.raises(QuotaExceededError, match="No credits remaining"):
CreditPoolService.check_and_deduct_credits(tenant_id=tenant_id, credits_required=10)
def test_check_and_deduct_credits_deducts_required_amount(self, db_session_with_containers):
tenant_id = self._create_tenant_id()
CreditPoolService.create_default_pool(tenant_id)
credits_required = 10
result = CreditPoolService.check_and_deduct_credits(tenant_id=tenant_id, credits_required=credits_required)
assert result == credits_required
db_session_with_containers.expire_all()
pool = CreditPoolService.get_pool(tenant_id=tenant_id)
assert pool.quota_used == credits_required
def test_check_and_deduct_credits_caps_at_remaining(self, db_session_with_containers):
tenant_id = self._create_tenant_id()
pool = CreditPoolService.create_default_pool(tenant_id)
remaining = 5
pool.quota_used = pool.quota_limit - remaining
db_session_with_containers.commit()
result = CreditPoolService.check_and_deduct_credits(tenant_id=tenant_id, credits_required=200)
assert result == remaining
db_session_with_containers.expire_all()
updated_pool = CreditPoolService.get_pool(tenant_id=tenant_id)
assert updated_pool.quota_used == pool.quota_limit

View File

@ -397,6 +397,68 @@ class TestDatasetPermissionServiceClearPartialMemberList:
class TestDatasetServiceCheckDatasetPermission:
"""Verify dataset access checks against persisted partial-member permissions."""
def test_check_dataset_permission_different_tenant_should_fail(self, db_session_with_containers):
"""Test that users from different tenants cannot access dataset."""
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
other_user, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.EDITOR)
dataset = DatasetPermissionTestDataFactory.create_dataset(
tenant.id, owner.id, permission=DatasetPermissionEnum.ALL_TEAM
)
with pytest.raises(NoPermissionError):
DatasetService.check_dataset_permission(dataset, other_user)
def test_check_dataset_permission_owner_can_access_any_dataset(self, db_session_with_containers):
"""Test that tenant owners can access any dataset regardless of permission level."""
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
creator, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
role=TenantAccountRole.NORMAL, tenant=tenant
)
dataset = DatasetPermissionTestDataFactory.create_dataset(
tenant.id, creator.id, permission=DatasetPermissionEnum.ONLY_ME
)
DatasetService.check_dataset_permission(dataset, owner)
def test_check_dataset_permission_only_me_creator_can_access(self, db_session_with_containers):
"""Test ONLY_ME permission allows only the dataset creator to access."""
creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.EDITOR)
dataset = DatasetPermissionTestDataFactory.create_dataset(
tenant.id, creator.id, permission=DatasetPermissionEnum.ONLY_ME
)
DatasetService.check_dataset_permission(dataset, creator)
def test_check_dataset_permission_only_me_others_cannot_access(self, db_session_with_containers):
"""Test ONLY_ME permission denies access to non-creators."""
creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.NORMAL)
other, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
role=TenantAccountRole.NORMAL, tenant=tenant
)
dataset = DatasetPermissionTestDataFactory.create_dataset(
tenant.id, creator.id, permission=DatasetPermissionEnum.ONLY_ME
)
with pytest.raises(NoPermissionError):
DatasetService.check_dataset_permission(dataset, other)
def test_check_dataset_permission_all_team_allows_access(self, db_session_with_containers):
"""Test ALL_TEAM permission allows any team member to access the dataset."""
creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.NORMAL)
member, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
role=TenantAccountRole.NORMAL, tenant=tenant
)
dataset = DatasetPermissionTestDataFactory.create_dataset(
tenant.id, creator.id, permission=DatasetPermissionEnum.ALL_TEAM
)
DatasetService.check_dataset_permission(dataset, member)
def test_check_dataset_permission_partial_members_with_permission_success(self, db_session_with_containers):
"""
Test that user with explicit permission can access partial_members dataset.
@ -443,6 +505,16 @@ class TestDatasetServiceCheckDatasetPermission:
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"):
DatasetService.check_dataset_permission(dataset, user)
def test_check_dataset_permission_partial_team_creator_can_access(self, db_session_with_containers):
"""Test PARTIAL_TEAM permission allows creator to access without explicit permission."""
creator, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.EDITOR)
dataset = DatasetPermissionTestDataFactory.create_dataset(
tenant.id, creator.id, permission=DatasetPermissionEnum.PARTIAL_TEAM
)
DatasetService.check_dataset_permission(dataset, creator)
class TestDatasetServiceCheckDatasetOperatorPermission:
"""Verify operator permission checks against persisted partial-member permissions."""

View File

@ -9,7 +9,7 @@ from werkzeug.exceptions import NotFound
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
from models.dataset import Dataset
from models.enums import DataSourceType
from models.enums import DataSourceType, TagType
from models.model import App, Tag, TagBinding
from services.tag_service import TagService
@ -547,7 +547,7 @@ class TestTagService:
assert result is not None
assert len(result) == 1
assert result[0].name == "python_tag"
assert result[0].type == "app"
assert result[0].type == TagType.APP
assert result[0].tenant_id == tenant.id
def test_get_tag_by_tag_name_no_matches(
@ -638,7 +638,7 @@ class TestTagService:
# Verify all tags are returned
for tag in result:
assert tag.type == "app"
assert tag.type == TagType.APP
assert tag.tenant_id == tenant.id
assert tag.id in [t.id for t in tags]

View File

@ -0,0 +1,158 @@
"""Testcontainers integration tests for WorkflowService.delete_workflow."""
import json
from uuid import uuid4
import pytest
from sqlalchemy.orm import Session, sessionmaker
from extensions.ext_database import db
from models.account import Account, Tenant, TenantAccountJoin
from models.model import App
from models.tools import WorkflowToolProvider
from models.workflow import Workflow
from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService
class TestWorkflowDeletion:
def _create_tenant_and_account(self, session: Session) -> tuple[Tenant, Account]:
tenant = Tenant(name=f"Tenant {uuid4()}")
session.add(tenant)
session.flush()
account = Account(
name=f"Account {uuid4()}",
email=f"wf_del_{uuid4()}@example.com",
password="hashed",
password_salt="salt",
interface_language="en-US",
timezone="UTC",
)
session.add(account)
session.flush()
join = TenantAccountJoin(
tenant_id=tenant.id,
account_id=account.id,
role="owner",
current=True,
)
session.add(join)
session.flush()
return tenant, account
def _create_app(self, session: Session, *, tenant: Tenant, account: Account, workflow_id: str | None = None) -> App:
app = App(
tenant_id=tenant.id,
name=f"App {uuid4()}",
description="",
mode="workflow",
icon_type="emoji",
icon="bot",
icon_background="#FFFFFF",
enable_site=False,
enable_api=True,
api_rpm=100,
api_rph=100,
is_demo=False,
is_public=False,
is_universal=False,
created_by=account.id,
updated_by=account.id,
workflow_id=workflow_id,
)
session.add(app)
session.flush()
return app
def _create_workflow(
self, session: Session, *, tenant: Tenant, app: App, account: Account, version: str = "1.0"
) -> Workflow:
workflow = Workflow(
id=str(uuid4()),
tenant_id=tenant.id,
app_id=app.id,
type="workflow",
version=version,
graph=json.dumps({"nodes": [], "edges": []}),
_features=json.dumps({}),
created_by=account.id,
updated_by=account.id,
)
session.add(workflow)
session.flush()
return workflow
def _create_tool_provider(
self, session: Session, *, tenant: Tenant, app: App, account: Account, version: str
) -> WorkflowToolProvider:
provider = WorkflowToolProvider(
name=f"tool-{uuid4()}",
label=f"Tool {uuid4()}",
icon="wrench",
app_id=app.id,
version=version,
user_id=account.id,
tenant_id=tenant.id,
description="test tool provider",
)
session.add(provider)
session.flush()
return provider
def test_delete_workflow_success(self, db_session_with_containers):
tenant, account = self._create_tenant_and_account(db_session_with_containers)
app = self._create_app(db_session_with_containers, tenant=tenant, account=account)
workflow = self._create_workflow(
db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0"
)
db_session_with_containers.commit()
workflow_id = workflow.id
service = WorkflowService(sessionmaker(bind=db.engine))
result = service.delete_workflow(
session=db_session_with_containers, workflow_id=workflow_id, tenant_id=tenant.id
)
assert result is True
db_session_with_containers.expire_all()
assert db_session_with_containers.get(Workflow, workflow_id) is None
def test_delete_draft_workflow_raises_error(self, db_session_with_containers):
tenant, account = self._create_tenant_and_account(db_session_with_containers)
app = self._create_app(db_session_with_containers, tenant=tenant, account=account)
workflow = self._create_workflow(
db_session_with_containers, tenant=tenant, app=app, account=account, version="draft"
)
db_session_with_containers.commit()
service = WorkflowService(sessionmaker(bind=db.engine))
with pytest.raises(DraftWorkflowDeletionError):
service.delete_workflow(session=db_session_with_containers, workflow_id=workflow.id, tenant_id=tenant.id)
def test_delete_workflow_in_use_by_app_raises_error(self, db_session_with_containers):
tenant, account = self._create_tenant_and_account(db_session_with_containers)
app = self._create_app(db_session_with_containers, tenant=tenant, account=account)
workflow = self._create_workflow(
db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0"
)
# Point app to this workflow
app.workflow_id = workflow.id
db_session_with_containers.commit()
service = WorkflowService(sessionmaker(bind=db.engine))
with pytest.raises(WorkflowInUseError, match="currently in use by app"):
service.delete_workflow(session=db_session_with_containers, workflow_id=workflow.id, tenant_id=tenant.id)
def test_delete_workflow_published_as_tool_raises_error(self, db_session_with_containers):
tenant, account = self._create_tenant_and_account(db_session_with_containers)
app = self._create_app(db_session_with_containers, tenant=tenant, account=account)
workflow = self._create_workflow(
db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0"
)
self._create_tool_provider(db_session_with_containers, tenant=tenant, app=app, account=account, version="1.0")
db_session_with_containers.commit()
service = WorkflowService(sessionmaker(bind=db.engine))
with pytest.raises(WorkflowInUseError, match="published as a tool"):
service.delete_workflow(session=db_session_with_containers, workflow_id=workflow.id, tenant_id=tenant.id)

View File

@ -1,320 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from flask import Flask, request
from werkzeug.exceptions import InternalServerError, NotFound
from werkzeug.local import LocalProxy
from controllers.console.app.error import (
ProviderModelCurrentlyNotSupportError,
ProviderNotInitializeError,
ProviderQuotaExceededError,
)
from controllers.console.app.message import (
ChatMessageListApi,
ChatMessagesQuery,
FeedbackExportQuery,
MessageAnnotationCountApi,
MessageApi,
MessageFeedbackApi,
MessageFeedbackExportApi,
MessageFeedbackPayload,
MessageSuggestedQuestionApi,
)
from controllers.console.explore.error import AppSuggestedQuestionsAfterAnswerDisabledError
from core.errors.error import ModelCurrentlyNotSupportError, ProviderTokenNotInitError, QuotaExceededError
from models import App, AppMode
from services.errors.conversation import ConversationNotExistsError
from services.errors.message import MessageNotExistsError, SuggestedQuestionsAfterAnswerDisabledError
@pytest.fixture
def app():
flask_app = Flask(__name__)
flask_app.config["TESTING"] = True
flask_app.config["RESTX_MASK_HEADER"] = "X-Fields"
return flask_app
@pytest.fixture
def mock_account():
from models.account import Account, AccountStatus
account = MagicMock(spec=Account)
account.id = "user_123"
account.timezone = "UTC"
account.status = AccountStatus.ACTIVE
account.is_admin_or_owner = True
account.current_tenant.current_role = "owner"
account.has_edit_permission = True
return account
@pytest.fixture
def mock_app_model():
app_model = MagicMock(spec=App)
app_model.id = "app_123"
app_model.mode = AppMode.CHAT
app_model.tenant_id = "tenant_123"
return app_model
@pytest.fixture(autouse=True)
def mock_csrf():
with patch("libs.login.check_csrf_token") as mock:
yield mock
import contextlib
@contextlib.contextmanager
def setup_test_context(
test_app, endpoint_class, route_path, method, mock_account, mock_app_model, payload=None, qs=None
):
with (
patch("extensions.ext_database.db") as mock_db,
patch("controllers.console.app.wraps.db", mock_db),
patch("controllers.console.wraps.db", mock_db),
patch("controllers.console.app.message.db", mock_db),
patch("controllers.console.app.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch("controllers.console.app.message.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
):
# Set up a generic query mock that usually returns mock_app_model when getting app
app_query_mock = MagicMock()
app_query_mock.filter.return_value.first.return_value = mock_app_model
app_query_mock.filter.return_value.filter.return_value.first.return_value = mock_app_model
app_query_mock.where.return_value.first.return_value = mock_app_model
app_query_mock.where.return_value.where.return_value.first.return_value = mock_app_model
data_query_mock = MagicMock()
def query_side_effect(*args, **kwargs):
if args and hasattr(args[0], "__name__") and args[0].__name__ == "App":
return app_query_mock
return data_query_mock
mock_db.session.query.side_effect = query_side_effect
mock_db.data_query = data_query_mock
# Let the caller override the stat db query logic
proxy_mock = LocalProxy(lambda: mock_account)
query_string = "&".join([f"{k}={v}" for k, v in (qs or {}).items()])
full_path = f"{route_path}?{query_string}" if qs else route_path
with (
patch("libs.login.current_user", proxy_mock),
patch("flask_login.current_user", proxy_mock),
patch("controllers.console.app.message.attach_message_extra_contents", return_value=None),
):
with test_app.test_request_context(full_path, method=method, json=payload):
request.view_args = {"app_id": "app_123"}
if "suggested-questions" in route_path:
# simplistic extraction for message_id
parts = route_path.split("chat-messages/")
if len(parts) > 1:
request.view_args["message_id"] = parts[1].split("/")[0]
elif "messages/" in route_path and "chat-messages" not in route_path:
parts = route_path.split("messages/")
if len(parts) > 1:
request.view_args["message_id"] = parts[1].split("/")[0]
api_instance = endpoint_class()
# Check if it has a dispatch_request or method
if hasattr(api_instance, method.lower()):
yield api_instance, mock_db, request.view_args
class TestMessageValidators:
def test_chat_messages_query_validators(self):
# Test empty_to_none
assert ChatMessagesQuery.empty_to_none("") is None
assert ChatMessagesQuery.empty_to_none("val") == "val"
# Test validate_uuid
assert ChatMessagesQuery.validate_uuid(None) is None
assert (
ChatMessagesQuery.validate_uuid("123e4567-e89b-12d3-a456-426614174000")
== "123e4567-e89b-12d3-a456-426614174000"
)
def test_message_feedback_validators(self):
assert (
MessageFeedbackPayload.validate_message_id("123e4567-e89b-12d3-a456-426614174000")
== "123e4567-e89b-12d3-a456-426614174000"
)
def test_feedback_export_validators(self):
assert FeedbackExportQuery.parse_bool(None) is None
assert FeedbackExportQuery.parse_bool(True) is True
assert FeedbackExportQuery.parse_bool("1") is True
assert FeedbackExportQuery.parse_bool("0") is False
assert FeedbackExportQuery.parse_bool("off") is False
with pytest.raises(ValueError):
FeedbackExportQuery.parse_bool("invalid")
class TestMessageEndpoints:
def test_chat_message_list_not_found(self, app, mock_account, mock_app_model):
with setup_test_context(
app,
ChatMessageListApi,
"/apps/app_123/chat-messages",
"GET",
mock_account,
mock_app_model,
qs={"conversation_id": "123e4567-e89b-12d3-a456-426614174000"},
) as (api, mock_db, v_args):
mock_db.data_query.where.return_value.first.return_value = None
with pytest.raises(NotFound):
api.get(**v_args)
def test_chat_message_list_success(self, app, mock_account, mock_app_model):
with setup_test_context(
app,
ChatMessageListApi,
"/apps/app_123/chat-messages",
"GET",
mock_account,
mock_app_model,
qs={"conversation_id": "123e4567-e89b-12d3-a456-426614174000", "limit": 1},
) as (api, mock_db, v_args):
mock_conv = MagicMock()
mock_conv.id = "123e4567-e89b-12d3-a456-426614174000"
mock_msg = MagicMock()
mock_msg.id = "msg_123"
mock_msg.feedbacks = []
mock_msg.annotation = None
mock_msg.annotation_hit_history = None
mock_msg.agent_thoughts = []
mock_msg.message_files = []
mock_msg.extra_contents = []
mock_msg.message = {}
mock_msg.message_metadata_dict = {}
# mock returns
q_mock = mock_db.data_query
q_mock.where.return_value.first.side_effect = [mock_conv]
q_mock.where.return_value.order_by.return_value.limit.return_value.all.return_value = [mock_msg]
mock_db.session.scalar.return_value = False
resp = api.get(**v_args)
assert resp["limit"] == 1
assert resp["has_more"] is False
assert len(resp["data"]) == 1
def test_message_feedback_not_found(self, app, mock_account, mock_app_model):
with setup_test_context(
app,
MessageFeedbackApi,
"/apps/app_123/feedbacks",
"POST",
mock_account,
mock_app_model,
payload={"message_id": "123e4567-e89b-12d3-a456-426614174000"},
) as (api, mock_db, v_args):
mock_db.data_query.where.return_value.first.return_value = None
with pytest.raises(NotFound):
api.post(**v_args)
def test_message_feedback_success(self, app, mock_account, mock_app_model):
payload = {"message_id": "123e4567-e89b-12d3-a456-426614174000", "rating": "like"}
with setup_test_context(
app, MessageFeedbackApi, "/apps/app_123/feedbacks", "POST", mock_account, mock_app_model, payload=payload
) as (api, mock_db, v_args):
mock_msg = MagicMock()
mock_msg.admin_feedback = None
mock_db.data_query.where.return_value.first.return_value = mock_msg
resp = api.post(**v_args)
assert resp == {"result": "success"}
def test_message_annotation_count(self, app, mock_account, mock_app_model):
with setup_test_context(
app, MessageAnnotationCountApi, "/apps/app_123/annotations/count", "GET", mock_account, mock_app_model
) as (api, mock_db, v_args):
mock_db.data_query.where.return_value.count.return_value = 5
resp = api.get(**v_args)
assert resp == {"count": 5}
@patch("controllers.console.app.message.MessageService")
def test_message_suggested_questions_success(self, mock_msg_srv, app, mock_account, mock_app_model):
mock_msg_srv.get_suggested_questions_after_answer.return_value = ["q1", "q2"]
with setup_test_context(
app,
MessageSuggestedQuestionApi,
"/apps/app_123/chat-messages/msg_123/suggested-questions",
"GET",
mock_account,
mock_app_model,
) as (api, mock_db, v_args):
resp = api.get(**v_args)
assert resp == {"data": ["q1", "q2"]}
@pytest.mark.parametrize(
("exc", "expected_exc"),
[
(MessageNotExistsError, NotFound),
(ConversationNotExistsError, NotFound),
(ProviderTokenNotInitError, ProviderNotInitializeError),
(QuotaExceededError, ProviderQuotaExceededError),
(ModelCurrentlyNotSupportError, ProviderModelCurrentlyNotSupportError),
(SuggestedQuestionsAfterAnswerDisabledError, AppSuggestedQuestionsAfterAnswerDisabledError),
(Exception, InternalServerError),
],
)
@patch("controllers.console.app.message.MessageService")
def test_message_suggested_questions_errors(
self, mock_msg_srv, exc, expected_exc, app, mock_account, mock_app_model
):
mock_msg_srv.get_suggested_questions_after_answer.side_effect = exc()
with setup_test_context(
app,
MessageSuggestedQuestionApi,
"/apps/app_123/chat-messages/msg_123/suggested-questions",
"GET",
mock_account,
mock_app_model,
) as (api, mock_db, v_args):
with pytest.raises(expected_exc):
api.get(**v_args)
@patch("services.feedback_service.FeedbackService.export_feedbacks")
def test_message_feedback_export_success(self, mock_export, app, mock_account, mock_app_model):
mock_export.return_value = {"exported": True}
with setup_test_context(
app, MessageFeedbackExportApi, "/apps/app_123/feedbacks/export", "GET", mock_account, mock_app_model
) as (api, mock_db, v_args):
resp = api.get(**v_args)
assert resp == {"exported": True}
def test_message_api_get_success(self, app, mock_account, mock_app_model):
with setup_test_context(
app, MessageApi, "/apps/app_123/messages/msg_123", "GET", mock_account, mock_app_model
) as (api, mock_db, v_args):
mock_msg = MagicMock()
mock_msg.id = "msg_123"
mock_msg.feedbacks = []
mock_msg.annotation = None
mock_msg.annotation_hit_history = None
mock_msg.agent_thoughts = []
mock_msg.message_files = []
mock_msg.extra_contents = []
mock_msg.message = {}
mock_msg.message_metadata_dict = {}
mock_db.data_query.where.return_value.first.return_value = mock_msg
resp = api.get(**v_args)
assert resp["id"] == "msg_123"

View File

@ -1,275 +0,0 @@
from decimal import Decimal
from unittest.mock import MagicMock, patch
import pytest
from flask import Flask, request
from werkzeug.local import LocalProxy
from controllers.console.app.statistic import (
AverageResponseTimeStatistic,
AverageSessionInteractionStatistic,
DailyConversationStatistic,
DailyMessageStatistic,
DailyTerminalsStatistic,
DailyTokenCostStatistic,
TokensPerSecondStatistic,
UserSatisfactionRateStatistic,
)
from models import App, AppMode
@pytest.fixture
def app():
flask_app = Flask(__name__)
flask_app.config["TESTING"] = True
return flask_app
@pytest.fixture
def mock_account():
from models.account import Account, AccountStatus
account = MagicMock(spec=Account)
account.id = "user_123"
account.timezone = "UTC"
account.status = AccountStatus.ACTIVE
account.is_admin_or_owner = True
account.current_tenant.current_role = "owner"
account.has_edit_permission = True
return account
@pytest.fixture
def mock_app_model():
app_model = MagicMock(spec=App)
app_model.id = "app_123"
app_model.mode = AppMode.CHAT
app_model.tenant_id = "tenant_123"
return app_model
@pytest.fixture(autouse=True)
def mock_csrf():
with patch("libs.login.check_csrf_token") as mock:
yield mock
def setup_test_context(
test_app, endpoint_class, route_path, mock_account, mock_app_model, mock_rs, mock_parse_ret=(None, None)
):
with (
patch("controllers.console.app.statistic.db") as mock_db_stat,
patch("controllers.console.app.wraps.db") as mock_db_wraps,
patch("controllers.console.wraps.db", mock_db_wraps),
patch(
"controllers.console.app.statistic.current_account_with_tenant", return_value=(mock_account, "tenant_123")
),
patch("controllers.console.app.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
):
mock_conn = MagicMock()
mock_conn.execute.return_value = mock_rs
mock_begin = MagicMock()
mock_begin.__enter__.return_value = mock_conn
mock_db_stat.engine.begin.return_value = mock_begin
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = mock_app_model
mock_query.filter.return_value.filter.return_value.first.return_value = mock_app_model
mock_query.where.return_value.first.return_value = mock_app_model
mock_query.where.return_value.where.return_value.first.return_value = mock_app_model
mock_db_wraps.session.query.return_value = mock_query
proxy_mock = LocalProxy(lambda: mock_account)
with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock):
with test_app.test_request_context(route_path, method="GET"):
request.view_args = {"app_id": "app_123"}
api_instance = endpoint_class()
response = api_instance.get(app_id="app_123")
return response
class TestStatisticEndpoints:
def test_daily_message_statistic(self, app, mock_account, mock_app_model):
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.message_count = 10
mock_row.interactions = Decimal(0)
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
DailyMessageStatistic,
"/apps/app_123/statistics/daily-messages?start=2023-01-01 00:00&end=2023-01-02 00:00",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["message_count"] == 10
def test_daily_conversation_statistic(self, app, mock_account, mock_app_model):
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.conversation_count = 5
mock_row.interactions = Decimal(0)
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
DailyConversationStatistic,
"/apps/app_123/statistics/daily-conversations",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["conversation_count"] == 5
def test_daily_terminals_statistic(self, app, mock_account, mock_app_model):
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.terminal_count = 2
mock_row.interactions = Decimal(0)
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
DailyTerminalsStatistic,
"/apps/app_123/statistics/daily-end-users",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["terminal_count"] == 2
def test_daily_token_cost_statistic(self, app, mock_account, mock_app_model):
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.token_count = 100
mock_row.total_price = Decimal("0.02")
mock_row.interactions = Decimal(0)
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
DailyTokenCostStatistic,
"/apps/app_123/statistics/token-costs",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["token_count"] == 100
assert response.json["data"][0]["total_price"] == "0.02"
def test_average_session_interaction_statistic(self, app, mock_account, mock_app_model):
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.interactions = Decimal("3.523")
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
AverageSessionInteractionStatistic,
"/apps/app_123/statistics/average-session-interactions",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["interactions"] == 3.52
def test_user_satisfaction_rate_statistic(self, app, mock_account, mock_app_model):
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.message_count = 100
mock_row.feedback_count = 10
mock_row.interactions = Decimal(0)
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
UserSatisfactionRateStatistic,
"/apps/app_123/statistics/user-satisfaction-rate",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["rate"] == 100.0
def test_average_response_time_statistic(self, app, mock_account, mock_app_model):
mock_app_model.mode = AppMode.COMPLETION
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.latency = 1.234
mock_row.interactions = Decimal(0)
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
AverageResponseTimeStatistic,
"/apps/app_123/statistics/average-response-time",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["latency"] == 1234.0
def test_tokens_per_second_statistic(self, app, mock_account, mock_app_model):
mock_row = MagicMock()
mock_row.date = "2023-01-01"
mock_row.tokens_per_second = 15.5
mock_row.interactions = Decimal(0)
with patch("controllers.console.app.statistic.parse_time_range", return_value=(None, None)):
response = setup_test_context(
app,
TokensPerSecondStatistic,
"/apps/app_123/statistics/tokens-per-second",
mock_account,
mock_app_model,
[mock_row],
)
assert response.status_code == 200
assert response.json["data"][0]["tps"] == 15.5
@patch("controllers.console.app.statistic.parse_time_range")
def test_invalid_time_range(self, mock_parse, app, mock_account, mock_app_model):
mock_parse.side_effect = ValueError("Invalid time")
from werkzeug.exceptions import BadRequest
with pytest.raises(BadRequest):
setup_test_context(
app,
DailyMessageStatistic,
"/apps/app_123/statistics/daily-messages?start=invalid&end=invalid",
mock_account,
mock_app_model,
[],
)
@patch("controllers.console.app.statistic.parse_time_range")
def test_time_range_params_passed(self, mock_parse, app, mock_account, mock_app_model):
import datetime
start = datetime.datetime.now()
end = datetime.datetime.now()
mock_parse.return_value = (start, end)
response = setup_test_context(
app,
DailyMessageStatistic,
"/apps/app_123/statistics/daily-messages?start=something&end=something",
mock_account,
mock_app_model,
[],
)
assert response.status_code == 200
mock_parse.assert_called_once()

View File

@ -1,313 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from flask import Flask, request
from werkzeug.local import LocalProxy
from controllers.console.app.error import DraftWorkflowNotExist
from controllers.console.app.workflow_draft_variable import (
ConversationVariableCollectionApi,
EnvironmentVariableCollectionApi,
NodeVariableCollectionApi,
SystemVariableCollectionApi,
VariableApi,
VariableResetApi,
WorkflowVariableCollectionApi,
)
from controllers.web.error import InvalidArgumentError, NotFoundError
from models import App, AppMode
from models.enums import DraftVariableType
@pytest.fixture
def app():
flask_app = Flask(__name__)
flask_app.config["TESTING"] = True
flask_app.config["RESTX_MASK_HEADER"] = "X-Fields"
return flask_app
@pytest.fixture
def mock_account():
from models.account import Account, AccountStatus
account = MagicMock(spec=Account)
account.id = "user_123"
account.timezone = "UTC"
account.status = AccountStatus.ACTIVE
account.is_admin_or_owner = True
account.current_tenant.current_role = "owner"
account.has_edit_permission = True
return account
@pytest.fixture
def mock_app_model():
app_model = MagicMock(spec=App)
app_model.id = "app_123"
app_model.mode = AppMode.WORKFLOW
app_model.tenant_id = "tenant_123"
return app_model
@pytest.fixture(autouse=True)
def mock_csrf():
with patch("libs.login.check_csrf_token") as mock:
yield mock
def setup_test_context(test_app, endpoint_class, route_path, method, mock_account, mock_app_model, payload=None):
with (
patch("controllers.console.app.wraps.db") as mock_db_wraps,
patch("controllers.console.wraps.db", mock_db_wraps),
patch("controllers.console.app.workflow_draft_variable.db"),
patch("controllers.console.app.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
):
mock_query = MagicMock()
mock_query.filter.return_value.first.return_value = mock_app_model
mock_query.filter.return_value.filter.return_value.first.return_value = mock_app_model
mock_query.where.return_value.first.return_value = mock_app_model
mock_query.where.return_value.where.return_value.first.return_value = mock_app_model
mock_db_wraps.session.query.return_value = mock_query
proxy_mock = LocalProxy(lambda: mock_account)
with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock):
with test_app.test_request_context(route_path, method=method, json=payload):
request.view_args = {"app_id": "app_123"}
# extract node_id or variable_id from path manually since view_args overrides
if "nodes/" in route_path:
request.view_args["node_id"] = route_path.split("nodes/")[1].split("/")[0]
if "variables/" in route_path:
# simplistic extraction
parts = route_path.split("variables/")
if len(parts) > 1 and parts[1] and parts[1] != "reset":
request.view_args["variable_id"] = parts[1].split("/")[0]
api_instance = endpoint_class()
# we just call dispatch_request to avoid manual argument passing
if hasattr(api_instance, method.lower()):
func = getattr(api_instance, method.lower())
return func(**request.view_args)
class TestWorkflowDraftVariableEndpoints:
@staticmethod
def _mock_workflow_variable(variable_type: DraftVariableType = DraftVariableType.NODE) -> MagicMock:
class DummyValueType:
def exposed_type(self):
return DraftVariableType.NODE
mock_var = MagicMock()
mock_var.app_id = "app_123"
mock_var.id = "var_123"
mock_var.name = "test_var"
mock_var.description = ""
mock_var.get_variable_type.return_value = variable_type
mock_var.get_selector.return_value = []
mock_var.value_type = DummyValueType()
mock_var.edited = False
mock_var.visible = True
mock_var.file_id = None
mock_var.variable_file = None
mock_var.is_truncated.return_value = False
mock_var.get_value.return_value.model_copy.return_value.value = "test_value"
return mock_var
@patch("controllers.console.app.workflow_draft_variable.WorkflowService")
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_workflow_variable_collection_get_success(
self, mock_draft_srv, mock_wf_srv, app, mock_account, mock_app_model
):
mock_wf_srv.return_value.is_workflow_exist.return_value = True
from services.workflow_draft_variable_service import WorkflowDraftVariableList
mock_draft_srv.return_value.list_variables_without_values.return_value = WorkflowDraftVariableList(
variables=[], total=0
)
resp = setup_test_context(
app,
WorkflowVariableCollectionApi,
"/apps/app_123/workflows/draft/variables?page=1&limit=20",
"GET",
mock_account,
mock_app_model,
)
assert resp == {"items": [], "total": 0}
@patch("controllers.console.app.workflow_draft_variable.WorkflowService")
def test_workflow_variable_collection_get_not_exist(self, mock_wf_srv, app, mock_account, mock_app_model):
mock_wf_srv.return_value.is_workflow_exist.return_value = False
with pytest.raises(DraftWorkflowNotExist):
setup_test_context(
app,
WorkflowVariableCollectionApi,
"/apps/app_123/workflows/draft/variables",
"GET",
mock_account,
mock_app_model,
)
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_workflow_variable_collection_delete(self, mock_draft_srv, app, mock_account, mock_app_model):
resp = setup_test_context(
app,
WorkflowVariableCollectionApi,
"/apps/app_123/workflows/draft/variables",
"DELETE",
mock_account,
mock_app_model,
)
assert resp.status_code == 204
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_node_variable_collection_get_success(self, mock_draft_srv, app, mock_account, mock_app_model):
from services.workflow_draft_variable_service import WorkflowDraftVariableList
mock_draft_srv.return_value.list_node_variables.return_value = WorkflowDraftVariableList(variables=[])
resp = setup_test_context(
app,
NodeVariableCollectionApi,
"/apps/app_123/workflows/draft/nodes/node_123/variables",
"GET",
mock_account,
mock_app_model,
)
assert resp == {"items": []}
def test_node_variable_collection_get_invalid_node_id(self, app, mock_account, mock_app_model):
with pytest.raises(InvalidArgumentError):
setup_test_context(
app,
NodeVariableCollectionApi,
"/apps/app_123/workflows/draft/nodes/sys/variables",
"GET",
mock_account,
mock_app_model,
)
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_node_variable_collection_delete(self, mock_draft_srv, app, mock_account, mock_app_model):
resp = setup_test_context(
app,
NodeVariableCollectionApi,
"/apps/app_123/workflows/draft/nodes/node_123/variables",
"DELETE",
mock_account,
mock_app_model,
)
assert resp.status_code == 204
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_variable_api_get_success(self, mock_draft_srv, app, mock_account, mock_app_model):
mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable()
resp = setup_test_context(
app, VariableApi, "/apps/app_123/workflows/draft/variables/var_123", "GET", mock_account, mock_app_model
)
assert resp["id"] == "var_123"
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_variable_api_get_not_found(self, mock_draft_srv, app, mock_account, mock_app_model):
mock_draft_srv.return_value.get_variable.return_value = None
with pytest.raises(NotFoundError):
setup_test_context(
app, VariableApi, "/apps/app_123/workflows/draft/variables/var_123", "GET", mock_account, mock_app_model
)
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_variable_api_patch_success(self, mock_draft_srv, app, mock_account, mock_app_model):
mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable()
resp = setup_test_context(
app,
VariableApi,
"/apps/app_123/workflows/draft/variables/var_123",
"PATCH",
mock_account,
mock_app_model,
payload={"name": "new_name"},
)
assert resp["id"] == "var_123"
mock_draft_srv.return_value.update_variable.assert_called_once()
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_variable_api_delete_success(self, mock_draft_srv, app, mock_account, mock_app_model):
mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable()
resp = setup_test_context(
app, VariableApi, "/apps/app_123/workflows/draft/variables/var_123", "DELETE", mock_account, mock_app_model
)
assert resp.status_code == 204
mock_draft_srv.return_value.delete_variable.assert_called_once()
@patch("controllers.console.app.workflow_draft_variable.WorkflowService")
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_variable_reset_api_put_success(self, mock_draft_srv, mock_wf_srv, app, mock_account, mock_app_model):
mock_wf_srv.return_value.get_draft_workflow.return_value = MagicMock()
mock_draft_srv.return_value.get_variable.return_value = self._mock_workflow_variable()
mock_draft_srv.return_value.reset_variable.return_value = None # means no content
resp = setup_test_context(
app,
VariableResetApi,
"/apps/app_123/workflows/draft/variables/var_123/reset",
"PUT",
mock_account,
mock_app_model,
)
assert resp.status_code == 204
@patch("controllers.console.app.workflow_draft_variable.WorkflowService")
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_conversation_variable_collection_get(self, mock_draft_srv, mock_wf_srv, app, mock_account, mock_app_model):
mock_wf_srv.return_value.get_draft_workflow.return_value = MagicMock()
from services.workflow_draft_variable_service import WorkflowDraftVariableList
mock_draft_srv.return_value.list_conversation_variables.return_value = WorkflowDraftVariableList(variables=[])
resp = setup_test_context(
app,
ConversationVariableCollectionApi,
"/apps/app_123/workflows/draft/conversation-variables",
"GET",
mock_account,
mock_app_model,
)
assert resp == {"items": []}
@patch("controllers.console.app.workflow_draft_variable.WorkflowDraftVariableService")
def test_system_variable_collection_get(self, mock_draft_srv, app, mock_account, mock_app_model):
from services.workflow_draft_variable_service import WorkflowDraftVariableList
mock_draft_srv.return_value.list_system_variables.return_value = WorkflowDraftVariableList(variables=[])
resp = setup_test_context(
app,
SystemVariableCollectionApi,
"/apps/app_123/workflows/draft/system-variables",
"GET",
mock_account,
mock_app_model,
)
assert resp == {"items": []}
@patch("controllers.console.app.workflow_draft_variable.WorkflowService")
def test_environment_variable_collection_get(self, mock_wf_srv, app, mock_account, mock_app_model):
mock_wf = MagicMock()
mock_wf.environment_variables = []
mock_wf_srv.return_value.get_draft_workflow.return_value = mock_wf
resp = setup_test_context(
app,
EnvironmentVariableCollectionApi,
"/apps/app_123/workflows/draft/environment-variables",
"GET",
mock_account,
mock_app_model,
)
assert resp == {"items": []}

View File

@ -1,209 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from flask import Flask
from controllers.console.auth.data_source_bearer_auth import (
ApiKeyAuthDataSource,
ApiKeyAuthDataSourceBinding,
ApiKeyAuthDataSourceBindingDelete,
)
from controllers.console.auth.error import ApiKeyAuthFailedError
class TestApiKeyAuthDataSource:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
app.config["WTF_CSRF_ENABLED"] = False
return app
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.get_provider_auth_list")
def test_get_api_key_auth_data_source(self, mock_get_list, mock_db, mock_csrf, app):
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
mock_binding = MagicMock()
mock_binding.id = "bind_123"
mock_binding.category = "api_key"
mock_binding.provider = "custom_provider"
mock_binding.disabled = False
mock_binding.created_at.timestamp.return_value = 1620000000
mock_binding.updated_at.timestamp.return_value = 1620000001
mock_get_list.return_value = [mock_binding]
with (
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch(
"controllers.console.auth.data_source_bearer_auth.current_account_with_tenant",
return_value=(mock_account, "tenant_123"),
),
):
with app.test_request_context("/console/api/api-key-auth/data-source", method="GET"):
proxy_mock = MagicMock()
proxy_mock._get_current_object.return_value = mock_account
with patch("libs.login.current_user", proxy_mock):
api_instance = ApiKeyAuthDataSource()
response = api_instance.get()
assert "sources" in response
assert len(response["sources"]) == 1
assert response["sources"][0]["provider"] == "custom_provider"
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.get_provider_auth_list")
def test_get_api_key_auth_data_source_empty(self, mock_get_list, mock_db, mock_csrf, app):
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
mock_get_list.return_value = None
with (
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch(
"controllers.console.auth.data_source_bearer_auth.current_account_with_tenant",
return_value=(mock_account, "tenant_123"),
),
):
with app.test_request_context("/console/api/api-key-auth/data-source", method="GET"):
proxy_mock = MagicMock()
proxy_mock._get_current_object.return_value = mock_account
with patch("libs.login.current_user", proxy_mock):
api_instance = ApiKeyAuthDataSource()
response = api_instance.get()
assert "sources" in response
assert len(response["sources"]) == 0
class TestApiKeyAuthDataSourceBinding:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
app.config["WTF_CSRF_ENABLED"] = False
return app
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth")
@patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args")
def test_create_binding_successful(self, mock_validate, mock_create, mock_db, mock_csrf, app):
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
with (
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch(
"controllers.console.auth.data_source_bearer_auth.current_account_with_tenant",
return_value=(mock_account, "tenant_123"),
),
):
with app.test_request_context(
"/console/api/api-key-auth/data-source/binding",
method="POST",
json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}},
):
proxy_mock = MagicMock()
proxy_mock._get_current_object.return_value = mock_account
with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock):
api_instance = ApiKeyAuthDataSourceBinding()
response = api_instance.post()
assert response[0]["result"] == "success"
assert response[1] == 200
mock_validate.assert_called_once()
mock_create.assert_called_once()
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.create_provider_auth")
@patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.validate_api_key_auth_args")
def test_create_binding_failure(self, mock_validate, mock_create, mock_db, mock_csrf, app):
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
mock_create.side_effect = ValueError("Invalid structure")
with (
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch(
"controllers.console.auth.data_source_bearer_auth.current_account_with_tenant",
return_value=(mock_account, "tenant_123"),
),
):
with app.test_request_context(
"/console/api/api-key-auth/data-source/binding",
method="POST",
json={"category": "api_key", "provider": "custom", "credentials": {"key": "value"}},
):
proxy_mock = MagicMock()
proxy_mock._get_current_object.return_value = mock_account
with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock):
api_instance = ApiKeyAuthDataSourceBinding()
with pytest.raises(ApiKeyAuthFailedError, match="Invalid structure"):
api_instance.post()
class TestApiKeyAuthDataSourceBindingDelete:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
app.config["WTF_CSRF_ENABLED"] = False
return app
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.data_source_bearer_auth.ApiKeyAuthService.delete_provider_auth")
def test_delete_binding_successful(self, mock_delete, mock_db, mock_csrf, app):
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
with (
patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, "tenant_123")),
patch(
"controllers.console.auth.data_source_bearer_auth.current_account_with_tenant",
return_value=(mock_account, "tenant_123"),
),
):
with app.test_request_context("/console/api/api-key-auth/data-source/binding_123", method="DELETE"):
proxy_mock = MagicMock()
proxy_mock._get_current_object.return_value = mock_account
with patch("libs.login.current_user", proxy_mock), patch("flask_login.current_user", proxy_mock):
api_instance = ApiKeyAuthDataSourceBindingDelete()
response = api_instance.delete("binding_123")
assert response[0]["result"] == "success"
assert response[1] == 204
mock_delete.assert_called_once_with("tenant_123", "binding_123")

View File

@ -1,192 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from flask import Flask
from werkzeug.local import LocalProxy
from controllers.console.auth.data_source_oauth import (
OAuthDataSource,
OAuthDataSourceBinding,
OAuthDataSourceCallback,
OAuthDataSourceSync,
)
class TestOAuthDataSource:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
@patch("flask_login.current_user")
@patch("libs.login.current_user")
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.data_source_oauth.dify_config.NOTION_INTEGRATION_TYPE", None)
def test_get_oauth_url_successful(
self, mock_db, mock_csrf, mock_libs_user, mock_flask_user, mock_get_providers, app
):
mock_oauth_provider = MagicMock()
mock_oauth_provider.get_authorization_url.return_value = "http://oauth.provider/auth"
mock_get_providers.return_value = {"notion": mock_oauth_provider}
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
mock_libs_user.return_value = mock_account
mock_flask_user.return_value = mock_account
# also patch current_account_with_tenant
with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, MagicMock())):
with app.test_request_context("/console/api/oauth/data-source/notion", method="GET"):
proxy_mock = LocalProxy(lambda: mock_account)
with patch("libs.login.current_user", proxy_mock):
api_instance = OAuthDataSource()
response = api_instance.get("notion")
assert response[0]["data"] == "http://oauth.provider/auth"
assert response[1] == 200
mock_oauth_provider.get_authorization_url.assert_called_once()
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
@patch("flask_login.current_user")
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
def test_get_oauth_url_invalid_provider(self, mock_db, mock_csrf, mock_flask_user, mock_get_providers, app):
mock_get_providers.return_value = {"notion": MagicMock()}
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, MagicMock())):
with app.test_request_context("/console/api/oauth/data-source/unknown_provider", method="GET"):
proxy_mock = LocalProxy(lambda: mock_account)
with patch("libs.login.current_user", proxy_mock):
api_instance = OAuthDataSource()
response = api_instance.get("unknown_provider")
assert response[0]["error"] == "Invalid provider"
assert response[1] == 400
class TestOAuthDataSourceCallback:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
def test_oauth_callback_successful(self, mock_get_providers, app):
provider_mock = MagicMock()
mock_get_providers.return_value = {"notion": provider_mock}
with app.test_request_context("/console/api/oauth/data-source/notion/callback?code=mock_code", method="GET"):
api_instance = OAuthDataSourceCallback()
response = api_instance.get("notion")
assert response.status_code == 302
assert "code=mock_code" in response.location
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
def test_oauth_callback_missing_code(self, mock_get_providers, app):
provider_mock = MagicMock()
mock_get_providers.return_value = {"notion": provider_mock}
with app.test_request_context("/console/api/oauth/data-source/notion/callback", method="GET"):
api_instance = OAuthDataSourceCallback()
response = api_instance.get("notion")
assert response.status_code == 302
assert "error=Access denied" in response.location
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
def test_oauth_callback_invalid_provider(self, mock_get_providers, app):
mock_get_providers.return_value = {"notion": MagicMock()}
with app.test_request_context("/console/api/oauth/data-source/invalid/callback?code=mock_code", method="GET"):
api_instance = OAuthDataSourceCallback()
response = api_instance.get("invalid")
assert response[0]["error"] == "Invalid provider"
assert response[1] == 400
class TestOAuthDataSourceBinding:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
def test_get_binding_successful(self, mock_get_providers, app):
mock_provider = MagicMock()
mock_provider.get_access_token.return_value = None
mock_get_providers.return_value = {"notion": mock_provider}
with app.test_request_context("/console/api/oauth/data-source/notion/binding?code=auth_code_123", method="GET"):
api_instance = OAuthDataSourceBinding()
response = api_instance.get("notion")
assert response[0]["result"] == "success"
assert response[1] == 200
mock_provider.get_access_token.assert_called_once_with("auth_code_123")
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
def test_get_binding_missing_code(self, mock_get_providers, app):
mock_get_providers.return_value = {"notion": MagicMock()}
with app.test_request_context("/console/api/oauth/data-source/notion/binding?code=", method="GET"):
api_instance = OAuthDataSourceBinding()
response = api_instance.get("notion")
assert response[0]["error"] == "Invalid code"
assert response[1] == 400
class TestOAuthDataSourceSync:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@patch("controllers.console.auth.data_source_oauth.get_oauth_providers")
@patch("libs.login.check_csrf_token")
@patch("controllers.console.wraps.db")
def test_sync_successful(self, mock_db, mock_csrf, mock_get_providers, app):
mock_provider = MagicMock()
mock_provider.sync_data_source.return_value = None
mock_get_providers.return_value = {"notion": mock_provider}
from models.account import Account, AccountStatus
mock_account = MagicMock(spec=Account)
mock_account.id = "user_123"
mock_account.status = AccountStatus.ACTIVE
mock_account.is_admin_or_owner = True
mock_account.current_tenant.current_role = "owner"
with patch("controllers.console.wraps.current_account_with_tenant", return_value=(mock_account, MagicMock())):
with app.test_request_context("/console/api/oauth/data-source/notion/binding_123/sync", method="GET"):
proxy_mock = LocalProxy(lambda: mock_account)
with patch("libs.login.current_user", proxy_mock):
api_instance = OAuthDataSourceSync()
# The route pattern uses <uuid:binding_id>, so we just pass a string for unit testing
response = api_instance.get("notion", "binding_123")
assert response[0]["result"] == "success"
assert response[1] == 200
mock_provider.sync_data_source.assert_called_once_with("binding_123")

View File

@ -1,417 +0,0 @@
from unittest.mock import MagicMock, patch
import pytest
from flask import Flask
from werkzeug.exceptions import BadRequest, NotFound
from controllers.console.auth.oauth_server import (
OAuthServerAppApi,
OAuthServerUserAccountApi,
OAuthServerUserAuthorizeApi,
OAuthServerUserTokenApi,
)
class TestOAuthServerAppApi:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@pytest.fixture
def mock_oauth_provider_app(self):
from models.model import OAuthProviderApp
oauth_app = MagicMock(spec=OAuthProviderApp)
oauth_app.client_id = "test_client_id"
oauth_app.redirect_uris = ["http://localhost/callback"]
oauth_app.app_icon = "icon_url"
oauth_app.app_label = "Test App"
oauth_app.scope = "read,write"
return oauth_app
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_successful_post(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider",
method="POST",
json={"client_id": "test_client_id", "redirect_uri": "http://localhost/callback"},
):
api_instance = OAuthServerAppApi()
response = api_instance.post()
assert response["app_icon"] == "icon_url"
assert response["app_label"] == "Test App"
assert response["scope"] == "read,write"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_invalid_redirect_uri(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider",
method="POST",
json={"client_id": "test_client_id", "redirect_uri": "http://invalid/callback"},
):
api_instance = OAuthServerAppApi()
with pytest.raises(BadRequest, match="redirect_uri is invalid"):
api_instance.post()
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_invalid_client_id(self, mock_get_app, mock_db, app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = None
with app.test_request_context(
"/oauth/provider",
method="POST",
json={"client_id": "test_invalid_client_id", "redirect_uri": "http://localhost/callback"},
):
api_instance = OAuthServerAppApi()
with pytest.raises(NotFound, match="client_id is invalid"):
api_instance.post()
class TestOAuthServerUserAuthorizeApi:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@pytest.fixture
def mock_oauth_provider_app(self):
oauth_app = MagicMock()
oauth_app.client_id = "test_client_id"
return oauth_app
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
@patch("controllers.console.auth.oauth_server.current_account_with_tenant")
@patch("controllers.console.wraps.current_account_with_tenant")
@patch("controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_authorization_code")
@patch("libs.login.check_csrf_token")
def test_successful_authorize(
self, mock_csrf, mock_sign, mock_wrap_current, mock_current, mock_get_app, mock_db, app, mock_oauth_provider_app
):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
mock_account = MagicMock()
mock_account.id = "user_123"
from models.account import AccountStatus
mock_account.status = AccountStatus.ACTIVE
mock_current.return_value = (mock_account, MagicMock())
mock_wrap_current.return_value = (mock_account, MagicMock())
mock_sign.return_value = "auth_code_123"
with app.test_request_context("/oauth/provider/authorize", method="POST", json={"client_id": "test_client_id"}):
with patch("libs.login.current_user", mock_account):
api_instance = OAuthServerUserAuthorizeApi()
response = api_instance.post()
assert response["code"] == "auth_code_123"
mock_sign.assert_called_once_with("test_client_id", "user_123")
class TestOAuthServerUserTokenApi:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@pytest.fixture
def mock_oauth_provider_app(self):
from models.model import OAuthProviderApp
oauth_app = MagicMock(spec=OAuthProviderApp)
oauth_app.client_id = "test_client_id"
oauth_app.client_secret = "test_secret"
oauth_app.redirect_uris = ["http://localhost/callback"]
return oauth_app
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
@patch("controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token")
def test_authorization_code_grant(self, mock_sign, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
mock_sign.return_value = ("access_123", "refresh_123")
with app.test_request_context(
"/oauth/provider/token",
method="POST",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"code": "auth_code",
"client_secret": "test_secret",
"redirect_uri": "http://localhost/callback",
},
):
api_instance = OAuthServerUserTokenApi()
response = api_instance.post()
assert response["access_token"] == "access_123"
assert response["refresh_token"] == "refresh_123"
assert response["token_type"] == "Bearer"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_authorization_code_grant_missing_code(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/token",
method="POST",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"client_secret": "test_secret",
"redirect_uri": "http://localhost/callback",
},
):
api_instance = OAuthServerUserTokenApi()
with pytest.raises(BadRequest, match="code is required"):
api_instance.post()
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_authorization_code_grant_invalid_secret(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/token",
method="POST",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"code": "auth_code",
"client_secret": "invalid_secret",
"redirect_uri": "http://localhost/callback",
},
):
api_instance = OAuthServerUserTokenApi()
with pytest.raises(BadRequest, match="client_secret is invalid"):
api_instance.post()
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_authorization_code_grant_invalid_redirect_uri(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/token",
method="POST",
json={
"client_id": "test_client_id",
"grant_type": "authorization_code",
"code": "auth_code",
"client_secret": "test_secret",
"redirect_uri": "http://invalid/callback",
},
):
api_instance = OAuthServerUserTokenApi()
with pytest.raises(BadRequest, match="redirect_uri is invalid"):
api_instance.post()
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
@patch("controllers.console.auth.oauth_server.OAuthServerService.sign_oauth_access_token")
def test_refresh_token_grant(self, mock_sign, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
mock_sign.return_value = ("new_access", "new_refresh")
with app.test_request_context(
"/oauth/provider/token",
method="POST",
json={"client_id": "test_client_id", "grant_type": "refresh_token", "refresh_token": "refresh_123"},
):
api_instance = OAuthServerUserTokenApi()
response = api_instance.post()
assert response["access_token"] == "new_access"
assert response["refresh_token"] == "new_refresh"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_refresh_token_grant_missing_token(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/token",
method="POST",
json={
"client_id": "test_client_id",
"grant_type": "refresh_token",
},
):
api_instance = OAuthServerUserTokenApi()
with pytest.raises(BadRequest, match="refresh_token is required"):
api_instance.post()
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_invalid_grant_type(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/token",
method="POST",
json={
"client_id": "test_client_id",
"grant_type": "invalid_grant",
},
):
api_instance = OAuthServerUserTokenApi()
with pytest.raises(BadRequest, match="invalid grant_type"):
api_instance.post()
class TestOAuthServerUserAccountApi:
@pytest.fixture
def app(self):
app = Flask(__name__)
app.config["TESTING"] = True
return app
@pytest.fixture
def mock_oauth_provider_app(self):
from models.model import OAuthProviderApp
oauth_app = MagicMock(spec=OAuthProviderApp)
oauth_app.client_id = "test_client_id"
return oauth_app
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
@patch("controllers.console.auth.oauth_server.OAuthServerService.validate_oauth_access_token")
def test_successful_account_retrieval(self, mock_validate, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
mock_account = MagicMock()
mock_account.name = "Test User"
mock_account.email = "test@example.com"
mock_account.avatar = "avatar_url"
mock_account.interface_language = "en-US"
mock_account.timezone = "UTC"
mock_validate.return_value = mock_account
with app.test_request_context(
"/oauth/provider/account",
method="POST",
json={"client_id": "test_client_id"},
headers={"Authorization": "Bearer valid_access_token"},
):
api_instance = OAuthServerUserAccountApi()
response = api_instance.post()
assert response["name"] == "Test User"
assert response["email"] == "test@example.com"
assert response["avatar"] == "avatar_url"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_missing_authorization_header(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context("/oauth/provider/account", method="POST", json={"client_id": "test_client_id"}):
api_instance = OAuthServerUserAccountApi()
response = api_instance.post()
assert response.status_code == 401
assert response.json["error"] == "Authorization header is required"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_invalid_authorization_header_format(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/account",
method="POST",
json={"client_id": "test_client_id"},
headers={"Authorization": "InvalidFormat"},
):
api_instance = OAuthServerUserAccountApi()
response = api_instance.post()
assert response.status_code == 401
assert response.json["error"] == "Invalid Authorization header format"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_invalid_token_type(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/account",
method="POST",
json={"client_id": "test_client_id"},
headers={"Authorization": "Basic something"},
):
api_instance = OAuthServerUserAccountApi()
response = api_instance.post()
assert response.status_code == 401
assert response.json["error"] == "token_type is invalid"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
def test_missing_access_token(self, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
with app.test_request_context(
"/oauth/provider/account",
method="POST",
json={"client_id": "test_client_id"},
headers={"Authorization": "Bearer "},
):
api_instance = OAuthServerUserAccountApi()
response = api_instance.post()
assert response.status_code == 401
assert response.json["error"] == "Invalid Authorization header format"
@patch("controllers.console.wraps.db")
@patch("controllers.console.auth.oauth_server.OAuthServerService.get_oauth_provider_app")
@patch("controllers.console.auth.oauth_server.OAuthServerService.validate_oauth_access_token")
def test_invalid_access_token(self, mock_validate, mock_get_app, mock_db, app, mock_oauth_provider_app):
mock_db.session.query.return_value.first.return_value = MagicMock()
mock_get_app.return_value = mock_oauth_provider_app
mock_validate.return_value = None
with app.test_request_context(
"/oauth/provider/account",
method="POST",
json={"client_id": "test_client_id"},
headers={"Authorization": "Bearer invalid_token"},
):
api_instance = OAuthServerUserAccountApi()
response = api_instance.post()
assert response.status_code == 401
assert response.json["error"] == "access_token or client_id is invalid"

View File

@ -11,6 +11,7 @@ from controllers.console.tag.tags import (
TagListApi,
TagUpdateDeleteApi,
)
from models.enums import TagType
def unwrap(func):
@ -52,7 +53,7 @@ def tag():
tag = MagicMock()
tag.id = "tag-1"
tag.name = "test-tag"
tag.type = "knowledge"
tag.type = TagType.KNOWLEDGE
return tag

View File

@ -35,6 +35,7 @@ from controllers.service_api.dataset.dataset import (
from controllers.service_api.dataset.error import DatasetInUseError, DatasetNameDuplicateError, InvalidActionError
from models.account import Account
from models.dataset import DatasetPermissionEnum
from models.enums import TagType
from services.dataset_service import DatasetPermissionService, DatasetService, DocumentService
from services.tag_service import TagService
@ -277,7 +278,7 @@ class TestDatasetTagsApi:
mock_tag = Mock()
mock_tag.id = "tag_1"
mock_tag.name = "Test Tag"
mock_tag.type = "knowledge"
mock_tag.type = TagType.KNOWLEDGE
mock_tag.binding_count = "0" # Required for Pydantic validation - must be string
mock_tag_service.get_tags.return_value = [mock_tag]
@ -316,7 +317,7 @@ class TestDatasetTagsApi:
mock_tag = Mock()
mock_tag.id = "new_tag_1"
mock_tag.name = "New Tag"
mock_tag.type = "knowledge"
mock_tag.type = TagType.KNOWLEDGE
mock_tag_service.save_tags.return_value = mock_tag
mock_service_api_ns.payload = {"name": "New Tag"}
@ -378,7 +379,7 @@ class TestDatasetTagsApi:
mock_tag = Mock()
mock_tag.id = "tag_1"
mock_tag.name = "Updated Tag"
mock_tag.type = "knowledge"
mock_tag.type = TagType.KNOWLEDGE
mock_tag.binding_count = "5"
mock_tag_service.update_tags.return_value = mock_tag
mock_tag_service.get_tag_binding_count.return_value = 5
@ -866,7 +867,7 @@ class TestTagService:
mock_tag = Mock()
mock_tag.id = str(uuid.uuid4())
mock_tag.name = "New Tag"
mock_tag.type = "knowledge"
mock_tag.type = TagType.KNOWLEDGE
mock_save.return_value = mock_tag
result = TagService.save_tags({"name": "New Tag", "type": "knowledge"})

View File

@ -24,7 +24,7 @@ from core.repositories.sqlalchemy_workflow_node_execution_repository import (
)
from dify_graph.entities import WorkflowNodeExecution
from dify_graph.enums import (
NodeType,
BuiltinNodeTypes,
WorkflowNodeExecutionMetadataKey,
WorkflowNodeExecutionStatus,
)
@ -67,7 +67,7 @@ def _execution(
index=1,
predecessor_node_id=None,
node_id="node-id",
node_type=NodeType.LLM,
node_type=BuiltinNodeTypes.LLM,
title="Title",
inputs=inputs,
outputs=outputs,
@ -387,7 +387,7 @@ def test_to_domain_model_loads_offloaded_files(monkeypatch: pytest.MonkeyPatch)
db_model.index = 1
db_model.predecessor_node_id = None
db_model.node_id = "node"
db_model.node_type = NodeType.LLM
db_model.node_type = BuiltinNodeTypes.LLM
db_model.title = "t"
db_model.inputs = json.dumps({"trunc": "i"})
db_model.process_data = json.dumps({"trunc": "p"})
@ -441,7 +441,7 @@ def test_to_domain_model_returns_early_when_no_offload_data(monkeypatch: pytest.
db_model.index = 1
db_model.predecessor_node_id = None
db_model.node_id = "node"
db_model.node_type = NodeType.LLM
db_model.node_type = BuiltinNodeTypes.LLM
db_model.title = "t"
db_model.inputs = json.dumps({"i": 1})
db_model.process_data = json.dumps({"p": 2})

View File

@ -95,13 +95,11 @@ class TestGitHubOAuth(BaseOAuthTest):
],
"primary@example.com",
),
# User with no emails - fallback to noreply
({"id": 12345, "login": "testuser", "name": "Test User"}, [], "12345+testuser@users.noreply.github.com"),
# User with only secondary email - fallback to noreply
# User with private email (null email and name from API)
(
{"id": 12345, "login": "testuser", "name": "Test User"},
[{"email": "secondary@example.com", "primary": False}],
"12345+testuser@users.noreply.github.com",
{"id": 12345, "login": "testuser", "name": None, "email": None},
[{"email": "primary@example.com", "primary": True}],
"primary@example.com",
),
],
)
@ -118,9 +116,54 @@ class TestGitHubOAuth(BaseOAuthTest):
user_info = oauth.get_user_info("test_token")
assert user_info.id == str(user_data["id"])
assert user_info.name == user_data["name"]
assert user_info.name == (user_data["name"] or "")
assert user_info.email == expected_email
@pytest.mark.parametrize(
("user_data", "email_data"),
[
# User with no emails
({"id": 12345, "login": "testuser", "name": "Test User"}, []),
# User with only secondary email
(
{"id": 12345, "login": "testuser", "name": "Test User"},
[{"email": "secondary@example.com", "primary": False}],
),
# User with private email and no primary in emails endpoint
(
{"id": 12345, "login": "testuser", "name": None, "email": None},
[],
),
],
)
@patch("httpx.get", autospec=True)
def test_should_raise_error_when_no_primary_email(self, mock_get, oauth, user_data, email_data):
user_response = MagicMock()
user_response.json.return_value = user_data
email_response = MagicMock()
email_response.json.return_value = email_data
mock_get.side_effect = [user_response, email_response]
with pytest.raises(ValueError, match="Keep my email addresses private"):
oauth.get_user_info("test_token")
@patch("httpx.get", autospec=True)
def test_should_raise_error_when_email_endpoint_fails(self, mock_get, oauth):
user_response = MagicMock()
user_response.json.return_value = {"id": 12345, "login": "testuser", "name": "Test User"}
email_response = MagicMock()
email_response.raise_for_status.side_effect = httpx.HTTPStatusError(
"Forbidden", request=MagicMock(), response=MagicMock()
)
mock_get.side_effect = [user_response, email_response]
with pytest.raises(ValueError, match="Keep my email addresses private"):
oauth.get_user_info("test_token")
@patch("httpx.get", autospec=True)
def test_should_handle_network_errors(self, mock_get, oauth):
mock_get.side_effect = httpx.RequestError("Network error")

View File

@ -12,7 +12,7 @@ This test suite covers:
import json
from uuid import uuid4
from core.tools.entities.tool_entities import ApiProviderSchemaType
from core.tools.entities.tool_entities import ApiProviderSchemaType, ToolProviderType
from models.tools import (
ApiToolProvider,
BuiltinToolProvider,
@ -631,7 +631,7 @@ class TestToolLabelBinding:
"""Test creating a tool label binding."""
# Arrange
tool_id = "google.search"
tool_type = "builtin"
tool_type = ToolProviderType.BUILT_IN
label_name = "search"
# Act
@ -655,7 +655,7 @@ class TestToolLabelBinding:
# Act
label_binding = ToolLabelBinding(
tool_id=tool_id,
tool_type="builtin",
tool_type=ToolProviderType.BUILT_IN,
label_name=label_name,
)
@ -667,7 +667,7 @@ class TestToolLabelBinding:
"""Test multiple labels can be bound to the same tool."""
# Arrange
tool_id = "google.search"
tool_type = "builtin"
tool_type = ToolProviderType.BUILT_IN
# Act
binding1 = ToolLabelBinding(
@ -688,7 +688,7 @@ class TestToolLabelBinding:
def test_tool_label_binding_different_tool_types(self):
"""Test label bindings for different tool types."""
# Arrange
tool_types = ["builtin", "api", "workflow"]
tool_types = [ToolProviderType.BUILT_IN, ToolProviderType.API, ToolProviderType.WORKFLOW]
# Act & Assert
for tool_type in tool_types:
@ -951,12 +951,12 @@ class TestToolProviderRelationships:
# Act
binding1 = ToolLabelBinding(
tool_id=tool_id,
tool_type="builtin",
tool_type=ToolProviderType.BUILT_IN,
label_name="search",
)
binding2 = ToolLabelBinding(
tool_id=tool_id,
tool_type="builtin",
tool_type=ToolProviderType.BUILT_IN,
label_name="web",
)

View File

@ -1,180 +0,0 @@
from __future__ import annotations
from collections.abc import Sequence
from dataclasses import dataclass
from datetime import UTC, datetime, timedelta
from core.entities.execution_extra_content import HumanInputContent as HumanInputContentDomain
from core.entities.execution_extra_content import HumanInputFormSubmissionData
from dify_graph.nodes.human_input.entities import (
FormDefinition,
UserAction,
)
from dify_graph.nodes.human_input.enums import HumanInputFormStatus
from models.execution_extra_content import HumanInputContent as HumanInputContentModel
from models.human_input import ConsoleRecipientPayload, HumanInputForm, HumanInputFormRecipient, RecipientType
from repositories.sqlalchemy_execution_extra_content_repository import SQLAlchemyExecutionExtraContentRepository
class _FakeScalarResult:
def __init__(self, values: Sequence[HumanInputContentModel]):
self._values = list(values)
def all(self) -> list[HumanInputContentModel]:
return list(self._values)
class _FakeSession:
def __init__(self, values: Sequence[Sequence[object]]):
self._values = list(values)
def scalars(self, _stmt):
if not self._values:
return _FakeScalarResult([])
return _FakeScalarResult(self._values.pop(0))
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
@dataclass
class _FakeSessionMaker:
session: _FakeSession
def __call__(self) -> _FakeSession:
return self.session
def _build_form(action_id: str, action_title: str, rendered_content: str) -> HumanInputForm:
expiration_time = datetime.now(UTC) + timedelta(days=1)
definition = FormDefinition(
form_content="content",
inputs=[],
user_actions=[UserAction(id=action_id, title=action_title)],
rendered_content="rendered",
expiration_time=expiration_time,
node_title="Approval",
display_in_ui=True,
)
form = HumanInputForm(
id=f"form-{action_id}",
tenant_id="tenant-id",
app_id="app-id",
workflow_run_id="workflow-run",
node_id="node-id",
form_definition=definition.model_dump_json(),
rendered_content=rendered_content,
status=HumanInputFormStatus.SUBMITTED,
expiration_time=expiration_time,
)
form.selected_action_id = action_id
return form
def _build_content(message_id: str, action_id: str, action_title: str) -> HumanInputContentModel:
form = _build_form(
action_id=action_id,
action_title=action_title,
rendered_content=f"Rendered {action_title}",
)
content = HumanInputContentModel(
id=f"content-{message_id}",
form_id=form.id,
message_id=message_id,
workflow_run_id=form.workflow_run_id,
)
content.form = form
return content
def test_get_by_message_ids_groups_contents_by_message() -> None:
message_ids = ["msg-1", "msg-2"]
contents = [_build_content("msg-1", "approve", "Approve")]
repository = SQLAlchemyExecutionExtraContentRepository(
session_maker=_FakeSessionMaker(session=_FakeSession(values=[contents, []]))
)
result = repository.get_by_message_ids(message_ids)
assert len(result) == 2
assert [content.model_dump(mode="json", exclude_none=True) for content in result[0]] == [
HumanInputContentDomain(
workflow_run_id="workflow-run",
submitted=True,
form_submission_data=HumanInputFormSubmissionData(
node_id="node-id",
node_title="Approval",
rendered_content="Rendered Approve",
action_id="approve",
action_text="Approve",
),
).model_dump(mode="json", exclude_none=True)
]
assert result[1] == []
def test_get_by_message_ids_returns_unsubmitted_form_definition() -> None:
expiration_time = datetime.now(UTC) + timedelta(days=1)
definition = FormDefinition(
form_content="content",
inputs=[],
user_actions=[UserAction(id="approve", title="Approve")],
rendered_content="rendered",
expiration_time=expiration_time,
default_values={"name": "John"},
node_title="Approval",
display_in_ui=True,
)
form = HumanInputForm(
id="form-1",
tenant_id="tenant-id",
app_id="app-id",
workflow_run_id="workflow-run",
node_id="node-id",
form_definition=definition.model_dump_json(),
rendered_content="Rendered block",
status=HumanInputFormStatus.WAITING,
expiration_time=expiration_time,
)
content = HumanInputContentModel(
id="content-msg-1",
form_id=form.id,
message_id="msg-1",
workflow_run_id=form.workflow_run_id,
)
content.form = form
recipient = HumanInputFormRecipient(
form_id=form.id,
delivery_id="delivery-1",
recipient_type=RecipientType.CONSOLE,
recipient_payload=ConsoleRecipientPayload(account_id=None).model_dump_json(),
access_token="token-1",
)
repository = SQLAlchemyExecutionExtraContentRepository(
session_maker=_FakeSessionMaker(session=_FakeSession(values=[[content], [recipient]]))
)
result = repository.get_by_message_ids(["msg-1"])
assert len(result) == 1
assert len(result[0]) == 1
domain_content = result[0][0]
assert domain_content.submitted is False
assert domain_content.workflow_run_id == "workflow-run"
assert domain_content.form_definition is not None
assert domain_content.form_definition.expiration_time == int(form.expiration_time.timestamp())
assert domain_content.form_definition is not None
form_definition = domain_content.form_definition
assert form_definition.form_id == "form-1"
assert form_definition.node_id == "node-id"
assert form_definition.node_title == "Approval"
assert form_definition.form_content == "Rendered block"
assert form_definition.display_in_ui is True
assert form_definition.form_token == "token-1"
assert form_definition.resolved_default_values == {"name": "John"}
assert form_definition.expiration_time == int(form.expiration_time.timestamp())

View File

@ -1,73 +0,0 @@
import base64
from unittest.mock import MagicMock, patch
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from werkzeug.exceptions import NotFound
import services.attachment_service as attachment_service_module
from models.model import UploadFile
from services.attachment_service import AttachmentService
class TestAttachmentService:
def test_should_initialize_with_sessionmaker_when_sessionmaker_is_provided(self):
"""Test that AttachmentService keeps the provided sessionmaker instance."""
session_factory = sessionmaker()
service = AttachmentService(session_factory=session_factory)
assert service._session_maker is session_factory
def test_should_initialize_with_bound_sessionmaker_when_engine_is_provided(self):
"""Test that AttachmentService builds a sessionmaker bound to the provided engine."""
engine = create_engine("sqlite:///:memory:")
service = AttachmentService(session_factory=engine)
session = service._session_maker()
try:
assert session.bind == engine
finally:
session.close()
engine.dispose()
@pytest.mark.parametrize("invalid_session_factory", [None, "not-a-session-factory", 1])
def test_should_raise_assertion_error_when_session_factory_type_is_invalid(self, invalid_session_factory):
"""Test that invalid session_factory types are rejected."""
with pytest.raises(AssertionError, match="must be a sessionmaker or an Engine."):
AttachmentService(session_factory=invalid_session_factory)
def test_should_return_base64_encoded_blob_when_file_exists(self):
"""Test that existing files are loaded from storage and returned as base64."""
service = AttachmentService(session_factory=sessionmaker())
upload_file = MagicMock(spec=UploadFile)
upload_file.key = "upload-file-key"
session = MagicMock()
session.query.return_value.where.return_value.first.return_value = upload_file
service._session_maker = MagicMock(return_value=session)
with patch.object(attachment_service_module.storage, "load_once", return_value=b"binary-content") as mock_load:
result = service.get_file_base64("file-123")
assert result == base64.b64encode(b"binary-content").decode()
service._session_maker.assert_called_once_with(expire_on_commit=False)
session.query.assert_called_once_with(UploadFile)
mock_load.assert_called_once_with("upload-file-key")
def test_should_raise_not_found_when_file_does_not_exist(self):
"""Test that missing files raise NotFound and never call storage."""
service = AttachmentService(session_factory=sessionmaker())
session = MagicMock()
session.query.return_value.where.return_value.first.return_value = None
service._session_maker = MagicMock(return_value=session)
with patch.object(attachment_service_module.storage, "load_once") as mock_load:
with pytest.raises(NotFound, match="File not found"):
service.get_file_base64("missing-file")
service._session_maker.assert_called_once_with(expire_on_commit=False)
session.query.assert_called_once_with(UploadFile)
mock_load.assert_not_called()

View File

@ -1303,6 +1303,24 @@ class TestBillingServiceSubscriptionOperations:
# Assert
assert result == {}
def test_get_plan_bulk_converts_string_expiration_date_to_int(self, mock_send_request):
"""Test bulk plan retrieval converts string expiration_date to int."""
# Arrange
tenant_ids = ["tenant-1"]
mock_send_request.return_value = {
"data": {
"tenant-1": {"plan": "sandbox", "expiration_date": "1735689600"},
}
}
# Act
result = BillingService.get_plan_bulk(tenant_ids)
# Assert
assert "tenant-1" in result
assert isinstance(result["tenant-1"]["expiration_date"], int)
assert result["tenant-1"]["expiration_date"] == 1735689600
def test_get_plan_bulk_with_invalid_tenant_plan_skipped(self, mock_send_request):
"""Test bulk plan retrieval when one tenant has invalid plan data (should skip that tenant)."""
# Arrange

View File

@ -1,75 +0,0 @@
from types import SimpleNamespace
from unittest.mock import MagicMock
import pytest
from dify_graph.variables import StringVariable
from services.conversation_variable_updater import ConversationVariableNotFoundError, ConversationVariableUpdater
class TestConversationVariableUpdater:
def test_should_update_conversation_variable_data_and_commit(self):
"""Test update persists serialized variable data when the row exists."""
conversation_id = "conv-123"
variable = StringVariable(
id="var-123",
name="topic",
value="new value",
)
expected_json = variable.model_dump_json()
row = SimpleNamespace(data="old value")
session = MagicMock()
session.scalar.return_value = row
session_context = MagicMock()
session_context.__enter__.return_value = session
session_context.__exit__.return_value = None
session_maker = MagicMock(return_value=session_context)
updater = ConversationVariableUpdater(session_maker)
updater.update(conversation_id=conversation_id, variable=variable)
session_maker.assert_called_once_with()
session.scalar.assert_called_once()
stmt = session.scalar.call_args.args[0]
compiled_params = stmt.compile().params
assert variable.id in compiled_params.values()
assert conversation_id in compiled_params.values()
assert row.data == expected_json
session.commit.assert_called_once()
def test_should_raise_not_found_error_when_conversation_variable_missing(self):
"""Test update raises ConversationVariableNotFoundError when no matching row exists."""
conversation_id = "conv-404"
variable = StringVariable(
id="var-404",
name="topic",
value="value",
)
session = MagicMock()
session.scalar.return_value = None
session_context = MagicMock()
session_context.__enter__.return_value = session
session_context.__exit__.return_value = None
session_maker = MagicMock(return_value=session_context)
updater = ConversationVariableUpdater(session_maker)
with pytest.raises(ConversationVariableNotFoundError, match="conversation variable not found in the database"):
updater.update(conversation_id=conversation_id, variable=variable)
session.commit.assert_not_called()
def test_should_do_nothing_when_flush_is_called(self):
"""Test flush currently behaves as a no-op and returns None."""
session_maker = MagicMock()
updater = ConversationVariableUpdater(session_maker)
result = updater.flush()
assert result is None
session_maker.assert_not_called()

View File

@ -1,157 +0,0 @@
from types import SimpleNamespace
from unittest.mock import MagicMock, patch
import pytest
import services.credit_pool_service as credit_pool_service_module
from core.errors.error import QuotaExceededError
from models import TenantCreditPool
from services.credit_pool_service import CreditPoolService
@pytest.fixture
def mock_credit_deduction_setup():
"""Fixture providing common setup for credit deduction tests."""
pool = SimpleNamespace(remaining_credits=50)
fake_engine = MagicMock()
session = MagicMock()
session_context = MagicMock()
session_context.__enter__.return_value = session
session_context.__exit__.return_value = None
mock_get_pool = patch.object(CreditPoolService, "get_pool", return_value=pool)
mock_db = patch.object(credit_pool_service_module, "db", new=SimpleNamespace(engine=fake_engine))
mock_session = patch.object(credit_pool_service_module, "Session", return_value=session_context)
return {
"pool": pool,
"fake_engine": fake_engine,
"session": session,
"session_context": session_context,
"patches": (mock_get_pool, mock_db, mock_session),
}
class TestCreditPoolService:
def test_should_create_default_pool_with_trial_type_and_configured_quota(self):
"""Test create_default_pool persists a trial pool using configured hosted credits."""
tenant_id = "tenant-123"
hosted_pool_credits = 5000
with (
patch.object(credit_pool_service_module.dify_config, "HOSTED_POOL_CREDITS", hosted_pool_credits),
patch.object(credit_pool_service_module, "db") as mock_db,
):
pool = CreditPoolService.create_default_pool(tenant_id)
assert isinstance(pool, TenantCreditPool)
assert pool.tenant_id == tenant_id
assert pool.pool_type == "trial"
assert pool.quota_limit == hosted_pool_credits
assert pool.quota_used == 0
mock_db.session.add.assert_called_once_with(pool)
mock_db.session.commit.assert_called_once()
def test_should_return_first_pool_from_query_when_get_pool_called(self):
"""Test get_pool queries by tenant and pool_type and returns first result."""
tenant_id = "tenant-123"
pool_type = "enterprise"
expected_pool = MagicMock(spec=TenantCreditPool)
with patch.object(credit_pool_service_module, "db") as mock_db:
query = mock_db.session.query.return_value
filtered_query = query.filter_by.return_value
filtered_query.first.return_value = expected_pool
result = CreditPoolService.get_pool(tenant_id=tenant_id, pool_type=pool_type)
assert result == expected_pool
mock_db.session.query.assert_called_once_with(TenantCreditPool)
query.filter_by.assert_called_once_with(tenant_id=tenant_id, pool_type=pool_type)
filtered_query.first.assert_called_once()
def test_should_return_false_when_pool_not_found_in_check_credits_available(self):
"""Test check_credits_available returns False when tenant has no pool."""
with patch.object(CreditPoolService, "get_pool", return_value=None) as mock_get_pool:
result = CreditPoolService.check_credits_available(tenant_id="tenant-123", credits_required=10)
assert result is False
mock_get_pool.assert_called_once_with("tenant-123", "trial")
def test_should_return_true_when_remaining_credits_cover_required_amount(self):
"""Test check_credits_available returns True when remaining credits are sufficient."""
pool = SimpleNamespace(remaining_credits=100)
with patch.object(CreditPoolService, "get_pool", return_value=pool) as mock_get_pool:
result = CreditPoolService.check_credits_available(tenant_id="tenant-123", credits_required=60)
assert result is True
mock_get_pool.assert_called_once_with("tenant-123", "trial")
def test_should_return_false_when_remaining_credits_are_insufficient(self):
"""Test check_credits_available returns False when required credits exceed remaining credits."""
pool = SimpleNamespace(remaining_credits=30)
with patch.object(CreditPoolService, "get_pool", return_value=pool):
result = CreditPoolService.check_credits_available(tenant_id="tenant-123", credits_required=60)
assert result is False
def test_should_raise_quota_exceeded_when_pool_not_found_in_check_and_deduct(self):
"""Test check_and_deduct_credits raises when tenant credit pool does not exist."""
with patch.object(CreditPoolService, "get_pool", return_value=None):
with pytest.raises(QuotaExceededError, match="Credit pool not found"):
CreditPoolService.check_and_deduct_credits(tenant_id="tenant-123", credits_required=10)
def test_should_raise_quota_exceeded_when_pool_has_no_remaining_credits(self):
"""Test check_and_deduct_credits raises when remaining credits are zero or negative."""
pool = SimpleNamespace(remaining_credits=0)
with patch.object(CreditPoolService, "get_pool", return_value=pool):
with pytest.raises(QuotaExceededError, match="No credits remaining"):
CreditPoolService.check_and_deduct_credits(tenant_id="tenant-123", credits_required=10)
def test_should_deduct_minimum_of_required_and_remaining_credits(self, mock_credit_deduction_setup):
"""Test check_and_deduct_credits updates quota_used by the actual deducted amount."""
tenant_id = "tenant-123"
pool_type = "trial"
credits_required = 200
remaining_credits = 120
expected_deducted_credits = 120
mock_credit_deduction_setup["pool"].remaining_credits = remaining_credits
patches = mock_credit_deduction_setup["patches"]
session = mock_credit_deduction_setup["session"]
with patches[0], patches[1], patches[2]:
result = CreditPoolService.check_and_deduct_credits(
tenant_id=tenant_id,
credits_required=credits_required,
pool_type=pool_type,
)
assert result == expected_deducted_credits
session.execute.assert_called_once()
session.commit.assert_called_once()
stmt = session.execute.call_args.args[0]
compiled_params = stmt.compile().params
assert tenant_id in compiled_params.values()
assert pool_type in compiled_params.values()
assert expected_deducted_credits in compiled_params.values()
def test_should_raise_quota_exceeded_when_deduction_update_fails(self, mock_credit_deduction_setup):
"""Test check_and_deduct_credits translates DB update failures to QuotaExceededError."""
mock_credit_deduction_setup["pool"].remaining_credits = 50
mock_credit_deduction_setup["session"].execute.side_effect = Exception("db failure")
session = mock_credit_deduction_setup["session"]
patches = mock_credit_deduction_setup["patches"]
mock_logger = patch.object(credit_pool_service_module, "logger")
with patches[0], patches[1], patches[2], mock_logger as mock_logger_obj:
with pytest.raises(QuotaExceededError, match="Failed to deduct credits"):
CreditPoolService.check_and_deduct_credits(tenant_id="tenant-123", credits_required=10)
session.commit.assert_not_called()
mock_logger_obj.exception.assert_called_once()

View File

@ -1,305 +0,0 @@
from unittest.mock import Mock, patch
import pytest
from models.account import Account, TenantAccountRole
from models.dataset import Dataset, DatasetPermission, DatasetPermissionEnum
from services.dataset_service import DatasetService
from services.errors.account import NoPermissionError
class DatasetPermissionTestDataFactory:
"""Factory class for creating test data and mock objects for dataset permission tests."""
@staticmethod
def create_dataset_mock(
dataset_id: str = "dataset-123",
tenant_id: str = "test-tenant-123",
created_by: str = "creator-456",
permission: DatasetPermissionEnum = DatasetPermissionEnum.ONLY_ME,
**kwargs,
) -> Mock:
"""Create a mock dataset with specified attributes."""
dataset = Mock(spec=Dataset)
dataset.id = dataset_id
dataset.tenant_id = tenant_id
dataset.created_by = created_by
dataset.permission = permission
for key, value in kwargs.items():
setattr(dataset, key, value)
return dataset
@staticmethod
def create_user_mock(
user_id: str = "user-789",
tenant_id: str = "test-tenant-123",
role: TenantAccountRole = TenantAccountRole.NORMAL,
**kwargs,
) -> Mock:
"""Create a mock user with specified attributes."""
user = Mock(spec=Account)
user.id = user_id
user.current_tenant_id = tenant_id
user.current_role = role
for key, value in kwargs.items():
setattr(user, key, value)
return user
@staticmethod
def create_dataset_permission_mock(
dataset_id: str = "dataset-123",
account_id: str = "user-789",
**kwargs,
) -> Mock:
"""Create a mock dataset permission record."""
permission = Mock(spec=DatasetPermission)
permission.dataset_id = dataset_id
permission.account_id = account_id
for key, value in kwargs.items():
setattr(permission, key, value)
return permission
class TestDatasetPermissionService:
"""
Comprehensive unit tests for DatasetService.check_dataset_permission method.
This test suite covers all permission scenarios including:
- Cross-tenant access restrictions
- Owner privilege checks
- Different permission levels (ONLY_ME, ALL_TEAM, PARTIAL_TEAM)
- Explicit permission checks for PARTIAL_TEAM
- Error conditions and logging
"""
@pytest.fixture
def mock_dataset_service_dependencies(self):
"""Common mock setup for dataset service dependencies."""
with patch("services.dataset_service.db.session") as mock_session:
yield {
"db_session": mock_session,
}
@pytest.fixture
def mock_logging_dependencies(self):
"""Mock setup for logging tests."""
with patch("services.dataset_service.logger") as mock_logging:
yield {
"logging": mock_logging,
}
def _assert_permission_check_passes(self, dataset: Mock, user: Mock):
"""Helper method to verify that permission check passes without raising exceptions."""
# Should not raise any exception
DatasetService.check_dataset_permission(dataset, user)
def _assert_permission_check_fails(
self, dataset: Mock, user: Mock, expected_message: str = "You do not have permission to access this dataset."
):
"""Helper method to verify that permission check fails with expected error."""
with pytest.raises(NoPermissionError, match=expected_message):
DatasetService.check_dataset_permission(dataset, user)
def _assert_database_query_called(self, mock_session: Mock, dataset_id: str, account_id: str):
"""Helper method to verify database query calls for permission checks."""
mock_session.query().filter_by.assert_called_with(dataset_id=dataset_id, account_id=account_id)
def _assert_database_query_not_called(self, mock_session: Mock):
"""Helper method to verify that database query was not called."""
mock_session.query.assert_not_called()
# ==================== Cross-Tenant Access Tests ====================
def test_permission_check_different_tenant_should_fail(self):
"""Test that users from different tenants cannot access dataset regardless of other permissions."""
# Create dataset and user from different tenants
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
tenant_id="tenant-123", permission=DatasetPermissionEnum.ALL_TEAM
)
user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="user-789", tenant_id="different-tenant-456", role=TenantAccountRole.EDITOR
)
# Should fail due to different tenant
self._assert_permission_check_fails(dataset, user)
# ==================== Owner Privilege Tests ====================
def test_owner_can_access_any_dataset(self):
"""Test that tenant owners can access any dataset regardless of permission level."""
# Create dataset with restrictive permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.ONLY_ME)
# Create owner user
owner_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="owner-999", role=TenantAccountRole.OWNER
)
# Owner should have access regardless of dataset permission
self._assert_permission_check_passes(dataset, owner_user)
# ==================== ONLY_ME Permission Tests ====================
def test_only_me_permission_creator_can_access(self):
"""Test ONLY_ME permission allows only the dataset creator to access."""
# Create dataset with ONLY_ME permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
created_by="creator-456", permission=DatasetPermissionEnum.ONLY_ME
)
# Create creator user
creator_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="creator-456", role=TenantAccountRole.EDITOR
)
# Creator should be able to access
self._assert_permission_check_passes(dataset, creator_user)
def test_only_me_permission_others_cannot_access(self):
"""Test ONLY_ME permission denies access to non-creators."""
# Create dataset with ONLY_ME permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
created_by="creator-456", permission=DatasetPermissionEnum.ONLY_ME
)
# Create normal user (not the creator)
normal_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="normal-789", role=TenantAccountRole.NORMAL
)
# Non-creator should be denied access
self._assert_permission_check_fails(dataset, normal_user)
# ==================== ALL_TEAM Permission Tests ====================
def test_all_team_permission_allows_access(self):
"""Test ALL_TEAM permission allows any team member to access the dataset."""
# Create dataset with ALL_TEAM permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.ALL_TEAM)
# Create different types of team members
normal_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="normal-789", role=TenantAccountRole.NORMAL
)
editor_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="editor-456", role=TenantAccountRole.EDITOR
)
# All team members should have access
self._assert_permission_check_passes(dataset, normal_user)
self._assert_permission_check_passes(dataset, editor_user)
# ==================== PARTIAL_TEAM Permission Tests ====================
def test_partial_team_permission_creator_can_access(self, mock_dataset_service_dependencies):
"""Test PARTIAL_TEAM permission allows creator to access without database query."""
# Create dataset with PARTIAL_TEAM permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM
)
# Create creator user
creator_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="creator-456", role=TenantAccountRole.EDITOR
)
# Creator should have access without database query
self._assert_permission_check_passes(dataset, creator_user)
self._assert_database_query_not_called(mock_dataset_service_dependencies["db_session"])
def test_partial_team_permission_with_explicit_permission(self, mock_dataset_service_dependencies):
"""Test PARTIAL_TEAM permission allows users with explicit permission records."""
# Create dataset with PARTIAL_TEAM permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM)
# Create normal user (not the creator)
normal_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="normal-789", role=TenantAccountRole.NORMAL
)
# Mock database query to return a permission record
mock_permission = DatasetPermissionTestDataFactory.create_dataset_permission_mock(
dataset_id=dataset.id, account_id=normal_user.id
)
mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = mock_permission
# User with explicit permission should have access
self._assert_permission_check_passes(dataset, normal_user)
self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, normal_user.id)
def test_partial_team_permission_without_explicit_permission(self, mock_dataset_service_dependencies):
"""Test PARTIAL_TEAM permission denies users without explicit permission records."""
# Create dataset with PARTIAL_TEAM permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM)
# Create normal user (not the creator)
normal_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="normal-789", role=TenantAccountRole.NORMAL
)
# Mock database query to return None (no permission record)
mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None
# User without explicit permission should be denied access
self._assert_permission_check_fails(dataset, normal_user)
self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, normal_user.id)
def test_partial_team_permission_non_creator_without_permission_fails(self, mock_dataset_service_dependencies):
"""Test that non-creators without explicit permission are denied access to PARTIAL_TEAM datasets."""
# Create dataset with PARTIAL_TEAM permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM
)
# Create a different user (not the creator)
other_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="other-user-123", role=TenantAccountRole.NORMAL
)
# Mock database query to return None (no permission record)
mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None
# Non-creator without explicit permission should be denied access
self._assert_permission_check_fails(dataset, other_user)
self._assert_database_query_called(mock_dataset_service_dependencies["db_session"], dataset.id, other_user.id)
# ==================== Enum Usage Tests ====================
def test_partial_team_permission_uses_correct_enum(self):
"""Test that the method correctly uses DatasetPermissionEnum.PARTIAL_TEAM instead of string literals."""
# Create dataset with PARTIAL_TEAM permission using enum
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
created_by="creator-456", permission=DatasetPermissionEnum.PARTIAL_TEAM
)
# Create creator user
creator_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="creator-456", role=TenantAccountRole.EDITOR
)
# Creator should always have access regardless of permission level
self._assert_permission_check_passes(dataset, creator_user)
# ==================== Logging Tests ====================
def test_permission_denied_logs_debug_message(self, mock_dataset_service_dependencies, mock_logging_dependencies):
"""Test that permission denied events are properly logged for debugging purposes."""
# Create dataset with PARTIAL_TEAM permission
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(permission=DatasetPermissionEnum.PARTIAL_TEAM)
# Create normal user (not the creator)
normal_user = DatasetPermissionTestDataFactory.create_user_mock(
user_id="normal-789", role=TenantAccountRole.NORMAL
)
# Mock database query to return None (no permission record)
mock_dataset_service_dependencies["db_session"].query().filter_by().first.return_value = None
# Attempt permission check (should fail)
with pytest.raises(NoPermissionError):
DatasetService.check_dataset_permission(dataset, normal_user)
# Verify debug message was logged with correct user and dataset information
mock_logging_dependencies["logging"].debug.assert_called_with(
"User %s does not have permission to access dataset %s", normal_user.id, dataset.id
)

View File

@ -75,6 +75,7 @@ import pytest
from werkzeug.exceptions import NotFound
from models.dataset import Dataset
from models.enums import TagType
from models.model import App, Tag, TagBinding
from services.tag_service import TagService
@ -102,7 +103,7 @@ class TagServiceTestDataFactory:
def create_tag_mock(
tag_id: str = "tag-123",
name: str = "Test Tag",
tag_type: str = "app",
tag_type: TagType = TagType.APP,
tenant_id: str = "tenant-123",
**kwargs,
) -> Mock:
@ -705,7 +706,7 @@ class TestTagServiceCRUD:
# Verify tag attributes
added_tag = mock_db_session.add.call_args[0][0]
assert added_tag.name == "New Tag", "Tag name should match"
assert added_tag.type == "app", "Tag type should match"
assert added_tag.type == TagType.APP, "Tag type should match"
assert added_tag.created_by == "user-123", "Created by should match current user"
assert added_tag.tenant_id == "tenant-123", "Tenant ID should match current tenant"

View File

@ -1,127 +0,0 @@
from unittest.mock import MagicMock
import pytest
from sqlalchemy.orm import Session
from models.model import App
from models.workflow import Workflow
from services.workflow_service import DraftWorkflowDeletionError, WorkflowInUseError, WorkflowService
@pytest.fixture
def workflow_setup():
mock_session_maker = MagicMock()
workflow_service = WorkflowService(mock_session_maker)
session = MagicMock(spec=Session)
tenant_id = "test-tenant-id"
workflow_id = "test-workflow-id"
# Mock workflow
workflow = MagicMock(spec=Workflow)
workflow.id = workflow_id
workflow.tenant_id = tenant_id
workflow.version = "1.0" # Not a draft
workflow.tool_published = False # Not published as a tool by default
# Mock app
app = MagicMock(spec=App)
app.id = "test-app-id"
app.name = "Test App"
app.workflow_id = None # Not used by an app by default
return {
"workflow_service": workflow_service,
"session": session,
"tenant_id": tenant_id,
"workflow_id": workflow_id,
"workflow": workflow,
"app": app,
}
def test_delete_workflow_success(workflow_setup):
# Setup mocks
# Mock the tool provider query to return None (not published as a tool)
workflow_setup["session"].query.return_value.where.return_value.first.return_value = None
workflow_setup["session"].scalar = MagicMock(
side_effect=[workflow_setup["workflow"], None]
) # Return workflow first, then None for app
# Call the method
result = workflow_setup["workflow_service"].delete_workflow(
session=workflow_setup["session"],
workflow_id=workflow_setup["workflow_id"],
tenant_id=workflow_setup["tenant_id"],
)
# Verify
assert result is True
workflow_setup["session"].delete.assert_called_once_with(workflow_setup["workflow"])
def test_delete_workflow_draft_error(workflow_setup):
# Setup mocks
workflow_setup["workflow"].version = "draft"
workflow_setup["session"].scalar = MagicMock(return_value=workflow_setup["workflow"])
# Call the method and verify exception
with pytest.raises(DraftWorkflowDeletionError):
workflow_setup["workflow_service"].delete_workflow(
session=workflow_setup["session"],
workflow_id=workflow_setup["workflow_id"],
tenant_id=workflow_setup["tenant_id"],
)
# Verify
workflow_setup["session"].delete.assert_not_called()
def test_delete_workflow_in_use_by_app_error(workflow_setup):
# Setup mocks
workflow_setup["app"].workflow_id = workflow_setup["workflow_id"]
workflow_setup["session"].scalar = MagicMock(
side_effect=[workflow_setup["workflow"], workflow_setup["app"]]
) # Return workflow first, then app
# Call the method and verify exception
with pytest.raises(WorkflowInUseError) as excinfo:
workflow_setup["workflow_service"].delete_workflow(
session=workflow_setup["session"],
workflow_id=workflow_setup["workflow_id"],
tenant_id=workflow_setup["tenant_id"],
)
# Verify error message contains app name
assert "Cannot delete workflow that is currently in use by app" in str(excinfo.value)
# Verify
workflow_setup["session"].delete.assert_not_called()
def test_delete_workflow_published_as_tool_error(workflow_setup):
# Setup mocks
from models.tools import WorkflowToolProvider
# Mock the tool provider query
mock_tool_provider = MagicMock(spec=WorkflowToolProvider)
workflow_setup["session"].query.return_value.where.return_value.first.return_value = mock_tool_provider
workflow_setup["session"].scalar = MagicMock(
side_effect=[workflow_setup["workflow"], None]
) # Return workflow first, then None for app
# Call the method and verify exception
with pytest.raises(WorkflowInUseError) as excinfo:
workflow_setup["workflow_service"].delete_workflow(
session=workflow_setup["session"],
workflow_id=workflow_setup["workflow_id"],
tenant_id=workflow_setup["tenant_id"],
)
# Verify error message
assert "Cannot delete workflow that is published as a tool" in str(excinfo.value)
# Verify
workflow_setup["session"].delete.assert_not_called()

View File

@ -169,12 +169,6 @@ version = "1.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a0/87/1d7019d23891897cb076b2f7e3c81ab3c2ba91de3bb067196f675d60d34c/alibabacloud-credentials-api-1.0.0.tar.gz", hash = "sha256:8c340038d904f0218d7214a8f4088c31912bfcf279af2cbc7d9be4897a97dd2f", size = 2330, upload-time = "2025-01-13T05:53:04.931Z" }
[[package]]
name = "alibabacloud-endpoint-util"
version = "0.0.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/92/7d/8cc92a95c920e344835b005af6ea45a0db98763ad6ad19299d26892e6c8d/alibabacloud_endpoint_util-0.0.4.tar.gz", hash = "sha256:a593eb8ddd8168d5dc2216cd33111b144f9189fcd6e9ca20e48f358a739bbf90", size = 2813, upload-time = "2025-06-12T07:20:52.572Z" }
[[package]]
name = "alibabacloud-gateway-spi"
version = "0.0.3"
@ -186,69 +180,17 @@ sdist = { url = "https://files.pythonhosted.org/packages/ab/98/d7111245f17935bf7
[[package]]
name = "alibabacloud-gpdb20160503"
version = "3.8.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-endpoint-util" },
{ name = "alibabacloud-openapi-util" },
{ name = "alibabacloud-openplatform20191219" },
{ name = "alibabacloud-oss-sdk" },
{ name = "alibabacloud-oss-util" },
{ name = "alibabacloud-tea-fileform" },
{ name = "alibabacloud-tea-openapi" },
{ name = "alibabacloud-tea-util" },
]
sdist = { url = "https://files.pythonhosted.org/packages/15/6a/cc72e744e95c8f37fa6a84e66ae0b9b57a13ee97a0ef03d94c7127c31d75/alibabacloud_gpdb20160503-3.8.3.tar.gz", hash = "sha256:4dfcc0d9cff5a921d529d76f4bf97e2ceb9dc2fa53f00ab055f08509423d8e30", size = 155092, upload-time = "2024-07-18T17:09:42.438Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ab/36/bce41704b3bf59d607590ec73a42a254c5dea27c0f707aee11d20512a200/alibabacloud_gpdb20160503-3.8.3-py3-none-any.whl", hash = "sha256:06e1c46ce5e4e9d1bcae76e76e51034196c625799d06b2efec8d46a7df323fe8", size = 156097, upload-time = "2024-07-18T17:09:40.414Z" },
]
[[package]]
name = "alibabacloud-openapi-util"
version = "0.2.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-tea-util" },
{ name = "cryptography" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f6/50/5f41ab550d7874c623f6e992758429802c4b52a6804db437017e5387de33/alibabacloud_openapi_util-0.2.2.tar.gz", hash = "sha256:ebbc3906f554cb4bf8f513e43e8a33e8b6a3d4a0ef13617a0e14c3dda8ef52a8", size = 7201, upload-time = "2023-10-23T07:44:18.523Z" }
[[package]]
name = "alibabacloud-openplatform20191219"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-endpoint-util" },
{ name = "alibabacloud-openapi-util" },
{ name = "alibabacloud-tea-openapi" },
{ name = "alibabacloud-tea-util" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4f/bf/f7fa2f3657ed352870f442434cb2f27b7f70dcd52a544a1f3998eeaf6d71/alibabacloud_openplatform20191219-2.0.0.tar.gz", hash = "sha256:e67f4c337b7542538746592c6a474bd4ae3a9edccdf62e11a32ca61fad3c9020", size = 5038, upload-time = "2022-09-21T06:16:10.683Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/94/e5/18c75213551eeca9db1f6b41ddcc0bd87b5b6508c75a67f05cd8671847b4/alibabacloud_openplatform20191219-2.0.0-py3-none-any.whl", hash = "sha256:873821c45bca72a6c6ec7a906c9cb21554c122e88893bbac3986934dab30dd36", size = 5204, upload-time = "2022-09-21T06:16:07.844Z" },
]
[[package]]
name = "alibabacloud-oss-sdk"
version = "0.1.1"
version = "5.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-credentials" },
{ name = "alibabacloud-oss-util" },
{ name = "alibabacloud-tea-fileform" },
{ name = "alibabacloud-tea-util" },
{ name = "alibabacloud-tea-xml" },
{ name = "alibabacloud-tea-openapi" },
{ name = "darabonba-core" },
]
sdist = { url = "https://files.pythonhosted.org/packages/7e/d1/f442dd026908fcf55340ca694bb1d027aa91e119e76ae2fbea62f2bde4f4/alibabacloud_oss_sdk-0.1.1.tar.gz", hash = "sha256:f51a368020d0964fcc0978f96736006f49f5ab6a4a4bf4f0b8549e2c659e7358", size = 46434, upload-time = "2025-04-22T12:40:41.717Z" }
[[package]]
name = "alibabacloud-oss-util"
version = "0.0.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-tea" },
sdist = { url = "https://files.pythonhosted.org/packages/b3/36/69333c7fb7fb5267f338371b14fdd8dbdd503717c97bbc7a6419d155ab4c/alibabacloud_gpdb20160503-5.1.0.tar.gz", hash = "sha256:086ec6d5e39b64f54d0e44bb3fd4fde1a4822a53eb9f6ff7464dff7d19b07b63", size = 295641, upload-time = "2026-03-19T10:09:02.444Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/7f/a91a2f9ad97c92fa9a6981587ea0ff789240cea05b17b17b7c244e5bac64/alibabacloud_gpdb20160503-5.1.0-py3-none-any.whl", hash = "sha256:580e4579285a54c7f04570782e0f60423a1997568684187fe88e4110acfb640e", size = 848784, upload-time = "2026-03-19T10:09:00.72Z" },
]
sdist = { url = "https://files.pythonhosted.org/packages/02/7c/d7e812b9968247a302573daebcfef95d0f9a718f7b4bfcca8d3d83e266be/alibabacloud_oss_util-0.0.6.tar.gz", hash = "sha256:d3ecec36632434bd509a113e8cf327dc23e830ac8d9dd6949926f4e334c8b5d6", size = 10008, upload-time = "2021-04-28T09:25:04.056Z" }
[[package]]
name = "alibabacloud-tea"
@ -260,15 +202,6 @@ dependencies = [
]
sdist = { url = "https://files.pythonhosted.org/packages/9a/7d/b22cb9a0d4f396ee0f3f9d7f26b76b9ed93d4101add7867a2c87ed2534f5/alibabacloud-tea-0.4.3.tar.gz", hash = "sha256:ec8053d0aa8d43ebe1deb632d5c5404339b39ec9a18a0707d57765838418504a", size = 8785, upload-time = "2025-03-24T07:34:42.958Z" }
[[package]]
name = "alibabacloud-tea-fileform"
version = "0.0.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-tea" },
]
sdist = { url = "https://files.pythonhosted.org/packages/22/8a/ef8ddf5ee0350984cad2749414b420369fe943e15e6d96b79be45367630e/alibabacloud_tea_fileform-0.0.5.tar.gz", hash = "sha256:fd00a8c9d85e785a7655059e9651f9e91784678881831f60589172387b968ee8", size = 3961, upload-time = "2021-04-28T09:22:54.56Z" }
[[package]]
name = "alibabacloud-tea-openapi"
version = "0.4.3"
@ -297,15 +230,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/72/9e/c394b4e2104766fb28a1e44e3ed36e4c7773b4d05c868e482be99d5635c9/alibabacloud_tea_util-0.3.14-py3-none-any.whl", hash = "sha256:10d3e5c340d8f7ec69dd27345eb2fc5a1dab07875742525edf07bbe86db93bfe", size = 6697, upload-time = "2025-11-19T06:01:07.355Z" },
]
[[package]]
name = "alibabacloud-tea-xml"
version = "0.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "alibabacloud-tea" },
]
sdist = { url = "https://files.pythonhosted.org/packages/32/eb/5e82e419c3061823f3feae9b5681588762929dc4da0176667297c2784c1a/alibabacloud_tea_xml-0.0.3.tar.gz", hash = "sha256:979cb51fadf43de77f41c69fc69c12529728919f849723eb0cd24eb7b048a90c", size = 3466, upload-time = "2025-07-01T08:04:55.144Z" }
[[package]]
name = "aliyun-log-python-sdk"
version = "0.9.37"
@ -570,28 +494,28 @@ wheels = [
[[package]]
name = "basedpyright"
version = "1.38.2"
version = "1.38.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "nodejs-wheel-binaries" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e4/a3/20aa7c4e83f2f614e0036300f3c352775dede0655c66814da16c37b661a9/basedpyright-1.38.2.tar.gz", hash = "sha256:b433b2b8ba745ed7520cdc79a29a03682f3fb00346d272ece5944e9e5e5daa92", size = 25277019, upload-time = "2026-02-26T11:18:43.594Z" }
sdist = { url = "https://files.pythonhosted.org/packages/0f/58/7abba2c743571a42b2548f07aee556ebc1e4d0bc2b277aeba1ee6c83b0af/basedpyright-1.38.3.tar.gz", hash = "sha256:9725419786afbfad8a9539527f162da02d462afad440b0412fdb3f3cdf179b90", size = 25277430, upload-time = "2026-03-17T13:10:41.526Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ac/12/736cab83626fea3fe65cdafb3ef3d2ee9480c56723f2fd33921537289a5e/basedpyright-1.38.2-py3-none-any.whl", hash = "sha256:153481d37fd19f9e3adedc8629d1d071b10c5f5e49321fb026b74444b7c70e24", size = 12312475, upload-time = "2026-02-26T11:18:40.373Z" },
{ url = "https://files.pythonhosted.org/packages/2c/e3/3ebb5c23bd3abb5fc2053b8a06a889aa5c1cf8cff738c78cb6c1957e90cd/basedpyright-1.38.3-py3-none-any.whl", hash = "sha256:1f15c2e489c67d6c5e896c24b6a63251195c04223a55e4568b8f8e8ed49ca830", size = 12313363, upload-time = "2026-03-17T13:10:47.344Z" },
]
[[package]]
name = "bce-python-sdk"
version = "0.9.63"
version = "0.9.64"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "future" },
{ name = "pycryptodome" },
{ name = "six" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8e/ab/4c2927b01a97562af6a296b722eee79658335795f341a395a12742d5e1a3/bce_python_sdk-0.9.63.tar.gz", hash = "sha256:0c80bc3ac128a0a144bae3b8dff1f397f42c30b36f7677e3a39d8df8e77b1088", size = 284419, upload-time = "2026-03-06T14:54:06.592Z" }
sdist = { url = "https://files.pythonhosted.org/packages/61/33/047e9c1a6c97e0cd4d93a6490abd8fbc2ccd13569462fc0228699edc08bc/bce_python_sdk-0.9.64.tar.gz", hash = "sha256:901bf787c26ad35855a80d65e58d7584c8541f7f0f2af20847830e572e5b622e", size = 287125, upload-time = "2026-03-17T11:24:29.345Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/67/a4/501e978776c7060aa8ba77e68536597e754d938bcdbe1826618acebfbddf/bce_python_sdk-0.9.63-py3-none-any.whl", hash = "sha256:ec66eee8807c6aa4036412592da7e8c9e2cd7fdec494190986288ac2195d8276", size = 400305, upload-time = "2026-03-06T14:53:52.887Z" },
{ url = "https://files.pythonhosted.org/packages/48/7f/dd289582f37ab4effea47b2a8503880db4781ca0fc8e0a8ed5ff493359e5/bce_python_sdk-0.9.64-py3-none-any.whl", hash = "sha256:eaad97e4f0e7d613ae978da3cdc5294e9f724ffca2735f79820037fa1317cd6d", size = 402233, upload-time = "2026-03-17T11:24:24.673Z" },
]
[[package]]
@ -660,14 +584,14 @@ wheels = [
[[package]]
name = "bleach"
version = "6.2.0"
version = "6.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "webencodings" },
]
sdist = { url = "https://files.pythonhosted.org/packages/76/9a/0e33f5054c54d349ea62c277191c020c2d6ef1d65ab2cb1993f91ec846d1/bleach-6.2.0.tar.gz", hash = "sha256:123e894118b8a599fd80d3ec1a6d4cc7ce4e5882b1317a7e1ba69b56e95f991f", size = 203083, upload-time = "2024-10-29T18:30:40.477Z" }
sdist = { url = "https://files.pythonhosted.org/packages/07/18/3c8523962314be6bf4c8989c79ad9531c825210dd13a8669f6b84336e8bd/bleach-6.3.0.tar.gz", hash = "sha256:6f3b91b1c0a02bb9a78b5a454c92506aa0fdf197e1d5e114d2e00c6f64306d22", size = 203533, upload-time = "2025-10-27T17:57:39.211Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fc/55/96142937f66150805c25c4d0f31ee4132fd33497753400734f9dfdcbdc66/bleach-6.2.0-py3-none-any.whl", hash = "sha256:117d9c6097a7c3d22fd578fcd8d35ff1e125df6736f554da4e432fdd63f31e5e", size = 163406, upload-time = "2024-10-29T18:30:38.186Z" },
{ url = "https://files.pythonhosted.org/packages/cd/3a/577b549de0cc09d95f11087ee63c739bba856cd3952697eec4c4bb91350a/bleach-6.3.0-py3-none-any.whl", hash = "sha256:fe10ec77c93ddf3d13a73b035abaac7a9f5e436513864ccdad516693213c65d6", size = 164437, upload-time = "2025-10-27T17:57:37.538Z" },
]
[[package]]
@ -706,30 +630,30 @@ wheels = [
[[package]]
name = "boto3"
version = "1.42.68"
version = "1.42.73"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore" },
{ name = "jmespath" },
{ name = "s3transfer" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/ae/60c642aa5413e560b671da825329f510b29a77274ed0f580bde77562294d/boto3-1.42.68.tar.gz", hash = "sha256:3f349f967ab38c23425626d130962bcb363e75f042734fe856ea8c5a00eef03c", size = 112761, upload-time = "2026-03-13T19:32:17.137Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e4/8b/d00575be514744ca4839e7d85bf4a8a3c7b6b4574433291e58d14c68ae09/boto3-1.42.73.tar.gz", hash = "sha256:d37b58d6cd452ca808dd6823ae19ca65b6244096c5125ef9052988b337298bae", size = 112775, upload-time = "2026-03-20T19:39:52.814Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/f6/dc6e993479dbb597d68223fbf61cb026511737696b15bd7d2a33e9b2c24f/boto3-1.42.68-py3-none-any.whl", hash = "sha256:dbff353eb7dc93cbddd7926ed24793e0174c04adbe88860dfa639568442e4962", size = 140556, upload-time = "2026-03-13T19:32:14.951Z" },
{ url = "https://files.pythonhosted.org/packages/aa/05/1fcf03d90abaa3d0b42a6bfd10231dd709493ecbacf794aa2eea5eae6841/boto3-1.42.73-py3-none-any.whl", hash = "sha256:1f81b79b873f130eeab14bb556417a7c66d38f3396b7f2fe3b958b3f9094f455", size = 140556, upload-time = "2026-03-20T19:39:50.298Z" },
]
[[package]]
name = "boto3-stubs"
version = "1.42.68"
version = "1.42.73"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "botocore-stubs" },
{ name = "types-s3transfer" },
{ name = "typing-extensions", marker = "python_full_version < '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/4c/8c/dd4b0c95ff008bed5a35ab411452ece121b355539d2a0b6dcd62a0c47be5/boto3_stubs-1.42.68.tar.gz", hash = "sha256:96ad1020735619483fb9b4da7a5e694b460bf2e18f84a34d5d175d0ffe8c4653", size = 101372, upload-time = "2026-03-13T19:49:54.867Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b9/c3/fcc47102c63278af25ad57c93d97dc393f4dbc54c0117a29c78f2b96ec1e/boto3_stubs-1.42.73.tar.gz", hash = "sha256:36f625769b5505c4bc627f16244b98de9e10dae3ac36f1aa0f0ebe2f201dc138", size = 101373, upload-time = "2026-03-20T19:59:51.463Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/15/3ca5848917214a168134512a5b45f856a56e913659888947a052e02031b5/boto3_stubs-1.42.68-py3-none-any.whl", hash = "sha256:ed7f98334ef7b2377fa8532190e63dc2c6d1dc895e3d7cb3d6d1c83771b81bf6", size = 70011, upload-time = "2026-03-13T19:49:42.801Z" },
{ url = "https://files.pythonhosted.org/packages/4b/57/d570ba61a2a0c7fe0c8667e41269a0480293cb53e1786d6661a2bd827fc5/boto3_stubs-1.42.73-py3-none-any.whl", hash = "sha256:bd658429069d8215247fc3abc003220cd875c24ab6eda7b3405090408afaacdf", size = 70009, upload-time = "2026-03-20T19:59:43.786Z" },
]
[package.optional-dependencies]
@ -739,16 +663,16 @@ bedrock-runtime = [
[[package]]
name = "botocore"
version = "1.42.68"
version = "1.42.73"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jmespath" },
{ name = "python-dateutil" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/3f/22/87502d5fbbfa8189406a617b30b1e2a3dc0ab2669f7268e91b385c1c1c7a/botocore-1.42.68.tar.gz", hash = "sha256:3951c69e12ac871dda245f48dac5c7dd88ea1bfdd74a8879ec356cf2874b806a", size = 14994514, upload-time = "2026-03-13T19:32:03.577Z" }
sdist = { url = "https://files.pythonhosted.org/packages/28/23/0c88ca116ef63b1ae77c901cd5d2095d22a8dbde9e80df74545db4a061b4/botocore-1.42.73.tar.gz", hash = "sha256:575858641e4949aaf2af1ced145b8524529edf006d075877af6b82ff96ad854c", size = 15008008, upload-time = "2026-03-20T19:39:40.082Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3c/2a/1428f6594799780fe6ee845d8e6aeffafe026cd16a70c878684e2dcbbfc8/botocore-1.42.68-py3-none-any.whl", hash = "sha256:9df7da26374601f890e2f115bfa573d65bf15b25fe136bb3aac809f6145f52ab", size = 14668816, upload-time = "2026-03-13T19:31:58.572Z" },
{ url = "https://files.pythonhosted.org/packages/8e/65/971f3d55015f4d133a6ff3ad74cd39f4b8dd8f53f7775a3c2ad378ea5145/botocore-1.42.73-py3-none-any.whl", hash = "sha256:7b62e2a12f7a1b08eb7360eecd23bb16fe3b7ab7f5617cf91b25476c6f86a0fe", size = 14681861, upload-time = "2026-03-20T19:39:35.341Z" },
]
[[package]]
@ -1290,41 +1214,41 @@ wheels = [
[[package]]
name = "coverage"
version = "7.13.4"
version = "7.13.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/24/56/95b7e30fa389756cb56630faa728da46a27b8c6eb46f9d557c68fff12b65/coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91", size = 827239, upload-time = "2026-02-09T12:59:03.86Z" }
sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b4/ad/b59e5b451cf7172b8d1043dc0fa718f23aab379bc1521ee13d4bd9bfa960/coverage-7.13.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d490ba50c3f35dd7c17953c68f3270e7ccd1c6642e2d2afe2d8e720b98f5a053", size = 219278, upload-time = "2026-02-09T12:56:31.673Z" },
{ url = "https://files.pythonhosted.org/packages/f1/17/0cb7ca3de72e5f4ef2ec2fa0089beafbcaaaead1844e8b8a63d35173d77d/coverage-7.13.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:19bc3c88078789f8ef36acb014d7241961dbf883fd2533d18cb1e7a5b4e28b11", size = 219783, upload-time = "2026-02-09T12:56:33.104Z" },
{ url = "https://files.pythonhosted.org/packages/ab/63/325d8e5b11e0eaf6d0f6a44fad444ae58820929a9b0de943fa377fe73e85/coverage-7.13.4-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3998e5a32e62fdf410c0dbd3115df86297995d6e3429af80b8798aad894ca7aa", size = 250200, upload-time = "2026-02-09T12:56:34.474Z" },
{ url = "https://files.pythonhosted.org/packages/76/53/c16972708cbb79f2942922571a687c52bd109a7bd51175aeb7558dff2236/coverage-7.13.4-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:8e264226ec98e01a8e1054314af91ee6cde0eacac4f465cc93b03dbe0bce2fd7", size = 252114, upload-time = "2026-02-09T12:56:35.749Z" },
{ url = "https://files.pythonhosted.org/packages/eb/c2/7ab36d8b8cc412bec9ea2d07c83c48930eb4ba649634ba00cb7e4e0f9017/coverage-7.13.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a3aa4e7b9e416774b21797365b358a6e827ffadaaca81b69ee02946852449f00", size = 254220, upload-time = "2026-02-09T12:56:37.796Z" },
{ url = "https://files.pythonhosted.org/packages/d6/4d/cf52c9a3322c89a0e6febdfbc83bb45c0ed3c64ad14081b9503adee702e7/coverage-7.13.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:71ca20079dd8f27fcf808817e281e90220475cd75115162218d0e27549f95fef", size = 256164, upload-time = "2026-02-09T12:56:39.016Z" },
{ url = "https://files.pythonhosted.org/packages/78/e9/eb1dd17bd6de8289df3580e967e78294f352a5df8a57ff4671ee5fc3dcd0/coverage-7.13.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e2f25215f1a359ab17320b47bcdaca3e6e6356652e8256f2441e4ef972052903", size = 250325, upload-time = "2026-02-09T12:56:40.668Z" },
{ url = "https://files.pythonhosted.org/packages/71/07/8c1542aa873728f72267c07278c5cc0ec91356daf974df21335ccdb46368/coverage-7.13.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d65b2d373032411e86960604dc4edac91fdfb5dca539461cf2cbe78327d1e64f", size = 251913, upload-time = "2026-02-09T12:56:41.97Z" },
{ url = "https://files.pythonhosted.org/packages/74/d7/c62e2c5e4483a748e27868e4c32ad3daa9bdddbba58e1bc7a15e252baa74/coverage-7.13.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94eb63f9b363180aff17de3e7c8760c3ba94664ea2695c52f10111244d16a299", size = 249974, upload-time = "2026-02-09T12:56:43.323Z" },
{ url = "https://files.pythonhosted.org/packages/98/9f/4c5c015a6e98ced54efd0f5cf8d31b88e5504ecb6857585fc0161bb1e600/coverage-7.13.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e856bf6616714c3a9fbc270ab54103f4e685ba236fa98c054e8f87f266c93505", size = 253741, upload-time = "2026-02-09T12:56:45.155Z" },
{ url = "https://files.pythonhosted.org/packages/bd/59/0f4eef89b9f0fcd9633b5d350016f54126ab49426a70ff4c4e87446cabdc/coverage-7.13.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:65dfcbe305c3dfe658492df2d85259e0d79ead4177f9ae724b6fb245198f55d6", size = 249695, upload-time = "2026-02-09T12:56:46.636Z" },
{ url = "https://files.pythonhosted.org/packages/b5/2c/b7476f938deb07166f3eb281a385c262675d688ff4659ad56c6c6b8e2e70/coverage-7.13.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b507778ae8a4c915436ed5c2e05b4a6cecfa70f734e19c22a005152a11c7b6a9", size = 250599, upload-time = "2026-02-09T12:56:48.13Z" },
{ url = "https://files.pythonhosted.org/packages/b8/34/c3420709d9846ee3785b9f2831b4d94f276f38884032dca1457fa83f7476/coverage-7.13.4-cp311-cp311-win32.whl", hash = "sha256:784fc3cf8be001197b652d51d3fd259b1e2262888693a4636e18879f613a62a9", size = 221780, upload-time = "2026-02-09T12:56:50.479Z" },
{ url = "https://files.pythonhosted.org/packages/61/08/3d9c8613079d2b11c185b865de9a4c1a68850cfda2b357fae365cf609f29/coverage-7.13.4-cp311-cp311-win_amd64.whl", hash = "sha256:2421d591f8ca05b308cf0092807308b2facbefe54af7c02ac22548b88b95c98f", size = 222715, upload-time = "2026-02-09T12:56:51.815Z" },
{ url = "https://files.pythonhosted.org/packages/18/1a/54c3c80b2f056164cc0a6cdcb040733760c7c4be9d780fe655f356f433e4/coverage-7.13.4-cp311-cp311-win_arm64.whl", hash = "sha256:79e73a76b854d9c6088fe5d8b2ebe745f8681c55f7397c3c0a016192d681045f", size = 221385, upload-time = "2026-02-09T12:56:53.194Z" },
{ url = "https://files.pythonhosted.org/packages/d1/81/4ce2fdd909c5a0ed1f6dedb88aa57ab79b6d1fbd9b588c1ac7ef45659566/coverage-7.13.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:02231499b08dabbe2b96612993e5fc34217cdae907a51b906ac7fca8027a4459", size = 219449, upload-time = "2026-02-09T12:56:54.889Z" },
{ url = "https://files.pythonhosted.org/packages/5d/96/5238b1efc5922ddbdc9b0db9243152c09777804fb7c02ad1741eb18a11c0/coverage-7.13.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40aa8808140e55dc022b15d8aa7f651b6b3d68b365ea0398f1441e0b04d859c3", size = 219810, upload-time = "2026-02-09T12:56:56.33Z" },
{ url = "https://files.pythonhosted.org/packages/78/72/2f372b726d433c9c35e56377cf1d513b4c16fe51841060d826b95caacec1/coverage-7.13.4-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5b856a8ccf749480024ff3bd7310adaef57bf31fd17e1bfc404b7940b6986634", size = 251308, upload-time = "2026-02-09T12:56:57.858Z" },
{ url = "https://files.pythonhosted.org/packages/5d/a0/2ea570925524ef4e00bb6c82649f5682a77fac5ab910a65c9284de422600/coverage-7.13.4-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2c048ea43875fbf8b45d476ad79f179809c590ec7b79e2035c662e7afa3192e3", size = 254052, upload-time = "2026-02-09T12:56:59.754Z" },
{ url = "https://files.pythonhosted.org/packages/e8/ac/45dc2e19a1939098d783c846e130b8f862fbb50d09e0af663988f2f21973/coverage-7.13.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b7b38448866e83176e28086674fe7368ab8590e4610fb662b44e345b86d63ffa", size = 255165, upload-time = "2026-02-09T12:57:01.287Z" },
{ url = "https://files.pythonhosted.org/packages/2d/4d/26d236ff35abc3b5e63540d3386e4c3b192168c1d96da5cb2f43c640970f/coverage-7.13.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:de6defc1c9badbf8b9e67ae90fd00519186d6ab64e5cc5f3d21359c2a9b2c1d3", size = 257432, upload-time = "2026-02-09T12:57:02.637Z" },
{ url = "https://files.pythonhosted.org/packages/ec/55/14a966c757d1348b2e19caf699415a2a4c4f7feaa4bbc6326a51f5c7dd1b/coverage-7.13.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7eda778067ad7ffccd23ecffce537dface96212576a07924cbf0d8799d2ded5a", size = 251716, upload-time = "2026-02-09T12:57:04.056Z" },
{ url = "https://files.pythonhosted.org/packages/77/33/50116647905837c66d28b2af1321b845d5f5d19be9655cb84d4a0ea806b4/coverage-7.13.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e87f6c587c3f34356c3759f0420693e35e7eb0e2e41e4c011cb6ec6ecbbf1db7", size = 253089, upload-time = "2026-02-09T12:57:05.503Z" },
{ url = "https://files.pythonhosted.org/packages/c2/b4/8efb11a46e3665d92635a56e4f2d4529de6d33f2cb38afd47d779d15fc99/coverage-7.13.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8248977c2e33aecb2ced42fef99f2d319e9904a36e55a8a68b69207fb7e43edc", size = 251232, upload-time = "2026-02-09T12:57:06.879Z" },
{ url = "https://files.pythonhosted.org/packages/51/24/8cd73dd399b812cc76bb0ac260e671c4163093441847ffe058ac9fda1e32/coverage-7.13.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:25381386e80ae727608e662474db537d4df1ecd42379b5ba33c84633a2b36d47", size = 255299, upload-time = "2026-02-09T12:57:08.245Z" },
{ url = "https://files.pythonhosted.org/packages/03/94/0a4b12f1d0e029ce1ccc1c800944a9984cbe7d678e470bb6d3c6bc38a0da/coverage-7.13.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:ee756f00726693e5ba94d6df2bdfd64d4852d23b09bb0bc700e3b30e6f333985", size = 250796, upload-time = "2026-02-09T12:57:10.142Z" },
{ url = "https://files.pythonhosted.org/packages/73/44/6002fbf88f6698ca034360ce474c406be6d5a985b3fdb3401128031eef6b/coverage-7.13.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fdfc1e28e7c7cdce44985b3043bc13bbd9c747520f94a4d7164af8260b3d91f0", size = 252673, upload-time = "2026-02-09T12:57:12.197Z" },
{ url = "https://files.pythonhosted.org/packages/de/c6/a0279f7c00e786be75a749a5674e6fa267bcbd8209cd10c9a450c655dfa7/coverage-7.13.4-cp312-cp312-win32.whl", hash = "sha256:01d4cbc3c283a17fc1e42d614a119f7f438eabb593391283adca8dc86eff1246", size = 221990, upload-time = "2026-02-09T12:57:14.085Z" },
{ url = "https://files.pythonhosted.org/packages/77/4e/c0a25a425fcf5557d9abd18419c95b63922e897bc86c1f327f155ef234a9/coverage-7.13.4-cp312-cp312-win_amd64.whl", hash = "sha256:9401ebc7ef522f01d01d45532c68c5ac40fb27113019b6b7d8b208f6e9baa126", size = 222800, upload-time = "2026-02-09T12:57:15.944Z" },
{ url = "https://files.pythonhosted.org/packages/47/ac/92da44ad9a6f4e3a7debd178949d6f3769bedca33830ce9b1dcdab589a37/coverage-7.13.4-cp312-cp312-win_arm64.whl", hash = "sha256:b1ec7b6b6e93255f952e27ab58fbc68dcc468844b16ecbee881aeb29b6ab4d8d", size = 221415, upload-time = "2026-02-09T12:57:17.497Z" },
{ url = "https://files.pythonhosted.org/packages/0d/4a/331fe2caf6799d591109bb9c08083080f6de90a823695d412a935622abb2/coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0", size = 211242, upload-time = "2026-02-09T12:59:02.032Z" },
{ url = "https://files.pythonhosted.org/packages/4b/37/d24c8f8220ff07b839b2c043ea4903a33b0f455abe673ae3c03bbdb7f212/coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d", size = 219381, upload-time = "2026-03-17T10:30:14.68Z" },
{ url = "https://files.pythonhosted.org/packages/35/8b/cd129b0ca4afe886a6ce9d183c44d8301acbd4ef248622e7c49a23145605/coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587", size = 219880, upload-time = "2026-03-17T10:30:16.231Z" },
{ url = "https://files.pythonhosted.org/packages/55/2f/e0e5b237bffdb5d6c530ce87cc1d413a5b7d7dfd60fb067ad6d254c35c76/coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642", size = 250303, upload-time = "2026-03-17T10:30:17.748Z" },
{ url = "https://files.pythonhosted.org/packages/92/be/b1afb692be85b947f3401375851484496134c5554e67e822c35f28bf2fbc/coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b", size = 252218, upload-time = "2026-03-17T10:30:19.804Z" },
{ url = "https://files.pythonhosted.org/packages/da/69/2f47bb6fa1b8d1e3e5d0c4be8ccb4313c63d742476a619418f85740d597b/coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686", size = 254326, upload-time = "2026-03-17T10:30:21.321Z" },
{ url = "https://files.pythonhosted.org/packages/d5/d0/79db81da58965bd29dabc8f4ad2a2af70611a57cba9d1ec006f072f30a54/coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743", size = 256267, upload-time = "2026-03-17T10:30:23.094Z" },
{ url = "https://files.pythonhosted.org/packages/e5/32/d0d7cc8168f91ddab44c0ce4806b969df5f5fdfdbb568eaca2dbc2a04936/coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75", size = 250430, upload-time = "2026-03-17T10:30:25.311Z" },
{ url = "https://files.pythonhosted.org/packages/4d/06/a055311d891ddbe231cd69fdd20ea4be6e3603ffebddf8704b8ca8e10a3c/coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209", size = 252017, upload-time = "2026-03-17T10:30:27.284Z" },
{ url = "https://files.pythonhosted.org/packages/d6/f6/d0fd2d21e29a657b5f77a2fe7082e1568158340dceb941954f776dce1b7b/coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a", size = 250080, upload-time = "2026-03-17T10:30:29.481Z" },
{ url = "https://files.pythonhosted.org/packages/4e/ab/0d7fb2efc2e9a5eb7ddcc6e722f834a69b454b7e6e5888c3a8567ecffb31/coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e", size = 253843, upload-time = "2026-03-17T10:30:31.301Z" },
{ url = "https://files.pythonhosted.org/packages/ba/6f/7467b917bbf5408610178f62a49c0ed4377bb16c1657f689cc61470da8ce/coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd", size = 249802, upload-time = "2026-03-17T10:30:33.358Z" },
{ url = "https://files.pythonhosted.org/packages/75/2c/1172fb689df92135f5bfbbd69fc83017a76d24ea2e2f3a1154007e2fb9f8/coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8", size = 250707, upload-time = "2026-03-17T10:30:35.2Z" },
{ url = "https://files.pythonhosted.org/packages/67/21/9ac389377380a07884e3b48ba7a620fcd9dbfaf1d40565facdc6b36ec9ef/coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf", size = 221880, upload-time = "2026-03-17T10:30:36.775Z" },
{ url = "https://files.pythonhosted.org/packages/af/7f/4cd8a92531253f9d7c1bbecd9fa1b472907fb54446ca768c59b531248dc5/coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9", size = 222816, upload-time = "2026-03-17T10:30:38.891Z" },
{ url = "https://files.pythonhosted.org/packages/12/a6/1d3f6155fb0010ca68eba7fe48ca6c9da7385058b77a95848710ecf189b1/coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028", size = 221483, upload-time = "2026-03-17T10:30:40.463Z" },
{ url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" },
{ url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" },
{ url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" },
{ url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" },
{ url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" },
{ url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" },
{ url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" },
{ url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" },
{ url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" },
{ url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" },
{ url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" },
{ url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" },
{ url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" },
{ url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" },
{ url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" },
{ url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" },
]
[package.optional-dependencies]
@ -1605,6 +1529,7 @@ dependencies = [
{ name = "pydantic-extra-types" },
{ name = "pydantic-settings" },
{ name = "pyjwt" },
{ name = "pypandoc" },
{ name = "pypdfium2" },
{ name = "python-docx" },
{ name = "python-dotenv" },
@ -1743,8 +1668,8 @@ requires-dist = [
{ name = "arize-phoenix-otel", specifier = "~=0.15.0" },
{ name = "azure-identity", specifier = "==1.25.3" },
{ name = "beautifulsoup4", specifier = "==4.14.3" },
{ name = "bleach", specifier = "~=6.2.0" },
{ name = "boto3", specifier = "==1.42.68" },
{ name = "bleach", specifier = "~=6.3.0" },
{ name = "boto3", specifier = "==1.42.73" },
{ name = "bs4", specifier = "~=0.0.1" },
{ name = "cachetools", specifier = "~=5.3.0" },
{ name = "celery", specifier = "~=5.6.2" },
@ -1762,7 +1687,7 @@ requires-dist = [
{ name = "gevent", specifier = "~=25.9.1" },
{ name = "gmpy2", specifier = "~=2.3.0" },
{ name = "google-api-core", specifier = ">=2.19.1" },
{ name = "google-api-python-client", specifier = "==2.192.0" },
{ name = "google-api-python-client", specifier = "==2.193.0" },
{ name = "google-auth", specifier = ">=2.47.0" },
{ name = "google-auth-httplib2", specifier = "==0.3.0" },
{ name = "google-cloud-aiplatform", specifier = ">=1.123.0" },
@ -1775,7 +1700,7 @@ requires-dist = [
{ name = "jsonschema", specifier = ">=4.25.1" },
{ name = "langfuse", specifier = "~=2.51.3" },
{ name = "langsmith", specifier = "~=0.7.16" },
{ name = "litellm", specifier = "==1.82.2" },
{ name = "litellm", specifier = "==1.82.6" },
{ name = "markdown", specifier = "~=3.10.2" },
{ name = "mlflow-skinny", specifier = ">=3.0.0" },
{ name = "numpy", specifier = "~=1.26.4" },
@ -1807,18 +1732,19 @@ requires-dist = [
{ name = "pydantic-extra-types", specifier = "~=2.11.0" },
{ name = "pydantic-settings", specifier = "~=2.13.1" },
{ name = "pyjwt", specifier = "~=2.12.0" },
{ name = "pypandoc", specifier = "~=1.13" },
{ name = "pypdfium2", specifier = "==5.6.0" },
{ name = "python-docx", specifier = "~=1.2.0" },
{ name = "python-dotenv", specifier = "==1.2.2" },
{ name = "pyyaml", specifier = "~=6.0.1" },
{ name = "readabilipy", specifier = "~=0.3.0" },
{ name = "redis", extras = ["hiredis"], specifier = "~=7.3.0" },
{ name = "resend", specifier = "~=2.23.0" },
{ name = "resend", specifier = "~=2.26.0" },
{ name = "sendgrid", specifier = "~=6.12.3" },
{ name = "sentry-sdk", extras = ["flask"], specifier = "~=2.54.0" },
{ name = "sentry-sdk", extras = ["flask"], specifier = "~=2.55.0" },
{ name = "sqlalchemy", specifier = "~=2.0.29" },
{ name = "sseclient-py", specifier = "~=1.9.0" },
{ name = "starlette", specifier = "==0.52.1" },
{ name = "starlette", specifier = "==1.0.0" },
{ name = "tiktoken", specifier = "~=0.12.0" },
{ name = "transformers", specifier = "~=5.3.0" },
{ name = "unstructured", extras = ["docx", "epub", "md", "ppt", "pptx"], specifier = "~=0.21.5" },
@ -1844,7 +1770,7 @@ dev = [
{ name = "pyrefly", specifier = ">=0.55.0" },
{ name = "pytest", specifier = "~=9.0.2" },
{ name = "pytest-benchmark", specifier = "~=5.2.3" },
{ name = "pytest-cov", specifier = "~=7.0.0" },
{ name = "pytest-cov", specifier = "~=7.1.0" },
{ name = "pytest-env", specifier = "~=1.6.0" },
{ name = "pytest-mock", specifier = "~=3.15.1" },
{ name = "pytest-timeout", specifier = ">=2.4.0" },
@ -1910,7 +1836,7 @@ tools = [
{ name = "nltk", specifier = "~=3.9.1" },
]
vdb = [
{ name = "alibabacloud-gpdb20160503", specifier = "~=3.8.0" },
{ name = "alibabacloud-gpdb20160503", specifier = "~=5.1.0" },
{ name = "alibabacloud-tea-openapi", specifier = "~=0.4.3" },
{ name = "chromadb", specifier = "==0.5.20" },
{ name = "clickhouse-connect", specifier = "~=0.14.1" },
@ -2499,7 +2425,7 @@ grpc = [
[[package]]
name = "google-api-python-client"
version = "2.192.0"
version = "2.193.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-api-core" },
@ -2508,9 +2434,9 @@ dependencies = [
{ name = "httplib2" },
{ name = "uritemplate" },
]
sdist = { url = "https://files.pythonhosted.org/packages/85/d8/489052a40935e45b9b5b3d6accc14b041360c1507bdc659c2e1a19aaa3ff/google_api_python_client-2.192.0.tar.gz", hash = "sha256:d48cfa6078fadea788425481b007af33fe0ab6537b78f37da914fb6fc112eb27", size = 14209505, upload-time = "2026-03-05T15:17:01.598Z" }
sdist = { url = "https://files.pythonhosted.org/packages/90/f4/e14b6815d3b1885328dd209676a3a4c704882743ac94e18ef0093894f5c8/google_api_python_client-2.193.0.tar.gz", hash = "sha256:8f88d16e89d11341e0a8b199cafde0fb7e6b44260dffb88d451577cbd1bb5d33", size = 14281006, upload-time = "2026-03-17T18:25:29.415Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e0/76/ec4128f00fefb9011635ae2abc67d7dacd05c8559378f8f05f0c907c38d8/google_api_python_client-2.192.0-py3-none-any.whl", hash = "sha256:63a57d4457cd97df1d63eb89c5fda03c5a50588dcbc32c0115dd1433c08f4b62", size = 14783267, upload-time = "2026-03-05T15:16:58.804Z" },
{ url = "https://files.pythonhosted.org/packages/f0/6d/fe75167797790a56d17799b75e1129bb93f7ff061efc7b36e9731bd4be2b/google_api_python_client-2.193.0-py3-none-any.whl", hash = "sha256:c42aa324b822109901cfecab5dc4fc3915d35a7b376835233c916c70610322db", size = 14856490, upload-time = "2026-03-17T18:25:26.608Z" },
]
[[package]]
@ -2546,7 +2472,7 @@ wheels = [
[[package]]
name = "google-cloud-aiplatform"
version = "1.141.0"
version = "1.142.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docstring-parser" },
@ -2562,9 +2488,9 @@ dependencies = [
{ name = "pydantic" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ac/dc/1209c7aab43bd7233cf631165a3b1b4284d22fc7fe7387c66228d07868ab/google_cloud_aiplatform-1.141.0.tar.gz", hash = "sha256:e3b1cdb28865dd862aac9c685dfc5ac076488705aba0a5354016efadcddd59c6", size = 10152688, upload-time = "2026-03-10T22:20:08.692Z" }
sdist = { url = "https://files.pythonhosted.org/packages/41/0d/3063a0512d60cf18854a279e00ccb796429545464345ef821cf77cb93d05/google_cloud_aiplatform-1.142.0.tar.gz", hash = "sha256:87b49e002703dc14885093e9b264587db84222bef5f70f5a442d03f41beecdd1", size = 10207993, upload-time = "2026-03-20T22:49:13.797Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6a/fc/428af69a69ff2e477e7f5e12d227b31fe5790f1a8234aacd54297f49c836/google_cloud_aiplatform-1.141.0-py2.py3-none-any.whl", hash = "sha256:6bd25b4d514c40b8181ca703e1b313ad6d0454ab8006fc9907fb3e9f672f31d1", size = 8358409, upload-time = "2026-03-10T22:20:04.871Z" },
{ url = "https://files.pythonhosted.org/packages/59/8b/f29646d3fa940f0e38cfcc12137f4851856b50d7486a3c05103ebc78d82d/google_cloud_aiplatform-1.142.0-py2.py3-none-any.whl", hash = "sha256:17c91db9b613cbbafb2c36335b123686aeb2b4b8448be5134b565ae07165a39a", size = 8388991, upload-time = "2026-03-20T22:49:10.334Z" },
]
[[package]]
@ -2617,7 +2543,7 @@ wheels = [
[[package]]
name = "google-cloud-storage"
version = "3.9.0"
version = "3.10.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "google-api-core" },
@ -2627,9 +2553,9 @@ dependencies = [
{ name = "google-resumable-media" },
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f7/b1/4f0798e88285b50dfc60ed3a7de071def538b358db2da468c2e0deecbb40/google_cloud_storage-3.9.0.tar.gz", hash = "sha256:f2d8ca7db2f652be757e92573b2196e10fbc09649b5c016f8b422ad593c641cc", size = 17298544, upload-time = "2026-02-02T13:36:34.119Z" }
sdist = { url = "https://files.pythonhosted.org/packages/7a/e3/747759eebc72e420c25903d6bc231d0ceb110b66ac7e6ee3f350417152cd/google_cloud_storage-3.10.0.tar.gz", hash = "sha256:1aeebf097c27d718d84077059a28d7e87f136f3700212215f1ceeae1d1c5d504", size = 17309829, upload-time = "2026-03-18T15:54:11.875Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/46/0b/816a6ae3c9fd096937d2e5f9670558908811d57d59ddf69dd4b83b326fd1/google_cloud_storage-3.9.0-py3-none-any.whl", hash = "sha256:2dce75a9e8b3387078cbbdad44757d410ecdb916101f8ba308abf202b6968066", size = 321324, upload-time = "2026-02-02T13:36:32.271Z" },
{ url = "https://files.pythonhosted.org/packages/29/e2/d58442f4daee5babd9255cf492a1f3d114357164072f8339a22a3ad460a2/google_cloud_storage-3.10.0-py3-none-any.whl", hash = "sha256:0072e7783b201e45af78fd9779894cdb6bec2bf922ee932f3fcc16f8bce9b9a3", size = 324382, upload-time = "2026-03-18T15:54:10.091Z" },
]
[[package]]
@ -3458,7 +3384,7 @@ wheels = [
[[package]]
name = "langsmith"
version = "0.7.17"
version = "0.7.22"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "httpx" },
@ -3471,9 +3397,9 @@ dependencies = [
{ name = "xxhash" },
{ name = "zstandard" },
]
sdist = { url = "https://files.pythonhosted.org/packages/71/79/81041dde07a974e728db7def23c1c7255950b8874102925cc77093bc847d/langsmith-0.7.17.tar.gz", hash = "sha256:6c1b0c2863cdd6636d2a58b8d5b1b80060703d98cac2593f4233e09ac25b5a9d", size = 1132228, upload-time = "2026-03-12T20:41:10.808Z" }
sdist = { url = "https://files.pythonhosted.org/packages/be/2a/2d5e6c67396fd228670af278c4da7bd6db2b8d11deaf6f108490b6d3f561/langsmith-0.7.22.tar.gz", hash = "sha256:35bfe795d648b069958280760564632fd28ebc9921c04f3e209c0db6a6c7dc04", size = 1134923, upload-time = "2026-03-19T22:45:23.492Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/34/31/62689d57f4d25792bd6a3c05c868771899481be2f3e31f9e71d31e1ac4ab/langsmith-0.7.17-py3-none-any.whl", hash = "sha256:cbec10460cb6c6ecc94c18c807be88a9984838144ae6c4693c9f859f378d7d02", size = 359147, upload-time = "2026-03-12T20:41:08.758Z" },
{ url = "https://files.pythonhosted.org/packages/1a/94/1f5d72655ab6534129540843776c40eff757387b88e798d8b3bf7e313fd4/langsmith-0.7.22-py3-none-any.whl", hash = "sha256:6e9d5148314d74e86748cb9d3898632cad0320c9323d95f70f969e5bc078eee4", size = 359927, upload-time = "2026-03-19T22:45:21.603Z" },
]
[[package]]
@ -3521,7 +3447,7 @@ wheels = [
[[package]]
name = "litellm"
version = "1.82.2"
version = "1.82.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "aiohttp" },
@ -3537,9 +3463,9 @@ dependencies = [
{ name = "tiktoken" },
{ name = "tokenizers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/60/12/010a86643f12ac0b004032d5927c260094299a84ed38b5ed20a8f8c7e3c4/litellm-1.82.2.tar.gz", hash = "sha256:f5f4c4049f344a88bf80b2e421bb927807687c99624515d7ff4152d533ec9dcb", size = 17353218, upload-time = "2026-03-13T21:24:24.5Z" }
sdist = { url = "https://files.pythonhosted.org/packages/29/75/1c537aa458426a9127a92bc2273787b2f987f4e5044e21f01f2eed5244fd/litellm-1.82.6.tar.gz", hash = "sha256:2aa1c2da21fe940c33613aa447119674a3ad4d2ad5eb064e4d5ce5ee42420136", size = 17414147, upload-time = "2026-03-22T06:36:00.452Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/96/e4/87e3ca82a8bf6e6bfffb42a539a1350dd6ced1b7169397bd439ba56fde10/litellm-1.82.2-py3-none-any.whl", hash = "sha256:641ed024774fa3d5b4dd9347f0efb1e31fa422fba2a6500aabedee085d1194cb", size = 15524224, upload-time = "2026-03-13T21:24:21.288Z" },
{ url = "https://files.pythonhosted.org/packages/02/6c/5327667e6dbe9e98cbfbd4261c8e91386a52e38f41419575854248bbab6a/litellm-1.82.6-py3-none-any.whl", hash = "sha256:164a3ef3e19f309e3cabc199bef3d2045212712fefdfa25fc7f75884a5b5b205", size = 15591595, upload-time = "2026-03-22T06:35:56.795Z" },
]
[[package]]
@ -4536,7 +4462,7 @@ wheels = [
[[package]]
name = "opik"
version = "1.10.39"
version = "1.10.45"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "boto3-stubs", extra = ["bedrock-runtime"] },
@ -4555,9 +4481,9 @@ dependencies = [
{ name = "tqdm" },
{ name = "uuid6" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b5/0f/b1e00a18cac16b4f36bf6cecc2de962fda810a9416d1159c48f46b81f5ec/opik-1.10.39.tar.gz", hash = "sha256:4d808eb2137070fc5d92a3bed3c3100d9cccfb35f4f0b71ea9990733f293dbb2", size = 780312, upload-time = "2026-03-12T14:08:25.746Z" }
sdist = { url = "https://files.pythonhosted.org/packages/85/17/edea6308347cec62e6828de7c573c596559c502b54fa4f0c88a52e2e81f5/opik-1.10.45.tar.gz", hash = "sha256:d8d8627ba03d12def46965e03d58f611daaf5cf878b3d087c53fe1159788c140", size = 789876, upload-time = "2026-03-20T11:35:12.457Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/24/0f4404907a98b4aec4508504570a78a61a3a8b5e451c67326632695ba8e6/opik-1.10.39-py3-none-any.whl", hash = "sha256:a72d735b9afac62e5262294b2f704aca89ec31f5c9beda17504815f7423870c3", size = 1317833, upload-time = "2026-03-12T14:08:23.954Z" },
{ url = "https://files.pythonhosted.org/packages/b7/17/150e9eecfa28cb23f7a0bfe83ae1486a11022b97fe6d12328b455784658d/opik-1.10.45-py3-none-any.whl", hash = "sha256:e8050d9e5e0d92ff587f156eacbdd02099897f39cfe79a98380b6c8ae9906b95", size = 1337714, upload-time = "2026-03-20T11:35:10.237Z" },
]
[[package]]
@ -5273,15 +5199,15 @@ wheels = [
[[package]]
name = "pydantic-extra-types"
version = "2.11.0"
version = "2.11.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pydantic" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fd/35/2fee58b1316a73e025728583d3b1447218a97e621933fc776fb8c0f2ebdd/pydantic_extra_types-2.11.0.tar.gz", hash = "sha256:4e9991959d045b75feb775683437a97991d02c138e00b59176571db9ce634f0e", size = 157226, upload-time = "2025-12-31T16:18:27.944Z" }
sdist = { url = "https://files.pythonhosted.org/packages/66/71/dba38ee2651f84f7842206adbd2233d8bbdb59fb85e9fa14232486a8c471/pydantic_extra_types-2.11.1.tar.gz", hash = "sha256:46792d2307383859e923d8fcefa82108b1a141f8a9c0198982b3832ab5ef1049", size = 172002, upload-time = "2026-03-16T08:08:03.92Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/fe/17/fabd56da47096d240dd45ba627bead0333b0cf0ee8ada9bec579287dadf3/pydantic_extra_types-2.11.0-py3-none-any.whl", hash = "sha256:84b864d250a0fc62535b7ec591e36f2c5b4d1325fa0017eb8cda9aeb63b374a6", size = 74296, upload-time = "2025-12-31T16:18:26.38Z" },
{ url = "https://files.pythonhosted.org/packages/17/c1/3226e6d7f5a4f736f38ac11a6fbb262d701889802595cdb0f53a885ac2e0/pydantic_extra_types-2.11.1-py3-none-any.whl", hash = "sha256:1722ea2bddae5628ace25f2aa685b69978ef533123e5638cfbddb999e0100ec1", size = 79526, upload-time = "2026-03-16T08:08:02.533Z" },
]
[[package]]
@ -5380,6 +5306,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d1/7d/037401cecb34728d1c28ea05e196ea3c9d50a1ce0f2172e586e075ff55d8/pyobvector-0.2.25-py3-none-any.whl", hash = "sha256:ae0153f99bd0222783ed7e3951efc31a0d2b462d926b6f86ebd2033409aede8f", size = 64663, upload-time = "2026-03-10T07:18:29.789Z" },
]
[[package]]
name = "pypandoc"
version = "1.17"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ea/d6/410615fc433e5d1eacc00db2044ae2a9c82302df0d35366fe2bd15de024d/pypandoc-1.17.tar.gz", hash = "sha256:51179abfd6e582a25ed03477541b48836b5bba5a4c3b282a547630793934d799", size = 69071, upload-time = "2026-03-14T22:39:07.21Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/86/e2ffa604eacfbec3f430b1d850e7e04c4101eca1a5828f9ae54bf51dfba4/pypandoc-1.17-py3-none-any.whl", hash = "sha256:01fdbffa61edb9f8e82e8faad6954efcb7b6f8f0634aead4d89e322a00225a67", size = 23554, upload-time = "2026-03-14T22:38:46.007Z" },
]
[[package]]
name = "pypandoc-binary"
version = "1.17"
@ -5512,16 +5447,16 @@ wheels = [
[[package]]
name = "pytest-cov"
version = "7.0.0"
version = "7.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "coverage", extra = ["toml"] },
{ name = "pluggy" },
{ name = "pytest" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" },
{ url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" },
]
[[package]]
@ -5957,15 +5892,15 @@ wheels = [
[[package]]
name = "resend"
version = "2.23.0"
version = "2.26.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "requests" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/96/a3/20003e7d14604fef778bd30c69604df3560a657a95a5c29a9688610759b6/resend-2.23.0.tar.gz", hash = "sha256:df613827dcc40eb1c9de2e5ff600cd4081b89b206537dec8067af1a5016d23c7", size = 31416, upload-time = "2026-02-23T19:01:57.603Z" }
sdist = { url = "https://files.pythonhosted.org/packages/07/ff/6a4e5e758fc2145c6a7d8563934d8ee24bf96a0212d7ec7d1af1f155bb74/resend-2.26.0.tar.gz", hash = "sha256:957a6a59dc597ce27fbd6d5383220dd9cc497fab99d4f3d775c8a42a449a569e", size = 36238, upload-time = "2026-03-20T22:49:09.728Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/35/64df775b8cd95e89798fd7b1b7fcafa975b6b09f559c10c0650e65b33580/resend-2.23.0-py2.py3-none-any.whl", hash = "sha256:eca6d28a1ffd36c1fc489fa83cb6b511f384792c9f07465f7c92d96c8b4d5636", size = 52599, upload-time = "2026-02-23T19:01:55.962Z" },
{ url = "https://files.pythonhosted.org/packages/16/c2/f88d3299d97aa1d36a923d0846fe185fcf5355ca898c954b2e5a79f090b5/resend-2.26.0-py2.py3-none-any.whl", hash = "sha256:5e25a804a84a68df504f2ade5369ac37e0139e37788a1f20b66c88696595b4bc", size = 57699, upload-time = "2026-03-20T22:49:08.354Z" },
]
[[package]]
@ -6046,27 +5981,27 @@ wheels = [
[[package]]
name = "ruff"
version = "0.15.6"
version = "0.15.7"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a1/22/9e4f66ee588588dc6c9af6a994e12d26e19efbe874d1a909d09a6dac7a59/ruff-0.15.7.tar.gz", hash = "sha256:04f1ae61fc20fe0b148617c324d9d009b5f63412c0b16474f3d5f1a1a665f7ac", size = 4601277, upload-time = "2026-03-19T16:26:22.605Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" },
{ url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" },
{ url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" },
{ url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" },
{ url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" },
{ url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" },
{ url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" },
{ url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" },
{ url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" },
{ url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" },
{ url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" },
{ url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" },
{ url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" },
{ url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" },
{ url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" },
{ url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" },
{ url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" },
{ url = "https://files.pythonhosted.org/packages/41/2f/0b08ced94412af091807b6119ca03755d651d3d93a242682bf020189db94/ruff-0.15.7-py3-none-linux_armv6l.whl", hash = "sha256:a81cc5b6910fb7dfc7c32d20652e50fa05963f6e13ead3c5915c41ac5d16668e", size = 10489037, upload-time = "2026-03-19T16:26:32.47Z" },
{ url = "https://files.pythonhosted.org/packages/91/4a/82e0fa632e5c8b1eba5ee86ecd929e8ff327bbdbfb3c6ac5d81631bef605/ruff-0.15.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:722d165bd52403f3bdabc0ce9e41fc47070ac56d7a91b4e0d097b516a53a3477", size = 10955433, upload-time = "2026-03-19T16:27:00.205Z" },
{ url = "https://files.pythonhosted.org/packages/ab/10/12586735d0ff42526ad78c049bf51d7428618c8b5c467e72508c694119df/ruff-0.15.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7fbc2448094262552146cbe1b9643a92f66559d3761f1ad0656d4991491af49e", size = 10269302, upload-time = "2026-03-19T16:26:26.183Z" },
{ url = "https://files.pythonhosted.org/packages/eb/5d/32b5c44ccf149a26623671df49cbfbd0a0ae511ff3df9d9d2426966a8d57/ruff-0.15.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b39329b60eba44156d138275323cc726bbfbddcec3063da57caa8a8b1d50adf", size = 10607625, upload-time = "2026-03-19T16:27:03.263Z" },
{ url = "https://files.pythonhosted.org/packages/5d/f1/f0001cabe86173aaacb6eb9bb734aa0605f9a6aa6fa7d43cb49cbc4af9c9/ruff-0.15.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87768c151808505f2bfc93ae44e5f9e7c8518943e5074f76ac21558ef5627c85", size = 10324743, upload-time = "2026-03-19T16:27:09.791Z" },
{ url = "https://files.pythonhosted.org/packages/7a/87/b8a8f3d56b8d848008559e7c9d8bf367934d5367f6d932ba779456e2f73b/ruff-0.15.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb0511670002c6c529ec66c0e30641c976c8963de26a113f3a30456b702468b0", size = 11138536, upload-time = "2026-03-19T16:27:06.101Z" },
{ url = "https://files.pythonhosted.org/packages/e4/f2/4fd0d05aab0c5934b2e1464784f85ba2eab9d54bffc53fb5430d1ed8b829/ruff-0.15.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0d19644f801849229db8345180a71bee5407b429dd217f853ec515e968a6912", size = 11994292, upload-time = "2026-03-19T16:26:48.718Z" },
{ url = "https://files.pythonhosted.org/packages/64/22/fc4483871e767e5e95d1622ad83dad5ebb830f762ed0420fde7dfa9d9b08/ruff-0.15.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4806d8e09ef5e84eb19ba833d0442f7e300b23fe3f0981cae159a248a10f0036", size = 11398981, upload-time = "2026-03-19T16:26:54.513Z" },
{ url = "https://files.pythonhosted.org/packages/b0/99/66f0343176d5eab02c3f7fcd2de7a8e0dd7a41f0d982bee56cd1c24db62b/ruff-0.15.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dce0896488562f09a27b9c91b1f58a097457143931f3c4d519690dea54e624c5", size = 11242422, upload-time = "2026-03-19T16:26:29.277Z" },
{ url = "https://files.pythonhosted.org/packages/5d/3a/a7060f145bfdcce4c987ea27788b30c60e2c81d6e9a65157ca8afe646328/ruff-0.15.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:1852ce241d2bc89e5dc823e03cff4ce73d816b5c6cdadd27dbfe7b03217d2a12", size = 11232158, upload-time = "2026-03-19T16:26:42.321Z" },
{ url = "https://files.pythonhosted.org/packages/a7/53/90fbb9e08b29c048c403558d3cdd0adf2668b02ce9d50602452e187cd4af/ruff-0.15.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5f3e4b221fb4bd293f79912fc5e93a9063ebd6d0dcbd528f91b89172a9b8436c", size = 10577861, upload-time = "2026-03-19T16:26:57.459Z" },
{ url = "https://files.pythonhosted.org/packages/2f/aa/5f486226538fe4d0f0439e2da1716e1acf895e2a232b26f2459c55f8ddad/ruff-0.15.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b15e48602c9c1d9bdc504b472e90b90c97dc7d46c7028011ae67f3861ceba7b4", size = 10327310, upload-time = "2026-03-19T16:26:35.909Z" },
{ url = "https://files.pythonhosted.org/packages/99/9e/271afdffb81fe7bfc8c43ba079e9d96238f674380099457a74ccb3863857/ruff-0.15.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b4705e0e85cedc74b0a23cf6a179dbb3df184cb227761979cc76c0440b5ab0d", size = 10840752, upload-time = "2026-03-19T16:26:45.723Z" },
{ url = "https://files.pythonhosted.org/packages/bf/29/a4ae78394f76c7759953c47884eb44de271b03a66634148d9f7d11e721bd/ruff-0.15.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:112c1fa316a558bb34319282c1200a8bf0495f1b735aeb78bfcb2991e6087580", size = 11336961, upload-time = "2026-03-19T16:26:39.076Z" },
{ url = "https://files.pythonhosted.org/packages/26/6b/8786ba5736562220d588a2f6653e6c17e90c59ced34a2d7b512ef8956103/ruff-0.15.7-py3-none-win32.whl", hash = "sha256:6d39e2d3505b082323352f733599f28169d12e891f7dd407f2d4f54b4c2886de", size = 10582538, upload-time = "2026-03-19T16:26:15.992Z" },
{ url = "https://files.pythonhosted.org/packages/2b/e9/346d4d3fffc6871125e877dae8d9a1966b254fbd92a50f8561078b88b099/ruff-0.15.7-py3-none-win_amd64.whl", hash = "sha256:4d53d712ddebcd7dace1bc395367aec12c057aacfe9adbb6d832302575f4d3a1", size = 11755839, upload-time = "2026-03-19T16:26:19.897Z" },
{ url = "https://files.pythonhosted.org/packages/8f/e8/726643a3ea68c727da31570bde48c7a10f1aa60eddd628d94078fec586ff/ruff-0.15.7-py3-none-win_arm64.whl", hash = "sha256:18e8d73f1c3fdf27931497972250340f92e8c861722161a9caeb89a58ead6ed2", size = 11023304, upload-time = "2026-03-19T16:26:51.669Z" },
]
[[package]]
@ -6105,14 +6040,14 @@ wheels = [
[[package]]
name = "scipy-stubs"
version = "1.17.1.2"
version = "1.17.1.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "optype", extra = ["numpy"] },
]
sdist = { url = "https://files.pythonhosted.org/packages/c7/ab/43f681ffba42f363b7ed6b767fd215d1e26006578214ff8330586a11bf95/scipy_stubs-1.17.1.2.tar.gz", hash = "sha256:2ecadc8c87a3b61aaf7379d6d6b10f1038a829c53b9efe5b174fb97fc8b52237", size = 388354, upload-time = "2026-03-15T22:33:20.449Z" }
sdist = { url = "https://files.pythonhosted.org/packages/a7/59/59c6cc3f9970154b9ed6b1aff42a0185cdd60cef54adc0404b9e77972221/scipy_stubs-1.17.1.3.tar.gz", hash = "sha256:5eb87a8d23d726706259b012ebe76a4a96a9ae9e141fc59bf55fc8eac2ed9e0f", size = 392185, upload-time = "2026-03-22T22:11:58.34Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/0b/ec4fe720c1202d9df729a3e9d9b7e4d2da9f6e7f28bd2877b7d0769f4f75/scipy_stubs-1.17.1.2-py3-none-any.whl", hash = "sha256:f19e8f5273dbe3b7ee6a9554678c3973b9695fa66b91f29206d00830a1536c06", size = 594377, upload-time = "2026-03-15T22:33:18.684Z" },
{ url = "https://files.pythonhosted.org/packages/2c/d4/94304532c0a75a55526119043dd44a9bd1541a21e14483cbb54261c527d2/scipy_stubs-1.17.1.3-py3-none-any.whl", hash = "sha256:7b91d3f05aa47da06fbca14eb6c5bb4c28994e9245fd250cc847e375bab31297", size = 597933, upload-time = "2026-03-22T22:11:56.525Z" },
]
[[package]]
@ -6131,15 +6066,15 @@ wheels = [
[[package]]
name = "sentry-sdk"
version = "2.54.0"
version = "2.55.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c8/e9/2e3a46c304e7fa21eaa70612f60354e32699c7102eb961f67448e222ad7c/sentry_sdk-2.54.0.tar.gz", hash = "sha256:2620c2575128d009b11b20f7feb81e4e4e8ae08ec1d36cbc845705060b45cc1b", size = 413813, upload-time = "2026-03-02T15:12:41.355Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e9/b8/285293dc60fc198fffc3fcdbc7c6d4e646e0f74e61461c355d40faa64ceb/sentry_sdk-2.55.0.tar.gz", hash = "sha256:3774c4d8820720ca4101548131b9c162f4c9426eb7f4d24aca453012a7470f69", size = 424505, upload-time = "2026-03-17T14:15:51.707Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/53/39/be412cc86bc6247b8f69e9383d7950711bd86f8d0a4a4b0fe8fad685bc21/sentry_sdk-2.54.0-py2.py3-none-any.whl", hash = "sha256:fd74e0e281dcda63afff095d23ebcd6e97006102cdc8e78a29f19ecdf796a0de", size = 439198, upload-time = "2026-03-02T15:12:39.546Z" },
{ url = "https://files.pythonhosted.org/packages/9a/66/20465097782d7e1e742d846407ea7262d338c6e876ddddad38ca8907b38f/sentry_sdk-2.55.0-py2.py3-none-any.whl", hash = "sha256:97026981cb15699394474a196b88503a393cbc58d182ece0d3abe12b9bd978d4", size = 449284, upload-time = "2026-03-17T14:15:49.604Z" },
]
[package.optional-dependencies]
@ -6375,15 +6310,15 @@ wheels = [
[[package]]
name = "starlette"
version = "0.52.1"
version = "1.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" }
sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" },
{ url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" },
]
[[package]]
@ -6792,11 +6727,11 @@ wheels = [
[[package]]
name = "types-cachetools"
version = "6.2.0.20251022"
version = "6.2.0.20260317"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3b/a8/f9bcc7f1be63af43ef0170a773e2d88817bcc7c9d8769f2228c802826efe/types_cachetools-6.2.0.20251022.tar.gz", hash = "sha256:f1d3c736f0f741e89ec10f0e1b0138625023e21eb33603a930c149e0318c0cef", size = 9608, upload-time = "2025-10-22T03:03:58.16Z" }
sdist = { url = "https://files.pythonhosted.org/packages/8b/7f/16a4d8344c28193a5a74358028c2d2f753f0d9658dd98b9e1967c50045a2/types_cachetools-6.2.0.20260317.tar.gz", hash = "sha256:6d91855bcc944665897c125e720aa3c80aace929b77a64e796343701df4f61c6", size = 9812, upload-time = "2026-03-17T04:06:32.007Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/98/2d/8d821ed80f6c2c5b427f650bf4dc25b80676ed63d03388e4b637d2557107/types_cachetools-6.2.0.20251022-py3-none-any.whl", hash = "sha256:698eb17b8f16b661b90624708b6915f33dbac2d185db499ed57e4997e7962cad", size = 9341, upload-time = "2025-10-22T03:03:57.036Z" },
{ url = "https://files.pythonhosted.org/packages/17/9a/b00b23054934c4d569c19f7278c4fb32746cd36a64a175a216d3073a4713/types_cachetools-6.2.0.20260317-py3-none-any.whl", hash = "sha256:92fa9bc50e4629e31fca67ceb3fb1de71791e314fa16c0a0d2728724dc222c8b", size = 9346, upload-time = "2026-03-17T04:06:31.184Z" },
]
[[package]]
@ -6840,11 +6775,11 @@ wheels = [
[[package]]
name = "types-docutils"
version = "0.22.3.20260316"
version = "0.22.3.20260322"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/9f/27/a7f16b3a2fad0a4ddd85a668319f9a1d0311c4bd9578894f6471c7e6c788/types_docutils-0.22.3.20260316.tar.gz", hash = "sha256:8ef27d565b9831ff094fe2eac75337a74151013e2d21ecabd445c2955f891564", size = 57263, upload-time = "2026-03-16T04:29:12.211Z" }
sdist = { url = "https://files.pythonhosted.org/packages/44/bb/243a87fc1605a4a94c2c343d6dbddbf0d7ef7c0b9550f360b8cda8e82c39/types_docutils-0.22.3.20260322.tar.gz", hash = "sha256:e2450bb997283c3141ec5db3e436b91f0aa26efe35eb9165178ca976ccb4930b", size = 57311, upload-time = "2026-03-22T04:08:44.064Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/70/60/c1f22b7cfc4837d5419e5a2d8702c7d65f03343f866364b71cccd8a73b79/types_docutils-0.22.3.20260316-py3-none-any.whl", hash = "sha256:083c7091b8072c242998ec51da1bf1492f0332387da81c3b085efbf5ca754c7d", size = 91968, upload-time = "2026-03-16T04:29:11.114Z" },
{ url = "https://files.pythonhosted.org/packages/c6/4a/22c090cd4615a16917dff817cbe7c5956da376c961e024c241cd962d2c3d/types_docutils-0.22.3.20260322-py3-none-any.whl", hash = "sha256:681d4510ce9b80a0c6a593f0f9843d81f8caa786db7b39ba04d9fd5480ac4442", size = 91978, upload-time = "2026-03-22T04:08:43.117Z" },
]
[[package]]
@ -6874,15 +6809,15 @@ wheels = [
[[package]]
name = "types-gevent"
version = "25.9.0.20251228"
version = "25.9.0.20260322"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "types-greenlet" },
{ name = "types-psutil" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/85/c5043c4472f82c8ee3d9e0673eb4093c7d16770a26541a137a53a1d096f6/types_gevent-25.9.0.20251228.tar.gz", hash = "sha256:423ef9891d25c5a3af236c3e9aace4c444c86ff773fe13ef22731bc61d59abef", size = 38063, upload-time = "2025-12-28T03:28:28.651Z" }
sdist = { url = "https://files.pythonhosted.org/packages/34/f0/14a99ddcaa69b559fa7cec8c9de880b792bebb0b848ae865d94ea9058533/types_gevent-25.9.0.20260322.tar.gz", hash = "sha256:91257920845762f09753c08aa20fad1743ac13d2de8bcf23f4b8fe967d803732", size = 38241, upload-time = "2026-03-22T04:08:55.213Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c8/b7/a2d6b652ab5a26318b68cafd58c46fafb9b15c5313d2d76a70b838febb4b/types_gevent-25.9.0.20251228-py3-none-any.whl", hash = "sha256:e2e225af4fface9241c16044983eb2fc3993f2d13d801f55c2932848649b7f2f", size = 55486, upload-time = "2025-12-28T03:28:27.382Z" },
{ url = "https://files.pythonhosted.org/packages/89/0f/964440b57eb4ddb4aca03479a4093852e1ce79010d1c5967234e6f5d6bd9/types_gevent-25.9.0.20260322-py3-none-any.whl", hash = "sha256:21b3c269b3a20ecb0e4668289c63b97d21694d84a004ab059c1e32ab970eacc2", size = 55500, upload-time = "2026-03-22T04:08:54.103Z" },
]
[[package]]
@ -6965,11 +6900,11 @@ wheels = [
[[package]]
name = "types-openpyxl"
version = "3.1.5.20260316"
version = "3.1.5.20260322"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a1/38/32f8ee633dd66ca6d52b8853b9fd45dc3869490195a6ed435d5c868b9c2d/types_openpyxl-3.1.5.20260316.tar.gz", hash = "sha256:081dda9427ea1141e5649e3dcf630e7013a4cf254a5862a7e0a3f53c123b7ceb", size = 101318, upload-time = "2026-03-16T04:29:05.004Z" }
sdist = { url = "https://files.pythonhosted.org/packages/77/bf/15240de4d68192d2a1f385ef2f6f1ecb29b85d2f3791dd2e2d5b980be30f/types_openpyxl-3.1.5.20260322.tar.gz", hash = "sha256:a61d66ebe1e49697853c6db8e0929e1cda2c96755e71fb676ed7fc48dfdcf697", size = 101325, upload-time = "2026-03-22T04:08:40.426Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d5/df/b87ae6226ed7cc84b9e43119c489c7f053a9a25e209e0ebb5d84bc36fa37/types_openpyxl-3.1.5.20260316-py3-none-any.whl", hash = "sha256:38e7e125df520fb7eb72cb1129c9f024eb99ef9564aad2c27f68f080c26bcf2d", size = 166084, upload-time = "2026-03-16T04:29:03.657Z" },
{ url = "https://files.pythonhosted.org/packages/bf/b4/c14191b30bcb266365b124b2bb4e67ecd68425a78ba77ee026f33667daa9/types_openpyxl-3.1.5.20260322-py3-none-any.whl", hash = "sha256:2f515f0b0bbfb04bfb587de34f7522d90b5151a8da7bbbd11ecec4ca40f64238", size = 166102, upload-time = "2026-03-22T04:08:39.174Z" },
]
[[package]]
@ -7044,11 +6979,11 @@ wheels = [
[[package]]
name = "types-python-dateutil"
version = "2.9.0.20260305"
version = "2.9.0.20260323"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/1d/c7/025c624f347e10476b439a6619a95f1d200250ea88e7ccea6e09e48a7544/types_python_dateutil-2.9.0.20260305.tar.gz", hash = "sha256:389717c9f64d8f769f36d55a01873915b37e97e52ce21928198d210fbd393c8b", size = 16885, upload-time = "2026-03-05T04:00:47.409Z" }
sdist = { url = "https://files.pythonhosted.org/packages/e9/02/f72df9ef5ffc4f959b83cb80c8aa03eb8718a43e563ecd99ccffe265fa89/types_python_dateutil-2.9.0.20260323.tar.gz", hash = "sha256:a107aef5841db41ace381dbbbd7e4945220fc940f7a72172a0be5a92d9ab7164", size = 16897, upload-time = "2026-03-23T04:15:14.829Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0a/77/8c0d1ec97f0d9707ad3d8fa270ab8964e7b31b076d2f641c94987395cc75/types_python_dateutil-2.9.0.20260305-py3-none-any.whl", hash = "sha256:a3be9ca444d38cadabd756cfbb29780d8b338ae2a3020e73c266a83cc3025dd7", size = 18419, upload-time = "2026-03-05T04:00:46.392Z" },
{ url = "https://files.pythonhosted.org/packages/92/c1/b661838b97453e699a215451f2e22cee750eaaf4ea4619b34bdaf01221a4/types_python_dateutil-2.9.0.20260323-py3-none-any.whl", hash = "sha256:a23a50a07f6eb87e729d4cb0c2eb511c81761eeb3f505db2c1413be94aae8335", size = 18433, upload-time = "2026-03-23T04:15:13.683Z" },
]
[[package]]
@ -7062,11 +6997,11 @@ wheels = [
[[package]]
name = "types-pywin32"
version = "311.0.0.20260316"
version = "311.0.0.20260323"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/17/a8/b4652002a854fcfe5d272872a0ae2d5df0e9dc482e1a6dfb5e97b905b76f/types_pywin32-311.0.0.20260316.tar.gz", hash = "sha256:c136fa489fe6279a13bca167b750414e18d657169b7cf398025856dc363004e8", size = 329956, upload-time = "2026-03-16T04:28:57.366Z" }
sdist = { url = "https://files.pythonhosted.org/packages/b5/cc/f03ddb7412ac2fc2238358b617c2d5919ba96812dff8d3081f3b2754bb83/types_pywin32-311.0.0.20260323.tar.gz", hash = "sha256:2e8dc6a59fedccbc51b241651ce1e8aa58488934f517debf23a9c6d0ff329b4b", size = 332263, upload-time = "2026-03-23T04:15:20.004Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f0/83/704698d93788cf1c2f5e236eae2b37f1b2152ef84dc66b4b83f6c7487b76/types_pywin32-311.0.0.20260316-py3-none-any.whl", hash = "sha256:abb643d50012386d697af49384cc0e6e475eab76b0ca2a7f93d480d0862b3692", size = 392959, upload-time = "2026-03-16T04:28:56.104Z" },
{ url = "https://files.pythonhosted.org/packages/dc/82/d786d5d8b846e3cbe1ee52da8945560b111c789b42c3771b2129b312ab94/types_pywin32-311.0.0.20260323-py3-none-any.whl", hash = "sha256:2f2b03fc72ae77ccbb0ee258da0f181c3a38bd8602f6e332e42587b3b0d5f095", size = 395435, upload-time = "2026-03-23T04:15:18.76Z" },
]
[[package]]
@ -7162,16 +7097,16 @@ wheels = [
[[package]]
name = "types-tensorflow"
version = "2.18.0.20260224"
version = "2.18.0.20260322"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "types-protobuf" },
{ name = "types-requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/af/cb/4914c2fbc1cf8a8d1ef2a7c727bb6f694879be85edeee880a0c88e696af8/types_tensorflow-2.18.0.20260224.tar.gz", hash = "sha256:9b0ccc91c79c88791e43d3f80d6c879748fa0361409c5ff23c7ffe3709be00f2", size = 258786, upload-time = "2026-02-24T04:06:45.613Z" }
sdist = { url = "https://files.pythonhosted.org/packages/4a/cb/81dfaa2680031a6e087bcdfaf1c0556371098e229aee541e21c81a381065/types_tensorflow-2.18.0.20260322.tar.gz", hash = "sha256:135dc6ca06cc647a002e1bca5c5c99516fde51efd08e46c48a9b1916fc5df07f", size = 259030, upload-time = "2026-03-22T04:09:14.069Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d4/1d/a1c3c60f0eb1a204500dbdc66e3d18aafabc86ad07a8eca71ea05bc8c5a8/types_tensorflow-2.18.0.20260224-py3-none-any.whl", hash = "sha256:6a25f5f41f3e06f28c1f65c6e09f484d4ba0031d6d8df83a39df9d890245eefc", size = 329746, upload-time = "2026-02-24T04:06:44.4Z" },
{ url = "https://files.pythonhosted.org/packages/5b/0c/a178061450b640e53577e2c423ad22bf5d3f692f6bfeeb12156d02b531ef/types_tensorflow-2.18.0.20260322-py3-none-any.whl", hash = "sha256:d8776b6daacdb279e64f105f9dcbc0b8e3544b9a2f2eb71ec6ea5955081f65e6", size = 329771, upload-time = "2026-03-22T04:09:12.844Z" },
]
[[package]]

440
docker/dify-env-sync.py Executable file
View File

@ -0,0 +1,440 @@
#!/usr/bin/env python3
# ================================================================
# Dify Environment Variables Synchronization Script
#
# Features:
# - Synchronize latest settings from .env.example to .env
# - Preserve custom settings in existing .env
# - Add new environment variables
# - Detect removed environment variables
# - Create backup files
# ================================================================
import argparse
import re
import shutil
import sys
from datetime import datetime
from pathlib import Path
# ANSI color codes
RED = "\033[0;31m"
GREEN = "\033[0;32m"
YELLOW = "\033[1;33m"
BLUE = "\033[0;34m"
NC = "\033[0m" # No Color
def supports_color() -> bool:
"""Return True if the terminal supports ANSI color codes."""
return hasattr(sys.stdout, "isatty") and sys.stdout.isatty()
def log_info(message: str) -> None:
"""Print an informational message in blue."""
if supports_color():
print(f"{BLUE}[INFO]{NC} {message}")
else:
print(f"[INFO] {message}")
def log_success(message: str) -> None:
"""Print a success message in green."""
if supports_color():
print(f"{GREEN}[SUCCESS]{NC} {message}")
else:
print(f"[SUCCESS] {message}")
def log_warning(message: str) -> None:
"""Print a warning message in yellow to stderr."""
if supports_color():
print(f"{YELLOW}[WARNING]{NC} {message}", file=sys.stderr)
else:
print(f"[WARNING] {message}", file=sys.stderr)
def log_error(message: str) -> None:
"""Print an error message in red to stderr."""
if supports_color():
print(f"{RED}[ERROR]{NC} {message}", file=sys.stderr)
else:
print(f"[ERROR] {message}", file=sys.stderr)
def parse_env_file(path: Path) -> dict[str, str]:
"""Parse an .env-style file and return a mapping of key to raw value.
Lines that are blank or start with '#' (after optional whitespace) are
skipped. Only lines containing '=' are considered variable definitions.
Args:
path: Path to the .env file to parse.
Returns:
Ordered dict mapping variable name to its value string.
"""
variables: dict[str, str] = {}
with path.open(encoding="utf-8") as fh:
for line in fh:
line = line.rstrip("\n")
# Skip blank lines and comment lines
stripped = line.strip()
if not stripped or stripped.startswith("#"):
continue
if "=" not in line:
continue
key, _, value = line.partition("=")
key = key.strip()
if key:
variables[key] = value.strip()
return variables
def check_files(work_dir: Path) -> None:
"""Verify required files exist; create .env from .env.example if absent.
Args:
work_dir: Directory that must contain .env.example (and optionally .env).
Raises:
SystemExit: If .env.example does not exist.
"""
log_info("Checking required files...")
example_file = work_dir / ".env.example"
env_file = work_dir / ".env"
if not example_file.exists():
log_error(".env.example file not found")
sys.exit(1)
if not env_file.exists():
log_warning(".env file does not exist. Creating from .env.example.")
shutil.copy2(example_file, env_file)
log_success(".env file created")
log_success("Required files verified")
def create_backup(work_dir: Path) -> None:
"""Create a timestamped backup of the current .env file.
Backups are placed in ``<work_dir>/env-backup/`` with the filename
``.env.backup_<YYYYMMDD_HHMMSS>``.
Args:
work_dir: Directory containing the .env file to back up.
"""
env_file = work_dir / ".env"
if not env_file.exists():
return
backup_dir = work_dir / "env-backup"
if not backup_dir.exists():
backup_dir.mkdir(parents=True)
log_info(f"Created backup directory: {backup_dir}")
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_file = backup_dir / f".env.backup_{timestamp}"
shutil.copy2(env_file, backup_file)
log_success(f"Backed up existing .env to {backup_file}")
def analyze_value_change(current: str, recommended: str) -> str | None:
"""Analyse what kind of change occurred between two env values.
Args:
current: Value currently set in .env.
recommended: Value present in .env.example.
Returns:
A human-readable description string, or None when no analysis applies.
"""
use_colors = supports_color()
def colorize(color: str, text: str) -> str:
return f"{color}{text}{NC}" if use_colors else text
if not current and recommended:
return colorize(RED, " -> Setting from empty to recommended value")
if current and not recommended:
return colorize(RED, " -> Recommended value changed to empty")
# Numeric comparison
if re.fullmatch(r"\d+", current) and re.fullmatch(r"\d+", recommended):
cur_int, rec_int = int(current), int(recommended)
if cur_int < rec_int:
return colorize(BLUE, f" -> Numeric increase ({current} < {recommended})")
if cur_int > rec_int:
return colorize(YELLOW, f" -> Numeric decrease ({current} > {recommended})")
return None
# Boolean comparison
if current.lower() in {"true", "false"} and recommended.lower() in {"true", "false"}:
if current.lower() != recommended.lower():
return colorize(BLUE, f" -> Boolean value change ({current} -> {recommended})")
return None
# URL / endpoint
if current.startswith(("http://", "https://")) or recommended.startswith(("http://", "https://")):
return colorize(BLUE, " -> URL/endpoint change")
# File path
if current.startswith("/") or recommended.startswith("/"):
return colorize(BLUE, " -> File path change")
# String length
if len(current) != len(recommended):
return colorize(YELLOW, f" -> String length change ({len(current)} -> {len(recommended)} characters)")
return None
def detect_differences(env_vars: dict[str, str], example_vars: dict[str, str]) -> dict[str, tuple[str, str]]:
"""Find variables whose values differ between .env and .env.example.
Only variables present in *both* files are compared; new or removed
variables are handled by separate functions.
Args:
env_vars: Parsed key/value pairs from .env.
example_vars: Parsed key/value pairs from .env.example.
Returns:
Mapping of key -> (env_value, example_value) for every key whose
values differ.
"""
log_info("Detecting differences between .env and .env.example...")
diffs: dict[str, tuple[str, str]] = {}
for key, example_value in example_vars.items():
if key in env_vars and env_vars[key] != example_value:
diffs[key] = (env_vars[key], example_value)
if diffs:
log_success(f"Detected differences in {len(diffs)} environment variables")
show_differences_detail(diffs)
else:
log_info("No differences detected")
return diffs
def show_differences_detail(diffs: dict[str, tuple[str, str]]) -> None:
"""Print a formatted table of differing environment variables.
Args:
diffs: Mapping of key -> (current_value, recommended_value).
"""
use_colors = supports_color()
log_info("")
log_info("=== Environment Variable Differences ===")
if not diffs:
log_info("No differences to display")
return
for count, (key, (env_value, example_value)) in enumerate(diffs.items(), start=1):
print()
if use_colors:
print(f"{YELLOW}[{count}] {key}{NC}")
print(f" {GREEN}.env (current){NC} : {env_value}")
print(f" {BLUE}.env.example (recommended){NC} : {example_value}")
else:
print(f"[{count}] {key}")
print(f" .env (current) : {env_value}")
print(f" .env.example (recommended) : {example_value}")
analysis = analyze_value_change(env_value, example_value)
if analysis:
print(analysis)
print()
log_info("=== Difference Analysis Complete ===")
log_info("Note: Consider changing to the recommended values above.")
log_info("Current implementation preserves .env values.")
print()
def detect_removed_variables(env_vars: dict[str, str], example_vars: dict[str, str]) -> list[str]:
"""Identify variables present in .env but absent from .env.example.
Args:
env_vars: Parsed key/value pairs from .env.
example_vars: Parsed key/value pairs from .env.example.
Returns:
Sorted list of variable names that no longer appear in .env.example.
"""
log_info("Detecting removed environment variables...")
removed = sorted(set(env_vars) - set(example_vars))
if removed:
log_warning("The following environment variables have been removed from .env.example:")
for var in removed:
log_warning(f" - {var}")
log_warning("Consider manually removing these variables from .env")
else:
log_success("No removed environment variables found")
return removed
def sync_env_file(work_dir: Path, env_vars: dict[str, str], diffs: dict[str, tuple[str, str]]) -> None:
"""Rewrite .env based on .env.example while preserving custom values.
The output file follows the exact line structure of .env.example
(preserving comments, blank lines, and ordering). For every variable
that exists in .env with a different value from the example, the
current .env value is kept. Variables that are new in .env.example
(not present in .env at all) are added with the example's default.
Args:
work_dir: Directory containing .env and .env.example.
env_vars: Parsed key/value pairs from the original .env.
diffs: Keys whose .env values differ from .env.example (to preserve).
"""
log_info("Starting partial synchronization of .env file...")
example_file = work_dir / ".env.example"
new_env_file = work_dir / ".env.new"
# Keys whose current .env value should override the example default
preserved_keys: set[str] = set(diffs.keys())
preserved_count = 0
updated_count = 0
env_var_pattern = re.compile(r"^([A-Za-z_][A-Za-z0-9_]*)\s*=")
with example_file.open(encoding="utf-8") as src, new_env_file.open("w", encoding="utf-8") as dst:
for line in src:
raw_line = line.rstrip("\n")
match = env_var_pattern.match(raw_line)
if match:
key = match.group(1)
if key in preserved_keys:
# Write the preserved value from .env
dst.write(f"{key}={env_vars[key]}\n")
log_info(f" Preserved: {key} (.env value)")
preserved_count += 1
else:
# Use the example value (covers new vars and unchanged ones)
dst.write(line if line.endswith("\n") else raw_line + "\n")
updated_count += 1
else:
# Blank line, comment, or non-variable line — keep as-is
dst.write(line if line.endswith("\n") else raw_line + "\n")
# Atomically replace the original .env
try:
new_env_file.replace(work_dir / ".env")
except OSError as exc:
log_error(f"Failed to replace .env file: {exc}")
new_env_file.unlink(missing_ok=True)
sys.exit(1)
log_success("Successfully created new .env file")
log_success("Partial synchronization of .env file completed")
log_info(f" Preserved .env values: {preserved_count}")
log_info(f" Updated to .env.example values: {updated_count}")
def show_statistics(work_dir: Path) -> None:
"""Print a summary of variable counts from both env files.
Args:
work_dir: Directory containing .env and .env.example.
"""
log_info("Synchronization statistics:")
example_file = work_dir / ".env.example"
env_file = work_dir / ".env"
example_count = len(parse_env_file(example_file)) if example_file.exists() else 0
env_count = len(parse_env_file(env_file)) if env_file.exists() else 0
log_info(f" .env.example environment variables: {example_count}")
log_info(f" .env environment variables: {env_count}")
def build_arg_parser() -> argparse.ArgumentParser:
"""Build and return the CLI argument parser.
Returns:
Configured ArgumentParser instance.
"""
parser = argparse.ArgumentParser(
prog="dify-env-sync",
description=(
"Synchronize .env with .env.example: add new variables, "
"preserve custom values, and report removed variables."
),
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=(
"Examples:\n"
" # Run from the docker/ directory (default)\n"
" python dify-env-sync.py\n\n"
" # Specify a custom working directory\n"
" python dify-env-sync.py --dir /path/to/docker\n"
),
)
parser.add_argument(
"--dir",
metavar="DIRECTORY",
default=".",
help="Working directory containing .env and .env.example (default: current directory)",
)
parser.add_argument(
"--no-backup",
action="store_true",
default=False,
help="Skip creating a timestamped backup of the existing .env file",
)
return parser
def main() -> None:
"""Orchestrate the complete environment variable synchronization process."""
parser = build_arg_parser()
args = parser.parse_args()
work_dir = Path(args.dir).resolve()
log_info("=== Dify Environment Variables Synchronization Script ===")
log_info(f"Execution started: {datetime.now()}")
log_info(f"Working directory: {work_dir}")
# 1. Verify prerequisites
check_files(work_dir)
# 2. Backup existing .env
if not args.no_backup:
create_backup(work_dir)
# 3. Parse both files
env_vars = parse_env_file(work_dir / ".env")
example_vars = parse_env_file(work_dir / ".env.example")
# 4. Report differences (values that changed in the example)
diffs = detect_differences(env_vars, example_vars)
# 5. Report variables removed from the example
detect_removed_variables(env_vars, example_vars)
# 6. Rewrite .env
sync_env_file(work_dir, env_vars, diffs)
# 7. Print summary statistics
show_statistics(work_dir)
log_success("=== Synchronization process completed successfully ===")
log_info(f"Execution finished: {datetime.now()}")
if __name__ == "__main__":
main()

View File

@ -28,6 +28,7 @@ http_access deny manager
http_access allow localhost
include /etc/squid/conf.d/*.conf
http_access deny all
tcp_outgoing_address 0.0.0.0
################################## Proxy Server ################################
http_port ${HTTP_PORT}

View File

@ -0,0 +1,186 @@
# EU AI Act Compliance Guide for Dify Deployers
Dify is an LLMOps platform for building RAG pipelines, agents, and AI workflows. If you deploy Dify in the EU — whether self-hosted or using a cloud provider — the EU AI Act applies to your deployment. This guide covers what the regulation requires and how Dify's architecture maps to those requirements.
## Is your system in scope?
The detailed obligations in Articles 12, 13, and 14 only apply to **high-risk AI systems** as defined in Annex III of the EU AI Act. A Dify application is high-risk if it is used for:
- **Recruitment and HR** — screening candidates, evaluating employee performance, allocating tasks
- **Credit scoring and insurance** — assessing creditworthiness or setting premiums
- **Law enforcement** — profiling, criminal risk assessment, border control
- **Critical infrastructure** — managing energy, water, transport, or telecommunications systems
- **Education assessment** — grading students, determining admissions
- **Essential public services** — evaluating eligibility for benefits, housing, or emergency services
Most Dify deployments (customer-facing chatbots, internal knowledge bases, content generation workflows) are **not** high-risk. If your Dify application does not fall into one of the categories above:
- **Article 50** (end-user transparency) still applies if users interact with your application directly. See the [Article 50 section](#article-50-end-user-transparency) below.
- **GDPR** still applies if you process personal data. See the [GDPR section](#gdpr-considerations) below.
- The high-risk obligations (Articles 9-15) are less likely to apply, but risk classification is context-dependent. **Do not self-classify without legal review.** Focus on Article 50 (transparency) and GDPR (data protection) as your baseline obligations.
If you are unsure whether your use case qualifies as high-risk, consult a qualified legal professional before proceeding.
## Self-hosted vs cloud: different compliance profiles
| Deployment | Your role | Dify's role | Who handles compliance? |
|-----------|----------|-------------|------------------------|
| **Self-hosted** | Provider and deployer | Framework provider — obligations under Article 25 apply only if Dify is placed on the market or put into service as part of a complete AI system bearing its name or trademark | You |
| **Dify Cloud** | Deployer | Provider and processor | Shared — Dify handles SOC 2 and GDPR for the platform; you handle AI Act obligations for your specific use case |
Dify Cloud already has SOC 2 Type II and GDPR compliance for the platform itself. But the EU AI Act adds obligations specific to AI systems that SOC 2 does not cover: risk classification, technical documentation, transparency, and human oversight.
## Supported providers and services
Dify integrates with a broad range of AI providers and data stores. The following are the key ones relevant to compliance:
- **AI providers:** HuggingFace (core), plus integrations with OpenAI, Anthropic, Google, and 100+ models via provider plugins
- **Model identifiers include:** gpt-4o, gpt-3.5-turbo, claude-3-opus, gemini-2.5-flash, whisper-1, and others
- **Vector database connections:** Extensive RAG infrastructure supporting numerous vector stores
Dify's plugin architecture means actual provider usage depends on your configuration. Document which providers and models are active in your deployment.
## Data flow diagram
A typical Dify RAG deployment:
```mermaid
graph LR
USER((User)) -->|query| DIFY[Dify Platform]
DIFY -->|prompts| LLM([LLM Provider])
LLM -->|responses| DIFY
DIFY -->|documents| EMBED([Embedding Model])
EMBED -->|vectors| DIFY
DIFY -->|store/retrieve| VS[(Vector Store)]
DIFY -->|knowledge| KB[(Knowledge Base)]
DIFY -->|response| USER
classDef processor fill:#60a5fa,stroke:#1e40af,color:#000
classDef controller fill:#4ade80,stroke:#166534,color:#000
classDef app fill:#a78bfa,stroke:#5b21b6,color:#000
classDef user fill:#f472b6,stroke:#be185d,color:#000
class USER user
class DIFY app
class LLM processor
class EMBED processor
class VS controller
class KB controller
```
**GDPR roles** (providers are typically processors for customer-submitted data, but the exact role depends on each provider's terms of service and processing purpose; deployers should review each provider's DPA):
- **Cloud LLM providers (OpenAI, Anthropic, Google)** typically act as processors — requires DPA.
- **Cloud embedding services** typically act as processors — requires DPA.
- **Self-hosted vector stores (Weaviate, Qdrant, pgvector):** Your organization remains the controller — no third-party transfer.
- **Cloud vector stores (Pinecone, Zilliz Cloud)** typically act as processors — requires DPA.
- **Knowledge base documents:** Your organization is the controller — stored in your infrastructure.
## Article 11: Technical documentation
High-risk systems need Annex IV documentation. For Dify deployments, key sections include:
| Section | What Dify provides | What you must document |
|---------|-------------------|----------------------|
| General description | Platform capabilities, supported models | Your specific use case, intended users, deployment context |
| Development process | Dify's architecture, plugin system | Your RAG pipeline design, prompt engineering, knowledge base curation |
| Monitoring | Dify's built-in logging and analytics | Your monitoring plan, alert thresholds, incident response |
| Performance metrics | Dify's evaluation features | Your accuracy benchmarks, quality thresholds, bias testing |
| Risk management | — | Risk assessment for your specific use case |
Some sections can be derived from Dify's architecture and your deployment configuration, as shown in the table above. The remaining sections require your input.
## Article 12: Record-keeping
Dify's built-in logging covers several Article 12 requirements:
| Requirement | Dify Feature | Status |
|------------|-------------|--------|
| Conversation logs | Full conversation history with timestamps | **Covered** |
| Model tracking | Model name recorded per interaction | **Covered** |
| Token usage | Token counts per message | **Covered** |
| Cost tracking | Cost per conversation (if provider reports it) | **Partial** |
| Document retrieval | RAG source documents logged | **Covered** |
| User identification | User session tracking | **Covered** |
| Error logging | Failed generation logs | **Covered** |
| Data retention | Configurable | **Your responsibility** |
**Retention periods:** The required retention period depends on your role under the Act. Article 18 requires **providers** of high-risk systems to retain logs and technical documentation for **10 years** after market placement. Article 26(6) requires **deployers** to retain logs for at least **6 months**. If you self-host Dify and have substantially modified the system, you may be classified as a provider rather than a deployer. Confirm the applicable retention period with legal counsel.
## Article 13: Transparency to deployers
Article 13 requires providers of high-risk AI systems to supply deployers with the information needed to understand and operate the system correctly. This is a **documentation obligation**, not a logging obligation. For Dify deployments, this means the upstream LLM and embedding providers must give you:
- Instructions for use, including intended purpose and known limitations
- Accuracy metrics and performance benchmarks
- Known or foreseeable risks and residual risks after mitigation
- Technical specifications: input/output formats, training data characteristics, model architecture details
As a deployer, collect model cards, system documentation, and accuracy reports from each AI provider your Dify application uses. Maintain these as part of your Annex IV technical documentation.
Dify's platform features provide **supporting evidence** that can inform Article 13 documentation, but they do not satisfy Article 13 on their own:
- **Source attribution** — Dify's RAG citation feature shows which documents informed the response, supporting deployer-side auditing
- **Model identification** — Dify logs which LLM model generates responses, providing evidence for system documentation
- **Conversation logs** — execution history helps compile performance and behavior evidence
You must independently produce system documentation covering how your specific Dify deployment uses AI, its intended purpose, performance characteristics, and residual risks.
## Article 50: End-user transparency
Article 50 requires deployers to inform end users that they are interacting with an AI system. This is a separate obligation from Article 13 and applies even to limited-risk systems.
For Dify applications serving end users:
1. **Disclose AI involvement** — tell users they are interacting with an AI system
2. **AI-generated content labeling** — identify AI-generated content as such (e.g., clear labeling in the UI)
Dify's "citation" feature also supports end-user transparency by showing users which knowledge base documents informed the answer.
> **Note:** Article 50 applies to chatbots and systems interacting directly with natural persons. It has a separate scope from the high-risk designation under Annex III — it applies even to limited-risk systems.
## Article 14: Human oversight
Article 14 requires that high-risk AI systems be designed so that natural persons can effectively oversee them. Dify provides **automated technical safeguards** that support human oversight, but they are not a substitute for it:
| Dify Feature | What It Does | Oversight Role |
|-------------|-------------|----------------|
| Annotation/feedback system | Human review of AI outputs | **Direct oversight** — humans evaluate and correct AI responses |
| Content moderation | Built-in filtering before responses reach users | **Automated safeguard** — reduces harmful outputs but does not replace human judgment on edge cases |
| Rate limiting | Controls on API usage | **Automated safeguard** — bounds system behavior, supports overseer's ability to maintain control |
| Workflow control | Insert human review steps between AI generation and output | **Oversight enabler** — allows building approval gates into the pipeline |
These automated controls are necessary building blocks, but Article 14 compliance requires **human oversight procedures** on top of them:
- **Escalation procedures** — define what happens when moderation triggers or edge cases arise (who is notified, what action is taken)
- **Human review pipeline** — for high-stakes decisions, route AI outputs to a qualified person before they take effect
- **Override mechanism** — a human must be able to halt AI responses or override the system's output
- **Competence requirements** — the human overseer must understand the system's capabilities, limitations, and the context of its outputs
### Recommended pattern
For high-risk use cases (HR, legal, medical), configure your Dify workflow to require human approval before the AI response is delivered to the end user or acted upon.
## Knowledge base compliance
Dify's knowledge base feature has specific compliance implications:
1. **Data provenance:** Document where your knowledge base documents come from. Article 10 requires data governance for training data; knowledge bases are analogous.
2. **Update tracking:** When you add, remove, or update documents in the knowledge base, log the change. The AI system's behavior changes with its knowledge base.
3. **PII in documents:** If knowledge base documents contain personal data, GDPR applies to the entire RAG pipeline. Implement access controls and consider PII redaction before indexing.
4. **Copyright:** Ensure you have the right to use the documents in your knowledge base for AI-assisted generation.
## GDPR considerations
1. **Legal basis** (Article 6): Document why AI processing of user queries is necessary
2. **Data Processing Agreements** (Article 28): Required for each cloud LLM and embedding provider
3. **Data minimization:** Only include necessary context in prompts; avoid sending entire documents when a relevant excerpt suffices
4. **Right to erasure:** If a user requests deletion, ensure their conversations are removed from Dify's logs AND any vector store entries derived from their data
5. **Cross-border transfers:** Providers based outside the EEA — including US-based providers (OpenAI, Anthropic), and any other non-EEA providers you route to — require Standard Contractual Clauses (SCCs) or equivalent safeguards under Chapter V of the GDPR. Review each provider's transfer mechanism individually.
## Resources
- [EU AI Act full text](https://artificialintelligenceact.eu/)
- [Dify documentation](https://docs.dify.ai/)
- [Dify SOC 2 compliance](https://dify.ai/trust)
---
*This is not legal advice. Consult a qualified professional for compliance decisions.*

View File

@ -19,6 +19,10 @@ import { RETRIEVE_METHOD } from '@/types/app'
// --- Mocks ---
const { mockToastError } = vi.hoisted(() => ({
mockToastError: vi.fn(),
}))
const mockMutateDatasets = vi.fn()
const mockInvalidDatasetList = vi.fn()
const mockUpdateDatasetSetting = vi.fn().mockResolvedValue({})
@ -55,8 +59,11 @@ vi.mock('@/app/components/datasets/common/check-rerank-model', () => ({
isReRankModelSelected: () => true,
}))
vi.mock('@/app/components/base/toast', () => ({
default: { notify: vi.fn() },
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
error: mockToastError,
success: vi.fn(),
},
}))
// --- Dataset factory ---
@ -311,7 +318,7 @@ describe('Dataset Settings Flow - Cross-Module Configuration Cascade', () => {
describe('Form Submission Validation → All Fields Together', () => {
it('should reject empty name on save', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
const { result } = renderHook(() => useFormState())
act(() => {
@ -322,10 +329,7 @@ describe('Dataset Settings Flow - Cross-Module Configuration Cascade', () => {
await result.current.handleSave()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
expect(mockUpdateDatasetSetting).not.toHaveBeenCalled()
})

View File

@ -3,7 +3,7 @@ import type { DatasetConfigs } from '@/models/debug'
import { render, screen, waitFor, within } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import * as React from 'react'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import {
useCurrentProviderAndModel,
useModelListAndDefaultModelAndCurrentProviderAndModel,
@ -75,7 +75,7 @@ vi.mock('@/app/components/header/account-setting/model-provider-page/model-param
const mockedUseModelListAndDefaultModelAndCurrentProviderAndModel = useModelListAndDefaultModelAndCurrentProviderAndModel as MockedFunction<typeof useModelListAndDefaultModelAndCurrentProviderAndModel>
const mockedUseCurrentProviderAndModel = useCurrentProviderAndModel as MockedFunction<typeof useCurrentProviderAndModel>
let toastNotifySpy: MockInstance
let toastErrorSpy: MockInstance
const createDatasetConfigs = (overrides: Partial<DatasetConfigs> = {}): DatasetConfigs => {
return {
@ -140,7 +140,7 @@ describe('dataset-config/params-config', () => {
beforeEach(() => {
vi.clearAllMocks()
vi.useRealTimers()
toastNotifySpy = vi.spyOn(Toast, 'notify').mockImplementation(() => ({}))
toastErrorSpy = vi.spyOn(toast, 'error').mockImplementation(() => '')
mockedUseModelListAndDefaultModelAndCurrentProviderAndModel.mockReturnValue({
modelList: [],
defaultModel: undefined,
@ -154,7 +154,7 @@ describe('dataset-config/params-config', () => {
})
afterEach(() => {
toastNotifySpy.mockRestore()
toastErrorSpy.mockRestore()
})
// Rendering tests (REQUIRED)
@ -254,10 +254,7 @@ describe('dataset-config/params-config', () => {
await user.click(dialogScope.getByRole('button', { name: 'common.operation.save' }))
// Assert
expect(toastNotifySpy).toHaveBeenCalledWith({
type: 'error',
message: 'appDebug.datasetConfig.rerankModelRequired',
})
expect(toastErrorSpy).toHaveBeenCalledWith('appDebug.datasetConfig.rerankModelRequired')
expect(screen.getByRole('dialog')).toBeInTheDocument()
})
})

View File

@ -7,7 +7,7 @@ import { useTranslation } from 'react-i18next'
import { useContext } from 'use-context-selector'
import Button from '@/app/components/base/button'
import Modal from '@/app/components/base/modal'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { useCurrentProviderAndModel, useModelListAndDefaultModelAndCurrentProviderAndModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import {
@ -66,10 +66,7 @@ const ParamsConfig = ({
}
}
if (errMsg) {
Toast.notify({
type: 'error',
message: errMsg,
})
toast.error(errMsg)
}
return !errMsg
}

View File

@ -158,7 +158,7 @@ function buildChatItemTree(allMessages: IChatItem[]): ChatItemInTree[] {
rootNodes.push(questionNode)
}
else {
map[parentMessageId]?.children!.push(questionNode)
map[parentMessageId].children!.push(questionNode)
}
}
}

View File

@ -21,6 +21,8 @@ let clientWidthSpy: { mockRestore: () => void } | null = null
let clientHeightSpy: { mockRestore: () => void } | null = null
let offsetWidthSpy: { mockRestore: () => void } | null = null
let offsetHeightSpy: { mockRestore: () => void } | null = null
let consoleErrorSpy: ReturnType<typeof vi.spyOn> | null = null
let consoleWarnSpy: ReturnType<typeof vi.spyOn> | null = null
type AudioContextCtor = new () => unknown
type WindowWithLegacyAudio = Window & {
@ -83,6 +85,8 @@ describe('CodeBlock', () => {
beforeEach(() => {
vi.clearAllMocks()
mockUseTheme.mockReturnValue({ theme: Theme.light })
consoleErrorSpy = vi.spyOn(console, 'error').mockImplementation(() => {})
consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {})
clientWidthSpy = vi.spyOn(HTMLElement.prototype, 'clientWidth', 'get').mockReturnValue(900)
clientHeightSpy = vi.spyOn(HTMLElement.prototype, 'clientHeight', 'get').mockReturnValue(400)
offsetWidthSpy = vi.spyOn(HTMLElement.prototype, 'offsetWidth', 'get').mockReturnValue(900)
@ -98,6 +102,10 @@ describe('CodeBlock', () => {
afterEach(() => {
vi.useRealTimers()
consoleErrorSpy?.mockRestore()
consoleWarnSpy?.mockRestore()
consoleErrorSpy = null
consoleWarnSpy = null
clientWidthSpy?.mockRestore()
clientHeightSpy?.mockRestore()
offsetWidthSpy?.mockRestore()

View File

@ -85,13 +85,30 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
const processedRef = useRef<boolean>(false) // Track if content was successfully processed
const isInitialRenderRef = useRef<boolean>(true) // Track if this is initial render
const chartInstanceRef = useRef<any>(null) // Direct reference to ECharts instance
const resizeTimerRef = useRef<NodeJS.Timeout | null>(null) // For debounce handling
const resizeTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null) // For debounce handling
const chartReadyTimerRef = useRef<ReturnType<typeof setTimeout> | null>(null)
const finishedEventCountRef = useRef<number>(0) // Track finished event trigger count
const match = /language-(\w+)/.exec(className || '')
const language = match?.[1]
const languageShowName = getCorrectCapitalizationLanguageName(language || '')
const isDarkMode = theme === Theme.dark
const clearResizeTimer = useCallback(() => {
if (!resizeTimerRef.current)
return
clearTimeout(resizeTimerRef.current)
resizeTimerRef.current = null
}, [])
const clearChartReadyTimer = useCallback(() => {
if (!chartReadyTimerRef.current)
return
clearTimeout(chartReadyTimerRef.current)
chartReadyTimerRef.current = null
}, [])
const echartsStyle = useMemo(() => ({
height: '350px',
width: '100%',
@ -104,26 +121,27 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
// Debounce resize operations
const debouncedResize = useCallback(() => {
if (resizeTimerRef.current)
clearTimeout(resizeTimerRef.current)
clearResizeTimer()
resizeTimerRef.current = setTimeout(() => {
if (chartInstanceRef.current)
chartInstanceRef.current.resize()
resizeTimerRef.current = null
}, 200)
}, [])
}, [clearResizeTimer])
// Handle ECharts instance initialization
const handleChartReady = useCallback((instance: any) => {
chartInstanceRef.current = instance
// Force resize to ensure timeline displays correctly
setTimeout(() => {
clearChartReadyTimer()
chartReadyTimerRef.current = setTimeout(() => {
if (chartInstanceRef.current)
chartInstanceRef.current.resize()
chartReadyTimerRef.current = null
}, 200)
}, [])
}, [clearChartReadyTimer])
// Store event handlers in useMemo to avoid recreating them
const echartsEvents = useMemo(() => ({
@ -157,10 +175,20 @@ const CodeBlock: any = memo(({ inline, className, children = '', ...props }: any
return () => {
window.removeEventListener('resize', handleResize)
if (resizeTimerRef.current)
clearTimeout(resizeTimerRef.current)
clearResizeTimer()
clearChartReadyTimer()
chartInstanceRef.current = null
}
}, [language, debouncedResize])
}, [language, debouncedResize, clearResizeTimer, clearChartReadyTimer])
useEffect(() => {
return () => {
clearResizeTimer()
clearChartReadyTimer()
chartInstanceRef.current = null
echartsRef.current = null
}
}, [clearResizeTimer, clearChartReadyTimer])
// Process chart data when content changes
useEffect(() => {
// Only process echarts content

View File

@ -1,10 +1,14 @@
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { useAutoDisabledDocuments } from '@/service/knowledge/use-document'
import AutoDisabledDocument from '../auto-disabled-document'
const { mockToastSuccess } = vi.hoisted(() => ({
mockToastSuccess: vi.fn(),
}))
type AutoDisabledDocumentsResponse = { document_ids: string[] }
const createMockQueryResult = (
@ -26,9 +30,9 @@ vi.mock('@/service/knowledge/use-document', () => ({
useInvalidDisabledDocument: vi.fn(() => mockInvalidDisabledDocument),
}))
vi.mock('@/app/components/base/toast', () => ({
default: {
notify: vi.fn(),
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
success: mockToastSuccess,
},
}))
@ -134,10 +138,7 @@ describe('AutoDisabledDocument', () => {
fireEvent.click(actionButton)
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'success',
message: expect.any(String),
})
expect(toast.success).toHaveBeenCalledWith(expect.any(String))
})
})
})

View File

@ -3,7 +3,7 @@ import type { FC } from 'react'
import * as React from 'react'
import { useCallback } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { useAutoDisabledDocuments, useDocumentEnable, useInvalidDisabledDocument } from '@/service/knowledge/use-document'
import StatusWithAction from './status-with-action'
@ -23,7 +23,7 @@ const AutoDisabledDocument: FC<Props> = ({
const handleEnableDocuments = useCallback(async () => {
await enableDocument({ datasetId, documentIds })
invalidDisabledDocument()
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
toast.success(t('actionMsg.modifiedSuccessfully', { ns: 'common' }))
}, [])
if (!hasDisabledDocument || isLoading)
return null

View File

@ -3,10 +3,14 @@ import type { FileEntity } from '../../types'
import { act, fireEvent, render, renderHook, screen, waitFor } from '@testing-library/react'
import * as React from 'react'
import { beforeEach, describe, expect, it, vi } from 'vitest'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { FileContextProvider } from '../../store'
import { useUpload } from '../use-upload'
const { mockToastError } = vi.hoisted(() => ({
mockToastError: vi.fn(),
}))
vi.mock('@/service/use-common', () => ({
useFileUploadConfig: vi.fn(() => ({
data: {
@ -17,9 +21,9 @@ vi.mock('@/service/use-common', () => ({
})),
}))
vi.mock('@/app/components/base/toast', () => ({
default: {
notify: vi.fn(),
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
error: mockToastError,
},
}))
@ -177,10 +181,7 @@ describe('useUpload hook', () => {
})
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
})
@ -204,13 +205,11 @@ describe('useUpload hook', () => {
result.current.fileChangeHandle(mockEvent)
})
// Should not show type error for valid image type
type ToastCall = [{ type: string, message: string }]
const mockNotify = vi.mocked(Toast.notify)
// Should not show file-extension error for valid image type
type ToastCall = [string]
const mockNotify = vi.mocked(toast.error)
const calls = mockNotify.mock.calls as ToastCall[]
const typeErrorCalls = calls.filter(
(call: ToastCall) => call[0].type === 'error' && call[0].message.includes('Extension'),
)
const typeErrorCalls = calls.filter(call => call[0].includes('common.fileUploader.fileExtensionNotSupport'))
expect(typeErrorCalls.length).toBe(0)
})
})
@ -261,7 +260,7 @@ describe('useUpload hook', () => {
})
// Should not throw and not show error
expect(Toast.notify).not.toHaveBeenCalled()
expect(toast.error).not.toHaveBeenCalled()
})
it('should handle null files', () => {
@ -314,10 +313,7 @@ describe('useUpload hook', () => {
})
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
})
})
@ -419,10 +415,7 @@ describe('useUpload hook', () => {
})
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'error',
message: 'Upload error',
})
expect(toast.error).toHaveBeenCalledWith('Upload error')
})
})
})
@ -481,10 +474,7 @@ describe('useUpload hook', () => {
})
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'error',
message: 'Upload error',
})
expect(toast.error).toHaveBeenCalledWith('Upload error')
})
})
})
@ -522,10 +512,7 @@ describe('useUpload hook', () => {
})
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
})
})
@ -610,10 +597,7 @@ describe('useUpload hook', () => {
})
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
// Restore original MockFileReader
@ -773,10 +757,7 @@ describe('useUpload hook', () => {
// Should show error toast for invalid file type
await waitFor(() => {
expect(Toast.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
})

View File

@ -4,7 +4,7 @@ import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { v4 as uuid4 } from 'uuid'
import { fileUpload, getFileUploadErrorMessage } from '@/app/components/base/file-uploader/utils'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { useFileUploadConfig } from '@/service/use-common'
import { ACCEPT_TYPES } from '../constants'
import { useFileStore } from '../store'
@ -54,9 +54,9 @@ export const useUpload = () => {
const showErrorMessage = useCallback((type: 'type' | 'size') => {
if (type === 'type')
Toast.notify({ type: 'error', message: t('fileUploader.fileExtensionNotSupport', { ns: 'common' }) })
toast.error(t('fileUploader.fileExtensionNotSupport', { ns: 'common' }))
else
Toast.notify({ type: 'error', message: t('imageUploader.fileSizeLimitExceeded', { ns: 'dataset', size: fileUploadConfig.imageFileSizeLimit }) })
toast.error(t('imageUploader.fileSizeLimitExceeded', { ns: 'dataset', size: fileUploadConfig.imageFileSizeLimit }))
}, [fileUploadConfig, t])
const getValidFiles = useCallback((files: File[]) => {
@ -146,7 +146,7 @@ export const useUpload = () => {
},
onErrorCallback: (error?: any) => {
const errorMessage = getFileUploadErrorMessage(error, t('fileUploader.uploadFromComputerUploadError', { ns: 'common' }), t)
Toast.notify({ type: 'error', message: errorMessage })
toast.error(errorMessage)
handleUpdateFile({ ...uploadingFile, progress: -1 })
},
})
@ -188,7 +188,7 @@ export const useUpload = () => {
},
onErrorCallback: (error?: any) => {
const errorMessage = getFileUploadErrorMessage(error, t('fileUploader.uploadFromComputerUploadError', { ns: 'common' }), t)
Toast.notify({ type: 'error', message: errorMessage })
toast.error(errorMessage)
handleUpdateFile({ ...uploadingFile, progress: -1 })
},
})
@ -198,7 +198,7 @@ export const useUpload = () => {
reader.addEventListener(
'error',
() => {
Toast.notify({ type: 'error', message: t('fileUploader.uploadFromComputerReadError', { ns: 'common' }) })
toast.error(t('fileUploader.uploadFromComputerReadError', { ns: 'common' }))
},
false,
)
@ -211,10 +211,7 @@ export const useUpload = () => {
if (newFiles.length === 0)
return
if (files.length + newFiles.length > singleChunkAttachmentLimit) {
Toast.notify({
type: 'error',
message: t('imageUploader.singleChunkAttachmentLimitTooltip', { ns: 'datasetHitTesting', limit: singleChunkAttachmentLimit }),
})
toast.error(t('imageUploader.singleChunkAttachmentLimitTooltip', { ns: 'datasetHitTesting', limit: singleChunkAttachmentLimit }))
return
}
for (const file of newFiles)

View File

@ -5,9 +5,9 @@ import { RETRIEVE_METHOD } from '@/types/app'
import RetrievalParamConfig from '../index'
const mockNotify = vi.fn()
vi.mock('@/app/components/base/toast', () => ({
default: {
notify: (params: { type: string, message: string }) => mockNotify(params),
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
error: (message: string) => mockNotify(message),
},
}))
@ -260,10 +260,7 @@ describe('RetrievalParamConfig', () => {
fireEvent.click(screen.getByTestId('rerank-switch'))
expect(mockNotify).toHaveBeenCalledWith({
type: 'error',
message: 'workflow.errorMsg.rerankModelRequired',
})
expect(mockNotify).toHaveBeenCalledWith('workflow.errorMsg.rerankModelRequired')
})
it('should update reranking model on selection', () => {
@ -618,10 +615,7 @@ describe('RetrievalParamConfig', () => {
const rerankModelCard = radioCards.find(card => card.getAttribute('data-title') === 'common.modelProvider.rerankModel.key')
fireEvent.click(rerankModelCard!)
expect(mockNotify).toHaveBeenCalledWith({
type: 'error',
message: 'workflow.errorMsg.rerankModelRequired',
})
expect(mockNotify).toHaveBeenCalledWith('workflow.errorMsg.rerankModelRequired')
})
it('should update weights when WeightedScore changes', () => {

View File

@ -11,8 +11,8 @@ import ScoreThresholdItem from '@/app/components/base/param-item/score-threshold
import TopKItem from '@/app/components/base/param-item/top-k-item'
import RadioCard from '@/app/components/base/radio-card'
import Switch from '@/app/components/base/switch'
import Toast from '@/app/components/base/toast'
import Tooltip from '@/app/components/base/tooltip'
import { toast } from '@/app/components/base/ui/toast'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { useCurrentProviderAndModel, useModelListAndDefaultModel } from '@/app/components/header/account-setting/model-provider-page/hooks'
import ModelSelector from '@/app/components/header/account-setting/model-provider-page/model-selector'
@ -59,7 +59,7 @@ const RetrievalParamConfig: FC<Props> = ({
const handleToggleRerankEnable = useCallback((enable: boolean) => {
if (enable && !currentModel)
Toast.notify({ type: 'error', message: t('errorMsg.rerankModelRequired', { ns: 'workflow' }) })
toast.error(t('errorMsg.rerankModelRequired', { ns: 'workflow' }))
onChange({
...value,
reranking_enable: enable,
@ -96,7 +96,7 @@ const RetrievalParamConfig: FC<Props> = ({
}
}
if (v === RerankingModeEnum.RerankingModel && !currentModel)
Toast.notify({ type: 'error', message: t('errorMsg.rerankModelRequired', { ns: 'workflow' }) })
toast.error(t('errorMsg.rerankModelRequired', { ns: 'workflow' }))
onChange(result)
}

View File

@ -5,11 +5,23 @@ import { renameDocumentName } from '@/service/datasets'
import RenameModal from '../rename-modal'
const { mockToastSuccess, mockToastError } = vi.hoisted(() => ({
mockToastSuccess: vi.fn(),
mockToastError: vi.fn(),
}))
// Mock the service
vi.mock('@/service/datasets', () => ({
renameDocumentName: vi.fn(),
}))
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
success: mockToastSuccess,
error: mockToastError,
},
}))
const mockRenameDocumentName = vi.mocked(renameDocumentName)
describe('RenameModal', () => {
@ -118,6 +130,7 @@ describe('RenameModal', () => {
await waitFor(() => {
expect(handleSaved).toHaveBeenCalledTimes(1)
expect(handleClose).toHaveBeenCalledTimes(1)
expect(mockToastSuccess).toHaveBeenCalledWith(expect.any(String))
})
})
})
@ -163,6 +176,7 @@ describe('RenameModal', () => {
// onSaved and onClose should not be called on error
expect(handleSaved).not.toHaveBeenCalled()
expect(handleClose).not.toHaveBeenCalled()
expect(mockToastError).toHaveBeenCalledWith('Error: API Error')
})
})
})

View File

@ -3,6 +3,11 @@ import { beforeEach, describe, expect, it, vi } from 'vitest'
import { DocumentActionType } from '@/models/datasets'
import { useDocumentActions } from '../use-document-actions'
const { mockToastSuccess, mockToastError } = vi.hoisted(() => ({
mockToastSuccess: vi.fn(),
mockToastError: vi.fn(),
}))
const mockArchive = vi.fn()
const mockSummary = vi.fn()
const mockEnable = vi.fn()
@ -22,9 +27,11 @@ vi.mock('@/service/knowledge/use-document', () => ({
useDocumentDownloadZip: () => ({ mutateAsync: mockDownloadZip, isPending: mockIsDownloadingZip }),
}))
const mockToastNotify = vi.fn()
vi.mock('@/app/components/base/toast', () => ({
default: { notify: (...args: unknown[]) => mockToastNotify(...args) },
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
success: mockToastSuccess,
error: mockToastError,
},
}))
const mockDownloadBlob = vi.fn()
@ -67,9 +74,7 @@ describe('useDocumentActions', () => {
datasetId: 'ds-1',
documentIds: ['doc-1', 'doc-2'],
})
expect(mockToastNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'success' }),
)
expect(mockToastSuccess).toHaveBeenCalledWith(expect.any(String))
expect(defaultOptions.onUpdate).toHaveBeenCalled()
})
@ -142,9 +147,7 @@ describe('useDocumentActions', () => {
await result.current.handleAction(DocumentActionType.archive)()
})
expect(mockToastNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
expect(mockToastError).toHaveBeenCalledWith(expect.any(String))
expect(defaultOptions.onUpdate).not.toHaveBeenCalled()
})
})
@ -174,9 +177,7 @@ describe('useDocumentActions', () => {
await result.current.handleBatchReIndex()
})
expect(mockToastNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
expect(mockToastError).toHaveBeenCalledWith(expect.any(String))
})
})
@ -210,9 +211,7 @@ describe('useDocumentActions', () => {
await result.current.handleBatchDownload()
})
expect(mockToastNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
expect(mockToastError).toHaveBeenCalledWith(expect.any(String))
})
it('should show error toast when blob is null', async () => {
@ -223,9 +222,7 @@ describe('useDocumentActions', () => {
await result.current.handleBatchDownload()
})
expect(mockToastNotify).toHaveBeenCalledWith(
expect.objectContaining({ type: 'error' }),
)
expect(mockToastError).toHaveBeenCalledWith(expect.any(String))
})
})
})

View File

@ -1,7 +1,7 @@
import type { CommonResponse } from '@/models/common'
import { useCallback, useMemo } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { DocumentActionType } from '@/models/datasets'
import {
useDocumentArchive,
@ -79,11 +79,11 @@ export const useDocumentActions = ({
if (!e) {
if (actionName === DocumentActionType.delete)
onClearSelection()
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
toast.success(t('actionMsg.modifiedSuccessfully', { ns: 'common' }))
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
toast.error(t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }))
}
}
}, [actionMutationMap, datasetId, selectedIds, onClearSelection, onUpdate, t])
@ -94,11 +94,11 @@ export const useDocumentActions = ({
)
if (!e) {
onClearSelection()
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
toast.success(t('actionMsg.modifiedSuccessfully', { ns: 'common' }))
onUpdate()
}
else {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
toast.error(t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }))
}
}, [retryIndexDocument, datasetId, selectedIds, onClearSelection, onUpdate, t])
@ -110,7 +110,7 @@ export const useDocumentActions = ({
requestDocumentsZip({ datasetId, documentIds: downloadableSelectedIds }),
)
if (e || !blob) {
Toast.notify({ type: 'error', message: t('actionMsg.downloadUnsuccessfully', { ns: 'common' }) })
toast.error(t('actionMsg.downloadUnsuccessfully', { ns: 'common' }))
return
}

View File

@ -7,7 +7,7 @@ import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import Input from '@/app/components/base/input'
import Modal from '@/app/components/base/modal'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { renameDocumentName } from '@/service/datasets'
type Props = {
@ -41,13 +41,13 @@ const RenameModal: FC<Props> = ({
documentId,
name: newName,
})
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
toast.success(t('actionMsg.modifiedSuccessfully', { ns: 'common' }))
onSaved()
onClose()
}
catch (error) {
if (error)
Toast.notify({ type: 'error', message: error.toString() })
toast.error(error.toString())
}
finally {
setSaveLoadingFalse()

View File

@ -35,7 +35,7 @@ vi.mock('../ExternalApiSelect', () => ({
<span data-testid="select-value">{value}</span>
<span data-testid="select-items-count">{items.length}</span>
{items.map((item: MockSelectItem) => (
<button key={item.value} data-testid={`select-${item.value}`} onClick={() => onSelect(item)}>
<button type="button" key={item.value} data-testid={`select-${item.value}`} onClick={() => onSelect(item)}>
{item.name}
</button>
))}

View File

@ -5,9 +5,15 @@ import { IndexingType } from '@/app/components/datasets/create/step-two'
import { ChunkingMode, DatasetPermission, DataSourceType } from '@/models/datasets'
import { useDatasetCardState } from '../use-dataset-card-state'
vi.mock('@/app/components/base/toast', () => ({
default: {
notify: vi.fn(),
const { mockToastSuccess, mockToastError } = vi.hoisted(() => ({
mockToastSuccess: vi.fn(),
mockToastError: vi.fn(),
}))
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
success: mockToastSuccess,
error: mockToastError,
},
}))
@ -299,7 +305,7 @@ describe('useDatasetCardState', () => {
describe('Error Handling', () => {
it('should show error toast when export pipeline fails', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
mockExportPipeline.mockRejectedValue(new Error('Export failed'))
const dataset = createMockDataset({ pipeline_id: 'pipeline-1' })
@ -311,14 +317,11 @@ describe('useDatasetCardState', () => {
await result.current.handleExportPipeline()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
it('should handle Response error in detectIsUsedByApp', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
const mockResponse = new Response(JSON.stringify({ message: 'API Error' }), {
status: 400,
})
@ -333,14 +336,11 @@ describe('useDatasetCardState', () => {
await result.current.detectIsUsedByApp()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.stringContaining('API Error'),
})
expect(toast.error).toHaveBeenCalledWith(expect.stringContaining('API Error'))
})
it('should handle generic Error in detectIsUsedByApp', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
mockCheckUsage.mockRejectedValue(new Error('Network error'))
const dataset = createMockDataset()
@ -352,14 +352,11 @@ describe('useDatasetCardState', () => {
await result.current.detectIsUsedByApp()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: 'Network error',
})
expect(toast.error).toHaveBeenCalledWith('Network error')
})
it('should handle error without message in detectIsUsedByApp', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
mockCheckUsage.mockRejectedValue({})
const dataset = createMockDataset()
@ -371,10 +368,7 @@ describe('useDatasetCardState', () => {
await result.current.detectIsUsedByApp()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: 'Unknown error',
})
expect(toast.error).toHaveBeenCalledWith('dataset.unknownError')
})
it('should handle exporting state correctly', async () => {

View File

@ -2,7 +2,7 @@ import type { Tag } from '@/app/components/base/tag-management/constant'
import type { DataSet } from '@/models/datasets'
import { useCallback, useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { useCheckDatasetUsage, useDeleteDataset } from '@/service/use-dataset-card'
import { useExportPipelineDSL } from '@/service/use-pipeline'
import { downloadBlob } from '@/utils/download'
@ -70,7 +70,7 @@ export const useDatasetCardState = ({ dataset, onSuccess }: UseDatasetCardStateO
downloadBlob({ data: file, fileName: `${name}.pipeline` })
}
catch {
Toast.notify({ type: 'error', message: t('exportFailed', { ns: 'app' }) })
toast.error(t('exportFailed', { ns: 'app' }))
}
finally {
setExporting(false)
@ -93,10 +93,10 @@ export const useDatasetCardState = ({ dataset, onSuccess }: UseDatasetCardStateO
catch (e: unknown) {
if (e instanceof Response) {
const res = await e.json()
Toast.notify({ type: 'error', message: res?.message || 'Unknown error' })
toast.error(res?.message || t('unknownError', { ns: 'dataset' }))
}
else {
Toast.notify({ type: 'error', message: (e as Error)?.message || 'Unknown error' })
toast.error((e as Error)?.message || t('unknownError', { ns: 'dataset' }))
}
}
}, [dataset.id, checkUsage, t])
@ -104,7 +104,7 @@ export const useDatasetCardState = ({ dataset, onSuccess }: UseDatasetCardStateO
const onConfirmDelete = useCallback(async () => {
try {
await deleteDatasetMutation(dataset.id)
Toast.notify({ type: 'success', message: t('datasetDeleted', { ns: 'dataset' }) })
toast.success(t('datasetDeleted', { ns: 'dataset' }))
onSuccess?.()
}
finally {

View File

@ -6,6 +6,10 @@ import { RETRIEVE_METHOD } from '@/types/app'
import { IndexingType } from '../../../create/step-two'
import Form from '../index'
const { mockToastError } = vi.hoisted(() => ({
mockToastError: vi.fn(),
}))
// Mock contexts
const mockMutateDatasets = vi.fn()
const mockInvalidDatasetList = vi.fn()
@ -189,9 +193,10 @@ vi.mock('@/app/components/datasets/common/check-rerank-model', () => ({
isReRankModelSelected: () => true,
}))
vi.mock('@/app/components/base/toast', () => ({
default: {
notify: vi.fn(),
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
error: mockToastError,
success: vi.fn(),
},
}))
@ -391,7 +396,7 @@ describe('Form', () => {
})
it('should show error when trying to save with empty name', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
render(<Form />)
// Clear the name
@ -403,10 +408,7 @@ describe('Form', () => {
fireEvent.click(saveButton)
await waitFor(() => {
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
})

View File

@ -6,6 +6,11 @@ import { RETRIEVE_METHOD } from '@/types/app'
import { IndexingType } from '../../../../create/step-two'
import { useFormState } from '../use-form-state'
const { mockToastSuccess, mockToastError } = vi.hoisted(() => ({
mockToastSuccess: vi.fn(),
mockToastError: vi.fn(),
}))
// Mock contexts
const mockMutateDatasets = vi.fn()
const mockInvalidDatasetList = vi.fn()
@ -122,9 +127,10 @@ vi.mock('@/app/components/datasets/common/check-rerank-model', () => ({
isReRankModelSelected: () => true,
}))
vi.mock('@/app/components/base/toast', () => ({
default: {
notify: vi.fn(),
vi.mock('@/app/components/base/ui/toast', () => ({
toast: {
success: mockToastSuccess,
error: mockToastError,
},
}))
@ -423,7 +429,7 @@ describe('useFormState', () => {
describe('handleSave', () => {
it('should show error toast when name is empty', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
const { result } = renderHook(() => useFormState())
act(() => {
@ -434,14 +440,11 @@ describe('useFormState', () => {
await result.current.handleSave()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
it('should show error toast when name is whitespace only', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
const { result } = renderHook(() => useFormState())
act(() => {
@ -452,10 +455,7 @@ describe('useFormState', () => {
await result.current.handleSave()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
it('should call updateDatasetSetting with correct params', async () => {
@ -477,7 +477,7 @@ describe('useFormState', () => {
})
it('should show success toast on successful save', async () => {
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
const { result } = renderHook(() => useFormState())
await act(async () => {
@ -485,10 +485,7 @@ describe('useFormState', () => {
})
await waitFor(() => {
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'success',
message: expect.any(String),
})
expect(toast.success).toHaveBeenCalledWith(expect.any(String))
})
})
@ -553,7 +550,7 @@ describe('useFormState', () => {
it('should show error toast on save failure', async () => {
const { updateDatasetSetting } = await import('@/service/datasets')
const Toast = await import('@/app/components/base/toast')
const { toast } = await import('@/app/components/base/ui/toast')
vi.mocked(updateDatasetSetting).mockRejectedValueOnce(new Error('Network error'))
const { result } = renderHook(() => useFormState())
@ -562,10 +559,7 @@ describe('useFormState', () => {
await result.current.handleSave()
})
expect(Toast.default.notify).toHaveBeenCalledWith({
type: 'error',
message: expect.any(String),
})
expect(toast.error).toHaveBeenCalledWith(expect.any(String))
})
it('should include partial_member_list when permission is partialMembers', async () => {

View File

@ -6,7 +6,7 @@ import type { IconInfo, SummaryIndexSetting as SummaryIndexSettingType } from '@
import type { RetrievalConfig } from '@/types/app'
import { useCallback, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import { toast } from '@/app/components/base/ui/toast'
import { isReRankModelSelected } from '@/app/components/datasets/common/check-rerank-model'
import { ModelTypeEnum } from '@/app/components/header/account-setting/model-provider-page/declarations'
import { useModelList } from '@/app/components/header/account-setting/model-provider-page/hooks'
@ -123,12 +123,12 @@ export const useFormState = () => {
return
if (!name?.trim()) {
Toast.notify({ type: 'error', message: t('form.nameError', { ns: 'datasetSettings' }) })
toast.error(t('form.nameError', { ns: 'datasetSettings' }))
return
}
if (!isReRankModelSelected({ rerankModelList, retrievalConfig, indexMethod })) {
Toast.notify({ type: 'error', message: t('datasetConfig.rerankModelRequired', { ns: 'appDebug' }) })
toast.error(t('datasetConfig.rerankModelRequired', { ns: 'appDebug' }))
return
}
@ -176,7 +176,7 @@ export const useFormState = () => {
}
await updateDatasetSetting({ datasetId: currentDataset!.id, body })
Toast.notify({ type: 'success', message: t('actionMsg.modifiedSuccessfully', { ns: 'common' }) })
toast.success(t('actionMsg.modifiedSuccessfully', { ns: 'common' }))
if (mutateDatasets) {
await mutateDatasets()
@ -184,7 +184,7 @@ export const useFormState = () => {
}
}
catch {
Toast.notify({ type: 'error', message: t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }) })
toast.error(t('actionMsg.modifiedUnsuccessfully', { ns: 'common' }))
}
finally {
setLoading(false)

View File

@ -1,3 +1,4 @@
import type { ComponentProps } from 'react'
import type { Banner } from '@/models/app'
import { cleanup, fireEvent, render, screen } from '@testing-library/react'
import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
@ -5,6 +6,11 @@ import { BannerItem } from '../banner-item'
const mockScrollTo = vi.fn()
const mockSlideNodes = vi.fn()
const mockTrackEvent = vi.fn()
vi.mock('@/app/components/base/amplitude', () => ({
trackEvent: (...args: unknown[]) => mockTrackEvent(...args),
}))
vi.mock('@/app/components/base/carousel', () => ({
useCarousel: () => ({
@ -48,19 +54,34 @@ class MockResizeObserver {
}
}
const renderBannerItem = (
banner: Banner = createMockBanner(),
props: Partial<ComponentProps<typeof BannerItem>> = {},
) => {
return render(
<BannerItem
banner={banner}
autoplayDelay={5000}
sort={1}
language="en-US"
{...props}
/>,
)
}
describe('BannerItem', () => {
let mockWindowOpen: ReturnType<typeof vi.spyOn>
beforeEach(() => {
mockWindowOpen = vi.spyOn(window, 'open').mockImplementation(() => null)
mockSlideNodes.mockReturnValue([{}, {}, {}]) // 3 slides
mockSlideNodes.mockReturnValue([{}, {}, {}])
vi.stubGlobal('ResizeObserver', MockResizeObserver)
Object.defineProperty(window, 'innerWidth', {
writable: true,
configurable: true,
value: 1400, // Above RESPONSIVE_BREAKPOINT (1200)
value: 1400,
})
})
@ -73,81 +94,51 @@ describe('BannerItem', () => {
describe('basic rendering', () => {
it('renders banner category', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('Featured')).toBeInTheDocument()
})
it('renders banner title', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('Test Banner Title')).toBeInTheDocument()
})
it('renders banner description', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('Test banner description text')).toBeInTheDocument()
})
it('renders banner image with correct src and alt', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
const image = screen.getByRole('img')
expect(image).toHaveAttribute('src', 'https://example.com/image.png')
expect(image).toHaveAttribute('alt', 'Test Banner Title')
})
it('renders view more text', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('explore.banner.viewMore')).toBeInTheDocument()
})
})
describe('click handling', () => {
it('opens banner link in new tab when clicked', () => {
it('opens banner link in new tab and tracks click when clicked', () => {
const banner = createMockBanner({ link: 'https://test-link.com' })
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem(banner, { sort: 2, language: 'zh-Hans', accountId: 'account-123' })
const bannerElement = screen.getByText('Test Banner Title').closest('div[class*="cursor-pointer"]')
fireEvent.click(bannerElement!)
expect(mockTrackEvent).toHaveBeenCalledWith('explore_banner_click', expect.objectContaining({
banner_id: 'banner-1',
title: 'Test Banner Title',
sort: 2,
link: 'https://test-link.com',
page: 'explore',
language: 'zh-Hans',
account_id: 'account-123',
event_time: expect.any(Number),
}))
expect(mockWindowOpen).toHaveBeenCalledWith(
'https://test-link.com',
'_blank',
@ -155,18 +146,16 @@ describe('BannerItem', () => {
)
})
it('does not open window when banner has no link', () => {
it('tracks click even when banner has no link', () => {
const banner = createMockBanner({ link: '' })
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem(banner)
const bannerElement = screen.getByText('Test Banner Title').closest('div[class*="cursor-pointer"]')
fireEvent.click(bannerElement!)
expect(mockTrackEvent).toHaveBeenCalledWith('explore_banner_click', expect.objectContaining({
link: '',
}))
expect(mockWindowOpen).not.toHaveBeenCalled()
})
})
@ -174,28 +163,13 @@ describe('BannerItem', () => {
describe('slide indicators', () => {
it('renders correct number of indicator buttons', () => {
mockSlideNodes.mockReturnValue([{}, {}, {}])
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
const buttons = screen.getAllByRole('button')
expect(buttons).toHaveLength(3)
renderBannerItem()
expect(screen.getAllByRole('button')).toHaveLength(3)
})
it('renders indicator buttons with correct numbers', () => {
mockSlideNodes.mockReturnValue([{}, {}, {}])
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('01')).toBeInTheDocument()
expect(screen.getByText('02')).toBeInTheDocument()
expect(screen.getByText('03')).toBeInTheDocument()
@ -203,13 +177,7 @@ describe('BannerItem', () => {
it('calls scrollTo when indicator is clicked', () => {
mockSlideNodes.mockReturnValue([{}, {}, {}])
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
const secondIndicator = screen.getByText('02').closest('button')
fireEvent.click(secondIndicator!)
@ -219,81 +187,39 @@ describe('BannerItem', () => {
it('renders no indicators when no slides', () => {
mockSlideNodes.mockReturnValue([])
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.queryByRole('button')).not.toBeInTheDocument()
})
})
describe('isPaused prop', () => {
it('defaults isPaused to false', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('Test Banner Title')).toBeInTheDocument()
})
it('accepts isPaused prop', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
isPaused={true}
/>,
)
renderBannerItem(createMockBanner(), { isPaused: true })
expect(screen.getByText('Test Banner Title')).toBeInTheDocument()
})
})
describe('responsive behavior', () => {
it('sets up ResizeObserver on mount', () => {
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(mockResizeObserverObserve).toHaveBeenCalled()
})
it('adds resize event listener on mount', () => {
const addEventListenerSpy = vi.spyOn(window, 'addEventListener')
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(addEventListenerSpy).toHaveBeenCalledWith('resize', expect.any(Function))
addEventListenerSpy.mockRestore()
})
it('removes resize event listener on unmount', () => {
const removeEventListenerSpy = vi.spyOn(window, 'removeEventListener')
const banner = createMockBanner()
const { unmount } = render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
const { unmount } = renderBannerItem()
unmount()
@ -308,14 +234,7 @@ describe('BannerItem', () => {
value: 1000,
})
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('Test Banner Title')).toBeInTheDocument()
})
@ -326,14 +245,7 @@ describe('BannerItem', () => {
value: 800,
})
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem()
expect(screen.getByText('explore.banner.viewMore')).toBeInTheDocument()
})
})
@ -348,13 +260,8 @@ describe('BannerItem', () => {
'img-src': 'https://example.com/img.png',
},
} as Partial<Banner>)
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem(banner)
expect(screen.getByText('Very Long Category Name')).toBeInTheDocument()
})
@ -367,13 +274,8 @@ describe('BannerItem', () => {
'img-src': 'https://example.com/img.png',
},
} as Partial<Banner>)
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem(banner)
const titleElement = screen.getByText('A Very Long Title That Should Be Truncated Eventually')
expect(titleElement).toHaveClass('line-clamp-2')
})
@ -387,13 +289,8 @@ describe('BannerItem', () => {
'img-src': 'https://example.com/img.png',
},
} as Partial<Banner>)
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
renderBannerItem(banner)
const descriptionElement = screen.getByText(/A very long description/)
expect(descriptionElement).toHaveClass('line-clamp-4')
})
@ -402,56 +299,26 @@ describe('BannerItem', () => {
describe('slide calculation', () => {
it('calculates next index correctly for first slide', () => {
mockSlideNodes.mockReturnValue([{}, {}, {}])
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
const buttons = screen.getAllByRole('button')
expect(buttons).toHaveLength(3)
renderBannerItem()
expect(screen.getAllByRole('button')).toHaveLength(3)
})
it('handles single slide case', () => {
mockSlideNodes.mockReturnValue([{}])
const banner = createMockBanner()
render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
const buttons = screen.getAllByRole('button')
expect(buttons).toHaveLength(1)
renderBannerItem()
expect(screen.getAllByRole('button')).toHaveLength(1)
})
})
describe('wrapper styling', () => {
it('has cursor-pointer class', () => {
const banner = createMockBanner()
const { container } = render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
const { container } = renderBannerItem()
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('cursor-pointer')
})
it('has rounded-2xl class', () => {
const banner = createMockBanner()
const { container } = render(
<BannerItem
banner={banner}
autoplayDelay={5000}
/>,
)
const { container } = renderBannerItem()
const wrapper = container.firstChild as HTMLElement
expect(wrapper).toHaveClass('rounded-2xl')
})

View File

@ -6,6 +6,8 @@ import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'
import Banner from '../banner'
const mockUseGetBanners = vi.fn()
const mockUseSelector = vi.fn()
const mockTrackEvent = vi.fn()
vi.mock('@/service/use-explore', () => ({
useGetBanners: (...args: unknown[]) => mockUseGetBanners(...args),
@ -15,6 +17,14 @@ vi.mock('@/context/i18n', () => ({
useLocale: () => 'en-US',
}))
vi.mock('@/context/app-context', () => ({
useSelector: (...args: unknown[]) => mockUseSelector(...args),
}))
vi.mock('@/app/components/base/amplitude', () => ({
trackEvent: (...args: unknown[]) => mockTrackEvent(...args),
}))
vi.mock('@/app/components/base/carousel', () => ({
Carousel: Object.assign(
({ children, onMouseEnter, onMouseLeave, className }: {
@ -54,9 +64,12 @@ vi.mock('@/app/components/base/carousel', () => ({
}))
vi.mock('../banner-item', () => ({
BannerItem: ({ banner, autoplayDelay, isPaused }: {
BannerItem: ({ banner, autoplayDelay, isPaused, sort, language, accountId }: {
banner: BannerType
autoplayDelay: number
sort: number
language: string
accountId?: string
isPaused?: boolean
}) => (
<div
@ -64,6 +77,9 @@ vi.mock('../banner-item', () => ({
data-banner-id={banner.id}
data-autoplay-delay={autoplayDelay}
data-is-paused={isPaused}
data-sort={sort}
data-language={language}
data-account-id={accountId}
>
BannerItem:
{' '}
@ -87,6 +103,11 @@ const createMockBanner = (id: string, status: string = 'enabled', title: string
describe('Banner', () => {
beforeEach(() => {
vi.useFakeTimers()
mockUseSelector.mockImplementation(selector => selector({
userProfile: {
id: 'account-123',
},
}))
})
afterEach(() => {
@ -235,6 +256,59 @@ describe('Banner', () => {
expect(screen.getByTestId('carousel')).toHaveClass('rounded-2xl')
})
it('tracks enabled banner impressions with expected payload', () => {
mockUseGetBanners.mockReturnValue({
data: [
createMockBanner('1', 'enabled', 'Enabled Banner 1'),
createMockBanner('2', 'disabled', 'Disabled Banner'),
createMockBanner('3', 'enabled', 'Enabled Banner 2'),
],
isLoading: false,
isError: false,
})
render(<Banner />)
expect(mockTrackEvent).toHaveBeenCalledTimes(2)
expect(mockTrackEvent).toHaveBeenNthCalledWith(1, 'explore_banner_impression', expect.objectContaining({
banner_id: '1',
title: 'Enabled Banner 1',
sort: 1,
link: 'https://example.com',
page: 'explore',
language: 'en-US',
account_id: 'account-123',
event_time: expect.any(Number),
}))
expect(mockTrackEvent).toHaveBeenNthCalledWith(2, 'explore_banner_impression', expect.objectContaining({
banner_id: '3',
title: 'Enabled Banner 2',
sort: 2,
link: 'https://example.com',
page: 'explore',
language: 'en-US',
account_id: 'account-123',
event_time: expect.any(Number),
}))
})
it('does not track impressions when account id is unavailable', () => {
mockUseSelector.mockImplementation(selector => selector({
userProfile: {
id: '',
},
}))
mockUseGetBanners.mockReturnValue({
data: [createMockBanner('1', 'enabled', 'Enabled Banner 1')],
isLoading: false,
isError: false,
})
render(<Banner />)
expect(mockTrackEvent).not.toHaveBeenCalled()
})
})
describe('hover behavior', () => {
@ -435,8 +509,25 @@ describe('Banner', () => {
const bannerItems = screen.getAllByTestId('banner-item')
expect(bannerItems[0]).toHaveAttribute('data-banner-id', '1')
expect(bannerItems[0]).toHaveAttribute('data-sort', '1')
expect(bannerItems[1]).toHaveAttribute('data-banner-id', '2')
expect(bannerItems[1]).toHaveAttribute('data-sort', '2')
expect(bannerItems[2]).toHaveAttribute('data-banner-id', '3')
expect(bannerItems[2]).toHaveAttribute('data-sort', '3')
})
it('passes tracking context to banner item', () => {
mockUseGetBanners.mockReturnValue({
data: [createMockBanner('1', 'enabled', 'Banner 1')],
isLoading: false,
isError: false,
})
render(<Banner />)
const bannerItem = screen.getByTestId('banner-item')
expect(bannerItem).toHaveAttribute('data-language', 'en-US')
expect(bannerItem).toHaveAttribute('data-account-id', 'account-123')
})
})

View File

@ -4,6 +4,7 @@ import type { Banner } from '@/models/app'
import { RiArrowRightLine } from '@remixicon/react'
import { useCallback, useEffect, useMemo, useRef, useState } from 'react'
import { useTranslation } from 'react-i18next'
import { trackEvent } from '@/app/components/base/amplitude'
import { useCarousel } from '@/app/components/base/carousel'
import { cn } from '@/utils/classnames'
import { IndicatorButton } from './indicator-button'
@ -11,6 +12,9 @@ import { IndicatorButton } from './indicator-button'
type BannerItemProps = {
banner: Banner
autoplayDelay: number
sort: number
language: string
accountId?: string
isPaused?: boolean
}
@ -20,7 +24,14 @@ const INDICATOR_WIDTH = 20
const INDICATOR_GAP = 8
const MIN_VIEW_MORE_WIDTH = 480
export const BannerItem: FC<BannerItemProps> = ({ banner, autoplayDelay, isPaused = false }) => {
export const BannerItem: FC<BannerItemProps> = ({
banner,
autoplayDelay,
sort,
language,
accountId,
isPaused = false,
}) => {
const { t } = useTranslation()
const { api, selectedIndex } = useCarousel()
const { category, title, description, 'img-src': imgSrc } = banner.content
@ -91,9 +102,21 @@ export const BannerItem: FC<BannerItemProps> = ({ banner, autoplayDelay, isPause
const handleBannerClick = useCallback(() => {
incrementResetKey()
trackEvent('explore_banner_click', {
banner_id: banner.id,
title: banner.content.title,
sort,
link: banner.link,
page: 'explore',
language,
account_id: accountId,
event_time: Date.now(),
})
if (banner.link)
window.open(banner.link, '_blank', 'noopener,noreferrer')
}, [banner.link, incrementResetKey])
}, [accountId, banner, incrementResetKey, language, sort])
const handleIndicatorClick = useCallback((index: number) => {
incrementResetKey()

View File

@ -1,7 +1,9 @@
import type { FC } from 'react'
import * as React from 'react'
import { useEffect, useMemo, useRef, useState } from 'react'
import { trackEvent } from '@/app/components/base/amplitude'
import { Carousel } from '@/app/components/base/carousel'
import { useSelector } from '@/context/app-context'
import { useLocale } from '@/context/i18n'
import { useGetBanners } from '@/service/use-explore'
import Loading from '../../base/loading'
@ -23,9 +25,11 @@ const LoadingState: FC = () => (
const Banner: FC = () => {
const locale = useLocale()
const { data: banners, isLoading, isError } = useGetBanners(locale)
const accountId = useSelector(s => s.userProfile.id)
const [isHovered, setIsHovered] = useState(false)
const [isResizing, setIsResizing] = useState(false)
const resizeTimerRef = useRef<NodeJS.Timeout | null>(null)
const trackedBannerIdsRef = useRef<Set<string>>(new Set())
const enabledBanners = useMemo(
() => banners?.filter(banner => banner.status === 'enabled') ?? [],
@ -56,6 +60,28 @@ const Banner: FC = () => {
}
}, [])
useEffect(() => {
if (!accountId)
return
enabledBanners.forEach((banner, index) => {
if (trackedBannerIdsRef.current.has(banner.id))
return
trackEvent('explore_banner_impression', {
banner_id: banner.id,
title: banner.content.title,
sort: index + 1,
link: banner.link,
page: 'explore',
language: locale,
account_id: accountId,
event_time: Date.now(),
})
trackedBannerIdsRef.current.add(banner.id)
})
}, [accountId, enabledBanners, locale])
if (isLoading)
return <LoadingState />
@ -77,12 +103,15 @@ const Banner: FC = () => {
onMouseLeave={() => setIsHovered(false)}
>
<Carousel.Content>
{enabledBanners.map(banner => (
{enabledBanners.map((banner, index) => (
<Carousel.Item key={banner.id}>
<BannerItem
banner={banner}
autoplayDelay={AUTOPLAY_DELAY}
isPaused={isPaused}
sort={index + 1}
language={locale}
accountId={accountId}
/>
</Carousel.Item>
))}

View File

@ -78,6 +78,7 @@ const ApiBasedExtensionModal: FC<ApiBasedExtensionModalProps> = ({
<Modal
isShow
onClose={noop}
wrapperClassName="z-[1002]"
className="!w-[640px] !max-w-none !p-8 !pb-6"
>
<div className="mb-2 text-xl font-semibold text-text-primary">

View File

@ -69,7 +69,7 @@ const ApiBasedExtensionSelector: FC<ApiBasedExtensionSelectorProps> = ({
)
}
</PortalToFollowElemTrigger>
<PortalToFollowElemContent className="z-[102] w-[calc(100%-32px)] max-w-[576px]">
<PortalToFollowElemContent className="z-[1002] w-[calc(100%-32px)] max-w-[576px]">
<div className="z-10 w-full rounded-lg border-[0.5px] border-components-panel-border bg-components-panel-bg shadow-lg">
<div className="p-1">
<div className="flex items-center justify-between px-3 pb-1 pt-2">

View File

@ -84,7 +84,7 @@ const Configure = ({
{t('dataSource.configure', { ns: 'common' })}
</Button>
</PortalToFollowElemTrigger>
<PortalToFollowElemContent className="z-[61]">
<PortalToFollowElemContent className="z-[1002]">
<div className="w-[240px] space-y-1.5 rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur p-2 shadow-lg">
{
!!canOAuth && (
@ -104,7 +104,7 @@ const Configure = ({
}
{
!!canApiKey && !!canOAuth && (
<div className="system-2xs-medium-uppercase flex h-4 items-center p-2 text-text-quaternary">
<div className="flex h-4 items-center p-2 text-text-quaternary system-2xs-medium-uppercase">
<div className="mr-2 h-[1px] grow bg-gradient-to-l from-[rgba(16,24,40,0.08)]" />
OR
<div className="ml-2 h-[1px] grow bg-gradient-to-r from-[rgba(16,24,40,0.08)]" />

View File

@ -39,7 +39,7 @@ const Operator = ({
text: (
<div className="flex items-center">
<RiHome9Line className="mr-2 h-4 w-4 text-text-tertiary" />
<div className="system-sm-semibold text-text-secondary">{t('auth.setDefault', { ns: 'plugin' })}</div>
<div className="text-text-secondary system-sm-semibold">{t('auth.setDefault', { ns: 'plugin' })}</div>
</div>
),
},
@ -51,7 +51,7 @@ const Operator = ({
text: (
<div className="flex items-center">
<RiEditLine className="mr-2 h-4 w-4 text-text-tertiary" />
<div className="system-sm-semibold text-text-secondary">{t('operation.rename', { ns: 'common' })}</div>
<div className="text-text-secondary system-sm-semibold">{t('operation.rename', { ns: 'common' })}</div>
</div>
),
},
@ -66,7 +66,7 @@ const Operator = ({
text: (
<div className="flex items-center">
<RiEqualizer2Line className="mr-2 h-4 w-4 text-text-tertiary" />
<div className="system-sm-semibold text-text-secondary">{t('operation.edit', { ns: 'common' })}</div>
<div className="text-text-secondary system-sm-semibold">{t('operation.edit', { ns: 'common' })}</div>
</div>
),
},
@ -81,7 +81,7 @@ const Operator = ({
text: (
<div className="flex items-center">
<RiStickyNoteAddLine className="mr-2 h-4 w-4 text-text-tertiary" />
<div className="system-sm-semibold mb-1 text-text-secondary">{t('dataSource.notion.changeAuthorizedPages', { ns: 'common' })}</div>
<div className="mb-1 text-text-secondary system-sm-semibold">{t('dataSource.notion.changeAuthorizedPages', { ns: 'common' })}</div>
</div>
),
},
@ -98,7 +98,7 @@ const Operator = ({
text: (
<div className="flex items-center">
<RiDeleteBinLine className="mr-2 h-4 w-4 text-text-tertiary" />
<div className="system-sm-semibold text-text-secondary">
<div className="text-text-secondary system-sm-semibold">
{t('operation.remove', { ns: 'common' })}
</div>
</div>
@ -122,7 +122,7 @@ const Operator = ({
items={items}
secondItems={secondItems}
onSelect={handleSelect}
popupClassName="z-[61]"
popupClassName="z-[1002]"
triggerProps={{
size: 'l',
}}

View File

@ -1,462 +0,0 @@
import type { UseQueryResult } from '@tanstack/react-query'
import type { AppContextValue } from '@/context/app-context'
import type { DataSourceNotion as TDataSourceNotion } from '@/models/common'
import { fireEvent, render, screen, waitFor, within } from '@testing-library/react'
import { useAppContext } from '@/context/app-context'
import { useDataSourceIntegrates, useInvalidDataSourceIntegrates, useNotionConnection } from '@/service/use-common'
import DataSourceNotion from '../index'
/**
* DataSourceNotion Component Tests
* Using Unit approach with real Panel and sibling components to test Notion integration logic.
*/
type MockQueryResult<T> = UseQueryResult<T, Error>
// Mock dependencies
vi.mock('@/context/app-context', () => ({
useAppContext: vi.fn(),
}))
vi.mock('@/service/common', () => ({
syncDataSourceNotion: vi.fn(),
updateDataSourceNotionAction: vi.fn(),
}))
vi.mock('@/service/use-common', () => ({
useDataSourceIntegrates: vi.fn(),
useNotionConnection: vi.fn(),
useInvalidDataSourceIntegrates: vi.fn(),
}))
describe('DataSourceNotion Component', () => {
const mockWorkspaces: TDataSourceNotion[] = [
{
id: 'ws-1',
provider: 'notion',
is_bound: true,
source_info: {
workspace_name: 'Workspace 1',
workspace_icon: 'https://example.com/icon-1.png',
workspace_id: 'notion-ws-1',
total: 10,
pages: [],
},
},
]
const baseAppContext: AppContextValue = {
userProfile: { id: 'test-user-id', name: 'test-user', email: 'test@example.com', avatar: '', avatar_url: '', is_password_set: true },
mutateUserProfile: vi.fn(),
currentWorkspace: { id: 'ws-id', name: 'Workspace', plan: 'basic', status: 'normal', created_at: 0, role: 'owner', providers: [], trial_credits: 0, trial_credits_used: 0, next_credit_reset_date: 0 },
isCurrentWorkspaceManager: true,
isCurrentWorkspaceOwner: true,
isCurrentWorkspaceEditor: true,
isCurrentWorkspaceDatasetOperator: false,
mutateCurrentWorkspace: vi.fn(),
langGeniusVersionInfo: { current_version: '0.1.0', latest_version: '0.1.1', version: '0.1.1', release_date: '', release_notes: '', can_auto_update: false, current_env: 'test' },
useSelector: vi.fn(),
isLoadingCurrentWorkspace: false,
isValidatingCurrentWorkspace: false,
}
/* eslint-disable-next-line ts/no-explicit-any */
const mockQuerySuccess = <T,>(data: T): MockQueryResult<T> => ({ data, isSuccess: true, isError: false, isLoading: false, isPending: false, status: 'success', error: null, fetchStatus: 'idle' } as any)
/* eslint-disable-next-line ts/no-explicit-any */
const mockQueryPending = <T,>(): MockQueryResult<T> => ({ data: undefined, isSuccess: false, isError: false, isLoading: true, isPending: true, status: 'pending', error: null, fetchStatus: 'fetching' } as any)
const originalLocation = window.location
beforeEach(() => {
vi.clearAllMocks()
vi.mocked(useAppContext).mockReturnValue(baseAppContext)
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: [] }))
vi.mocked(useNotionConnection).mockReturnValue(mockQueryPending())
vi.mocked(useInvalidDataSourceIntegrates).mockReturnValue(vi.fn())
const locationMock = { href: '', assign: vi.fn() }
Object.defineProperty(window, 'location', { value: locationMock, writable: true, configurable: true })
// Clear document body to avoid toast leaks between tests
document.body.innerHTML = ''
})
afterEach(() => {
Object.defineProperty(window, 'location', { value: originalLocation, writable: true, configurable: true })
})
const getWorkspaceItem = (name: string) => {
const nameEl = screen.getByText(name)
return (nameEl.closest('div[class*="workspace-item"]') || nameEl.parentElement) as HTMLElement
}
describe('Rendering', () => {
it('should render with no workspaces initially and call integration hook', () => {
// Act
render(<DataSourceNotion />)
// Assert
expect(screen.getByText('common.dataSource.notion.title')).toBeInTheDocument()
expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument()
expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: undefined })
})
it('should render with provided workspaces and pass initialData to hook', () => {
// Arrange
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces }))
// Act
render(<DataSourceNotion workspaces={mockWorkspaces} />)
// Assert
expect(screen.getByText('common.dataSource.notion.connectedWorkspace')).toBeInTheDocument()
expect(screen.getByText('Workspace 1')).toBeInTheDocument()
expect(screen.getByText('common.dataSource.notion.connected')).toBeInTheDocument()
expect(screen.getByAltText('workspace icon')).toHaveAttribute('src', 'https://example.com/icon-1.png')
expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: { data: mockWorkspaces } })
})
it('should handle workspaces prop being an empty array', () => {
// Act
render(<DataSourceNotion workspaces={[]} />)
// Assert
expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument()
expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: { data: [] } })
})
it('should handle optional workspaces configurations', () => {
// Branch: workspaces passed as undefined
const { rerender } = render(<DataSourceNotion workspaces={undefined} />)
expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: undefined })
// Branch: workspaces passed as null
/* eslint-disable-next-line ts/no-explicit-any */
rerender(<DataSourceNotion workspaces={null as any} />)
expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: undefined })
// Branch: workspaces passed as []
rerender(<DataSourceNotion workspaces={[]} />)
expect(useDataSourceIntegrates).toHaveBeenCalledWith({ initialData: { data: [] } })
})
it('should handle cases where integrates data is loading or broken', () => {
// Act (Loading)
const { rerender } = render(<DataSourceNotion />)
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQueryPending())
rerender(<DataSourceNotion />)
// Assert
expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument()
// Act (Broken)
const brokenData = {} as { data: TDataSourceNotion[] }
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess(brokenData))
rerender(<DataSourceNotion />)
// Assert
expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument()
})
it('should handle integrates being nullish', () => {
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: undefined, isSuccess: true } as any)
render(<DataSourceNotion />)
expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument()
})
it('should handle integrates data being nullish', () => {
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: { data: null }, isSuccess: true } as any)
render(<DataSourceNotion />)
expect(screen.queryByText('common.dataSource.notion.connectedWorkspace')).not.toBeInTheDocument()
})
it('should handle integrates data being valid', () => {
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: { data: [{ id: '1', is_bound: true, source_info: { workspace_name: 'W', workspace_icon: 'https://example.com/i.png', total: 1, pages: [] } }] }, isSuccess: true } as any)
render(<DataSourceNotion />)
expect(screen.getByText('common.dataSource.notion.connectedWorkspace')).toBeInTheDocument()
})
it('should cover all possible falsy/nullish branches for integrates and workspaces', () => {
/* eslint-disable-next-line ts/no-explicit-any */
const { rerender } = render(<DataSourceNotion workspaces={null as any} />)
const integratesCases = [
undefined,
null,
{},
{ data: null },
{ data: undefined },
{ data: [] },
{ data: [mockWorkspaces[0]] },
{ data: false },
{ data: 0 },
{ data: '' },
123,
'string',
false,
]
integratesCases.forEach((val) => {
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useDataSourceIntegrates).mockReturnValue({ data: val, isSuccess: true } as any)
/* eslint-disable-next-line ts/no-explicit-any */
rerender(<DataSourceNotion workspaces={null as any} />)
})
expect(useDataSourceIntegrates).toHaveBeenCalled()
})
})
describe('User Permissions', () => {
it('should pass readOnly as false when user is a manager', () => {
// Arrange
vi.mocked(useAppContext).mockReturnValue({ ...baseAppContext, isCurrentWorkspaceManager: true })
// Act
render(<DataSourceNotion />)
// Assert
expect(screen.getByText('common.dataSource.notion.title').closest('div')).not.toHaveClass('grayscale')
})
it('should pass readOnly as true when user is NOT a manager', () => {
// Arrange
vi.mocked(useAppContext).mockReturnValue({ ...baseAppContext, isCurrentWorkspaceManager: false })
// Act
render(<DataSourceNotion />)
// Assert
expect(screen.getByText('common.dataSource.connect')).toHaveClass('opacity-50', 'grayscale')
})
})
describe('Configure and Auth Actions', () => {
it('should handle configure action when user is workspace manager', () => {
// Arrange
render(<DataSourceNotion />)
// Act
fireEvent.click(screen.getByText('common.dataSource.connect'))
// Assert
expect(useNotionConnection).toHaveBeenCalledWith(true)
})
it('should block configure action when user is NOT workspace manager', () => {
// Arrange
vi.mocked(useAppContext).mockReturnValue({ ...baseAppContext, isCurrentWorkspaceManager: false })
render(<DataSourceNotion />)
// Act
fireEvent.click(screen.getByText('common.dataSource.connect'))
// Assert
expect(useNotionConnection).toHaveBeenCalledWith(false)
})
it('should redirect if auth URL is available when "Auth Again" is clicked', async () => {
// Arrange
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces }))
vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'http://auth-url' }))
render(<DataSourceNotion />)
// Act
const workspaceItem = getWorkspaceItem('Workspace 1')
const actionBtn = within(workspaceItem).getByRole('button')
fireEvent.click(actionBtn)
const authAgainBtn = await screen.findByText('common.dataSource.notion.changeAuthorizedPages')
fireEvent.click(authAgainBtn)
// Assert
expect(window.location.href).toBe('http://auth-url')
})
it('should trigger connection flow if URL is missing when "Auth Again" is clicked', async () => {
// Arrange
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces }))
render(<DataSourceNotion />)
// Act
const workspaceItem = getWorkspaceItem('Workspace 1')
const actionBtn = within(workspaceItem).getByRole('button')
fireEvent.click(actionBtn)
const authAgainBtn = await screen.findByText('common.dataSource.notion.changeAuthorizedPages')
fireEvent.click(authAgainBtn)
// Assert
expect(useNotionConnection).toHaveBeenCalledWith(true)
})
})
describe('Side Effects (Redirection and Toast)', () => {
it('should redirect automatically when connection data returns an http URL', async () => {
// Arrange
vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'http://redirect-url' }))
// Act
render(<DataSourceNotion />)
// Assert
await waitFor(() => {
expect(window.location.href).toBe('http://redirect-url')
})
})
it('should show toast notification when connection data is "internal"', async () => {
// Arrange
vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'internal' }))
// Act
render(<DataSourceNotion />)
// Assert
expect(await screen.findByText('common.dataSource.notion.integratedAlert')).toBeInTheDocument()
})
it('should handle various data types and missing properties in connection data correctly', async () => {
// Arrange & Act (Unknown string)
const { rerender } = render(<DataSourceNotion />)
vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'unknown' }))
rerender(<DataSourceNotion />)
// Assert
await waitFor(() => {
expect(window.location.href).toBe('')
expect(screen.queryByText('common.dataSource.notion.integratedAlert')).not.toBeInTheDocument()
})
// Act (Broken object)
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({} as any))
rerender(<DataSourceNotion />)
// Assert
await waitFor(() => {
expect(window.location.href).toBe('')
})
// Act (Non-string)
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 123 } as any))
rerender(<DataSourceNotion />)
// Assert
await waitFor(() => {
expect(window.location.href).toBe('')
})
})
it('should redirect if data starts with "http" even if it is just "http"', async () => {
// Arrange
vi.mocked(useNotionConnection).mockReturnValue(mockQuerySuccess({ data: 'http' }))
// Act
render(<DataSourceNotion />)
// Assert
await waitFor(() => {
expect(window.location.href).toBe('http')
})
})
it('should skip side effect logic if connection data is an object but missing the "data" property', async () => {
// Arrange
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useNotionConnection).mockReturnValue({} as any)
// Act
render(<DataSourceNotion />)
// Assert
await waitFor(() => {
expect(window.location.href).toBe('')
})
})
it('should skip side effect logic if data.data is falsy', async () => {
// Arrange
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useNotionConnection).mockReturnValue({ data: { data: null } } as any)
// Act
render(<DataSourceNotion />)
// Assert
await waitFor(() => {
expect(window.location.href).toBe('')
})
})
})
describe('Additional Action Edge Cases', () => {
it.each([
undefined,
null,
{},
{ data: undefined },
{ data: null },
{ data: '' },
{ data: 0 },
{ data: false },
{ data: 'http' },
{ data: 'internal' },
{ data: 'unknown' },
])('should cover connection data branch: %s', async (val) => {
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: mockWorkspaces }))
/* eslint-disable-next-line ts/no-explicit-any */
vi.mocked(useNotionConnection).mockReturnValue({ data: val, isSuccess: true } as any)
render(<DataSourceNotion />)
// Trigger handleAuthAgain with these values
const workspaceItem = getWorkspaceItem('Workspace 1')
const actionBtn = within(workspaceItem).getByRole('button')
fireEvent.click(actionBtn)
const authAgainBtn = await screen.findByText('common.dataSource.notion.changeAuthorizedPages')
fireEvent.click(authAgainBtn)
expect(useNotionConnection).toHaveBeenCalled()
})
})
describe('Edge Cases in Workspace Data', () => {
it('should render correctly with missing source_info optional fields', async () => {
// Arrange
const workspaceWithMissingInfo: TDataSourceNotion = {
id: 'ws-2',
provider: 'notion',
is_bound: false,
source_info: { workspace_name: 'Workspace 2', workspace_id: 'notion-ws-2', workspace_icon: null, pages: [] },
}
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: [workspaceWithMissingInfo] }))
// Act
render(<DataSourceNotion />)
// Assert
expect(screen.getByText('Workspace 2')).toBeInTheDocument()
const workspaceItem = getWorkspaceItem('Workspace 2')
const actionBtn = within(workspaceItem).getByRole('button')
fireEvent.click(actionBtn)
expect(await screen.findByText('0 common.dataSource.notion.pagesAuthorized')).toBeInTheDocument()
})
it('should display inactive status correctly for unbound workspaces', () => {
// Arrange
const inactiveWS: TDataSourceNotion = {
id: 'ws-3',
provider: 'notion',
is_bound: false,
source_info: { workspace_name: 'Workspace 3', workspace_icon: 'https://example.com/icon-3.png', workspace_id: 'notion-ws-3', total: 5, pages: [] },
}
vi.mocked(useDataSourceIntegrates).mockReturnValue(mockQuerySuccess({ data: [inactiveWS] }))
// Act
render(<DataSourceNotion />)
// Assert
expect(screen.getByText('common.dataSource.notion.disconnected')).toBeInTheDocument()
})
})
})

View File

@ -1,103 +0,0 @@
'use client'
import type { FC } from 'react'
import type { DataSourceNotion as TDataSourceNotion } from '@/models/common'
import { noop } from 'es-toolkit/function'
import * as React from 'react'
import { useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import NotionIcon from '@/app/components/base/notion-icon'
import Toast from '@/app/components/base/toast'
import { useAppContext } from '@/context/app-context'
import { useDataSourceIntegrates, useNotionConnection } from '@/service/use-common'
import Panel from '../panel'
import { DataSourceType } from '../panel/types'
const Icon: FC<{
src: string
name: string
className: string
}> = ({ src, name, className }) => {
return (
<NotionIcon
src={src}
name={name}
className={className}
/>
)
}
type Props = {
workspaces?: TDataSourceNotion[]
}
const DataSourceNotion: FC<Props> = ({
workspaces,
}) => {
const { isCurrentWorkspaceManager } = useAppContext()
const [canConnectNotion, setCanConnectNotion] = useState(false)
const { data: integrates } = useDataSourceIntegrates({
initialData: workspaces ? { data: workspaces } : undefined,
})
const { data } = useNotionConnection(canConnectNotion)
const { t } = useTranslation()
const resolvedWorkspaces = integrates?.data ?? []
const connected = !!resolvedWorkspaces.length
const handleConnectNotion = () => {
if (!isCurrentWorkspaceManager)
return
setCanConnectNotion(true)
}
const handleAuthAgain = () => {
if (data?.data)
window.location.href = data.data
else
setCanConnectNotion(true)
}
useEffect(() => {
if (data && 'data' in data) {
if (data.data && typeof data.data === 'string' && data.data.startsWith('http')) {
window.location.href = data.data
}
else if (data.data === 'internal') {
Toast.notify({
type: 'info',
message: t('dataSource.notion.integratedAlert', { ns: 'common' }),
})
}
}
}, [data, t])
return (
<Panel
type={DataSourceType.notion}
isConfigured={connected}
onConfigure={handleConnectNotion}
readOnly={!isCurrentWorkspaceManager}
isSupportList
configuredList={resolvedWorkspaces.map(workspace => ({
id: workspace.id,
logo: ({ className }: { className: string }) => (
<Icon
src={workspace.source_info.workspace_icon!}
name={workspace.source_info.workspace_name}
className={className}
/>
),
name: workspace.source_info.workspace_name,
isActive: workspace.is_bound,
notionConfig: {
total: workspace.source_info.total || 0,
},
}))}
onRemove={noop} // handled in operation/index.tsx
notionActions={{
onChangeAuthorizedPage: handleAuthAgain,
}}
/>
)
}
export default React.memo(DataSourceNotion)

View File

@ -1,137 +0,0 @@
import { fireEvent, render, screen, waitFor, within } from '@testing-library/react'
import { syncDataSourceNotion, updateDataSourceNotionAction } from '@/service/common'
import { useInvalidDataSourceIntegrates } from '@/service/use-common'
import Operate from '../index'
/**
* Operate Component (Notion) Tests
* This component provides actions like Sync, Change Pages, and Remove for Notion data sources.
*/
// Mock services and toast
vi.mock('@/service/common', () => ({
syncDataSourceNotion: vi.fn(),
updateDataSourceNotionAction: vi.fn(),
}))
vi.mock('@/service/use-common', () => ({
useInvalidDataSourceIntegrates: vi.fn(),
}))
describe('Operate Component (Notion)', () => {
const mockPayload = {
id: 'test-notion-id',
total: 5,
}
const mockOnAuthAgain = vi.fn()
const mockInvalidate = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
vi.mocked(useInvalidDataSourceIntegrates).mockReturnValue(mockInvalidate)
vi.mocked(syncDataSourceNotion).mockResolvedValue({ result: 'success' })
vi.mocked(updateDataSourceNotionAction).mockResolvedValue({ result: 'success' })
})
describe('Rendering', () => {
it('should render the menu button initially', () => {
// Act
const { container } = render(<Operate payload={mockPayload} onAuthAgain={mockOnAuthAgain} />)
// Assert
const menuButton = within(container).getByRole('button')
expect(menuButton).toBeInTheDocument()
expect(menuButton).not.toHaveClass('bg-state-base-hover')
})
it('should open the menu and show all options when clicked', async () => {
// Arrange
const { container } = render(<Operate payload={mockPayload} onAuthAgain={mockOnAuthAgain} />)
const menuButton = within(container).getByRole('button')
// Act
fireEvent.click(menuButton)
// Assert
expect(await screen.findByText('common.dataSource.notion.changeAuthorizedPages')).toBeInTheDocument()
expect(screen.getByText('common.dataSource.notion.sync')).toBeInTheDocument()
expect(screen.getByText('common.dataSource.notion.remove')).toBeInTheDocument()
expect(screen.getByText(/5/)).toBeInTheDocument()
expect(screen.getByText(/common.dataSource.notion.pagesAuthorized/)).toBeInTheDocument()
expect(menuButton).toHaveClass('bg-state-base-hover')
})
})
describe('Menu Actions', () => {
it('should call onAuthAgain when Change Authorized Pages is clicked', async () => {
// Arrange
const { container } = render(<Operate payload={mockPayload} onAuthAgain={mockOnAuthAgain} />)
fireEvent.click(within(container).getByRole('button'))
const option = await screen.findByText('common.dataSource.notion.changeAuthorizedPages')
// Act
fireEvent.click(option)
// Assert
expect(mockOnAuthAgain).toHaveBeenCalledTimes(1)
})
it('should call handleSync, show success toast, and invalidate cache when Sync is clicked', async () => {
// Arrange
const { container } = render(<Operate payload={mockPayload} onAuthAgain={mockOnAuthAgain} />)
fireEvent.click(within(container).getByRole('button'))
const syncBtn = await screen.findByText('common.dataSource.notion.sync')
// Act
fireEvent.click(syncBtn)
// Assert
await waitFor(() => {
expect(syncDataSourceNotion).toHaveBeenCalledWith({
url: `/oauth/data-source/notion/${mockPayload.id}/sync`,
})
})
expect((await screen.findAllByText('common.api.success')).length).toBeGreaterThan(0)
expect(mockInvalidate).toHaveBeenCalledTimes(1)
})
it('should call handleRemove, show success toast, and invalidate cache when Remove is clicked', async () => {
// Arrange
const { container } = render(<Operate payload={mockPayload} onAuthAgain={mockOnAuthAgain} />)
fireEvent.click(within(container).getByRole('button'))
const removeBtn = await screen.findByText('common.dataSource.notion.remove')
// Act
fireEvent.click(removeBtn)
// Assert
await waitFor(() => {
expect(updateDataSourceNotionAction).toHaveBeenCalledWith({
url: `/data-source/integrates/${mockPayload.id}/disable`,
})
})
expect((await screen.findAllByText('common.api.success')).length).toBeGreaterThan(0)
expect(mockInvalidate).toHaveBeenCalledTimes(1)
})
})
describe('State Transitions', () => {
it('should toggle the open class on the button based on menu visibility', async () => {
// Arrange
const { container } = render(<Operate payload={mockPayload} onAuthAgain={mockOnAuthAgain} />)
const menuButton = within(container).getByRole('button')
// Act (Open)
fireEvent.click(menuButton)
// Assert
expect(menuButton).toHaveClass('bg-state-base-hover')
// Act (Close - click again)
fireEvent.click(menuButton)
// Assert
await waitFor(() => {
expect(menuButton).not.toHaveClass('bg-state-base-hover')
})
})
})
})

View File

@ -1,103 +0,0 @@
'use client'
import { Menu, MenuButton, MenuItem, MenuItems, Transition } from '@headlessui/react'
import {
RiDeleteBinLine,
RiLoopLeftLine,
RiMoreFill,
RiStickyNoteAddLine,
} from '@remixicon/react'
import { Fragment } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import { syncDataSourceNotion, updateDataSourceNotionAction } from '@/service/common'
import { useInvalidDataSourceIntegrates } from '@/service/use-common'
import { cn } from '@/utils/classnames'
type OperateProps = {
payload: {
id: string
total: number
}
onAuthAgain: () => void
}
export default function Operate({
payload,
onAuthAgain,
}: OperateProps) {
const { t } = useTranslation()
const invalidateDataSourceIntegrates = useInvalidDataSourceIntegrates()
const updateIntegrates = () => {
Toast.notify({
type: 'success',
message: t('api.success', { ns: 'common' }),
})
invalidateDataSourceIntegrates()
}
const handleSync = async () => {
await syncDataSourceNotion({ url: `/oauth/data-source/notion/${payload.id}/sync` })
updateIntegrates()
}
const handleRemove = async () => {
await updateDataSourceNotionAction({ url: `/data-source/integrates/${payload.id}/disable` })
updateIntegrates()
}
return (
<Menu as="div" className="relative inline-block text-left">
{
({ open }) => (
<>
<MenuButton className={cn('flex h-8 w-8 items-center justify-center rounded-lg hover:bg-state-base-hover', open && 'bg-state-base-hover')}>
<RiMoreFill className="h-4 w-4 text-text-secondary" />
</MenuButton>
<Transition
as={Fragment}
enter="transition ease-out duration-100"
enterFrom="transform opacity-0 scale-95"
enterTo="transform opacity-100 scale-100"
leave="transition ease-in duration-75"
leaveFrom="transform opacity-100 scale-100"
leaveTo="transform opacity-0 scale-95"
>
<MenuItems className="absolute right-0 top-9 w-60 max-w-80 origin-top-right rounded-xl border-[0.5px] border-components-panel-border bg-components-panel-bg-blur shadow-lg backdrop-blur-sm">
<div className="px-1 py-1">
<MenuItem>
<div
className="flex cursor-pointer rounded-lg px-3 py-2 hover:bg-state-base-hover"
onClick={onAuthAgain}
>
<RiStickyNoteAddLine className="mr-2 mt-[2px] h-4 w-4 text-text-tertiary" />
<div>
<div className="system-sm-semibold text-text-secondary">{t('dataSource.notion.changeAuthorizedPages', { ns: 'common' })}</div>
<div className="system-xs-regular text-text-tertiary">
{payload.total}
{' '}
{t('dataSource.notion.pagesAuthorized', { ns: 'common' })}
</div>
</div>
</div>
</MenuItem>
<MenuItem>
<div className="flex cursor-pointer rounded-lg px-3 py-2 hover:bg-state-base-hover" onClick={handleSync}>
<RiLoopLeftLine className="mr-2 mt-[2px] h-4 w-4 text-text-tertiary" />
<div className="system-sm-semibold text-text-secondary">{t('dataSource.notion.sync', { ns: 'common' })}</div>
</div>
</MenuItem>
</div>
<MenuItem>
<div className="border-t border-divider-subtle p-1">
<div className="flex cursor-pointer rounded-lg px-3 py-2 hover:bg-state-base-hover" onClick={handleRemove}>
<RiDeleteBinLine className="mr-2 mt-[2px] h-4 w-4 text-text-tertiary" />
<div className="system-sm-semibold text-text-secondary">{t('dataSource.notion.remove', { ns: 'common' })}</div>
</div>
</div>
</MenuItem>
</MenuItems>
</Transition>
</>
)
}
</Menu>
)
}

View File

@ -1,204 +0,0 @@
import type { CommonResponse } from '@/models/common'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { createDataSourceApiKeyBinding } from '@/service/datasets'
import ConfigFirecrawlModal from '../config-firecrawl-modal'
/**
* ConfigFirecrawlModal Component Tests
* Tests validation, save logic, and basic rendering for the Firecrawl configuration modal.
*/
vi.mock('@/service/datasets', () => ({
createDataSourceApiKeyBinding: vi.fn(),
}))
describe('ConfigFirecrawlModal Component', () => {
const mockOnCancel = vi.fn()
const mockOnSaved = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
})
describe('Initial Rendering', () => {
it('should render the modal with all fields and buttons', () => {
// Act
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Assert
expect(screen.getByText('datasetCreation.firecrawl.configFirecrawl')).toBeInTheDocument()
expect(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder')).toBeInTheDocument()
expect(screen.getByPlaceholderText('https://api.firecrawl.dev')).toBeInTheDocument()
expect(screen.getByRole('button', { name: /common\.operation\.save/i })).toBeInTheDocument()
expect(screen.getByRole('button', { name: /common\.operation\.cancel/i })).toBeInTheDocument()
expect(screen.getByRole('link', { name: /datasetCreation\.firecrawl\.getApiKeyLinkText/i })).toHaveAttribute('href', 'https://www.firecrawl.dev/account')
})
})
describe('Form Interactions', () => {
it('should update state when input fields change', async () => {
// Arrange
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
const apiKeyInput = screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder')
const baseUrlInput = screen.getByPlaceholderText('https://api.firecrawl.dev')
// Act
fireEvent.change(apiKeyInput, { target: { value: 'firecrawl-key' } })
fireEvent.change(baseUrlInput, { target: { value: 'https://custom.firecrawl.dev' } })
// Assert
expect(apiKeyInput).toHaveValue('firecrawl-key')
expect(baseUrlInput).toHaveValue('https://custom.firecrawl.dev')
})
it('should call onCancel when cancel button is clicked', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.click(screen.getByRole('button', { name: /common\.operation\.cancel/i }))
// Assert
expect(mockOnCancel).toHaveBeenCalled()
})
})
describe('Validation', () => {
it('should show error when saving without API Key', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(screen.getByText('common.errorMsg.fieldRequired:{"field":"API Key"}')).toBeInTheDocument()
})
expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled()
})
it('should show error for invalid Base URL format', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
const baseUrlInput = screen.getByPlaceholderText('https://api.firecrawl.dev')
// Act
await user.type(baseUrlInput, 'ftp://invalid-url.com')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(screen.getByText('common.errorMsg.urlError')).toBeInTheDocument()
})
expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled()
})
})
describe('Saving Logic', () => {
it('should save successfully with valid API Key and custom URL', async () => {
const user = userEvent.setup()
// Arrange
vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' })
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'valid-key')
await user.type(screen.getByPlaceholderText('https://api.firecrawl.dev'), 'http://my-firecrawl.com')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith({
category: 'website',
provider: 'firecrawl',
credentials: {
auth_type: 'bearer',
config: {
api_key: 'valid-key',
base_url: 'http://my-firecrawl.com',
},
},
})
})
await waitFor(() => {
expect(screen.getByText('common.api.success')).toBeInTheDocument()
expect(mockOnSaved).toHaveBeenCalled()
})
})
it('should use default Base URL if none is provided during save', async () => {
const user = userEvent.setup()
// Arrange
vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' })
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'test-key')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({
credentials: expect.objectContaining({
config: expect.objectContaining({
base_url: 'https://api.firecrawl.dev',
}),
}),
}))
})
})
it('should ignore multiple save clicks while saving is in progress', async () => {
const user = userEvent.setup()
// Arrange
let resolveSave: (value: CommonResponse) => void
const savePromise = new Promise<CommonResponse>((resolve) => {
resolveSave = resolve
})
vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(savePromise)
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'test-key')
const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i })
// Act
await user.click(saveBtn)
await user.click(saveBtn)
// Assert
expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1)
// Cleanup
resolveSave!({ result: 'success' })
await waitFor(() => expect(mockOnSaved).toHaveBeenCalledTimes(1))
})
it('should accept base_url starting with https://', async () => {
const user = userEvent.setup()
// Arrange
vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' })
render(<ConfigFirecrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.type(screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder'), 'test-key')
await user.type(screen.getByPlaceholderText('https://api.firecrawl.dev'), 'https://secure-firecrawl.com')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({
credentials: expect.objectContaining({
config: expect.objectContaining({
base_url: 'https://secure-firecrawl.com',
}),
}),
}))
})
})
})
})

View File

@ -1,179 +0,0 @@
import { render, screen, waitFor } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { DataSourceProvider } from '@/models/common'
import { createDataSourceApiKeyBinding } from '@/service/datasets'
import ConfigJinaReaderModal from '../config-jina-reader-modal'
/**
* ConfigJinaReaderModal Component Tests
* Tests validation, save logic, and basic rendering for the Jina Reader configuration modal.
*/
vi.mock('@/service/datasets', () => ({
createDataSourceApiKeyBinding: vi.fn(),
}))
describe('ConfigJinaReaderModal Component', () => {
const mockOnCancel = vi.fn()
const mockOnSaved = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
})
describe('Initial Rendering', () => {
it('should render the modal with API Key field and buttons', () => {
// Act
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Assert
expect(screen.getByText('datasetCreation.jinaReader.configJinaReader')).toBeInTheDocument()
expect(screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder')).toBeInTheDocument()
expect(screen.getByRole('button', { name: /common\.operation\.save/i })).toBeInTheDocument()
expect(screen.getByRole('button', { name: /common\.operation\.cancel/i })).toBeInTheDocument()
expect(screen.getByRole('link', { name: /datasetCreation\.jinaReader\.getApiKeyLinkText/i })).toHaveAttribute('href', 'https://jina.ai/reader/')
})
})
describe('Form Interactions', () => {
it('should update state when API Key field changes', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
const apiKeyInput = screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder')
// Act
await user.type(apiKeyInput, 'jina-test-key')
// Assert
expect(apiKeyInput).toHaveValue('jina-test-key')
})
it('should call onCancel when cancel button is clicked', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.click(screen.getByRole('button', { name: /common\.operation\.cancel/i }))
// Assert
expect(mockOnCancel).toHaveBeenCalled()
})
})
describe('Validation', () => {
it('should show error when saving without API Key', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(screen.getByText('common.errorMsg.fieldRequired:{"field":"API Key"}')).toBeInTheDocument()
})
expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled()
})
})
describe('Saving Logic', () => {
it('should save successfully with valid API Key', async () => {
const user = userEvent.setup()
// Arrange
vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' })
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
const apiKeyInput = screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder')
// Act
await user.type(apiKeyInput, 'valid-jina-key')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith({
category: 'website',
provider: DataSourceProvider.jinaReader,
credentials: {
auth_type: 'bearer',
config: {
api_key: 'valid-jina-key',
},
},
})
})
await waitFor(() => {
expect(screen.getByText('common.api.success')).toBeInTheDocument()
expect(mockOnSaved).toHaveBeenCalled()
})
})
it('should ignore multiple save clicks while saving is in progress', async () => {
const user = userEvent.setup()
// Arrange
let resolveSave: (value: { result: 'success' }) => void
const savePromise = new Promise<{ result: 'success' }>((resolve) => {
resolveSave = resolve
})
vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(savePromise)
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
await user.type(screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder'), 'test-key')
const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i })
// Act
await user.click(saveBtn)
await user.click(saveBtn)
// Assert
expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1)
// Cleanup
resolveSave!({ result: 'success' })
await waitFor(() => expect(mockOnSaved).toHaveBeenCalledTimes(1))
})
it('should show encryption info and external link in the modal', async () => {
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Verify PKCS1_OAEP link exists
const pkcsLink = screen.getByText('PKCS1_OAEP')
expect(pkcsLink.closest('a')).toHaveAttribute('href', 'https://pycryptodome.readthedocs.io/en/latest/src/cipher/oaep.html')
// Verify the Jina Reader external link
const jinaLink = screen.getByRole('link', { name: /datasetCreation\.jinaReader\.getApiKeyLinkText/i })
expect(jinaLink).toHaveAttribute('target', '_blank')
})
it('should return early when save is clicked while already saving (isSaving guard)', async () => {
const user = userEvent.setup()
// Arrange - a save that never resolves so isSaving stays true
let resolveFirst: (value: { result: 'success' }) => void
const neverResolves = new Promise<{ result: 'success' }>((resolve) => {
resolveFirst = resolve
})
vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(neverResolves)
render(<ConfigJinaReaderModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
const apiKeyInput = screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder')
await user.type(apiKeyInput, 'valid-key')
const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i })
// First click - starts saving, isSaving becomes true
await user.click(saveBtn)
expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1)
// Second click using fireEvent bypasses disabled check - hits isSaving guard
const { fireEvent: fe } = await import('@testing-library/react')
fe.click(saveBtn)
// Still only called once because isSaving=true returns early
expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1)
// Cleanup
resolveFirst!({ result: 'success' })
await waitFor(() => expect(mockOnSaved).toHaveBeenCalled())
})
})
})

View File

@ -1,204 +0,0 @@
import type { CommonResponse } from '@/models/common'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import userEvent from '@testing-library/user-event'
import { createDataSourceApiKeyBinding } from '@/service/datasets'
import ConfigWatercrawlModal from '../config-watercrawl-modal'
/**
* ConfigWatercrawlModal Component Tests
* Tests validation, save logic, and basic rendering for the Watercrawl configuration modal.
*/
vi.mock('@/service/datasets', () => ({
createDataSourceApiKeyBinding: vi.fn(),
}))
describe('ConfigWatercrawlModal Component', () => {
const mockOnCancel = vi.fn()
const mockOnSaved = vi.fn()
beforeEach(() => {
vi.clearAllMocks()
})
describe('Initial Rendering', () => {
it('should render the modal with all fields and buttons', () => {
// Act
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Assert
expect(screen.getByText('datasetCreation.watercrawl.configWatercrawl')).toBeInTheDocument()
expect(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder')).toBeInTheDocument()
expect(screen.getByPlaceholderText('https://app.watercrawl.dev')).toBeInTheDocument()
expect(screen.getByRole('button', { name: /common\.operation\.save/i })).toBeInTheDocument()
expect(screen.getByRole('button', { name: /common\.operation\.cancel/i })).toBeInTheDocument()
expect(screen.getByRole('link', { name: /datasetCreation\.watercrawl\.getApiKeyLinkText/i })).toHaveAttribute('href', 'https://app.watercrawl.dev/')
})
})
describe('Form Interactions', () => {
it('should update state when input fields change', async () => {
// Arrange
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
const apiKeyInput = screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder')
const baseUrlInput = screen.getByPlaceholderText('https://app.watercrawl.dev')
// Act
fireEvent.change(apiKeyInput, { target: { value: 'water-key' } })
fireEvent.change(baseUrlInput, { target: { value: 'https://custom.watercrawl.dev' } })
// Assert
expect(apiKeyInput).toHaveValue('water-key')
expect(baseUrlInput).toHaveValue('https://custom.watercrawl.dev')
})
it('should call onCancel when cancel button is clicked', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.click(screen.getByRole('button', { name: /common\.operation\.cancel/i }))
// Assert
expect(mockOnCancel).toHaveBeenCalled()
})
})
describe('Validation', () => {
it('should show error when saving without API Key', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(screen.getByText('common.errorMsg.fieldRequired:{"field":"API Key"}')).toBeInTheDocument()
})
expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled()
})
it('should show error for invalid Base URL format', async () => {
const user = userEvent.setup()
// Arrange
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
const baseUrlInput = screen.getByPlaceholderText('https://app.watercrawl.dev')
// Act
await user.type(baseUrlInput, 'ftp://invalid-url.com')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(screen.getByText('common.errorMsg.urlError')).toBeInTheDocument()
})
expect(createDataSourceApiKeyBinding).not.toHaveBeenCalled()
})
})
describe('Saving Logic', () => {
it('should save successfully with valid API Key and custom URL', async () => {
const user = userEvent.setup()
// Arrange
vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' })
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'valid-key')
await user.type(screen.getByPlaceholderText('https://app.watercrawl.dev'), 'http://my-watercrawl.com')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith({
category: 'website',
provider: 'watercrawl',
credentials: {
auth_type: 'x-api-key',
config: {
api_key: 'valid-key',
base_url: 'http://my-watercrawl.com',
},
},
})
})
await waitFor(() => {
expect(screen.getByText('common.api.success')).toBeInTheDocument()
expect(mockOnSaved).toHaveBeenCalled()
})
})
it('should use default Base URL if none is provided during save', async () => {
const user = userEvent.setup()
// Arrange
vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' })
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'test-api-key')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({
credentials: expect.objectContaining({
config: expect.objectContaining({
base_url: 'https://app.watercrawl.dev',
}),
}),
}))
})
})
it('should ignore multiple save clicks while saving is in progress', async () => {
const user = userEvent.setup()
// Arrange
let resolveSave: (value: CommonResponse) => void
const savePromise = new Promise<CommonResponse>((resolve) => {
resolveSave = resolve
})
vi.mocked(createDataSourceApiKeyBinding).mockReturnValue(savePromise)
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'test-api-key')
const saveBtn = screen.getByRole('button', { name: /common\.operation\.save/i })
// Act
await user.click(saveBtn)
await user.click(saveBtn)
// Assert
expect(createDataSourceApiKeyBinding).toHaveBeenCalledTimes(1)
// Cleanup
resolveSave!({ result: 'success' })
await waitFor(() => expect(mockOnSaved).toHaveBeenCalledTimes(1))
})
it('should accept base_url starting with https://', async () => {
const user = userEvent.setup()
// Arrange
vi.mocked(createDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' })
render(<ConfigWatercrawlModal onCancel={mockOnCancel} onSaved={mockOnSaved} />)
// Act
await user.type(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), 'test-api-key')
await user.type(screen.getByPlaceholderText('https://app.watercrawl.dev'), 'https://secure-watercrawl.com')
await user.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(createDataSourceApiKeyBinding).toHaveBeenCalledWith(expect.objectContaining({
credentials: expect.objectContaining({
config: expect.objectContaining({
base_url: 'https://secure-watercrawl.com',
}),
}),
}))
})
})
})
})

View File

@ -1,251 +0,0 @@
import type { AppContextValue } from '@/context/app-context'
import type { CommonResponse } from '@/models/common'
import { fireEvent, render, screen, waitFor } from '@testing-library/react'
import { useAppContext } from '@/context/app-context'
import { DataSourceProvider } from '@/models/common'
import { fetchDataSources, removeDataSourceApiKeyBinding } from '@/service/datasets'
import DataSourceWebsite from '../index'
/**
* DataSourceWebsite Component Tests
* Tests integration of multiple website scraping providers (Firecrawl, WaterCrawl, Jina Reader).
*/
type DataSourcesResponse = CommonResponse & {
sources: Array<{ id: string, provider: DataSourceProvider }>
}
// Mock App Context
vi.mock('@/context/app-context', () => ({
useAppContext: vi.fn(),
}))
// Mock Service calls
vi.mock('@/service/datasets', () => ({
fetchDataSources: vi.fn(),
removeDataSourceApiKeyBinding: vi.fn(),
createDataSourceApiKeyBinding: vi.fn(),
}))
describe('DataSourceWebsite Component', () => {
const mockSources = [
{ id: '1', provider: DataSourceProvider.fireCrawl },
{ id: '2', provider: DataSourceProvider.waterCrawl },
{ id: '3', provider: DataSourceProvider.jinaReader },
]
beforeEach(() => {
vi.clearAllMocks()
vi.mocked(useAppContext).mockReturnValue({ isCurrentWorkspaceManager: true } as unknown as AppContextValue)
vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [] } as DataSourcesResponse)
})
// Helper to render and wait for initial fetch to complete
const renderAndWait = async (provider: DataSourceProvider) => {
const result = render(<DataSourceWebsite provider={provider} />)
await waitFor(() => expect(fetchDataSources).toHaveBeenCalled())
return result
}
describe('Data Initialization', () => {
it('should fetch data sources on mount and reflect configured status', async () => {
// Arrange
vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: mockSources } as DataSourcesResponse)
// Act
await renderAndWait(DataSourceProvider.fireCrawl)
// Assert
expect(screen.getByText('common.dataSource.website.configuredCrawlers')).toBeInTheDocument()
})
it('should pass readOnly status based on workspace manager permissions', async () => {
// Arrange
vi.mocked(useAppContext).mockReturnValue({ isCurrentWorkspaceManager: false } as unknown as AppContextValue)
// Act
await renderAndWait(DataSourceProvider.fireCrawl)
// Assert
expect(screen.getByText('common.dataSource.configure')).toHaveClass('cursor-default')
})
})
describe('Provider Specific Rendering', () => {
it('should render correct logo and name for Firecrawl', async () => {
// Arrange
vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[0]] } as DataSourcesResponse)
// Act
await renderAndWait(DataSourceProvider.fireCrawl)
// Assert
expect(await screen.findByText('Firecrawl')).toBeInTheDocument()
expect(screen.getByText('🔥')).toBeInTheDocument()
})
it('should render correct logo and name for WaterCrawl', async () => {
// Arrange
vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[1]] } as DataSourcesResponse)
// Act
await renderAndWait(DataSourceProvider.waterCrawl)
// Assert
const elements = await screen.findAllByText('WaterCrawl')
expect(elements.length).toBeGreaterThanOrEqual(1)
})
it('should render correct logo and name for Jina Reader', async () => {
// Arrange
vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[2]] } as DataSourcesResponse)
// Act
await renderAndWait(DataSourceProvider.jinaReader)
// Assert
const elements = await screen.findAllByText('Jina Reader')
expect(elements.length).toBeGreaterThanOrEqual(1)
})
})
describe('Modal Interactions', () => {
it('should manage opening and closing of configuration modals', async () => {
// Arrange
await renderAndWait(DataSourceProvider.fireCrawl)
// Act (Open)
fireEvent.click(screen.getByText('common.dataSource.configure'))
// Assert
expect(screen.getByText('datasetCreation.firecrawl.configFirecrawl')).toBeInTheDocument()
// Act (Cancel)
fireEvent.click(screen.getByRole('button', { name: /common\.operation\.cancel/i }))
// Assert
expect(screen.queryByText('datasetCreation.firecrawl.configFirecrawl')).not.toBeInTheDocument()
})
it('should re-fetch sources after saving configuration (Watercrawl)', async () => {
// Arrange
await renderAndWait(DataSourceProvider.waterCrawl)
fireEvent.click(screen.getByText('common.dataSource.configure'))
vi.mocked(fetchDataSources).mockClear()
// Act
fireEvent.change(screen.getByPlaceholderText('datasetCreation.watercrawl.apiKeyPlaceholder'), { target: { value: 'test-key' } })
fireEvent.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(fetchDataSources).toHaveBeenCalled()
expect(screen.queryByText('datasetCreation.watercrawl.configWatercrawl')).not.toBeInTheDocument()
})
})
it('should re-fetch sources after saving configuration (Jina Reader)', async () => {
// Arrange
await renderAndWait(DataSourceProvider.jinaReader)
fireEvent.click(screen.getByText('common.dataSource.configure'))
vi.mocked(fetchDataSources).mockClear()
// Act
fireEvent.change(screen.getByPlaceholderText('datasetCreation.jinaReader.apiKeyPlaceholder'), { target: { value: 'test-key' } })
fireEvent.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(fetchDataSources).toHaveBeenCalled()
expect(screen.queryByText('datasetCreation.jinaReader.configJinaReader')).not.toBeInTheDocument()
})
})
})
describe('Management Actions', () => {
it('should handle successful data source removal with toast notification', async () => {
// Arrange
vi.mocked(fetchDataSources).mockResolvedValue({ result: 'success', sources: [mockSources[0]] } as DataSourcesResponse)
vi.mocked(removeDataSourceApiKeyBinding).mockResolvedValue({ result: 'success' } as CommonResponse)
await renderAndWait(DataSourceProvider.fireCrawl)
await waitFor(() => expect(screen.getByText('common.dataSource.website.configuredCrawlers')).toBeInTheDocument())
// Act
const removeBtn = screen.getByText('Firecrawl').parentElement?.querySelector('svg')?.parentElement
if (removeBtn)
fireEvent.click(removeBtn)
// Assert
await waitFor(() => {
expect(removeDataSourceApiKeyBinding).toHaveBeenCalledWith('1')
expect(screen.getByText('common.api.remove')).toBeInTheDocument()
})
expect(screen.queryByText('common.dataSource.website.configuredCrawlers')).not.toBeInTheDocument()
})
it('should skip removal API call if no data source ID is present', async () => {
// Arrange
await renderAndWait(DataSourceProvider.fireCrawl)
// Act
const removeBtn = screen.queryByText('Firecrawl')?.parentElement?.querySelector('svg')?.parentElement
if (removeBtn)
fireEvent.click(removeBtn)
// Assert
expect(removeDataSourceApiKeyBinding).not.toHaveBeenCalled()
})
})
describe('Firecrawl Save Flow', () => {
it('should re-fetch sources after saving Firecrawl configuration', async () => {
// Arrange
await renderAndWait(DataSourceProvider.fireCrawl)
fireEvent.click(screen.getByText('common.dataSource.configure'))
expect(screen.getByText('datasetCreation.firecrawl.configFirecrawl')).toBeInTheDocument()
vi.mocked(fetchDataSources).mockClear()
// Act - fill in required API key field and save
const apiKeyInput = screen.getByPlaceholderText('datasetCreation.firecrawl.apiKeyPlaceholder')
fireEvent.change(apiKeyInput, { target: { value: 'test-key' } })
fireEvent.click(screen.getByRole('button', { name: /common\.operation\.save/i }))
// Assert
await waitFor(() => {
expect(fetchDataSources).toHaveBeenCalled()
expect(screen.queryByText('datasetCreation.firecrawl.configFirecrawl')).not.toBeInTheDocument()
})
})
})
describe('Cancel Flow', () => {
it('should close watercrawl modal when cancel is clicked', async () => {
// Arrange
await renderAndWait(DataSourceProvider.waterCrawl)
fireEvent.click(screen.getByText('common.dataSource.configure'))
expect(screen.getByText('datasetCreation.watercrawl.configWatercrawl')).toBeInTheDocument()
// Act
fireEvent.click(screen.getByRole('button', { name: /common\.operation\.cancel/i }))
// Assert - modal closed
await waitFor(() => {
expect(screen.queryByText('datasetCreation.watercrawl.configWatercrawl')).not.toBeInTheDocument()
})
})
it('should close jina reader modal when cancel is clicked', async () => {
// Arrange
await renderAndWait(DataSourceProvider.jinaReader)
fireEvent.click(screen.getByText('common.dataSource.configure'))
expect(screen.getByText('datasetCreation.jinaReader.configJinaReader')).toBeInTheDocument()
// Act
fireEvent.click(screen.getByRole('button', { name: /common\.operation\.cancel/i }))
// Assert - modal closed
await waitFor(() => {
expect(screen.queryByText('datasetCreation.jinaReader.configJinaReader')).not.toBeInTheDocument()
})
})
})
})

View File

@ -1,165 +0,0 @@
'use client'
import type { FC } from 'react'
import type { FirecrawlConfig } from '@/models/common'
import * as React from 'react'
import { useCallback, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { LinkExternal02 } from '@/app/components/base/icons/src/vender/line/general'
import { Lock01 } from '@/app/components/base/icons/src/vender/solid/security'
import {
PortalToFollowElem,
PortalToFollowElemContent,
} from '@/app/components/base/portal-to-follow-elem'
import Toast from '@/app/components/base/toast'
import Field from '@/app/components/datasets/create/website/base/field'
import { createDataSourceApiKeyBinding } from '@/service/datasets'
type Props = {
onCancel: () => void
onSaved: () => void
}
const I18N_PREFIX = 'firecrawl'
const DEFAULT_BASE_URL = 'https://api.firecrawl.dev'
const ConfigFirecrawlModal: FC<Props> = ({
onCancel,
onSaved,
}) => {
const { t } = useTranslation()
const [isSaving, setIsSaving] = useState(false)
const [config, setConfig] = useState<FirecrawlConfig>({
api_key: '',
base_url: '',
})
const handleConfigChange = useCallback((key: string) => {
return (value: string | number) => {
setConfig(prev => ({ ...prev, [key]: value as string }))
}
}, [])
const handleSave = useCallback(async () => {
if (isSaving)
return
let errorMsg = ''
if (config.base_url && !((config.base_url.startsWith('http://') || config.base_url.startsWith('https://'))))
errorMsg = t('errorMsg.urlError', { ns: 'common' })
if (!errorMsg) {
if (!config.api_key) {
errorMsg = t('errorMsg.fieldRequired', {
ns: 'common',
field: 'API Key',
})
}
}
if (errorMsg) {
Toast.notify({
type: 'error',
message: errorMsg,
})
return
}
const postData = {
category: 'website',
provider: 'firecrawl',
credentials: {
auth_type: 'bearer',
config: {
api_key: config.api_key,
base_url: config.base_url || DEFAULT_BASE_URL,
},
},
}
try {
setIsSaving(true)
await createDataSourceApiKeyBinding(postData)
Toast.notify({
type: 'success',
message: t('api.success', { ns: 'common' }),
})
}
finally {
setIsSaving(false)
}
onSaved()
}, [config.api_key, config.base_url, onSaved, t, isSaving])
return (
<PortalToFollowElem open>
<PortalToFollowElemContent className="z-[60] h-full w-full">
<div className="fixed inset-0 flex items-center justify-center bg-background-overlay">
<div className="mx-2 max-h-[calc(100vh-120px)] w-[640px] overflow-y-auto rounded-2xl bg-components-panel-bg shadow-xl">
<div className="px-8 pt-8">
<div className="mb-4 flex items-center justify-between">
<div className="system-xl-semibold text-text-primary">{t(`${I18N_PREFIX}.configFirecrawl`, { ns: 'datasetCreation' })}</div>
</div>
<div className="space-y-4">
<Field
label="API Key"
labelClassName="!text-sm"
isRequired
value={config.api_key}
onChange={handleConfigChange('api_key')}
placeholder={t(`${I18N_PREFIX}.apiKeyPlaceholder`, { ns: 'datasetCreation' })!}
/>
<Field
label="Base URL"
labelClassName="!text-sm"
value={config.base_url}
onChange={handleConfigChange('base_url')}
placeholder={DEFAULT_BASE_URL}
/>
</div>
<div className="my-8 flex h-8 items-center justify-between">
<a className="flex items-center space-x-1 text-xs font-normal leading-[18px] text-text-accent" target="_blank" href="https://www.firecrawl.dev/account">
<span>{t(`${I18N_PREFIX}.getApiKeyLinkText`, { ns: 'datasetCreation' })}</span>
<LinkExternal02 className="h-3 w-3" />
</a>
<div className="flex">
<Button
size="large"
className="mr-2"
onClick={onCancel}
>
{t('operation.cancel', { ns: 'common' })}
</Button>
<Button
variant="primary"
size="large"
onClick={handleSave}
loading={isSaving}
>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
</div>
</div>
<div className="border-t-[0.5px] border-t-divider-regular">
<div className="flex items-center justify-center bg-background-section-burn py-3 text-xs text-text-tertiary">
<Lock01 className="mr-1 h-3 w-3 text-text-tertiary" />
{t('modelProvider.encrypted.front', { ns: 'common' })}
<a
className="mx-1 text-text-accent"
target="_blank"
rel="noopener noreferrer"
href="https://pycryptodome.readthedocs.io/en/latest/src/cipher/oaep.html"
>
PKCS1_OAEP
</a>
{t('modelProvider.encrypted.back', { ns: 'common' })}
</div>
</div>
</div>
</div>
</PortalToFollowElemContent>
</PortalToFollowElem>
)
}
export default React.memo(ConfigFirecrawlModal)

View File

@ -1,144 +0,0 @@
'use client'
import type { FC } from 'react'
import * as React from 'react'
import { useCallback, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { LinkExternal02 } from '@/app/components/base/icons/src/vender/line/general'
import { Lock01 } from '@/app/components/base/icons/src/vender/solid/security'
import {
PortalToFollowElem,
PortalToFollowElemContent,
} from '@/app/components/base/portal-to-follow-elem'
import Toast from '@/app/components/base/toast'
import Field from '@/app/components/datasets/create/website/base/field'
import { DataSourceProvider } from '@/models/common'
import { createDataSourceApiKeyBinding } from '@/service/datasets'
type Props = {
onCancel: () => void
onSaved: () => void
}
const I18N_PREFIX = 'jinaReader'
const ConfigJinaReaderModal: FC<Props> = ({
onCancel,
onSaved,
}) => {
const { t } = useTranslation()
const [isSaving, setIsSaving] = useState(false)
const [apiKey, setApiKey] = useState('')
const handleSave = useCallback(async () => {
if (isSaving)
return
let errorMsg = ''
if (!errorMsg) {
if (!apiKey) {
errorMsg = t('errorMsg.fieldRequired', {
ns: 'common',
field: 'API Key',
})
}
}
if (errorMsg) {
Toast.notify({
type: 'error',
message: errorMsg,
})
return
}
const postData = {
category: 'website',
provider: DataSourceProvider.jinaReader,
credentials: {
auth_type: 'bearer',
config: {
api_key: apiKey,
},
},
}
try {
setIsSaving(true)
await createDataSourceApiKeyBinding(postData)
Toast.notify({
type: 'success',
message: t('api.success', { ns: 'common' }),
})
}
finally {
setIsSaving(false)
}
onSaved()
}, [apiKey, onSaved, t, isSaving])
return (
<PortalToFollowElem open>
<PortalToFollowElemContent className="z-[60] h-full w-full">
<div className="fixed inset-0 flex items-center justify-center bg-background-overlay">
<div className="mx-2 max-h-[calc(100vh-120px)] w-[640px] overflow-y-auto rounded-2xl bg-components-panel-bg shadow-xl">
<div className="px-8 pt-8">
<div className="mb-4 flex items-center justify-between">
<div className="system-xl-semibold text-text-primary">{t(`${I18N_PREFIX}.configJinaReader`, { ns: 'datasetCreation' })}</div>
</div>
<div className="space-y-4">
<Field
label="API Key"
labelClassName="!text-sm"
isRequired
value={apiKey}
onChange={(value: string | number) => setApiKey(value as string)}
placeholder={t(`${I18N_PREFIX}.apiKeyPlaceholder`, { ns: 'datasetCreation' })!}
/>
</div>
<div className="my-8 flex h-8 items-center justify-between">
<a className="flex items-center space-x-1 text-xs font-normal leading-[18px] text-text-accent" target="_blank" href="https://jina.ai/reader/">
<span>{t(`${I18N_PREFIX}.getApiKeyLinkText`, { ns: 'datasetCreation' })}</span>
<LinkExternal02 className="h-3 w-3" />
</a>
<div className="flex">
<Button
size="large"
className="mr-2"
onClick={onCancel}
>
{t('operation.cancel', { ns: 'common' })}
</Button>
<Button
variant="primary"
size="large"
onClick={handleSave}
loading={isSaving}
>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
</div>
</div>
<div className="border-t-[0.5px] border-t-divider-regular">
<div className="flex items-center justify-center bg-background-section-burn py-3 text-xs text-text-tertiary">
<Lock01 className="mr-1 h-3 w-3 text-text-tertiary" />
{t('modelProvider.encrypted.front', { ns: 'common' })}
<a
className="mx-1 text-text-accent"
target="_blank"
rel="noopener noreferrer"
href="https://pycryptodome.readthedocs.io/en/latest/src/cipher/oaep.html"
>
PKCS1_OAEP
</a>
{t('modelProvider.encrypted.back', { ns: 'common' })}
</div>
</div>
</div>
</div>
</PortalToFollowElemContent>
</PortalToFollowElem>
)
}
export default React.memo(ConfigJinaReaderModal)

View File

@ -1,165 +0,0 @@
'use client'
import type { FC } from 'react'
import type { WatercrawlConfig } from '@/models/common'
import * as React from 'react'
import { useCallback, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { LinkExternal02 } from '@/app/components/base/icons/src/vender/line/general'
import { Lock01 } from '@/app/components/base/icons/src/vender/solid/security'
import {
PortalToFollowElem,
PortalToFollowElemContent,
} from '@/app/components/base/portal-to-follow-elem'
import Toast from '@/app/components/base/toast'
import Field from '@/app/components/datasets/create/website/base/field'
import { createDataSourceApiKeyBinding } from '@/service/datasets'
type Props = {
onCancel: () => void
onSaved: () => void
}
const I18N_PREFIX = 'watercrawl'
const DEFAULT_BASE_URL = 'https://app.watercrawl.dev'
const ConfigWatercrawlModal: FC<Props> = ({
onCancel,
onSaved,
}) => {
const { t } = useTranslation()
const [isSaving, setIsSaving] = useState(false)
const [config, setConfig] = useState<WatercrawlConfig>({
api_key: '',
base_url: '',
})
const handleConfigChange = useCallback((key: string) => {
return (value: string | number) => {
setConfig(prev => ({ ...prev, [key]: value as string }))
}
}, [])
const handleSave = useCallback(async () => {
if (isSaving)
return
let errorMsg = ''
if (config.base_url && !((config.base_url.startsWith('http://') || config.base_url.startsWith('https://'))))
errorMsg = t('errorMsg.urlError', { ns: 'common' })
if (!errorMsg) {
if (!config.api_key) {
errorMsg = t('errorMsg.fieldRequired', {
ns: 'common',
field: 'API Key',
})
}
}
if (errorMsg) {
Toast.notify({
type: 'error',
message: errorMsg,
})
return
}
const postData = {
category: 'website',
provider: 'watercrawl',
credentials: {
auth_type: 'x-api-key',
config: {
api_key: config.api_key,
base_url: config.base_url || DEFAULT_BASE_URL,
},
},
}
try {
setIsSaving(true)
await createDataSourceApiKeyBinding(postData)
Toast.notify({
type: 'success',
message: t('api.success', { ns: 'common' }),
})
}
finally {
setIsSaving(false)
}
onSaved()
}, [config.api_key, config.base_url, onSaved, t, isSaving])
return (
<PortalToFollowElem open>
<PortalToFollowElemContent className="z-[60] h-full w-full">
<div className="fixed inset-0 flex items-center justify-center bg-background-overlay">
<div className="mx-2 max-h-[calc(100vh-120px)] w-[640px] overflow-y-auto rounded-2xl bg-components-panel-bg shadow-xl">
<div className="px-8 pt-8">
<div className="mb-4 flex items-center justify-between">
<div className="system-xl-semibold text-text-primary">{t(`${I18N_PREFIX}.configWatercrawl`, { ns: 'datasetCreation' })}</div>
</div>
<div className="space-y-4">
<Field
label="API Key"
labelClassName="!text-sm"
isRequired
value={config.api_key}
onChange={handleConfigChange('api_key')}
placeholder={t(`${I18N_PREFIX}.apiKeyPlaceholder`, { ns: 'datasetCreation' })!}
/>
<Field
label="Base URL"
labelClassName="!text-sm"
value={config.base_url}
onChange={handleConfigChange('base_url')}
placeholder={DEFAULT_BASE_URL}
/>
</div>
<div className="my-8 flex h-8 items-center justify-between">
<a className="flex items-center space-x-1 text-xs font-normal leading-[18px] text-text-accent" target="_blank" href="https://app.watercrawl.dev/">
<span>{t(`${I18N_PREFIX}.getApiKeyLinkText`, { ns: 'datasetCreation' })}</span>
<LinkExternal02 className="h-3 w-3" />
</a>
<div className="flex">
<Button
size="large"
className="mr-2"
onClick={onCancel}
>
{t('operation.cancel', { ns: 'common' })}
</Button>
<Button
variant="primary"
size="large"
onClick={handleSave}
loading={isSaving}
>
{t('operation.save', { ns: 'common' })}
</Button>
</div>
</div>
</div>
<div className="border-t-[0.5px] border-t-divider-regular">
<div className="flex items-center justify-center bg-background-section-burn py-3 text-xs text-text-tertiary">
<Lock01 className="mr-1 h-3 w-3 text-text-tertiary" />
{t('modelProvider.encrypted.front', { ns: 'common' })}
<a
className="mx-1 text-text-accent"
target="_blank"
rel="noopener noreferrer"
href="https://pycryptodome.readthedocs.io/en/latest/src/cipher/oaep.html"
>
PKCS1_OAEP
</a>
{t('modelProvider.encrypted.back', { ns: 'common' })}
</div>
</div>
</div>
</div>
</PortalToFollowElemContent>
</PortalToFollowElem>
)
}
export default React.memo(ConfigWatercrawlModal)

View File

@ -1,137 +0,0 @@
'use client'
import type { FC } from 'react'
import type { DataSourceItem } from '@/models/common'
import * as React from 'react'
import { useCallback, useEffect, useState } from 'react'
import { useTranslation } from 'react-i18next'
import Toast from '@/app/components/base/toast'
import s from '@/app/components/datasets/create/website/index.module.css'
import { useAppContext } from '@/context/app-context'
import { DataSourceProvider } from '@/models/common'
import { fetchDataSources, removeDataSourceApiKeyBinding } from '@/service/datasets'
import { cn } from '@/utils/classnames'
import Panel from '../panel'
import { DataSourceType } from '../panel/types'
import ConfigFirecrawlModal from './config-firecrawl-modal'
import ConfigJinaReaderModal from './config-jina-reader-modal'
import ConfigWatercrawlModal from './config-watercrawl-modal'
type Props = {
provider: DataSourceProvider
}
const DataSourceWebsite: FC<Props> = ({ provider }) => {
const { t } = useTranslation()
const { isCurrentWorkspaceManager } = useAppContext()
const [sources, setSources] = useState<DataSourceItem[]>([])
const checkSetApiKey = useCallback(async () => {
const res = await fetchDataSources() as any
const list = res.sources
setSources(list)
}, [])
useEffect(() => {
checkSetApiKey()
}, [])
const [configTarget, setConfigTarget] = useState<DataSourceProvider | null>(null)
const showConfig = useCallback((provider: DataSourceProvider) => {
setConfigTarget(provider)
}, [setConfigTarget])
const hideConfig = useCallback(() => {
setConfigTarget(null)
}, [setConfigTarget])
const handleAdded = useCallback(() => {
checkSetApiKey()
hideConfig()
}, [checkSetApiKey, hideConfig])
const getIdByProvider = (provider: DataSourceProvider): string | undefined => {
const source = sources.find(item => item.provider === provider)
return source?.id
}
const getProviderName = (provider: DataSourceProvider): string => {
if (provider === DataSourceProvider.fireCrawl)
return 'Firecrawl'
if (provider === DataSourceProvider.waterCrawl)
return 'WaterCrawl'
return 'Jina Reader'
}
const handleRemove = useCallback((provider: DataSourceProvider) => {
return async () => {
const dataSourceId = getIdByProvider(provider)
if (dataSourceId) {
await removeDataSourceApiKeyBinding(dataSourceId)
setSources(sources.filter(item => item.provider !== provider))
Toast.notify({
type: 'success',
message: t('api.remove', { ns: 'common' }),
})
}
}
}, [sources, t])
return (
<>
<Panel
type={DataSourceType.website}
provider={provider}
isConfigured={sources.find(item => item.provider === provider) !== undefined}
onConfigure={() => showConfig(provider)}
readOnly={!isCurrentWorkspaceManager}
configuredList={sources.filter(item => item.provider === provider).map(item => ({
id: item.id,
logo: ({ className }: { className: string }) => {
if (item.provider === DataSourceProvider.fireCrawl) {
return (
<div
className={cn(className, 'ml-3 flex h-5 w-5 items-center justify-center rounded border border-divider-subtle !bg-background-default text-xs font-medium text-text-tertiary')}
>
🔥
</div>
)
}
if (item.provider === DataSourceProvider.waterCrawl) {
return (
<div
className={cn(className, 'ml-3 flex h-5 w-5 items-center justify-center rounded border border-divider-subtle !bg-background-default text-xs font-medium text-text-tertiary')}
>
<span className={s.watercrawlLogo} />
</div>
)
}
return (
<div
className={cn(className, 'ml-3 flex h-5 w-5 items-center justify-center rounded border border-divider-subtle !bg-background-default text-xs font-medium text-text-tertiary')}
>
<span className={s.jinaLogo} />
</div>
)
},
name: getProviderName(item.provider),
isActive: true,
}))}
onRemove={handleRemove(provider)}
/>
{configTarget === DataSourceProvider.fireCrawl && (
<ConfigFirecrawlModal onSaved={handleAdded} onCancel={hideConfig} />
)}
{configTarget === DataSourceProvider.waterCrawl && (
<ConfigWatercrawlModal onSaved={handleAdded} onCancel={hideConfig} />
)}
{configTarget === DataSourceProvider.jinaReader && (
<ConfigJinaReaderModal onSaved={handleAdded} onCancel={hideConfig} />
)}
</>
)
}
export default React.memo(DataSourceWebsite)

View File

@ -1,213 +0,0 @@
import type { ConfigItemType } from '../config-item'
import { fireEvent, render, screen } from '@testing-library/react'
import ConfigItem from '../config-item'
import { DataSourceType } from '../types'
/**
* ConfigItem Component Tests
* Tests rendering of individual configuration items for Notion and Website data sources.
*/
// Mock Operate component to isolate ConfigItem unit tests.
vi.mock('../../data-source-notion/operate', () => ({
default: ({ onAuthAgain, payload }: { onAuthAgain: () => void, payload: { id: string, total: number } }) => (
<div data-testid="mock-operate">
<button onClick={onAuthAgain} data-testid="operate-auth-btn">Auth Again</button>
<span data-testid="operate-payload">{JSON.stringify(payload)}</span>
</div>
),
}))
describe('ConfigItem Component', () => {
const mockOnRemove = vi.fn()
const mockOnChangeAuthorizedPage = vi.fn()
const MockLogo = (props: React.SVGProps<SVGSVGElement>) => <svg data-testid="mock-logo" {...props} />
const baseNotionPayload: ConfigItemType = {
id: 'notion-1',
logo: MockLogo,
name: 'Notion Workspace',
isActive: true,
notionConfig: { total: 5 },
}
const baseWebsitePayload: ConfigItemType = {
id: 'website-1',
logo: MockLogo,
name: 'My Website',
isActive: true,
}
afterEach(() => {
vi.clearAllMocks()
})
describe('Notion Configuration', () => {
it('should render active Notion config item with connected status and operator', () => {
// Act
render(
<ConfigItem
type={DataSourceType.notion}
payload={baseNotionPayload}
onRemove={mockOnRemove}
notionActions={{ onChangeAuthorizedPage: mockOnChangeAuthorizedPage }}
readOnly={false}
/>,
)
// Assert
expect(screen.getByTestId('mock-logo')).toBeInTheDocument()
expect(screen.getByText('Notion Workspace')).toBeInTheDocument()
const statusText = screen.getByText('common.dataSource.notion.connected')
expect(statusText).toHaveClass('text-util-colors-green-green-600')
expect(screen.getByTestId('operate-payload')).toHaveTextContent(JSON.stringify({ id: 'notion-1', total: 5 }))
})
it('should render inactive Notion config item with disconnected status', () => {
// Arrange
const inactivePayload = { ...baseNotionPayload, isActive: false }
// Act
render(
<ConfigItem
type={DataSourceType.notion}
payload={inactivePayload}
onRemove={mockOnRemove}
readOnly={false}
/>,
)
// Assert
const statusText = screen.getByText('common.dataSource.notion.disconnected')
expect(statusText).toHaveClass('text-util-colors-warning-warning-600')
})
it('should handle auth action through the Operate component', () => {
// Arrange
render(
<ConfigItem
type={DataSourceType.notion}
payload={baseNotionPayload}
onRemove={mockOnRemove}
notionActions={{ onChangeAuthorizedPage: mockOnChangeAuthorizedPage }}
readOnly={false}
/>,
)
// Act
fireEvent.click(screen.getByTestId('operate-auth-btn'))
// Assert
expect(mockOnChangeAuthorizedPage).toHaveBeenCalled()
})
it('should fallback to 0 total if notionConfig is missing', () => {
// Arrange
const payloadNoConfig = { ...baseNotionPayload, notionConfig: undefined }
// Act
render(
<ConfigItem
type={DataSourceType.notion}
payload={payloadNoConfig}
onRemove={mockOnRemove}
readOnly={false}
/>,
)
// Assert
expect(screen.getByTestId('operate-payload')).toHaveTextContent(JSON.stringify({ id: 'notion-1', total: 0 }))
})
it('should handle missing notionActions safely without crashing', () => {
// Arrange
render(
<ConfigItem
type={DataSourceType.notion}
payload={baseNotionPayload}
onRemove={mockOnRemove}
readOnly={false}
/>,
)
// Act & Assert
expect(() => fireEvent.click(screen.getByTestId('operate-auth-btn'))).not.toThrow()
})
})
describe('Website Configuration', () => {
it('should render active Website config item and hide operator', () => {
// Act
render(
<ConfigItem
type={DataSourceType.website}
payload={baseWebsitePayload}
onRemove={mockOnRemove}
readOnly={false}
/>,
)
// Assert
expect(screen.getByText('common.dataSource.website.active')).toBeInTheDocument()
expect(screen.queryByTestId('mock-operate')).not.toBeInTheDocument()
})
it('should render inactive Website config item', () => {
// Arrange
const inactivePayload = { ...baseWebsitePayload, isActive: false }
// Act
render(
<ConfigItem
type={DataSourceType.website}
payload={inactivePayload}
onRemove={mockOnRemove}
readOnly={false}
/>,
)
// Assert
const statusText = screen.getByText('common.dataSource.website.inactive')
expect(statusText).toHaveClass('text-util-colors-warning-warning-600')
})
it('should show remove button and trigger onRemove when clicked (not read-only)', () => {
// Arrange
const { container } = render(
<ConfigItem
type={DataSourceType.website}
payload={baseWebsitePayload}
onRemove={mockOnRemove}
readOnly={false}
/>,
)
// Note: This selector is brittle but necessary since the delete button lacks
// accessible attributes (data-testid, aria-label). Ideally, the component should
// be updated to include proper accessibility attributes.
const deleteBtn = container.querySelector('div[class*="cursor-pointer"]') as HTMLElement
// Act
fireEvent.click(deleteBtn)
// Assert
expect(mockOnRemove).toHaveBeenCalled()
})
it('should hide remove button in read-only mode', () => {
// Arrange
const { container } = render(
<ConfigItem
type={DataSourceType.website}
payload={baseWebsitePayload}
onRemove={mockOnRemove}
readOnly={true}
/>,
)
// Assert
const deleteBtn = container.querySelector('div[class*="cursor-pointer"]')
expect(deleteBtn).not.toBeInTheDocument()
})
})
})

View File

@ -1,226 +0,0 @@
import type { ConfigItemType } from '../config-item'
import { fireEvent, render, screen } from '@testing-library/react'
import { DataSourceProvider } from '@/models/common'
import Panel from '../index'
import { DataSourceType } from '../types'
/**
* Panel Component Tests
* Tests layout, conditional rendering, and interactions for data source panels (Notion and Website).
*/
vi.mock('../../data-source-notion/operate', () => ({
default: () => <div data-testid="mock-operate" />,
}))
describe('Panel Component', () => {
const onConfigure = vi.fn()
const onRemove = vi.fn()
const mockConfiguredList: ConfigItemType[] = [
{ id: '1', name: 'Item 1', isActive: true, logo: () => null },
{ id: '2', name: 'Item 2', isActive: false, logo: () => null },
]
beforeEach(() => {
vi.clearAllMocks()
})
describe('Notion Panel Rendering', () => {
it('should render Notion panel when not configured and isSupportList is true', () => {
// Act
render(
<Panel
type={DataSourceType.notion}
isConfigured={false}
onConfigure={onConfigure}
readOnly={false}
configuredList={[]}
onRemove={onRemove}
isSupportList={true}
/>,
)
// Assert
expect(screen.getByText('common.dataSource.notion.title')).toBeInTheDocument()
expect(screen.getByText('common.dataSource.notion.description')).toBeInTheDocument()
const connectBtn = screen.getByText('common.dataSource.connect')
expect(connectBtn).toBeInTheDocument()
// Act
fireEvent.click(connectBtn)
// Assert
expect(onConfigure).toHaveBeenCalled()
})
it('should render Notion panel in readOnly mode when not configured', () => {
// Act
render(
<Panel
type={DataSourceType.notion}
isConfigured={false}
onConfigure={onConfigure}
readOnly={true}
configuredList={[]}
onRemove={onRemove}
isSupportList={true}
/>,
)
// Assert
const connectBtn = screen.getByText('common.dataSource.connect')
expect(connectBtn).toHaveClass('cursor-default opacity-50 grayscale')
})
it('should render Notion panel when configured with list of items', () => {
// Act
render(
<Panel
type={DataSourceType.notion}
isConfigured={true}
onConfigure={onConfigure}
readOnly={false}
configuredList={mockConfiguredList}
onRemove={onRemove}
/>,
)
// Assert
expect(screen.getByRole('button', { name: 'common.dataSource.configure' })).toBeInTheDocument()
expect(screen.getByText('common.dataSource.notion.connectedWorkspace')).toBeInTheDocument()
expect(screen.getByText('Item 1')).toBeInTheDocument()
expect(screen.getByText('Item 2')).toBeInTheDocument()
})
it('should hide connect button for Notion if isSupportList is false', () => {
// Act
render(
<Panel
type={DataSourceType.notion}
isConfigured={false}
onConfigure={onConfigure}
readOnly={false}
configuredList={[]}
onRemove={onRemove}
isSupportList={false}
/>,
)
// Assert
expect(screen.queryByText('common.dataSource.connect')).not.toBeInTheDocument()
})
it('should disable Notion configure button in readOnly mode (configured state)', () => {
// Act
render(
<Panel
type={DataSourceType.notion}
isConfigured={true}
onConfigure={onConfigure}
readOnly={true}
configuredList={mockConfiguredList}
onRemove={onRemove}
/>,
)
// Assert
const btn = screen.getByRole('button', { name: 'common.dataSource.configure' })
expect(btn).toBeDisabled()
})
})
describe('Website Panel Rendering', () => {
it('should show correct provider names and handle configuration when not configured', () => {
// Arrange
const { rerender } = render(
<Panel
type={DataSourceType.website}
provider={DataSourceProvider.fireCrawl}
isConfigured={false}
onConfigure={onConfigure}
readOnly={false}
configuredList={[]}
onRemove={onRemove}
/>,
)
// Assert Firecrawl
expect(screen.getByText('🔥 Firecrawl')).toBeInTheDocument()
// Rerender for WaterCrawl
rerender(
<Panel
type={DataSourceType.website}
provider={DataSourceProvider.waterCrawl}
isConfigured={false}
onConfigure={onConfigure}
readOnly={false}
configuredList={[]}
onRemove={onRemove}
/>,
)
expect(screen.getByText('WaterCrawl')).toBeInTheDocument()
// Rerender for Jina Reader
rerender(
<Panel
type={DataSourceType.website}
provider={DataSourceProvider.jinaReader}
isConfigured={false}
onConfigure={onConfigure}
readOnly={false}
configuredList={[]}
onRemove={onRemove}
/>,
)
expect(screen.getByText('Jina Reader')).toBeInTheDocument()
// Act
const configBtn = screen.getByText('common.dataSource.configure')
fireEvent.click(configBtn)
// Assert
expect(onConfigure).toHaveBeenCalled()
})
it('should handle readOnly mode for Website configuration button', () => {
// Act
render(
<Panel
type={DataSourceType.website}
isConfigured={false}
onConfigure={onConfigure}
readOnly={true}
configuredList={[]}
onRemove={onRemove}
/>,
)
// Assert
const configBtn = screen.getByText('common.dataSource.configure')
expect(configBtn).toHaveClass('cursor-default opacity-50 grayscale')
// Act
fireEvent.click(configBtn)
// Assert
expect(onConfigure).not.toHaveBeenCalled()
})
it('should render Website panel correctly when configured with crawlers', () => {
// Act
render(
<Panel
type={DataSourceType.website}
isConfigured={true}
onConfigure={onConfigure}
readOnly={false}
configuredList={mockConfiguredList}
onRemove={onRemove}
/>,
)
// Assert
expect(screen.getByText('common.dataSource.website.configuredCrawlers')).toBeInTheDocument()
expect(screen.getByText('Item 1')).toBeInTheDocument()
expect(screen.getByText('Item 2')).toBeInTheDocument()
})
})
})

View File

@ -1,85 +0,0 @@
'use client'
import type { FC } from 'react'
import {
RiDeleteBinLine,
} from '@remixicon/react'
import { noop } from 'es-toolkit/function'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
import { cn } from '@/utils/classnames'
import Indicator from '../../../indicator'
import Operate from '../data-source-notion/operate'
import s from './style.module.css'
import { DataSourceType } from './types'
export type ConfigItemType = {
id: string
logo: any
name: string
isActive: boolean
notionConfig?: {
total: number
}
}
type Props = {
type: DataSourceType
payload: ConfigItemType
onRemove: () => void
notionActions?: {
onChangeAuthorizedPage: () => void
}
readOnly: boolean
}
const ConfigItem: FC<Props> = ({
type,
payload,
onRemove,
notionActions,
readOnly,
}) => {
const { t } = useTranslation()
const isNotion = type === DataSourceType.notion
const isWebsite = type === DataSourceType.website
const onChangeAuthorizedPage = notionActions?.onChangeAuthorizedPage || noop
return (
<div className={cn(s['workspace-item'], 'mb-1 flex items-center rounded-lg bg-components-panel-on-panel-item-bg py-1 pr-1')} key={payload.id}>
<payload.logo className="ml-3 mr-1.5" />
<div className="system-sm-medium grow truncate py-[7px] text-text-secondary" title={payload.name}>{payload.name}</div>
{
payload.isActive
? <Indicator className="mr-[6px] shrink-0" color="green" />
: <Indicator className="mr-[6px] shrink-0" color="yellow" />
}
<div className={`system-xs-semibold-uppercase mr-3 shrink-0 ${payload.isActive ? 'text-util-colors-green-green-600' : 'text-util-colors-warning-warning-600'}`}>
{
payload.isActive
? t(isNotion ? 'dataSource.notion.connected' : 'dataSource.website.active', { ns: 'common' })
: t(isNotion ? 'dataSource.notion.disconnected' : 'dataSource.website.inactive', { ns: 'common' })
}
</div>
<div className="mr-2 h-3 w-[1px] bg-divider-regular" />
{isNotion && (
<Operate
payload={{
id: payload.id,
total: payload.notionConfig?.total || 0,
}}
onAuthAgain={onChangeAuthorizedPage}
/>
)}
{
isWebsite && !readOnly && (
<div className="cursor-pointer rounded-md p-2 text-text-tertiary hover:bg-state-base-hover" onClick={onRemove}>
<RiDeleteBinLine className="h-4 w-4" />
</div>
)
}
</div>
)
}
export default React.memo(ConfigItem)

View File

@ -1,151 +0,0 @@
'use client'
import type { FC } from 'react'
import type { ConfigItemType } from './config-item'
import { RiAddLine } from '@remixicon/react'
import * as React from 'react'
import { useTranslation } from 'react-i18next'
import Button from '@/app/components/base/button'
import { DataSourceProvider } from '@/models/common'
import { cn } from '@/utils/classnames'
import ConfigItem from './config-item'
import s from './style.module.css'
import { DataSourceType } from './types'
type Props = {
type: DataSourceType
provider?: DataSourceProvider
isConfigured: boolean
onConfigure: () => void
readOnly: boolean
isSupportList?: boolean
configuredList: ConfigItemType[]
onRemove: () => void
notionActions?: {
onChangeAuthorizedPage: () => void
}
}
const Panel: FC<Props> = ({
type,
provider,
isConfigured,
onConfigure,
readOnly,
configuredList,
isSupportList,
onRemove,
notionActions,
}) => {
const { t } = useTranslation()
const isNotion = type === DataSourceType.notion
const isWebsite = type === DataSourceType.website
const getProviderName = (): string => {
if (provider === DataSourceProvider.fireCrawl)
return '🔥 Firecrawl'
if (provider === DataSourceProvider.waterCrawl)
return 'WaterCrawl'
return 'Jina Reader'
}
return (
<div className="mb-2 rounded-xl bg-background-section-burn">
<div className="flex items-center px-3 py-[9px]">
<div className={cn(s[`${type}-icon`], 'mr-3 h-8 w-8 rounded-lg border border-divider-subtle !bg-background-default')} />
<div className="grow">
<div className="flex h-5 items-center">
<div className="text-sm font-medium text-text-primary">{t(`dataSource.${type}.title`, { ns: 'common' })}</div>
{isWebsite && (
<div className="ml-1 rounded-md bg-components-badge-white-to-dark px-1.5 text-xs font-medium leading-[18px] text-text-secondary">
<span className="text-text-tertiary">{t('dataSource.website.with', { ns: 'common' })}</span>
{' '}
{getProviderName()}
</div>
)}
</div>
{
!isConfigured && (
<div className="system-xs-medium text-text-tertiary">
{t(`dataSource.${type}.description`, { ns: 'common' })}
</div>
)
}
</div>
{isNotion && (
<>
{
isConfigured
? (
<Button
disabled={readOnly}
className="ml-3"
onClick={onConfigure}
>
{t('dataSource.configure', { ns: 'common' })}
</Button>
)
: (
<>
{isSupportList && (
<div
className={
`system-sm-medium flex min-h-7 items-center rounded-md border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg px-3 py-1 text-components-button-secondary-accent-text
${!readOnly ? 'cursor-pointer' : 'cursor-default opacity-50 grayscale'}`
}
onClick={onConfigure}
>
<RiAddLine className="mr-[5px] h-4 w-4 text-components-button-secondary-accent-text" />
{t('dataSource.connect', { ns: 'common' })}
</div>
)}
</>
)
}
</>
)}
{isWebsite && !isConfigured && (
<div
className={
`ml-3 flex h-7 items-center rounded-md border-[0.5px] border-components-button-secondary-border bg-components-button-secondary-bg
px-3 text-xs font-medium text-components-button-secondary-accent-text
${!readOnly ? 'cursor-pointer' : 'cursor-default opacity-50 grayscale'}`
}
onClick={!readOnly ? onConfigure : undefined}
>
{t('dataSource.configure', { ns: 'common' })}
</div>
)}
</div>
{
isConfigured && (
<>
<div className="flex h-[18px] items-center px-3">
<div className="system-xs-medium text-text-tertiary">
{isNotion ? t('dataSource.notion.connectedWorkspace', { ns: 'common' }) : t('dataSource.website.configuredCrawlers', { ns: 'common' })}
</div>
<div className="ml-3 grow border-t border-t-divider-subtle" />
</div>
<div className="px-3 pb-3 pt-2">
{
configuredList.map(item => (
<ConfigItem
key={item.id}
type={type}
payload={item}
onRemove={onRemove}
notionActions={notionActions}
readOnly={readOnly}
/>
))
}
</div>
</>
)
}
</div>
)
}
export default React.memo(Panel)

View File

@ -1,17 +0,0 @@
.notion-icon {
background: #ffffff url(../../../assets/notion.svg) center center no-repeat;
background-size: 20px 20px;
}
.website-icon {
background: #ffffff url(../../../../datasets/create/assets/web.svg) center center no-repeat;
background-size: 20px 20px;
}
.workspace-item {
box-shadow: 0px 1px 2px rgba(16, 24, 40, 0.05);
}
.workspace-item:last-of-type {
margin-bottom: 0;
}

View File

@ -1,4 +0,0 @@
export enum DataSourceType {
notion = 'notion',
website = 'website',
}

Some files were not shown because too many files have changed in this diff Show More