Merge branch 'main' into fix/draft-variable-desc-length
This commit is contained in:
commit
afeb8d3e68
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Any, cast
|
||||
|
||||
from controllers.common import fields
|
||||
from controllers.console import console_ns
|
||||
from controllers.console.app.error import AppUnavailableError
|
||||
|
|
@ -23,14 +25,14 @@ class AppParameterApi(InstalledAppResource):
|
|||
if workflow is None:
|
||||
raise AppUnavailableError()
|
||||
|
||||
features_dict = workflow.features_dict
|
||||
features_dict: dict[str, Any] = workflow.features_dict
|
||||
user_input_form = workflow.user_input_form(to_old_structure=True)
|
||||
else:
|
||||
app_model_config = app_model.app_model_config
|
||||
if app_model_config is None:
|
||||
raise AppUnavailableError()
|
||||
|
||||
features_dict = app_model_config.to_dict()
|
||||
features_dict = cast(dict[str, Any], app_model_config.to_dict())
|
||||
|
||||
user_input_form = features_dict.get("user_input_form", [])
|
||||
|
||||
|
|
|
|||
|
|
@ -185,4 +185,4 @@ class AnnotationUpdateDeleteApi(Resource):
|
|||
def delete(self, app_model: App, annotation_id: str):
|
||||
"""Delete an annotation."""
|
||||
AppAnnotationService.delete_app_annotation(app_model.id, annotation_id)
|
||||
return {"result": "success"}, 204
|
||||
return "", 204
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Any, cast
|
||||
|
||||
from flask_restx import Resource
|
||||
|
||||
from controllers.common.fields import Parameters
|
||||
|
|
@ -33,14 +35,14 @@ class AppParameterApi(Resource):
|
|||
if workflow is None:
|
||||
raise AppUnavailableError()
|
||||
|
||||
features_dict = workflow.features_dict
|
||||
features_dict: dict[str, Any] = workflow.features_dict
|
||||
user_input_form = workflow.user_input_form(to_old_structure=True)
|
||||
else:
|
||||
app_model_config = app_model.app_model_config
|
||||
if app_model_config is None:
|
||||
raise AppUnavailableError()
|
||||
|
||||
features_dict = app_model_config.to_dict()
|
||||
features_dict = cast(dict[str, Any], app_model_config.to_dict())
|
||||
|
||||
user_input_form = features_dict.get("user_input_form", [])
|
||||
|
||||
|
|
|
|||
|
|
@ -14,7 +14,6 @@ from controllers.service_api.wraps import FetchUserArg, WhereisUserArg, validate
|
|||
from core.app.entities.app_invoke_entities import InvokeFrom
|
||||
from extensions.ext_database import db
|
||||
from fields.conversation_fields import (
|
||||
ConversationDelete,
|
||||
ConversationInfiniteScrollPagination,
|
||||
SimpleConversation,
|
||||
)
|
||||
|
|
@ -163,7 +162,7 @@ class ConversationDetailApi(Resource):
|
|||
ConversationService.delete(app_model, conversation_id, end_user)
|
||||
except services.errors.conversation.ConversationNotExistsError:
|
||||
raise NotFound("Conversation Not Exists.")
|
||||
return ConversationDelete(result="success").model_dump(mode="json"), 204
|
||||
return "", 204
|
||||
|
||||
|
||||
@service_api_ns.route("/conversations/<uuid:c_id>/name")
|
||||
|
|
|
|||
|
|
@ -132,6 +132,8 @@ class WorkflowRunDetailApi(Resource):
|
|||
app_id=app_model.id,
|
||||
run_id=workflow_run_id,
|
||||
)
|
||||
if not workflow_run:
|
||||
raise NotFound("Workflow run not found.")
|
||||
return workflow_run
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from flask import request
|
||||
from flask_restx import Resource
|
||||
|
|
@ -57,14 +58,14 @@ class AppParameterApi(WebApiResource):
|
|||
if workflow is None:
|
||||
raise AppUnavailableError()
|
||||
|
||||
features_dict = workflow.features_dict
|
||||
features_dict: dict[str, Any] = workflow.features_dict
|
||||
user_input_form = workflow.user_input_form(to_old_structure=True)
|
||||
else:
|
||||
app_model_config = app_model.app_model_config
|
||||
if app_model_config is None:
|
||||
raise AppUnavailableError()
|
||||
|
||||
features_dict = app_model_config.to_dict()
|
||||
features_dict = cast(dict[str, Any], app_model_config.to_dict())
|
||||
|
||||
user_input_form = features_dict.get("user_input_form", [])
|
||||
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
from collections.abc import Mapping
|
||||
from typing import Any
|
||||
|
||||
from core.app.app_config.entities import SensitiveWordAvoidanceEntity
|
||||
from core.moderation.factory import ModerationFactory
|
||||
|
||||
|
||||
class SensitiveWordAvoidanceConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> SensitiveWordAvoidanceEntity | None:
|
||||
def convert(cls, config: Mapping[str, Any]) -> SensitiveWordAvoidanceEntity | None:
|
||||
sensitive_word_avoidance_dict = config.get("sensitive_word_avoidance")
|
||||
if not sensitive_word_avoidance_dict:
|
||||
return None
|
||||
|
|
@ -12,7 +15,7 @@ class SensitiveWordAvoidanceConfigManager:
|
|||
if sensitive_word_avoidance_dict.get("enabled"):
|
||||
return SensitiveWordAvoidanceEntity(
|
||||
type=sensitive_word_avoidance_dict.get("type"),
|
||||
config=sensitive_word_avoidance_dict.get("config"),
|
||||
config=sensitive_word_avoidance_dict.get("config", {}),
|
||||
)
|
||||
else:
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1,10 +1,13 @@
|
|||
from typing import Any, cast
|
||||
|
||||
from core.agent.entities import AgentEntity, AgentPromptEntity, AgentToolEntity
|
||||
from core.agent.prompt.template import REACT_PROMPT_TEMPLATES
|
||||
from models.model import AppModelConfigDict
|
||||
|
||||
|
||||
class AgentConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> AgentEntity | None:
|
||||
def convert(cls, config: AppModelConfigDict) -> AgentEntity | None:
|
||||
"""
|
||||
Convert model config to model config
|
||||
|
||||
|
|
@ -28,17 +31,17 @@ class AgentConfigManager:
|
|||
|
||||
agent_tools = []
|
||||
for tool in agent_dict.get("tools", []):
|
||||
keys = tool.keys()
|
||||
if len(keys) >= 4:
|
||||
if "enabled" not in tool or not tool["enabled"]:
|
||||
tool_dict = cast(dict[str, Any], tool)
|
||||
if len(tool_dict) >= 4:
|
||||
if "enabled" not in tool_dict or not tool_dict["enabled"]:
|
||||
continue
|
||||
|
||||
agent_tool_properties = {
|
||||
"provider_type": tool["provider_type"],
|
||||
"provider_id": tool["provider_id"],
|
||||
"tool_name": tool["tool_name"],
|
||||
"tool_parameters": tool.get("tool_parameters", {}),
|
||||
"credential_id": tool.get("credential_id", None),
|
||||
"provider_type": tool_dict["provider_type"],
|
||||
"provider_id": tool_dict["provider_id"],
|
||||
"tool_name": tool_dict["tool_name"],
|
||||
"tool_parameters": tool_dict.get("tool_parameters", {}),
|
||||
"credential_id": tool_dict.get("credential_id", None),
|
||||
}
|
||||
|
||||
agent_tools.append(AgentToolEntity.model_validate(agent_tool_properties))
|
||||
|
|
@ -47,7 +50,8 @@ class AgentConfigManager:
|
|||
"react_router",
|
||||
"router",
|
||||
}:
|
||||
agent_prompt = agent_dict.get("prompt", None) or {}
|
||||
agent_prompt_raw = agent_dict.get("prompt", None)
|
||||
agent_prompt: dict[str, Any] = agent_prompt_raw if isinstance(agent_prompt_raw, dict) else {}
|
||||
# check model mode
|
||||
model_mode = config.get("model", {}).get("mode", "completion")
|
||||
if model_mode == "completion":
|
||||
|
|
@ -75,7 +79,7 @@ class AgentConfigManager:
|
|||
strategy=strategy,
|
||||
prompt=agent_prompt_entity,
|
||||
tools=agent_tools,
|
||||
max_iteration=agent_dict.get("max_iteration", 10),
|
||||
max_iteration=cast(int, agent_dict.get("max_iteration", 10)),
|
||||
)
|
||||
|
||||
return None
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import uuid
|
||||
from typing import Literal, cast
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from core.app.app_config.entities import (
|
||||
DatasetEntity,
|
||||
|
|
@ -8,13 +8,13 @@ from core.app.app_config.entities import (
|
|||
ModelConfig,
|
||||
)
|
||||
from core.entities.agent_entities import PlanningStrategy
|
||||
from models.model import AppMode
|
||||
from models.model import AppMode, AppModelConfigDict
|
||||
from services.dataset_service import DatasetService
|
||||
|
||||
|
||||
class DatasetConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> DatasetEntity | None:
|
||||
def convert(cls, config: AppModelConfigDict) -> DatasetEntity | None:
|
||||
"""
|
||||
Convert model config to model config
|
||||
|
||||
|
|
@ -25,11 +25,15 @@ class DatasetConfigManager:
|
|||
datasets = config.get("dataset_configs", {}).get("datasets", {"strategy": "router", "datasets": []})
|
||||
|
||||
for dataset in datasets.get("datasets", []):
|
||||
if not isinstance(dataset, dict):
|
||||
continue
|
||||
keys = list(dataset.keys())
|
||||
if len(keys) == 0 or keys[0] != "dataset":
|
||||
continue
|
||||
|
||||
dataset = dataset["dataset"]
|
||||
if not isinstance(dataset, dict):
|
||||
continue
|
||||
|
||||
if "enabled" not in dataset or not dataset["enabled"]:
|
||||
continue
|
||||
|
|
@ -47,15 +51,14 @@ class DatasetConfigManager:
|
|||
agent_dict = config.get("agent_mode", {})
|
||||
|
||||
for tool in agent_dict.get("tools", []):
|
||||
keys = tool.keys()
|
||||
if len(keys) == 1:
|
||||
if len(tool) == 1:
|
||||
# old standard
|
||||
key = list(tool.keys())[0]
|
||||
|
||||
if key != "dataset":
|
||||
continue
|
||||
|
||||
tool_item = tool[key]
|
||||
tool_item = cast(dict[str, Any], tool)[key]
|
||||
|
||||
if "enabled" not in tool_item or not tool_item["enabled"]:
|
||||
continue
|
||||
|
|
|
|||
|
|
@ -5,12 +5,13 @@ from core.app.app_config.entities import ModelConfigEntity
|
|||
from core.provider_manager import ProviderManager
|
||||
from dify_graph.model_runtime.entities.model_entities import ModelPropertyKey, ModelType
|
||||
from dify_graph.model_runtime.model_providers.model_provider_factory import ModelProviderFactory
|
||||
from models.model import AppModelConfigDict
|
||||
from models.provider_ids import ModelProviderID
|
||||
|
||||
|
||||
class ModelConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> ModelConfigEntity:
|
||||
def convert(cls, config: AppModelConfigDict) -> ModelConfigEntity:
|
||||
"""
|
||||
Convert model config to model config
|
||||
|
||||
|
|
@ -22,7 +23,7 @@ class ModelConfigManager:
|
|||
if not model_config:
|
||||
raise ValueError("model is required")
|
||||
|
||||
completion_params = model_config.get("completion_params")
|
||||
completion_params = model_config.get("completion_params") or {}
|
||||
stop = []
|
||||
if "stop" in completion_params:
|
||||
stop = completion_params["stop"]
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Any
|
||||
|
||||
from core.app.app_config.entities import (
|
||||
AdvancedChatMessageEntity,
|
||||
AdvancedChatPromptTemplateEntity,
|
||||
|
|
@ -6,12 +8,12 @@ from core.app.app_config.entities import (
|
|||
)
|
||||
from core.prompt.simple_prompt_transform import ModelMode
|
||||
from dify_graph.model_runtime.entities.message_entities import PromptMessageRole
|
||||
from models.model import AppMode
|
||||
from models.model import AppMode, AppModelConfigDict
|
||||
|
||||
|
||||
class PromptTemplateConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> PromptTemplateEntity:
|
||||
def convert(cls, config: AppModelConfigDict) -> PromptTemplateEntity:
|
||||
if not config.get("prompt_type"):
|
||||
raise ValueError("prompt_type is required")
|
||||
|
||||
|
|
@ -40,14 +42,15 @@ class PromptTemplateConfigManager:
|
|||
advanced_completion_prompt_template = None
|
||||
completion_prompt_config = config.get("completion_prompt_config", {})
|
||||
if completion_prompt_config:
|
||||
completion_prompt_template_params = {
|
||||
completion_prompt_template_params: dict[str, Any] = {
|
||||
"prompt": completion_prompt_config["prompt"]["text"],
|
||||
}
|
||||
|
||||
if "conversation_histories_role" in completion_prompt_config:
|
||||
conv_role = completion_prompt_config.get("conversation_histories_role")
|
||||
if conv_role:
|
||||
completion_prompt_template_params["role_prefix"] = {
|
||||
"user": completion_prompt_config["conversation_histories_role"]["user_prefix"],
|
||||
"assistant": completion_prompt_config["conversation_histories_role"]["assistant_prefix"],
|
||||
"user": conv_role["user_prefix"],
|
||||
"assistant": conv_role["assistant_prefix"],
|
||||
}
|
||||
|
||||
advanced_completion_prompt_template = AdvancedCompletionPromptTemplateEntity(
|
||||
|
|
|
|||
|
|
@ -1,8 +1,10 @@
|
|||
import re
|
||||
from typing import cast
|
||||
|
||||
from core.app.app_config.entities import ExternalDataVariableEntity
|
||||
from core.external_data_tool.factory import ExternalDataToolFactory
|
||||
from dify_graph.variables.input_entities import VariableEntity, VariableEntityType
|
||||
from models.model import AppModelConfigDict
|
||||
|
||||
_ALLOWED_VARIABLE_ENTITY_TYPE = frozenset(
|
||||
[
|
||||
|
|
@ -18,7 +20,7 @@ _ALLOWED_VARIABLE_ENTITY_TYPE = frozenset(
|
|||
|
||||
class BasicVariablesConfigManager:
|
||||
@classmethod
|
||||
def convert(cls, config: dict) -> tuple[list[VariableEntity], list[ExternalDataVariableEntity]]:
|
||||
def convert(cls, config: AppModelConfigDict) -> tuple[list[VariableEntity], list[ExternalDataVariableEntity]]:
|
||||
"""
|
||||
Convert model config to model config
|
||||
|
||||
|
|
@ -51,7 +53,9 @@ class BasicVariablesConfigManager:
|
|||
|
||||
external_data_variables.append(
|
||||
ExternalDataVariableEntity(
|
||||
variable=variable["variable"], type=variable["type"], config=variable["config"]
|
||||
variable=variable["variable"],
|
||||
type=variable.get("type", ""),
|
||||
config=variable.get("config", {}),
|
||||
)
|
||||
)
|
||||
elif variable_type in {
|
||||
|
|
@ -64,10 +68,10 @@ class BasicVariablesConfigManager:
|
|||
variable = variables[variable_type]
|
||||
variable_entities.append(
|
||||
VariableEntity(
|
||||
type=variable_type,
|
||||
variable=variable.get("variable"),
|
||||
type=cast(VariableEntityType, variable_type),
|
||||
variable=variable["variable"],
|
||||
description=variable.get("description") or "",
|
||||
label=variable.get("label"),
|
||||
label=variable["label"],
|
||||
required=variable.get("required", False),
|
||||
max_length=variable.get("max_length"),
|
||||
options=variable.get("options") or [],
|
||||
|
|
|
|||
|
|
@ -281,7 +281,7 @@ class EasyUIBasedAppConfig(AppConfig):
|
|||
|
||||
app_model_config_from: EasyUIBasedAppModelConfigFrom
|
||||
app_model_config_id: str
|
||||
app_model_config_dict: dict
|
||||
app_model_config_dict: dict[str, Any]
|
||||
model: ModelConfigEntity
|
||||
prompt_template: PromptTemplateEntity
|
||||
dataset: DatasetEntity | None = None
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@ from core.app.app_config.features.suggested_questions_after_answer.manager impor
|
|||
)
|
||||
from core.app.app_config.features.text_to_speech.manager import TextToSpeechConfigManager
|
||||
from core.entities.agent_entities import PlanningStrategy
|
||||
from models.model import App, AppMode, AppModelConfig, Conversation
|
||||
from models.model import App, AppMode, AppModelConfig, AppModelConfigDict, Conversation
|
||||
|
||||
OLD_TOOLS = ["dataset", "google_search", "web_reader", "wikipedia", "current_datetime"]
|
||||
|
||||
|
|
@ -40,7 +40,7 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
|
|||
app_model: App,
|
||||
app_model_config: AppModelConfig,
|
||||
conversation: Conversation | None = None,
|
||||
override_config_dict: dict | None = None,
|
||||
override_config_dict: AppModelConfigDict | None = None,
|
||||
) -> AgentChatAppConfig:
|
||||
"""
|
||||
Convert app model config to agent chat app config
|
||||
|
|
@ -61,7 +61,9 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
|
|||
app_model_config_dict = app_model_config.to_dict()
|
||||
config_dict = app_model_config_dict.copy()
|
||||
else:
|
||||
config_dict = override_config_dict or {}
|
||||
if not override_config_dict:
|
||||
raise Exception("override_config_dict is required when config_from is ARGS")
|
||||
config_dict = override_config_dict
|
||||
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
app_config = AgentChatAppConfig(
|
||||
|
|
@ -70,7 +72,7 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
|
|||
app_mode=app_mode,
|
||||
app_model_config_from=config_from,
|
||||
app_model_config_id=app_model_config.id,
|
||||
app_model_config_dict=config_dict,
|
||||
app_model_config_dict=cast(dict[str, Any], config_dict),
|
||||
model=ModelConfigManager.convert(config=config_dict),
|
||||
prompt_template=PromptTemplateConfigManager.convert(config=config_dict),
|
||||
sensitive_word_avoidance=SensitiveWordAvoidanceConfigManager.convert(config=config_dict),
|
||||
|
|
@ -86,7 +88,7 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
|
|||
return app_config
|
||||
|
||||
@classmethod
|
||||
def config_validate(cls, tenant_id: str, config: Mapping[str, Any]):
|
||||
def config_validate(cls, tenant_id: str, config: Mapping[str, Any]) -> AppModelConfigDict:
|
||||
"""
|
||||
Validate for agent chat app model config
|
||||
|
||||
|
|
@ -157,7 +159,7 @@ class AgentChatAppConfigManager(BaseAppConfigManager):
|
|||
# Filter out extra parameters
|
||||
filtered_config = {key: config.get(key) for key in related_config_keys}
|
||||
|
||||
return filtered_config
|
||||
return cast(AppModelConfigDict, filtered_config)
|
||||
|
||||
@classmethod
|
||||
def validate_agent_mode_and_set_defaults(
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Any, cast
|
||||
|
||||
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
|
||||
from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager
|
||||
from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager
|
||||
|
|
@ -13,7 +15,7 @@ from core.app.app_config.features.suggested_questions_after_answer.manager impor
|
|||
SuggestedQuestionsAfterAnswerConfigManager,
|
||||
)
|
||||
from core.app.app_config.features.text_to_speech.manager import TextToSpeechConfigManager
|
||||
from models.model import App, AppMode, AppModelConfig, Conversation
|
||||
from models.model import App, AppMode, AppModelConfig, AppModelConfigDict, Conversation
|
||||
|
||||
|
||||
class ChatAppConfig(EasyUIBasedAppConfig):
|
||||
|
|
@ -31,7 +33,7 @@ class ChatAppConfigManager(BaseAppConfigManager):
|
|||
app_model: App,
|
||||
app_model_config: AppModelConfig,
|
||||
conversation: Conversation | None = None,
|
||||
override_config_dict: dict | None = None,
|
||||
override_config_dict: AppModelConfigDict | None = None,
|
||||
) -> ChatAppConfig:
|
||||
"""
|
||||
Convert app model config to chat app config
|
||||
|
|
@ -64,7 +66,7 @@ class ChatAppConfigManager(BaseAppConfigManager):
|
|||
app_mode=app_mode,
|
||||
app_model_config_from=config_from,
|
||||
app_model_config_id=app_model_config.id,
|
||||
app_model_config_dict=config_dict,
|
||||
app_model_config_dict=cast(dict[str, Any], config_dict),
|
||||
model=ModelConfigManager.convert(config=config_dict),
|
||||
prompt_template=PromptTemplateConfigManager.convert(config=config_dict),
|
||||
sensitive_word_avoidance=SensitiveWordAvoidanceConfigManager.convert(config=config_dict),
|
||||
|
|
@ -79,7 +81,7 @@ class ChatAppConfigManager(BaseAppConfigManager):
|
|||
return app_config
|
||||
|
||||
@classmethod
|
||||
def config_validate(cls, tenant_id: str, config: dict):
|
||||
def config_validate(cls, tenant_id: str, config: dict) -> AppModelConfigDict:
|
||||
"""
|
||||
Validate for chat app model config
|
||||
|
||||
|
|
@ -145,4 +147,4 @@ class ChatAppConfigManager(BaseAppConfigManager):
|
|||
# Filter out extra parameters
|
||||
filtered_config = {key: config.get(key) for key in related_config_keys}
|
||||
|
||||
return filtered_config
|
||||
return cast(AppModelConfigDict, filtered_config)
|
||||
|
|
|
|||
|
|
@ -173,8 +173,10 @@ class ChatAppRunner(AppRunner):
|
|||
memory=memory,
|
||||
message_id=message.id,
|
||||
inputs=inputs,
|
||||
vision_enabled=application_generate_entity.app_config.app_model_config_dict.get("file_upload", {}).get(
|
||||
"enabled", False
|
||||
vision_enabled=bool(
|
||||
application_generate_entity.app_config.app_model_config_dict.get("file_upload", {})
|
||||
.get("image", {})
|
||||
.get("enabled", False)
|
||||
),
|
||||
)
|
||||
context_files = retrieved_files or []
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
from typing import Any, cast
|
||||
|
||||
from core.app.app_config.base_app_config_manager import BaseAppConfigManager
|
||||
from core.app.app_config.common.sensitive_word_avoidance.manager import SensitiveWordAvoidanceConfigManager
|
||||
from core.app.app_config.easy_ui_based_app.dataset.manager import DatasetConfigManager
|
||||
|
|
@ -8,7 +10,7 @@ from core.app.app_config.entities import EasyUIBasedAppConfig, EasyUIBasedAppMod
|
|||
from core.app.app_config.features.file_upload.manager import FileUploadConfigManager
|
||||
from core.app.app_config.features.more_like_this.manager import MoreLikeThisConfigManager
|
||||
from core.app.app_config.features.text_to_speech.manager import TextToSpeechConfigManager
|
||||
from models.model import App, AppMode, AppModelConfig
|
||||
from models.model import App, AppMode, AppModelConfig, AppModelConfigDict
|
||||
|
||||
|
||||
class CompletionAppConfig(EasyUIBasedAppConfig):
|
||||
|
|
@ -22,7 +24,7 @@ class CompletionAppConfig(EasyUIBasedAppConfig):
|
|||
class CompletionAppConfigManager(BaseAppConfigManager):
|
||||
@classmethod
|
||||
def get_app_config(
|
||||
cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: dict | None = None
|
||||
cls, app_model: App, app_model_config: AppModelConfig, override_config_dict: AppModelConfigDict | None = None
|
||||
) -> CompletionAppConfig:
|
||||
"""
|
||||
Convert app model config to completion app config
|
||||
|
|
@ -40,7 +42,9 @@ class CompletionAppConfigManager(BaseAppConfigManager):
|
|||
app_model_config_dict = app_model_config.to_dict()
|
||||
config_dict = app_model_config_dict.copy()
|
||||
else:
|
||||
config_dict = override_config_dict or {}
|
||||
if not override_config_dict:
|
||||
raise Exception("override_config_dict is required when config_from is ARGS")
|
||||
config_dict = override_config_dict
|
||||
|
||||
app_mode = AppMode.value_of(app_model.mode)
|
||||
app_config = CompletionAppConfig(
|
||||
|
|
@ -49,7 +53,7 @@ class CompletionAppConfigManager(BaseAppConfigManager):
|
|||
app_mode=app_mode,
|
||||
app_model_config_from=config_from,
|
||||
app_model_config_id=app_model_config.id,
|
||||
app_model_config_dict=config_dict,
|
||||
app_model_config_dict=cast(dict[str, Any], config_dict),
|
||||
model=ModelConfigManager.convert(config=config_dict),
|
||||
prompt_template=PromptTemplateConfigManager.convert(config=config_dict),
|
||||
sensitive_word_avoidance=SensitiveWordAvoidanceConfigManager.convert(config=config_dict),
|
||||
|
|
@ -64,7 +68,7 @@ class CompletionAppConfigManager(BaseAppConfigManager):
|
|||
return app_config
|
||||
|
||||
@classmethod
|
||||
def config_validate(cls, tenant_id: str, config: dict):
|
||||
def config_validate(cls, tenant_id: str, config: dict) -> AppModelConfigDict:
|
||||
"""
|
||||
Validate for completion app model config
|
||||
|
||||
|
|
@ -116,4 +120,4 @@ class CompletionAppConfigManager(BaseAppConfigManager):
|
|||
# Filter out extra parameters
|
||||
filtered_config = {key: config.get(key) for key in related_config_keys}
|
||||
|
||||
return filtered_config
|
||||
return cast(AppModelConfigDict, filtered_config)
|
||||
|
|
|
|||
|
|
@ -275,7 +275,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator):
|
|||
raise ValueError("Message app_model_config is None")
|
||||
override_model_config_dict = app_model_config.to_dict()
|
||||
model_dict = override_model_config_dict["model"]
|
||||
completion_params = model_dict.get("completion_params")
|
||||
completion_params = model_dict.get("completion_params", {})
|
||||
completion_params["temperature"] = 0.9
|
||||
model_dict["completion_params"] = completion_params
|
||||
override_model_config_dict["model"] = model_dict
|
||||
|
|
|
|||
|
|
@ -132,8 +132,10 @@ class CompletionAppRunner(AppRunner):
|
|||
hit_callback=hit_callback,
|
||||
message_id=message.id,
|
||||
inputs=inputs,
|
||||
vision_enabled=application_generate_entity.app_config.app_model_config_dict.get("file_upload", {}).get(
|
||||
"enabled", False
|
||||
vision_enabled=bool(
|
||||
application_generate_entity.app_config.app_model_config_dict.get("file_upload", {})
|
||||
.get("image", {})
|
||||
.get("enabled", False)
|
||||
),
|
||||
)
|
||||
context_files = retrieved_files or []
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import logging
|
|||
import time
|
||||
from collections.abc import Generator
|
||||
from threading import Thread
|
||||
from typing import Union, cast
|
||||
from typing import Any, Union, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
|
@ -219,14 +219,14 @@ class EasyUIBasedGenerateTaskPipeline(BasedGenerateTaskPipeline):
|
|||
tenant_id = self._application_generate_entity.app_config.tenant_id
|
||||
task_id = self._application_generate_entity.task_id
|
||||
publisher = None
|
||||
text_to_speech_dict = self._app_config.app_model_config_dict.get("text_to_speech")
|
||||
text_to_speech_dict = cast(dict[str, Any], self._app_config.app_model_config_dict.get("text_to_speech"))
|
||||
if (
|
||||
text_to_speech_dict
|
||||
and text_to_speech_dict.get("autoPlay") == "enabled"
|
||||
and text_to_speech_dict.get("enabled")
|
||||
):
|
||||
publisher = AppGeneratorTTSPublisher(
|
||||
tenant_id, text_to_speech_dict.get("voice", None), text_to_speech_dict.get("language", None)
|
||||
tenant_id, text_to_speech_dict.get("voice", ""), text_to_speech_dict.get("language", None)
|
||||
)
|
||||
for response in self._process_stream_response(publisher=publisher, trace_manager=trace_manager):
|
||||
while True:
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import uuid
|
|||
from collections import deque
|
||||
from collections.abc import Sequence
|
||||
from datetime import datetime
|
||||
from typing import Final, cast
|
||||
from typing import Final
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import httpx
|
||||
|
|
@ -201,7 +201,7 @@ def convert_to_trace_id(uuid_v4: str | None) -> int:
|
|||
raise ValueError("UUID cannot be None")
|
||||
try:
|
||||
uuid_obj = uuid.UUID(uuid_v4)
|
||||
return cast(int, uuid_obj.int)
|
||||
return uuid_obj.int
|
||||
except ValueError as e:
|
||||
raise ValueError(f"Invalid UUID input: {uuid_v4}") from e
|
||||
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import hashlib
|
|||
import random
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from opentelemetry.trace import Link, SpanContext, TraceFlags
|
||||
|
||||
|
|
@ -23,7 +22,7 @@ class TencentTraceUtils:
|
|||
uuid_obj = uuid.UUID(uuid_v4) if uuid_v4 else uuid.uuid4()
|
||||
except Exception as e:
|
||||
raise ValueError(f"Invalid UUID input: {e}")
|
||||
return cast(int, uuid_obj.int)
|
||||
return uuid_obj.int
|
||||
|
||||
@staticmethod
|
||||
def convert_to_span_id(uuid_v4: str | None, span_type: str) -> int:
|
||||
|
|
@ -52,9 +51,9 @@ class TencentTraceUtils:
|
|||
@staticmethod
|
||||
def create_link(trace_id_str: str) -> Link:
|
||||
try:
|
||||
trace_id = int(trace_id_str, 16) if len(trace_id_str) == 32 else cast(int, uuid.UUID(trace_id_str).int)
|
||||
trace_id = int(trace_id_str, 16) if len(trace_id_str) == 32 else uuid.UUID(trace_id_str).int
|
||||
except (ValueError, TypeError):
|
||||
trace_id = cast(int, uuid.uuid4().int)
|
||||
trace_id = uuid.uuid4().int
|
||||
|
||||
span_context = SpanContext(
|
||||
trace_id=trace_id,
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import uuid
|
||||
from collections.abc import Generator, Mapping
|
||||
from typing import Union
|
||||
from typing import Any, Union, cast
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
|
@ -34,14 +34,14 @@ class PluginAppBackwardsInvocation(BaseBackwardsInvocation):
|
|||
if workflow is None:
|
||||
raise ValueError("unexpected app type")
|
||||
|
||||
features_dict = workflow.features_dict
|
||||
features_dict: dict[str, Any] = workflow.features_dict
|
||||
user_input_form = workflow.user_input_form(to_old_structure=True)
|
||||
else:
|
||||
app_model_config = app.app_model_config
|
||||
if app_model_config is None:
|
||||
raise ValueError("unexpected app type")
|
||||
|
||||
features_dict = app_model_config.to_dict()
|
||||
features_dict = cast(dict[str, Any], app_model_config.to_dict())
|
||||
|
||||
user_input_form = features_dict.get("user_input_form", [])
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,37 @@
|
|||
"""add partial indexes on conversations for app_id with created_at and updated_at
|
||||
|
||||
Revision ID: e288952f2994
|
||||
Revises: fce013ca180e
|
||||
Create Date: 2026-02-26 13:36:45.928922
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e288952f2994'
|
||||
down_revision = 'fce013ca180e'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.create_index(
|
||||
'conversation_app_created_at_idx',
|
||||
['app_id', sa.literal_column('created_at DESC')],
|
||||
unique=False,
|
||||
postgresql_where=sa.text('is_deleted IS false'),
|
||||
)
|
||||
batch_op.create_index(
|
||||
'conversation_app_updated_at_idx',
|
||||
['app_id', sa.literal_column('updated_at DESC')],
|
||||
unique=False,
|
||||
postgresql_where=sa.text('is_deleted IS false'),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
with op.batch_alter_table('conversations', schema=None) as batch_op:
|
||||
batch_op.drop_index('conversation_app_updated_at_idx')
|
||||
batch_op.drop_index('conversation_app_created_at_idx')
|
||||
|
|
@ -7,7 +7,7 @@ from collections.abc import Mapping, Sequence
|
|||
from datetime import datetime
|
||||
from decimal import Decimal
|
||||
from enum import StrEnum, auto
|
||||
from typing import TYPE_CHECKING, Any, Literal, cast
|
||||
from typing import TYPE_CHECKING, Any, Literal, NotRequired, cast
|
||||
from uuid import uuid4
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
|
@ -15,6 +15,7 @@ from flask import request
|
|||
from flask_login import UserMixin # type: ignore[import-untyped]
|
||||
from sqlalchemy import BigInteger, Float, Index, PrimaryKeyConstraint, String, exists, func, select, text
|
||||
from sqlalchemy.orm import Mapped, Session, mapped_column
|
||||
from typing_extensions import TypedDict
|
||||
|
||||
from configs import dify_config
|
||||
from constants import DEFAULT_FILE_NUMBER_LIMITS
|
||||
|
|
@ -36,6 +37,259 @@ if TYPE_CHECKING:
|
|||
from .workflow import Workflow
|
||||
|
||||
|
||||
# --- TypedDict definitions for structured dict return types ---
|
||||
|
||||
|
||||
class EnabledConfig(TypedDict):
|
||||
enabled: bool
|
||||
|
||||
|
||||
class EmbeddingModelInfo(TypedDict):
|
||||
embedding_provider_name: str
|
||||
embedding_model_name: str
|
||||
|
||||
|
||||
class AnnotationReplyDisabledConfig(TypedDict):
|
||||
enabled: Literal[False]
|
||||
|
||||
|
||||
class AnnotationReplyEnabledConfig(TypedDict):
|
||||
id: str
|
||||
enabled: Literal[True]
|
||||
score_threshold: float
|
||||
embedding_model: EmbeddingModelInfo
|
||||
|
||||
|
||||
AnnotationReplyConfig = AnnotationReplyEnabledConfig | AnnotationReplyDisabledConfig
|
||||
|
||||
|
||||
class SensitiveWordAvoidanceConfig(TypedDict):
|
||||
enabled: bool
|
||||
type: str
|
||||
config: dict[str, Any]
|
||||
|
||||
|
||||
class AgentToolConfig(TypedDict):
|
||||
provider_type: str
|
||||
provider_id: str
|
||||
tool_name: str
|
||||
tool_parameters: dict[str, Any]
|
||||
plugin_unique_identifier: NotRequired[str | None]
|
||||
credential_id: NotRequired[str | None]
|
||||
|
||||
|
||||
class AgentModeConfig(TypedDict):
|
||||
enabled: bool
|
||||
strategy: str | None
|
||||
tools: list[AgentToolConfig | dict[str, Any]]
|
||||
prompt: str | None
|
||||
|
||||
|
||||
class ImageUploadConfig(TypedDict):
|
||||
enabled: bool
|
||||
number_limits: int
|
||||
detail: str
|
||||
transfer_methods: list[str]
|
||||
|
||||
|
||||
class FileUploadConfig(TypedDict):
|
||||
image: ImageUploadConfig
|
||||
|
||||
|
||||
class DeletedToolInfo(TypedDict):
|
||||
type: str
|
||||
tool_name: str
|
||||
provider_id: str
|
||||
|
||||
|
||||
class ExternalDataToolConfig(TypedDict):
|
||||
enabled: bool
|
||||
variable: str
|
||||
type: str
|
||||
config: dict[str, Any]
|
||||
|
||||
|
||||
class UserInputFormItemConfig(TypedDict):
|
||||
variable: str
|
||||
label: str
|
||||
description: NotRequired[str]
|
||||
required: NotRequired[bool]
|
||||
max_length: NotRequired[int]
|
||||
options: NotRequired[list[str]]
|
||||
default: NotRequired[str]
|
||||
type: NotRequired[str]
|
||||
config: NotRequired[dict[str, Any]]
|
||||
|
||||
|
||||
# Each item is a single-key dict, e.g. {"text-input": UserInputFormItemConfig}
|
||||
UserInputFormItem = dict[str, UserInputFormItemConfig]
|
||||
|
||||
|
||||
class DatasetConfigs(TypedDict):
|
||||
retrieval_model: str
|
||||
datasets: NotRequired[dict[str, Any]]
|
||||
top_k: NotRequired[int]
|
||||
score_threshold: NotRequired[float]
|
||||
score_threshold_enabled: NotRequired[bool]
|
||||
reranking_model: NotRequired[dict[str, Any] | None]
|
||||
weights: NotRequired[dict[str, Any] | None]
|
||||
reranking_enabled: NotRequired[bool]
|
||||
reranking_mode: NotRequired[str]
|
||||
metadata_filtering_mode: NotRequired[str]
|
||||
metadata_model_config: NotRequired[dict[str, Any] | None]
|
||||
metadata_filtering_conditions: NotRequired[dict[str, Any] | None]
|
||||
|
||||
|
||||
class ChatPromptMessage(TypedDict):
|
||||
text: str
|
||||
role: str
|
||||
|
||||
|
||||
class ChatPromptConfig(TypedDict, total=False):
|
||||
prompt: list[ChatPromptMessage]
|
||||
|
||||
|
||||
class CompletionPromptText(TypedDict):
|
||||
text: str
|
||||
|
||||
|
||||
class ConversationHistoriesRole(TypedDict):
|
||||
user_prefix: str
|
||||
assistant_prefix: str
|
||||
|
||||
|
||||
class CompletionPromptConfig(TypedDict):
|
||||
prompt: CompletionPromptText
|
||||
conversation_histories_role: NotRequired[ConversationHistoriesRole]
|
||||
|
||||
|
||||
class ModelConfig(TypedDict):
|
||||
provider: str
|
||||
name: str
|
||||
mode: str
|
||||
completion_params: NotRequired[dict[str, Any]]
|
||||
|
||||
|
||||
class AppModelConfigDict(TypedDict):
|
||||
opening_statement: str | None
|
||||
suggested_questions: list[str]
|
||||
suggested_questions_after_answer: EnabledConfig
|
||||
speech_to_text: EnabledConfig
|
||||
text_to_speech: EnabledConfig
|
||||
retriever_resource: EnabledConfig
|
||||
annotation_reply: AnnotationReplyConfig
|
||||
more_like_this: EnabledConfig
|
||||
sensitive_word_avoidance: SensitiveWordAvoidanceConfig
|
||||
external_data_tools: list[ExternalDataToolConfig]
|
||||
model: ModelConfig
|
||||
user_input_form: list[UserInputFormItem]
|
||||
dataset_query_variable: str | None
|
||||
pre_prompt: str | None
|
||||
agent_mode: AgentModeConfig
|
||||
prompt_type: str
|
||||
chat_prompt_config: ChatPromptConfig
|
||||
completion_prompt_config: CompletionPromptConfig
|
||||
dataset_configs: DatasetConfigs
|
||||
file_upload: FileUploadConfig
|
||||
# Added dynamically in Conversation.model_config
|
||||
model_id: NotRequired[str | None]
|
||||
provider: NotRequired[str | None]
|
||||
|
||||
|
||||
class ConversationDict(TypedDict):
|
||||
id: str
|
||||
app_id: str
|
||||
app_model_config_id: str | None
|
||||
model_provider: str | None
|
||||
override_model_configs: str | None
|
||||
model_id: str | None
|
||||
mode: str
|
||||
name: str
|
||||
summary: str | None
|
||||
inputs: dict[str, Any]
|
||||
introduction: str | None
|
||||
system_instruction: str | None
|
||||
system_instruction_tokens: int
|
||||
status: str
|
||||
invoke_from: str | None
|
||||
from_source: str
|
||||
from_end_user_id: str | None
|
||||
from_account_id: str | None
|
||||
read_at: datetime | None
|
||||
read_account_id: str | None
|
||||
dialogue_count: int
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
|
||||
|
||||
class MessageDict(TypedDict):
|
||||
id: str
|
||||
app_id: str
|
||||
conversation_id: str
|
||||
model_id: str | None
|
||||
inputs: dict[str, Any]
|
||||
query: str
|
||||
total_price: Decimal | None
|
||||
message: dict[str, Any]
|
||||
answer: str
|
||||
status: str
|
||||
error: str | None
|
||||
message_metadata: dict[str, Any]
|
||||
from_source: str
|
||||
from_end_user_id: str | None
|
||||
from_account_id: str | None
|
||||
created_at: str
|
||||
updated_at: str
|
||||
agent_based: bool
|
||||
workflow_run_id: str | None
|
||||
|
||||
|
||||
class MessageFeedbackDict(TypedDict):
|
||||
id: str
|
||||
app_id: str
|
||||
conversation_id: str
|
||||
message_id: str
|
||||
rating: str
|
||||
content: str | None
|
||||
from_source: str
|
||||
from_end_user_id: str | None
|
||||
from_account_id: str | None
|
||||
created_at: str
|
||||
updated_at: str
|
||||
|
||||
|
||||
class MessageFileInfo(TypedDict, total=False):
|
||||
belongs_to: str | None
|
||||
upload_file_id: str | None
|
||||
id: str
|
||||
tenant_id: str
|
||||
type: str
|
||||
transfer_method: str
|
||||
remote_url: str | None
|
||||
related_id: str | None
|
||||
filename: str | None
|
||||
extension: str | None
|
||||
mime_type: str | None
|
||||
size: int
|
||||
dify_model_identity: str
|
||||
url: str | None
|
||||
|
||||
|
||||
class ExtraContentDict(TypedDict, total=False):
|
||||
type: str
|
||||
workflow_run_id: str
|
||||
|
||||
|
||||
class TraceAppConfigDict(TypedDict):
|
||||
id: str
|
||||
app_id: str
|
||||
tracing_provider: str | None
|
||||
tracing_config: dict[str, Any]
|
||||
is_active: bool
|
||||
created_at: str | None
|
||||
updated_at: str | None
|
||||
|
||||
|
||||
class DifySetup(TypeBase):
|
||||
__tablename__ = "dify_setups"
|
||||
__table_args__ = (sa.PrimaryKeyConstraint("version", name="dify_setup_pkey"),)
|
||||
|
|
@ -176,7 +430,7 @@ class App(Base):
|
|||
return str(self.mode)
|
||||
|
||||
@property
|
||||
def deleted_tools(self) -> list[dict[str, str]]:
|
||||
def deleted_tools(self) -> list[DeletedToolInfo]:
|
||||
from core.tools.tool_manager import ToolManager, ToolProviderType
|
||||
from services.plugin.plugin_service import PluginService
|
||||
|
||||
|
|
@ -257,7 +511,7 @@ class App(Base):
|
|||
provider_id.provider_name: existence[i] for i, provider_id in enumerate(builtin_provider_ids)
|
||||
}
|
||||
|
||||
deleted_tools: list[dict[str, str]] = []
|
||||
deleted_tools: list[DeletedToolInfo] = []
|
||||
|
||||
for tool in tools:
|
||||
keys = list(tool.keys())
|
||||
|
|
@ -364,35 +618,38 @@ class AppModelConfig(TypeBase):
|
|||
return app
|
||||
|
||||
@property
|
||||
def model_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.model) if self.model else {}
|
||||
def model_dict(self) -> ModelConfig:
|
||||
return cast(ModelConfig, json.loads(self.model) if self.model else {})
|
||||
|
||||
@property
|
||||
def suggested_questions_list(self) -> list[str]:
|
||||
return json.loads(self.suggested_questions) if self.suggested_questions else []
|
||||
|
||||
@property
|
||||
def suggested_questions_after_answer_dict(self) -> dict[str, Any]:
|
||||
return (
|
||||
def suggested_questions_after_answer_dict(self) -> EnabledConfig:
|
||||
return cast(
|
||||
EnabledConfig,
|
||||
json.loads(self.suggested_questions_after_answer)
|
||||
if self.suggested_questions_after_answer
|
||||
else {"enabled": False}
|
||||
else {"enabled": False},
|
||||
)
|
||||
|
||||
@property
|
||||
def speech_to_text_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.speech_to_text) if self.speech_to_text else {"enabled": False}
|
||||
def speech_to_text_dict(self) -> EnabledConfig:
|
||||
return cast(EnabledConfig, json.loads(self.speech_to_text) if self.speech_to_text else {"enabled": False})
|
||||
|
||||
@property
|
||||
def text_to_speech_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.text_to_speech) if self.text_to_speech else {"enabled": False}
|
||||
def text_to_speech_dict(self) -> EnabledConfig:
|
||||
return cast(EnabledConfig, json.loads(self.text_to_speech) if self.text_to_speech else {"enabled": False})
|
||||
|
||||
@property
|
||||
def retriever_resource_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.retriever_resource) if self.retriever_resource else {"enabled": True}
|
||||
def retriever_resource_dict(self) -> EnabledConfig:
|
||||
return cast(
|
||||
EnabledConfig, json.loads(self.retriever_resource) if self.retriever_resource else {"enabled": True}
|
||||
)
|
||||
|
||||
@property
|
||||
def annotation_reply_dict(self) -> dict[str, Any]:
|
||||
def annotation_reply_dict(self) -> AnnotationReplyConfig:
|
||||
annotation_setting = (
|
||||
db.session.query(AppAnnotationSetting).where(AppAnnotationSetting.app_id == self.app_id).first()
|
||||
)
|
||||
|
|
@ -415,56 +672,62 @@ class AppModelConfig(TypeBase):
|
|||
return {"enabled": False}
|
||||
|
||||
@property
|
||||
def more_like_this_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.more_like_this) if self.more_like_this else {"enabled": False}
|
||||
def more_like_this_dict(self) -> EnabledConfig:
|
||||
return cast(EnabledConfig, json.loads(self.more_like_this) if self.more_like_this else {"enabled": False})
|
||||
|
||||
@property
|
||||
def sensitive_word_avoidance_dict(self) -> dict[str, Any]:
|
||||
return (
|
||||
def sensitive_word_avoidance_dict(self) -> SensitiveWordAvoidanceConfig:
|
||||
return cast(
|
||||
SensitiveWordAvoidanceConfig,
|
||||
json.loads(self.sensitive_word_avoidance)
|
||||
if self.sensitive_word_avoidance
|
||||
else {"enabled": False, "type": "", "configs": []}
|
||||
else {"enabled": False, "type": "", "config": {}},
|
||||
)
|
||||
|
||||
@property
|
||||
def external_data_tools_list(self) -> list[dict[str, Any]]:
|
||||
def external_data_tools_list(self) -> list[ExternalDataToolConfig]:
|
||||
return json.loads(self.external_data_tools) if self.external_data_tools else []
|
||||
|
||||
@property
|
||||
def user_input_form_list(self) -> list[dict[str, Any]]:
|
||||
def user_input_form_list(self) -> list[UserInputFormItem]:
|
||||
return json.loads(self.user_input_form) if self.user_input_form else []
|
||||
|
||||
@property
|
||||
def agent_mode_dict(self) -> dict[str, Any]:
|
||||
return (
|
||||
def agent_mode_dict(self) -> AgentModeConfig:
|
||||
return cast(
|
||||
AgentModeConfig,
|
||||
json.loads(self.agent_mode)
|
||||
if self.agent_mode
|
||||
else {"enabled": False, "strategy": None, "tools": [], "prompt": None}
|
||||
else {"enabled": False, "strategy": None, "tools": [], "prompt": None},
|
||||
)
|
||||
|
||||
@property
|
||||
def chat_prompt_config_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.chat_prompt_config) if self.chat_prompt_config else {}
|
||||
def chat_prompt_config_dict(self) -> ChatPromptConfig:
|
||||
return cast(ChatPromptConfig, json.loads(self.chat_prompt_config) if self.chat_prompt_config else {})
|
||||
|
||||
@property
|
||||
def completion_prompt_config_dict(self) -> dict[str, Any]:
|
||||
return json.loads(self.completion_prompt_config) if self.completion_prompt_config else {}
|
||||
def completion_prompt_config_dict(self) -> CompletionPromptConfig:
|
||||
return cast(
|
||||
CompletionPromptConfig,
|
||||
json.loads(self.completion_prompt_config) if self.completion_prompt_config else {},
|
||||
)
|
||||
|
||||
@property
|
||||
def dataset_configs_dict(self) -> dict[str, Any]:
|
||||
def dataset_configs_dict(self) -> DatasetConfigs:
|
||||
if self.dataset_configs:
|
||||
dataset_configs: dict[str, Any] = json.loads(self.dataset_configs)
|
||||
dataset_configs = json.loads(self.dataset_configs)
|
||||
if "retrieval_model" not in dataset_configs:
|
||||
return {"retrieval_model": "single"}
|
||||
else:
|
||||
return dataset_configs
|
||||
return cast(DatasetConfigs, dataset_configs)
|
||||
return {
|
||||
"retrieval_model": "multiple",
|
||||
}
|
||||
|
||||
@property
|
||||
def file_upload_dict(self) -> dict[str, Any]:
|
||||
return (
|
||||
def file_upload_dict(self) -> FileUploadConfig:
|
||||
return cast(
|
||||
FileUploadConfig,
|
||||
json.loads(self.file_upload)
|
||||
if self.file_upload
|
||||
else {
|
||||
|
|
@ -474,10 +737,10 @@ class AppModelConfig(TypeBase):
|
|||
"detail": "high",
|
||||
"transfer_methods": ["remote_url", "local_file"],
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
def to_dict(self) -> AppModelConfigDict:
|
||||
return {
|
||||
"opening_statement": self.opening_statement,
|
||||
"suggested_questions": self.suggested_questions_list,
|
||||
|
|
@ -501,36 +764,42 @@ class AppModelConfig(TypeBase):
|
|||
"file_upload": self.file_upload_dict,
|
||||
}
|
||||
|
||||
def from_model_config_dict(self, model_config: Mapping[str, Any]):
|
||||
def from_model_config_dict(self, model_config: AppModelConfigDict):
|
||||
self.opening_statement = model_config.get("opening_statement")
|
||||
self.suggested_questions = (
|
||||
json.dumps(model_config["suggested_questions"]) if model_config.get("suggested_questions") else None
|
||||
json.dumps(model_config.get("suggested_questions")) if model_config.get("suggested_questions") else None
|
||||
)
|
||||
self.suggested_questions_after_answer = (
|
||||
json.dumps(model_config["suggested_questions_after_answer"])
|
||||
json.dumps(model_config.get("suggested_questions_after_answer"))
|
||||
if model_config.get("suggested_questions_after_answer")
|
||||
else None
|
||||
)
|
||||
self.speech_to_text = json.dumps(model_config["speech_to_text"]) if model_config.get("speech_to_text") else None
|
||||
self.text_to_speech = json.dumps(model_config["text_to_speech"]) if model_config.get("text_to_speech") else None
|
||||
self.more_like_this = json.dumps(model_config["more_like_this"]) if model_config.get("more_like_this") else None
|
||||
self.speech_to_text = (
|
||||
json.dumps(model_config.get("speech_to_text")) if model_config.get("speech_to_text") else None
|
||||
)
|
||||
self.text_to_speech = (
|
||||
json.dumps(model_config.get("text_to_speech")) if model_config.get("text_to_speech") else None
|
||||
)
|
||||
self.more_like_this = (
|
||||
json.dumps(model_config.get("more_like_this")) if model_config.get("more_like_this") else None
|
||||
)
|
||||
self.sensitive_word_avoidance = (
|
||||
json.dumps(model_config["sensitive_word_avoidance"])
|
||||
json.dumps(model_config.get("sensitive_word_avoidance"))
|
||||
if model_config.get("sensitive_word_avoidance")
|
||||
else None
|
||||
)
|
||||
self.external_data_tools = (
|
||||
json.dumps(model_config["external_data_tools"]) if model_config.get("external_data_tools") else None
|
||||
json.dumps(model_config.get("external_data_tools")) if model_config.get("external_data_tools") else None
|
||||
)
|
||||
self.model = json.dumps(model_config["model"]) if model_config.get("model") else None
|
||||
self.model = json.dumps(model_config.get("model")) if model_config.get("model") else None
|
||||
self.user_input_form = (
|
||||
json.dumps(model_config["user_input_form"]) if model_config.get("user_input_form") else None
|
||||
json.dumps(model_config.get("user_input_form")) if model_config.get("user_input_form") else None
|
||||
)
|
||||
self.dataset_query_variable = model_config.get("dataset_query_variable")
|
||||
self.pre_prompt = model_config["pre_prompt"]
|
||||
self.agent_mode = json.dumps(model_config["agent_mode"]) if model_config.get("agent_mode") else None
|
||||
self.pre_prompt = model_config.get("pre_prompt")
|
||||
self.agent_mode = json.dumps(model_config.get("agent_mode")) if model_config.get("agent_mode") else None
|
||||
self.retriever_resource = (
|
||||
json.dumps(model_config["retriever_resource"]) if model_config.get("retriever_resource") else None
|
||||
json.dumps(model_config.get("retriever_resource")) if model_config.get("retriever_resource") else None
|
||||
)
|
||||
self.prompt_type = model_config.get("prompt_type", "simple")
|
||||
self.chat_prompt_config = (
|
||||
|
|
@ -711,6 +980,18 @@ class Conversation(Base):
|
|||
__table_args__ = (
|
||||
sa.PrimaryKeyConstraint("id", name="conversation_pkey"),
|
||||
sa.Index("conversation_app_from_user_idx", "app_id", "from_source", "from_end_user_id"),
|
||||
sa.Index(
|
||||
"conversation_app_created_at_idx",
|
||||
"app_id",
|
||||
sa.text("created_at DESC"),
|
||||
postgresql_where=sa.text("is_deleted IS false"),
|
||||
),
|
||||
sa.Index(
|
||||
"conversation_app_updated_at_idx",
|
||||
"app_id",
|
||||
sa.text("updated_at DESC"),
|
||||
postgresql_where=sa.text("is_deleted IS false"),
|
||||
),
|
||||
)
|
||||
|
||||
id: Mapped[str] = mapped_column(StringUUID, default=lambda: str(uuid4()))
|
||||
|
|
@ -811,24 +1092,26 @@ class Conversation(Base):
|
|||
self._inputs = inputs
|
||||
|
||||
@property
|
||||
def model_config(self):
|
||||
model_config = {}
|
||||
def model_config(self) -> AppModelConfigDict:
|
||||
model_config = cast(AppModelConfigDict, {})
|
||||
app_model_config: AppModelConfig | None = None
|
||||
|
||||
if self.mode == AppMode.ADVANCED_CHAT:
|
||||
if self.override_model_configs:
|
||||
override_model_configs = json.loads(self.override_model_configs)
|
||||
model_config = override_model_configs
|
||||
model_config = cast(AppModelConfigDict, override_model_configs)
|
||||
else:
|
||||
if self.override_model_configs:
|
||||
override_model_configs = json.loads(self.override_model_configs)
|
||||
|
||||
if "model" in override_model_configs:
|
||||
# where is app_id?
|
||||
app_model_config = AppModelConfig(app_id=self.app_id).from_model_config_dict(override_model_configs)
|
||||
app_model_config = AppModelConfig(app_id=self.app_id).from_model_config_dict(
|
||||
cast(AppModelConfigDict, override_model_configs)
|
||||
)
|
||||
model_config = app_model_config.to_dict()
|
||||
else:
|
||||
model_config["configs"] = override_model_configs
|
||||
model_config["configs"] = override_model_configs # type: ignore[typeddict-unknown-key]
|
||||
else:
|
||||
app_model_config = (
|
||||
db.session.query(AppModelConfig).where(AppModelConfig.id == self.app_model_config_id).first()
|
||||
|
|
@ -1003,7 +1286,7 @@ class Conversation(Base):
|
|||
def in_debug_mode(self) -> bool:
|
||||
return self.override_model_configs is not None
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
def to_dict(self) -> ConversationDict:
|
||||
return {
|
||||
"id": self.id,
|
||||
"app_id": self.app_id,
|
||||
|
|
@ -1283,7 +1566,7 @@ class Message(Base):
|
|||
return self.message_metadata_dict.get("retriever_resources") if self.message_metadata else []
|
||||
|
||||
@property
|
||||
def message_files(self) -> list[dict[str, Any]]:
|
||||
def message_files(self) -> list[MessageFileInfo]:
|
||||
from factories import file_factory
|
||||
|
||||
message_files = db.session.scalars(select(MessageFile).where(MessageFile.message_id == self.id)).all()
|
||||
|
|
@ -1338,10 +1621,13 @@ class Message(Base):
|
|||
)
|
||||
files.append(file)
|
||||
|
||||
result: list[dict[str, Any]] = [
|
||||
{"belongs_to": message_file.belongs_to, "upload_file_id": message_file.upload_file_id, **file.to_dict()}
|
||||
for (file, message_file) in zip(files, message_files)
|
||||
]
|
||||
result = cast(
|
||||
list[MessageFileInfo],
|
||||
[
|
||||
{"belongs_to": message_file.belongs_to, "upload_file_id": message_file.upload_file_id, **file.to_dict()}
|
||||
for (file, message_file) in zip(files, message_files)
|
||||
],
|
||||
)
|
||||
|
||||
db.session.commit()
|
||||
return result
|
||||
|
|
@ -1351,7 +1637,7 @@ class Message(Base):
|
|||
self._extra_contents = list(contents)
|
||||
|
||||
@property
|
||||
def extra_contents(self) -> list[dict[str, Any]]:
|
||||
def extra_contents(self) -> list[ExtraContentDict]:
|
||||
return getattr(self, "_extra_contents", [])
|
||||
|
||||
@property
|
||||
|
|
@ -1367,7 +1653,7 @@ class Message(Base):
|
|||
|
||||
return None
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
def to_dict(self) -> MessageDict:
|
||||
return {
|
||||
"id": self.id,
|
||||
"app_id": self.app_id,
|
||||
|
|
@ -1391,7 +1677,7 @@ class Message(Base):
|
|||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict[str, Any]) -> Message:
|
||||
def from_dict(cls, data: MessageDict) -> Message:
|
||||
return cls(
|
||||
id=data["id"],
|
||||
app_id=data["app_id"],
|
||||
|
|
@ -1451,7 +1737,7 @@ class MessageFeedback(TypeBase):
|
|||
account = db.session.query(Account).where(Account.id == self.from_account_id).first()
|
||||
return account
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
def to_dict(self) -> MessageFeedbackDict:
|
||||
return {
|
||||
"id": str(self.id),
|
||||
"app_id": str(self.app_id),
|
||||
|
|
@ -1714,8 +2000,8 @@ class AppMCPServer(TypeBase):
|
|||
return result
|
||||
|
||||
@property
|
||||
def parameters_dict(self) -> dict[str, Any]:
|
||||
return cast(dict[str, Any], json.loads(self.parameters))
|
||||
def parameters_dict(self) -> dict[str, str]:
|
||||
return cast(dict[str, str], json.loads(self.parameters))
|
||||
|
||||
|
||||
class Site(Base):
|
||||
|
|
@ -2155,7 +2441,7 @@ class TraceAppConfig(TypeBase):
|
|||
def tracing_config_str(self) -> str:
|
||||
return json.dumps(self.tracing_config_dict)
|
||||
|
||||
def to_dict(self) -> dict[str, Any]:
|
||||
def to_dict(self) -> TraceAppConfigDict:
|
||||
return {
|
||||
"id": self.id,
|
||||
"app_id": self.app_id,
|
||||
|
|
|
|||
|
|
@ -35,7 +35,7 @@ dependencies = [
|
|||
"jsonschema>=4.25.1",
|
||||
"langfuse~=2.51.3",
|
||||
"langsmith~=0.1.77",
|
||||
"markdown~=3.5.1",
|
||||
"markdown~=3.8.1",
|
||||
"mlflow-skinny>=3.0.0",
|
||||
"numpy~=1.26.4",
|
||||
"openpyxl~=3.1.5",
|
||||
|
|
@ -113,7 +113,7 @@ dev = [
|
|||
"dotenv-linter~=0.5.0",
|
||||
"faker~=38.2.0",
|
||||
"lxml-stubs~=0.5.1",
|
||||
"basedpyright~=1.31.0",
|
||||
"basedpyright~=1.38.2",
|
||||
"ruff~=0.14.0",
|
||||
"pytest~=8.3.2",
|
||||
"pytest-benchmark~=4.0.0",
|
||||
|
|
@ -167,12 +167,12 @@ dev = [
|
|||
"import-linter>=2.3",
|
||||
"types-redis>=4.6.0.20241004",
|
||||
"celery-types>=0.23.0",
|
||||
"mypy~=1.17.1",
|
||||
"mypy~=1.19.1",
|
||||
# "locust>=2.40.4", # Temporarily removed due to compatibility issues. Uncomment when resolved.
|
||||
"sseclient-py>=1.8.0",
|
||||
"pytest-timeout>=2.4.0",
|
||||
"pytest-xdist>=3.8.0",
|
||||
"pyrefly>=0.54.0",
|
||||
"pyrefly>=0.55.0",
|
||||
]
|
||||
|
||||
############################################################
|
||||
|
|
|
|||
|
|
@ -4,6 +4,7 @@ import logging
|
|||
import uuid
|
||||
from collections.abc import Mapping
|
||||
from enum import StrEnum
|
||||
from typing import cast
|
||||
from urllib.parse import urlparse
|
||||
from uuid import uuid4
|
||||
|
||||
|
|
@ -32,7 +33,7 @@ from extensions.ext_redis import redis_client
|
|||
from factories import variable_factory
|
||||
from libs.datetime_utils import naive_utc_now
|
||||
from models import Account, App, AppMode
|
||||
from models.model import AppModelConfig, IconType
|
||||
from models.model import AppModelConfig, AppModelConfigDict, IconType
|
||||
from models.workflow import Workflow
|
||||
from services.plugin.dependencies_analysis import DependenciesAnalysisService
|
||||
from services.workflow_draft_variable_service import WorkflowDraftVariableService
|
||||
|
|
@ -523,7 +524,7 @@ class AppDslService:
|
|||
if not app.app_model_config:
|
||||
app_model_config = AppModelConfig(
|
||||
app_id=app.id, created_by=account.id, updated_by=account.id
|
||||
).from_model_config_dict(model_config)
|
||||
).from_model_config_dict(cast(AppModelConfigDict, model_config))
|
||||
app_model_config.id = str(uuid4())
|
||||
app.app_model_config_id = app_model_config.id
|
||||
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
from core.app.apps.agent_chat.app_config_manager import AgentChatAppConfigManager
|
||||
from core.app.apps.chat.app_config_manager import ChatAppConfigManager
|
||||
from core.app.apps.completion.app_config_manager import CompletionAppConfigManager
|
||||
from models.model import AppMode
|
||||
from models.model import AppMode, AppModelConfigDict
|
||||
|
||||
|
||||
class AppModelConfigService:
|
||||
@classmethod
|
||||
def validate_configuration(cls, tenant_id: str, config: dict, app_mode: AppMode):
|
||||
def validate_configuration(cls, tenant_id: str, config: dict, app_mode: AppMode) -> AppModelConfigDict:
|
||||
if app_mode == AppMode.CHAT:
|
||||
return ChatAppConfigManager.config_validate(tenant_id, config)
|
||||
elif app_mode == AppMode.AGENT_CHAT:
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import json
|
||||
import logging
|
||||
from typing import TypedDict, cast
|
||||
from typing import Any, TypedDict, cast
|
||||
|
||||
import sqlalchemy as sa
|
||||
from flask_sqlalchemy.pagination import Pagination
|
||||
|
|
@ -187,7 +187,7 @@ class AppService:
|
|||
for tool in agent_mode.get("tools") or []:
|
||||
if not isinstance(tool, dict) or len(tool.keys()) <= 3:
|
||||
continue
|
||||
agent_tool_entity = AgentToolEntity(**tool)
|
||||
agent_tool_entity = AgentToolEntity(**cast(dict[str, Any], tool))
|
||||
# get tool
|
||||
try:
|
||||
tool_runtime = ToolManager.get_agent_tool_runtime(
|
||||
|
|
@ -388,7 +388,7 @@ class AppService:
|
|||
agent_config = app_model_config.agent_mode_dict
|
||||
|
||||
# get all tools
|
||||
tools = agent_config.get("tools", [])
|
||||
tools = cast(list[dict[str, Any]], agent_config.get("tools", []))
|
||||
|
||||
url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/"
|
||||
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import io
|
|||
import logging
|
||||
import uuid
|
||||
from collections.abc import Generator
|
||||
from typing import cast
|
||||
|
||||
from flask import Response, stream_with_context
|
||||
from werkzeug.datastructures import FileStorage
|
||||
|
|
@ -106,7 +107,7 @@ class AudioService:
|
|||
if not text_to_speech_dict.get("enabled"):
|
||||
raise ValueError("TTS is not enabled")
|
||||
|
||||
voice = text_to_speech_dict.get("voice")
|
||||
voice = cast(str | None, text_to_speech_dict.get("voice"))
|
||||
|
||||
model_manager = ModelManager()
|
||||
model_instance = model_manager.get_default_model_instance(
|
||||
|
|
|
|||
|
|
@ -0,0 +1,497 @@
|
|||
"""
|
||||
Container-backed integration tests for dataset permission services on the real SQL path.
|
||||
|
||||
This module exercises persisted DatasetPermission rows and dataset permission
|
||||
checks with testcontainers-backed infrastructure instead of database-chain mocks.
|
||||
"""
|
||||
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
|
||||
from extensions.ext_database import db
|
||||
from models import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.dataset import (
|
||||
Dataset,
|
||||
DatasetPermission,
|
||||
DatasetPermissionEnum,
|
||||
)
|
||||
from services.dataset_service import DatasetPermissionService, DatasetService
|
||||
from services.errors.account import NoPermissionError
|
||||
|
||||
|
||||
class DatasetPermissionTestDataFactory:
|
||||
"""Create persisted entities and request payloads for dataset permission integration tests."""
|
||||
|
||||
@staticmethod
|
||||
def create_account_with_tenant(
|
||||
role: TenantAccountRole = TenantAccountRole.NORMAL,
|
||||
tenant: Tenant | None = None,
|
||||
) -> tuple[Account, Tenant]:
|
||||
"""Create a real account and tenant with specified role."""
|
||||
account = Account(
|
||||
email=f"{uuid4()}@example.com",
|
||||
name=f"user-{uuid4()}",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
if tenant is None:
|
||||
tenant = Tenant(name=f"tenant-{uuid4()}", status="normal")
|
||||
db.session.add_all([account, tenant])
|
||||
else:
|
||||
db.session.add(account)
|
||||
|
||||
db.session.flush()
|
||||
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=role,
|
||||
current=True,
|
||||
)
|
||||
db.session.add(join)
|
||||
db.session.commit()
|
||||
|
||||
account.current_tenant = tenant
|
||||
return account, tenant
|
||||
|
||||
@staticmethod
|
||||
def create_dataset(
|
||||
tenant_id: str,
|
||||
created_by: str,
|
||||
permission: DatasetPermissionEnum = DatasetPermissionEnum.ONLY_ME,
|
||||
name: str = "Test Dataset",
|
||||
) -> Dataset:
|
||||
"""Create a real dataset with specified attributes."""
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant_id,
|
||||
name=name,
|
||||
description="desc",
|
||||
data_source_type="upload_file",
|
||||
indexing_technique="high_quality",
|
||||
created_by=created_by,
|
||||
permission=permission,
|
||||
provider="vendor",
|
||||
retrieval_model={"top_k": 2},
|
||||
)
|
||||
db.session.add(dataset)
|
||||
db.session.commit()
|
||||
return dataset
|
||||
|
||||
@staticmethod
|
||||
def create_dataset_permission(
|
||||
dataset_id: str,
|
||||
account_id: str,
|
||||
tenant_id: str,
|
||||
has_permission: bool = True,
|
||||
) -> DatasetPermission:
|
||||
"""Create a real DatasetPermission instance."""
|
||||
permission = DatasetPermission(
|
||||
dataset_id=dataset_id,
|
||||
account_id=account_id,
|
||||
tenant_id=tenant_id,
|
||||
has_permission=has_permission,
|
||||
)
|
||||
db.session.add(permission)
|
||||
db.session.commit()
|
||||
return permission
|
||||
|
||||
@staticmethod
|
||||
def build_user_list_payload(user_ids: list[str]) -> list[dict[str, str]]:
|
||||
"""Build the request payload shape used by partial-member list updates."""
|
||||
return [{"user_id": user_id} for user_id in user_ids]
|
||||
|
||||
|
||||
class TestDatasetPermissionServiceGetPartialMemberList:
|
||||
"""Verify partial-member list reads against persisted DatasetPermission rows."""
|
||||
|
||||
def test_get_dataset_partial_member_list_with_members(self, db_session_with_containers):
|
||||
"""
|
||||
Test retrieving partial member list with multiple members.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
user_1, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
user_2, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
user_3, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
|
||||
expected_account_ids = [user_1.id, user_2.id, user_3.id]
|
||||
for account_id in expected_account_ids:
|
||||
DatasetPermissionTestDataFactory.create_dataset_permission(dataset.id, account_id, tenant.id)
|
||||
|
||||
# Act
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
|
||||
# Assert
|
||||
assert set(result) == set(expected_account_ids)
|
||||
assert len(result) == 3
|
||||
|
||||
def test_get_dataset_partial_member_list_with_single_member(self, db_session_with_containers):
|
||||
"""
|
||||
Test retrieving partial member list with single member.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
user, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
|
||||
expected_account_ids = [user.id]
|
||||
DatasetPermissionTestDataFactory.create_dataset_permission(dataset.id, user.id, tenant.id)
|
||||
|
||||
# Act
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
|
||||
# Assert
|
||||
assert set(result) == set(expected_account_ids)
|
||||
assert len(result) == 1
|
||||
|
||||
def test_get_dataset_partial_member_list_empty(self, db_session_with_containers):
|
||||
"""
|
||||
Test retrieving partial member list when no members exist.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
|
||||
# Act
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
|
||||
# Assert
|
||||
assert result == []
|
||||
assert len(result) == 0
|
||||
|
||||
|
||||
class TestDatasetPermissionServiceUpdatePartialMemberList:
|
||||
"""Verify partial-member list updates against persisted DatasetPermission rows."""
|
||||
|
||||
def test_update_partial_member_list_add_new_members(self, db_session_with_containers):
|
||||
"""
|
||||
Test adding new partial members to a dataset.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
member_1, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
member_2, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
user_list = DatasetPermissionTestDataFactory.build_user_list_payload([member_1.id, member_2.id])
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, user_list)
|
||||
|
||||
# Assert
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert set(result) == {member_1.id, member_2.id}
|
||||
|
||||
def test_update_partial_member_list_replace_existing(self, db_session_with_containers):
|
||||
"""
|
||||
Test replacing existing partial members with new ones.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
old_member_1, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
old_member_2, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
new_member_1, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
new_member_2, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
|
||||
old_users = DatasetPermissionTestDataFactory.build_user_list_payload([old_member_1.id, old_member_2.id])
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, old_users)
|
||||
|
||||
new_users = DatasetPermissionTestDataFactory.build_user_list_payload([new_member_1.id, new_member_2.id])
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, new_users)
|
||||
|
||||
# Assert
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert set(result) == {new_member_1.id, new_member_2.id}
|
||||
|
||||
def test_update_partial_member_list_empty_list(self, db_session_with_containers):
|
||||
"""
|
||||
Test updating with empty member list (clearing all members).
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
member_1, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
member_2, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
users = DatasetPermissionTestDataFactory.build_user_list_payload([member_1.id, member_2.id])
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, users)
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, [])
|
||||
|
||||
# Assert
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert result == []
|
||||
|
||||
def test_update_partial_member_list_database_error_rollback(self, db_session_with_containers):
|
||||
"""
|
||||
Test error handling and rollback on database error.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
existing_member, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
replacement_member, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
DatasetPermissionService.update_partial_member_list(
|
||||
tenant.id,
|
||||
dataset.id,
|
||||
DatasetPermissionTestDataFactory.build_user_list_payload([existing_member.id]),
|
||||
)
|
||||
user_list = DatasetPermissionTestDataFactory.build_user_list_payload([replacement_member.id])
|
||||
rollback_called = {"count": 0}
|
||||
original_rollback = db.session.rollback
|
||||
|
||||
# Act / Assert
|
||||
with pytest.MonkeyPatch.context() as mp:
|
||||
|
||||
def _raise_commit():
|
||||
raise Exception("Database connection error")
|
||||
|
||||
def _rollback_and_mark():
|
||||
rollback_called["count"] += 1
|
||||
original_rollback()
|
||||
|
||||
mp.setattr("services.dataset_service.db.session.commit", _raise_commit)
|
||||
mp.setattr("services.dataset_service.db.session.rollback", _rollback_and_mark)
|
||||
with pytest.raises(Exception, match="Database connection error"):
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, user_list)
|
||||
|
||||
# Assert
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert rollback_called["count"] == 1
|
||||
assert result == [existing_member.id]
|
||||
assert db_session_with_containers.query(DatasetPermission).filter_by(dataset_id=dataset.id).count() == 1
|
||||
|
||||
|
||||
class TestDatasetPermissionServiceClearPartialMemberList:
|
||||
"""Verify partial-member clearing against persisted DatasetPermission rows."""
|
||||
|
||||
def test_clear_partial_member_list_success(self, db_session_with_containers):
|
||||
"""
|
||||
Test successful clearing of partial member list.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
member_1, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
member_2, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
users = DatasetPermissionTestDataFactory.build_user_list_payload([member_1.id, member_2.id])
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, users)
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.clear_partial_member_list(dataset.id)
|
||||
|
||||
# Assert
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert result == []
|
||||
|
||||
def test_clear_partial_member_list_empty_list(self, db_session_with_containers):
|
||||
"""
|
||||
Test clearing partial member list when no members exist.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.clear_partial_member_list(dataset.id)
|
||||
|
||||
# Assert
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert result == []
|
||||
|
||||
def test_clear_partial_member_list_database_error_rollback(self, db_session_with_containers):
|
||||
"""
|
||||
Test error handling and rollback on database error.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
member_1, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
member_2, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(tenant.id, owner.id)
|
||||
users = DatasetPermissionTestDataFactory.build_user_list_payload([member_1.id, member_2.id])
|
||||
DatasetPermissionService.update_partial_member_list(tenant.id, dataset.id, users)
|
||||
rollback_called = {"count": 0}
|
||||
original_rollback = db.session.rollback
|
||||
|
||||
# Act / Assert
|
||||
with pytest.MonkeyPatch.context() as mp:
|
||||
|
||||
def _raise_commit():
|
||||
raise Exception("Database connection error")
|
||||
|
||||
def _rollback_and_mark():
|
||||
rollback_called["count"] += 1
|
||||
original_rollback()
|
||||
|
||||
mp.setattr("services.dataset_service.db.session.commit", _raise_commit)
|
||||
mp.setattr("services.dataset_service.db.session.rollback", _rollback_and_mark)
|
||||
with pytest.raises(Exception, match="Database connection error"):
|
||||
DatasetPermissionService.clear_partial_member_list(dataset.id)
|
||||
|
||||
# Assert
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert rollback_called["count"] == 1
|
||||
assert set(result) == {member_1.id, member_2.id}
|
||||
assert db_session_with_containers.query(DatasetPermission).filter_by(dataset_id=dataset.id).count() == 2
|
||||
|
||||
|
||||
class TestDatasetServiceCheckDatasetPermission:
|
||||
"""Verify dataset access checks against persisted partial-member permissions."""
|
||||
|
||||
def test_check_dataset_permission_partial_members_with_permission_success(self, db_session_with_containers):
|
||||
"""
|
||||
Test that user with explicit permission can access partial_members dataset.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
user, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(
|
||||
tenant.id,
|
||||
owner.id,
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
)
|
||||
DatasetPermissionTestDataFactory.create_dataset_permission(dataset.id, user.id, tenant.id)
|
||||
|
||||
# Act (should not raise)
|
||||
DatasetService.check_dataset_permission(dataset, user)
|
||||
|
||||
# Assert
|
||||
permissions = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert user.id in permissions
|
||||
|
||||
def test_check_dataset_permission_partial_members_without_permission_error(self, db_session_with_containers):
|
||||
"""
|
||||
Test error when user without permission tries to access partial_members dataset.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
user, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(
|
||||
tenant.id,
|
||||
owner.id,
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"):
|
||||
DatasetService.check_dataset_permission(dataset, user)
|
||||
|
||||
|
||||
class TestDatasetServiceCheckDatasetOperatorPermission:
|
||||
"""Verify operator permission checks against persisted partial-member permissions."""
|
||||
|
||||
def test_check_dataset_operator_permission_partial_members_with_permission_success(
|
||||
self, db_session_with_containers
|
||||
):
|
||||
"""
|
||||
Test that user with explicit permission can access partial_members dataset.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
user, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(
|
||||
tenant.id,
|
||||
owner.id,
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
)
|
||||
DatasetPermissionTestDataFactory.create_dataset_permission(dataset.id, user.id, tenant.id)
|
||||
|
||||
# Act (should not raise)
|
||||
DatasetService.check_dataset_operator_permission(user=user, dataset=dataset)
|
||||
|
||||
# Assert
|
||||
permissions = DatasetPermissionService.get_dataset_partial_member_list(dataset.id)
|
||||
assert user.id in permissions
|
||||
|
||||
def test_check_dataset_operator_permission_partial_members_without_permission_error(
|
||||
self, db_session_with_containers
|
||||
):
|
||||
"""
|
||||
Test error when user without permission tries to access partial_members dataset.
|
||||
"""
|
||||
# Arrange
|
||||
owner, tenant = DatasetPermissionTestDataFactory.create_account_with_tenant(role=TenantAccountRole.OWNER)
|
||||
user, _ = DatasetPermissionTestDataFactory.create_account_with_tenant(
|
||||
role=TenantAccountRole.NORMAL,
|
||||
tenant=tenant,
|
||||
)
|
||||
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset(
|
||||
tenant.id,
|
||||
owner.id,
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
)
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"):
|
||||
DatasetService.check_dataset_operator_permission(user=user, dataset=dataset)
|
||||
|
|
@ -0,0 +1,244 @@
|
|||
"""Container-backed integration tests for DatasetService.delete_dataset real SQL paths."""
|
||||
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
from models.account import Account, Tenant, TenantAccountJoin, TenantAccountRole
|
||||
from models.dataset import Dataset, Document
|
||||
from services.dataset_service import DatasetService
|
||||
|
||||
|
||||
class DatasetDeleteIntegrationDataFactory:
|
||||
"""Create persisted entities used by delete_dataset integration tests."""
|
||||
|
||||
@staticmethod
|
||||
def create_account_with_tenant(db_session_with_containers) -> tuple[Account, Tenant]:
|
||||
"""Persist an owner account, tenant, and tenant join for dataset deletion tests."""
|
||||
account = Account(
|
||||
email=f"owner-{uuid4()}@example.com",
|
||||
name="Owner",
|
||||
interface_language="en-US",
|
||||
status="active",
|
||||
)
|
||||
db_session_with_containers.add(account)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
tenant = Tenant(
|
||||
name=f"tenant-{uuid4()}",
|
||||
status="normal",
|
||||
)
|
||||
db_session_with_containers.add(tenant)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
join = TenantAccountJoin(
|
||||
tenant_id=tenant.id,
|
||||
account_id=account.id,
|
||||
role=TenantAccountRole.OWNER,
|
||||
current=True,
|
||||
)
|
||||
db_session_with_containers.add(join)
|
||||
db_session_with_containers.commit()
|
||||
|
||||
account.current_tenant = tenant
|
||||
return account, tenant
|
||||
|
||||
@staticmethod
|
||||
def create_dataset(
|
||||
db_session_with_containers,
|
||||
tenant_id: str,
|
||||
created_by: str,
|
||||
*,
|
||||
indexing_technique: str | None,
|
||||
chunk_structure: str | None,
|
||||
index_struct: str | None = '{"type": "paragraph"}',
|
||||
collection_binding_id: str | None = None,
|
||||
pipeline_id: str | None = None,
|
||||
) -> Dataset:
|
||||
"""Persist a dataset with delete_dataset-relevant fields configured."""
|
||||
dataset = Dataset(
|
||||
tenant_id=tenant_id,
|
||||
name=f"dataset-{uuid4()}",
|
||||
data_source_type="upload_file",
|
||||
indexing_technique=indexing_technique,
|
||||
index_struct=index_struct,
|
||||
created_by=created_by,
|
||||
collection_binding_id=collection_binding_id,
|
||||
pipeline_id=pipeline_id,
|
||||
chunk_structure=chunk_structure,
|
||||
)
|
||||
db_session_with_containers.add(dataset)
|
||||
db_session_with_containers.commit()
|
||||
return dataset
|
||||
|
||||
@staticmethod
|
||||
def create_document(
|
||||
db_session_with_containers,
|
||||
*,
|
||||
tenant_id: str,
|
||||
dataset_id: str,
|
||||
created_by: str,
|
||||
doc_form: str = "text_model",
|
||||
) -> Document:
|
||||
"""Persist a document so dataset.doc_form resolves through the real document path."""
|
||||
document = Document(
|
||||
tenant_id=tenant_id,
|
||||
dataset_id=dataset_id,
|
||||
position=1,
|
||||
data_source_type="upload_file",
|
||||
batch=f"batch-{uuid4()}",
|
||||
name="Document",
|
||||
created_from="upload_file",
|
||||
created_by=created_by,
|
||||
doc_form=doc_form,
|
||||
)
|
||||
db_session_with_containers.add(document)
|
||||
db_session_with_containers.commit()
|
||||
return document
|
||||
|
||||
|
||||
class TestDatasetServiceDeleteDataset:
|
||||
"""Integration coverage for DatasetService.delete_dataset using testcontainers."""
|
||||
|
||||
def test_delete_dataset_with_documents_success(self, db_session_with_containers):
|
||||
"""Delete a dataset with documents and dispatch cleanup through the real signal handler."""
|
||||
# Arrange
|
||||
owner, tenant = DatasetDeleteIntegrationDataFactory.create_account_with_tenant(db_session_with_containers)
|
||||
dataset = DatasetDeleteIntegrationDataFactory.create_dataset(
|
||||
db_session_with_containers,
|
||||
tenant_id=tenant.id,
|
||||
created_by=owner.id,
|
||||
indexing_technique="high_quality",
|
||||
chunk_structure=None,
|
||||
index_struct='{"type": "paragraph"}',
|
||||
collection_binding_id=str(uuid4()),
|
||||
pipeline_id=str(uuid4()),
|
||||
)
|
||||
DatasetDeleteIntegrationDataFactory.create_document(
|
||||
db_session_with_containers,
|
||||
tenant_id=tenant.id,
|
||||
dataset_id=dataset.id,
|
||||
created_by=owner.id,
|
||||
doc_form="text_model",
|
||||
)
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
"events.event_handlers.clean_when_dataset_deleted.clean_dataset_task.delay",
|
||||
autospec=True,
|
||||
) as clean_dataset_delay:
|
||||
result = DatasetService.delete_dataset(dataset.id, owner)
|
||||
|
||||
# Assert
|
||||
db_session_with_containers.expire_all()
|
||||
assert result is True
|
||||
assert db_session_with_containers.get(Dataset, dataset.id) is None
|
||||
clean_dataset_delay.assert_called_once_with(
|
||||
dataset.id,
|
||||
dataset.tenant_id,
|
||||
dataset.indexing_technique,
|
||||
dataset.index_struct,
|
||||
dataset.collection_binding_id,
|
||||
dataset.doc_form,
|
||||
dataset.pipeline_id,
|
||||
)
|
||||
|
||||
def test_delete_empty_dataset_success(self, db_session_with_containers):
|
||||
"""Delete an empty dataset without scheduling cleanup when both gating fields are absent."""
|
||||
# Arrange
|
||||
owner, tenant = DatasetDeleteIntegrationDataFactory.create_account_with_tenant(db_session_with_containers)
|
||||
dataset = DatasetDeleteIntegrationDataFactory.create_dataset(
|
||||
db_session_with_containers,
|
||||
tenant_id=tenant.id,
|
||||
created_by=owner.id,
|
||||
indexing_technique=None,
|
||||
chunk_structure=None,
|
||||
index_struct=None,
|
||||
collection_binding_id=None,
|
||||
pipeline_id=None,
|
||||
)
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
"events.event_handlers.clean_when_dataset_deleted.clean_dataset_task.delay",
|
||||
autospec=True,
|
||||
) as clean_dataset_delay:
|
||||
result = DatasetService.delete_dataset(dataset.id, owner)
|
||||
|
||||
# Assert
|
||||
db_session_with_containers.expire_all()
|
||||
assert result is True
|
||||
assert db_session_with_containers.get(Dataset, dataset.id) is None
|
||||
clean_dataset_delay.assert_not_called()
|
||||
|
||||
def test_delete_dataset_with_partial_none_values(self, db_session_with_containers):
|
||||
"""Delete a dataset without cleanup when indexing_technique is missing but doc_form resolves."""
|
||||
# Arrange
|
||||
owner, tenant = DatasetDeleteIntegrationDataFactory.create_account_with_tenant(db_session_with_containers)
|
||||
dataset = DatasetDeleteIntegrationDataFactory.create_dataset(
|
||||
db_session_with_containers,
|
||||
tenant_id=tenant.id,
|
||||
created_by=owner.id,
|
||||
indexing_technique=None,
|
||||
chunk_structure="text_model",
|
||||
index_struct='{"type": "paragraph"}',
|
||||
collection_binding_id=str(uuid4()),
|
||||
pipeline_id=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
"events.event_handlers.clean_when_dataset_deleted.clean_dataset_task.delay",
|
||||
autospec=True,
|
||||
) as clean_dataset_delay:
|
||||
result = DatasetService.delete_dataset(dataset.id, owner)
|
||||
|
||||
# Assert
|
||||
db_session_with_containers.expire_all()
|
||||
assert result is True
|
||||
assert db_session_with_containers.get(Dataset, dataset.id) is None
|
||||
clean_dataset_delay.assert_not_called()
|
||||
|
||||
def test_delete_dataset_with_doc_form_none_indexing_technique_exists(self, db_session_with_containers):
|
||||
"""Delete a dataset without cleanup when indexing exists but doc_form resolves to None."""
|
||||
# Arrange
|
||||
owner, tenant = DatasetDeleteIntegrationDataFactory.create_account_with_tenant(db_session_with_containers)
|
||||
dataset = DatasetDeleteIntegrationDataFactory.create_dataset(
|
||||
db_session_with_containers,
|
||||
tenant_id=tenant.id,
|
||||
created_by=owner.id,
|
||||
indexing_technique="high_quality",
|
||||
chunk_structure=None,
|
||||
index_struct='{"type": "paragraph"}',
|
||||
collection_binding_id=str(uuid4()),
|
||||
pipeline_id=str(uuid4()),
|
||||
)
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
"events.event_handlers.clean_when_dataset_deleted.clean_dataset_task.delay",
|
||||
autospec=True,
|
||||
) as clean_dataset_delay:
|
||||
result = DatasetService.delete_dataset(dataset.id, owner)
|
||||
|
||||
# Assert
|
||||
db_session_with_containers.expire_all()
|
||||
assert result is True
|
||||
assert db_session_with_containers.get(Dataset, dataset.id) is None
|
||||
clean_dataset_delay.assert_not_called()
|
||||
|
||||
def test_delete_dataset_not_found(self, db_session_with_containers):
|
||||
"""Return False without scheduling cleanup when the target dataset does not exist."""
|
||||
# Arrange
|
||||
owner, _ = DatasetDeleteIntegrationDataFactory.create_account_with_tenant(db_session_with_containers)
|
||||
missing_dataset_id = str(uuid4())
|
||||
|
||||
# Act
|
||||
with patch(
|
||||
"events.event_handlers.clean_when_dataset_deleted.clean_dataset_task.delay",
|
||||
autospec=True,
|
||||
) as clean_dataset_delay:
|
||||
result = DatasetService.delete_dataset(missing_dataset_id, owner)
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
clean_dataset_delay.assert_not_called()
|
||||
|
|
@ -258,323 +258,6 @@ class DatasetPermissionTestDataFactory:
|
|||
return [{"user_id": user_id} for user_id in user_ids]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tests for get_dataset_partial_member_list
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class TestDatasetPermissionServiceGetPartialMemberList:
|
||||
"""
|
||||
Comprehensive unit tests for DatasetPermissionService.get_dataset_partial_member_list method.
|
||||
|
||||
This test class covers the retrieval of partial member lists for datasets,
|
||||
which returns a list of account IDs that have explicit permissions for
|
||||
a given dataset.
|
||||
|
||||
The get_dataset_partial_member_list method:
|
||||
1. Queries DatasetPermission table for the dataset ID
|
||||
2. Selects account_id values
|
||||
3. Returns list of account IDs
|
||||
|
||||
Test scenarios include:
|
||||
- Retrieving list with multiple members
|
||||
- Retrieving list with single member
|
||||
- Retrieving empty list (no partial members)
|
||||
- Database query validation
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session(self):
|
||||
"""
|
||||
Mock database session for testing.
|
||||
|
||||
Provides a mocked database session that can be used to verify
|
||||
query construction and execution.
|
||||
"""
|
||||
with patch("services.dataset_service.db.session") as mock_db:
|
||||
yield mock_db
|
||||
|
||||
def test_get_dataset_partial_member_list_with_members(self, mock_db_session):
|
||||
"""
|
||||
Test retrieving partial member list with multiple members.
|
||||
|
||||
Verifies that when a dataset has multiple partial members, all
|
||||
account IDs are returned correctly.
|
||||
|
||||
This test ensures:
|
||||
- Query is constructed correctly
|
||||
- All account IDs are returned
|
||||
- Database query is executed
|
||||
"""
|
||||
# Arrange
|
||||
dataset_id = "dataset-123"
|
||||
expected_account_ids = ["user-456", "user-789", "user-012"]
|
||||
|
||||
# Mock the scalars query to return account IDs
|
||||
mock_scalars_result = Mock()
|
||||
mock_scalars_result.all.return_value = expected_account_ids
|
||||
mock_db_session.scalars.return_value = mock_scalars_result
|
||||
|
||||
# Act
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset_id)
|
||||
|
||||
# Assert
|
||||
assert result == expected_account_ids
|
||||
assert len(result) == 3
|
||||
|
||||
# Verify query was executed
|
||||
mock_db_session.scalars.assert_called_once()
|
||||
|
||||
def test_get_dataset_partial_member_list_with_single_member(self, mock_db_session):
|
||||
"""
|
||||
Test retrieving partial member list with single member.
|
||||
|
||||
Verifies that when a dataset has only one partial member, the
|
||||
single account ID is returned correctly.
|
||||
|
||||
This test ensures:
|
||||
- Query works correctly for single member
|
||||
- Result is a list with one element
|
||||
- Database query is executed
|
||||
"""
|
||||
# Arrange
|
||||
dataset_id = "dataset-123"
|
||||
expected_account_ids = ["user-456"]
|
||||
|
||||
# Mock the scalars query to return single account ID
|
||||
mock_scalars_result = Mock()
|
||||
mock_scalars_result.all.return_value = expected_account_ids
|
||||
mock_db_session.scalars.return_value = mock_scalars_result
|
||||
|
||||
# Act
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset_id)
|
||||
|
||||
# Assert
|
||||
assert result == expected_account_ids
|
||||
assert len(result) == 1
|
||||
|
||||
# Verify query was executed
|
||||
mock_db_session.scalars.assert_called_once()
|
||||
|
||||
def test_get_dataset_partial_member_list_empty(self, mock_db_session):
|
||||
"""
|
||||
Test retrieving partial member list when no members exist.
|
||||
|
||||
Verifies that when a dataset has no partial members, an empty
|
||||
list is returned.
|
||||
|
||||
This test ensures:
|
||||
- Empty list is returned correctly
|
||||
- Query is executed even when no results
|
||||
- No errors are raised
|
||||
"""
|
||||
# Arrange
|
||||
dataset_id = "dataset-123"
|
||||
|
||||
# Mock the scalars query to return empty list
|
||||
mock_scalars_result = Mock()
|
||||
mock_scalars_result.all.return_value = []
|
||||
mock_db_session.scalars.return_value = mock_scalars_result
|
||||
|
||||
# Act
|
||||
result = DatasetPermissionService.get_dataset_partial_member_list(dataset_id)
|
||||
|
||||
# Assert
|
||||
assert result == []
|
||||
assert len(result) == 0
|
||||
|
||||
# Verify query was executed
|
||||
mock_db_session.scalars.assert_called_once()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tests for update_partial_member_list
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class TestDatasetPermissionServiceUpdatePartialMemberList:
|
||||
"""
|
||||
Comprehensive unit tests for DatasetPermissionService.update_partial_member_list method.
|
||||
|
||||
This test class covers the update of partial member lists for datasets,
|
||||
which replaces the existing partial member list with a new one.
|
||||
|
||||
The update_partial_member_list method:
|
||||
1. Deletes all existing DatasetPermission records for the dataset
|
||||
2. Creates new DatasetPermission records for each user in the list
|
||||
3. Adds all new permissions to the session
|
||||
4. Commits the transaction
|
||||
5. Rolls back on error
|
||||
|
||||
Test scenarios include:
|
||||
- Adding new partial members
|
||||
- Updating existing partial members
|
||||
- Replacing entire member list
|
||||
- Handling empty member list
|
||||
- Database transaction handling
|
||||
- Error handling and rollback
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session(self):
|
||||
"""
|
||||
Mock database session for testing.
|
||||
|
||||
Provides a mocked database session that can be used to verify
|
||||
database operations including queries, adds, commits, and rollbacks.
|
||||
"""
|
||||
with patch("services.dataset_service.db.session") as mock_db:
|
||||
yield mock_db
|
||||
|
||||
def test_update_partial_member_list_add_new_members(self, mock_db_session):
|
||||
"""
|
||||
Test adding new partial members to a dataset.
|
||||
|
||||
Verifies that when updating with new members, the old members
|
||||
are deleted and new members are added correctly.
|
||||
|
||||
This test ensures:
|
||||
- Old permissions are deleted
|
||||
- New permissions are created
|
||||
- All permissions are added to session
|
||||
- Transaction is committed
|
||||
"""
|
||||
# Arrange
|
||||
tenant_id = "tenant-123"
|
||||
dataset_id = "dataset-123"
|
||||
user_list = DatasetPermissionTestDataFactory.create_user_list_mock(["user-456", "user-789"])
|
||||
|
||||
# Mock the query delete operation
|
||||
mock_query = Mock()
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.delete.return_value = None
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.update_partial_member_list(tenant_id, dataset_id, user_list)
|
||||
|
||||
# Assert
|
||||
# Verify old permissions were deleted
|
||||
mock_db_session.query.assert_called()
|
||||
mock_query.where.assert_called()
|
||||
|
||||
# Verify new permissions were added
|
||||
mock_db_session.add_all.assert_called_once()
|
||||
|
||||
# Verify transaction was committed
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
# Verify no rollback occurred
|
||||
mock_db_session.rollback.assert_not_called()
|
||||
|
||||
def test_update_partial_member_list_replace_existing(self, mock_db_session):
|
||||
"""
|
||||
Test replacing existing partial members with new ones.
|
||||
|
||||
Verifies that when updating with a different member list, the
|
||||
old members are removed and new members are added.
|
||||
|
||||
This test ensures:
|
||||
- Old permissions are deleted
|
||||
- New permissions replace old ones
|
||||
- Transaction is committed successfully
|
||||
"""
|
||||
# Arrange
|
||||
tenant_id = "tenant-123"
|
||||
dataset_id = "dataset-123"
|
||||
user_list = DatasetPermissionTestDataFactory.create_user_list_mock(["user-999", "user-888"])
|
||||
|
||||
# Mock the query delete operation
|
||||
mock_query = Mock()
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.delete.return_value = None
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.update_partial_member_list(tenant_id, dataset_id, user_list)
|
||||
|
||||
# Assert
|
||||
# Verify old permissions were deleted
|
||||
mock_db_session.query.assert_called()
|
||||
|
||||
# Verify new permissions were added
|
||||
mock_db_session.add_all.assert_called_once()
|
||||
|
||||
# Verify transaction was committed
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
def test_update_partial_member_list_empty_list(self, mock_db_session):
|
||||
"""
|
||||
Test updating with empty member list (clearing all members).
|
||||
|
||||
Verifies that when updating with an empty list, all existing
|
||||
permissions are deleted and no new permissions are added.
|
||||
|
||||
This test ensures:
|
||||
- Old permissions are deleted
|
||||
- No new permissions are added
|
||||
- Transaction is committed
|
||||
"""
|
||||
# Arrange
|
||||
tenant_id = "tenant-123"
|
||||
dataset_id = "dataset-123"
|
||||
user_list = []
|
||||
|
||||
# Mock the query delete operation
|
||||
mock_query = Mock()
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.delete.return_value = None
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.update_partial_member_list(tenant_id, dataset_id, user_list)
|
||||
|
||||
# Assert
|
||||
# Verify old permissions were deleted
|
||||
mock_db_session.query.assert_called()
|
||||
|
||||
# Verify add_all was called with empty list
|
||||
mock_db_session.add_all.assert_called_once_with([])
|
||||
|
||||
# Verify transaction was committed
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
def test_update_partial_member_list_database_error_rollback(self, mock_db_session):
|
||||
"""
|
||||
Test error handling and rollback on database error.
|
||||
|
||||
Verifies that when a database error occurs during the update,
|
||||
the transaction is rolled back and the error is re-raised.
|
||||
|
||||
This test ensures:
|
||||
- Error is caught and handled
|
||||
- Transaction is rolled back
|
||||
- Error is re-raised
|
||||
- No commit occurs after error
|
||||
"""
|
||||
# Arrange
|
||||
tenant_id = "tenant-123"
|
||||
dataset_id = "dataset-123"
|
||||
user_list = DatasetPermissionTestDataFactory.create_user_list_mock(["user-456"])
|
||||
|
||||
# Mock the query delete operation
|
||||
mock_query = Mock()
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.delete.return_value = None
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Mock commit to raise an error
|
||||
database_error = Exception("Database connection error")
|
||||
mock_db_session.commit.side_effect = database_error
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(Exception, match="Database connection error"):
|
||||
DatasetPermissionService.update_partial_member_list(tenant_id, dataset_id, user_list)
|
||||
|
||||
# Verify rollback was called
|
||||
mock_db_session.rollback.assert_called_once()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tests for check_permission
|
||||
# ============================================================================
|
||||
|
|
@ -776,144 +459,6 @@ class TestDatasetPermissionServiceCheckPermission:
|
|||
mock_get_partial_member_list.assert_called_once_with(dataset.id)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tests for clear_partial_member_list
|
||||
# ============================================================================
|
||||
|
||||
|
||||
class TestDatasetPermissionServiceClearPartialMemberList:
|
||||
"""
|
||||
Comprehensive unit tests for DatasetPermissionService.clear_partial_member_list method.
|
||||
|
||||
This test class covers the clearing of partial member lists, which removes
|
||||
all DatasetPermission records for a given dataset.
|
||||
|
||||
The clear_partial_member_list method:
|
||||
1. Deletes all DatasetPermission records for the dataset
|
||||
2. Commits the transaction
|
||||
3. Rolls back on error
|
||||
|
||||
Test scenarios include:
|
||||
- Clearing list with existing members
|
||||
- Clearing empty list (no members)
|
||||
- Database transaction handling
|
||||
- Error handling and rollback
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session(self):
|
||||
"""
|
||||
Mock database session for testing.
|
||||
|
||||
Provides a mocked database session that can be used to verify
|
||||
database operations including queries, deletes, commits, and rollbacks.
|
||||
"""
|
||||
with patch("services.dataset_service.db.session") as mock_db:
|
||||
yield mock_db
|
||||
|
||||
def test_clear_partial_member_list_success(self, mock_db_session):
|
||||
"""
|
||||
Test successful clearing of partial member list.
|
||||
|
||||
Verifies that when clearing a partial member list, all permissions
|
||||
are deleted and the transaction is committed.
|
||||
|
||||
This test ensures:
|
||||
- All permissions are deleted
|
||||
- Transaction is committed
|
||||
- No errors are raised
|
||||
"""
|
||||
# Arrange
|
||||
dataset_id = "dataset-123"
|
||||
|
||||
# Mock the query delete operation
|
||||
mock_query = Mock()
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.delete.return_value = None
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.clear_partial_member_list(dataset_id)
|
||||
|
||||
# Assert
|
||||
# Verify query was executed
|
||||
mock_db_session.query.assert_called()
|
||||
|
||||
# Verify delete was called
|
||||
mock_query.where.assert_called()
|
||||
mock_query.delete.assert_called_once()
|
||||
|
||||
# Verify transaction was committed
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
# Verify no rollback occurred
|
||||
mock_db_session.rollback.assert_not_called()
|
||||
|
||||
def test_clear_partial_member_list_empty_list(self, mock_db_session):
|
||||
"""
|
||||
Test clearing partial member list when no members exist.
|
||||
|
||||
Verifies that when clearing an already empty list, the operation
|
||||
completes successfully without errors.
|
||||
|
||||
This test ensures:
|
||||
- Operation works correctly for empty lists
|
||||
- Transaction is committed
|
||||
- No errors are raised
|
||||
"""
|
||||
# Arrange
|
||||
dataset_id = "dataset-123"
|
||||
|
||||
# Mock the query delete operation
|
||||
mock_query = Mock()
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.delete.return_value = None
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act
|
||||
DatasetPermissionService.clear_partial_member_list(dataset_id)
|
||||
|
||||
# Assert
|
||||
# Verify query was executed
|
||||
mock_db_session.query.assert_called()
|
||||
|
||||
# Verify transaction was committed
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
def test_clear_partial_member_list_database_error_rollback(self, mock_db_session):
|
||||
"""
|
||||
Test error handling and rollback on database error.
|
||||
|
||||
Verifies that when a database error occurs during clearing,
|
||||
the transaction is rolled back and the error is re-raised.
|
||||
|
||||
This test ensures:
|
||||
- Error is caught and handled
|
||||
- Transaction is rolled back
|
||||
- Error is re-raised
|
||||
- No commit occurs after error
|
||||
"""
|
||||
# Arrange
|
||||
dataset_id = "dataset-123"
|
||||
|
||||
# Mock the query delete operation
|
||||
mock_query = Mock()
|
||||
mock_query.where.return_value = mock_query
|
||||
mock_query.delete.return_value = None
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Mock commit to raise an error
|
||||
database_error = Exception("Database connection error")
|
||||
mock_db_session.commit.side_effect = database_error
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(Exception, match="Database connection error"):
|
||||
DatasetPermissionService.clear_partial_member_list(dataset_id)
|
||||
|
||||
# Verify rollback was called
|
||||
mock_db_session.rollback.assert_called_once()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Tests for DatasetService.check_dataset_permission
|
||||
# ============================================================================
|
||||
|
|
@ -1047,72 +592,6 @@ class TestDatasetServiceCheckDatasetPermission:
|
|||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"):
|
||||
DatasetService.check_dataset_permission(dataset, user)
|
||||
|
||||
def test_check_dataset_permission_partial_members_with_permission_success(self, mock_db_session):
|
||||
"""
|
||||
Test that user with explicit permission can access partial_members dataset.
|
||||
|
||||
Verifies that when a user has an explicit DatasetPermission record
|
||||
for a partial_members dataset, they can access it successfully.
|
||||
|
||||
This test ensures:
|
||||
- Explicit permissions are checked correctly
|
||||
- Users with permissions can access
|
||||
- Database query is executed
|
||||
"""
|
||||
# Arrange
|
||||
user = DatasetPermissionTestDataFactory.create_user_mock(user_id="user-123", role=TenantAccountRole.NORMAL)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
|
||||
tenant_id="tenant-123",
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
created_by="other-user-456", # Not the creator
|
||||
)
|
||||
|
||||
# Mock permission query to return permission record
|
||||
mock_permission = DatasetPermissionTestDataFactory.create_dataset_permission_mock(
|
||||
dataset_id=dataset.id, account_id=user.id
|
||||
)
|
||||
mock_query = Mock()
|
||||
mock_query.filter_by.return_value = mock_query
|
||||
mock_query.first.return_value = mock_permission
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act (should not raise)
|
||||
DatasetService.check_dataset_permission(dataset, user)
|
||||
|
||||
# Assert
|
||||
# Verify permission query was executed
|
||||
mock_db_session.query.assert_called()
|
||||
|
||||
def test_check_dataset_permission_partial_members_without_permission_error(self, mock_db_session):
|
||||
"""
|
||||
Test error when user without permission tries to access partial_members dataset.
|
||||
|
||||
Verifies that when a user does not have an explicit DatasetPermission
|
||||
record for a partial_members dataset, a NoPermissionError is raised.
|
||||
|
||||
This test ensures:
|
||||
- Missing permissions are detected
|
||||
- Error message is clear
|
||||
- Error type is correct
|
||||
"""
|
||||
# Arrange
|
||||
user = DatasetPermissionTestDataFactory.create_user_mock(user_id="user-123", role=TenantAccountRole.NORMAL)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
|
||||
tenant_id="tenant-123",
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
created_by="other-user-456", # Not the creator
|
||||
)
|
||||
|
||||
# Mock permission query to return None (no permission)
|
||||
mock_query = Mock()
|
||||
mock_query.filter_by.return_value = mock_query
|
||||
mock_query.first.return_value = None # No permission found
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"):
|
||||
DatasetService.check_dataset_permission(dataset, user)
|
||||
|
||||
def test_check_dataset_permission_partial_members_creator_success(self, mock_db_session):
|
||||
"""
|
||||
Test that creator can access partial_members dataset without explicit permission.
|
||||
|
|
@ -1311,72 +790,6 @@ class TestDatasetServiceCheckDatasetOperatorPermission:
|
|||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"):
|
||||
DatasetService.check_dataset_operator_permission(user=user, dataset=dataset)
|
||||
|
||||
def test_check_dataset_operator_permission_partial_members_with_permission_success(self, mock_db_session):
|
||||
"""
|
||||
Test that user with explicit permission can access partial_members dataset.
|
||||
|
||||
Verifies that when a user has an explicit DatasetPermission record
|
||||
for a partial_members dataset, they can access it successfully.
|
||||
|
||||
This test ensures:
|
||||
- Explicit permissions are checked correctly
|
||||
- Users with permissions can access
|
||||
- Database query is executed
|
||||
"""
|
||||
# Arrange
|
||||
user = DatasetPermissionTestDataFactory.create_user_mock(user_id="user-123", role=TenantAccountRole.NORMAL)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
|
||||
tenant_id="tenant-123",
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
created_by="other-user-456", # Not the creator
|
||||
)
|
||||
|
||||
# Mock permission query to return permission records
|
||||
mock_permission = DatasetPermissionTestDataFactory.create_dataset_permission_mock(
|
||||
dataset_id=dataset.id, account_id=user.id
|
||||
)
|
||||
mock_query = Mock()
|
||||
mock_query.filter_by.return_value = mock_query
|
||||
mock_query.all.return_value = [mock_permission] # User has permission
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act (should not raise)
|
||||
DatasetService.check_dataset_operator_permission(user=user, dataset=dataset)
|
||||
|
||||
# Assert
|
||||
# Verify permission query was executed
|
||||
mock_db_session.query.assert_called()
|
||||
|
||||
def test_check_dataset_operator_permission_partial_members_without_permission_error(self, mock_db_session):
|
||||
"""
|
||||
Test error when user without permission tries to access partial_members dataset.
|
||||
|
||||
Verifies that when a user does not have an explicit DatasetPermission
|
||||
record for a partial_members dataset, a NoPermissionError is raised.
|
||||
|
||||
This test ensures:
|
||||
- Missing permissions are detected
|
||||
- Error message is clear
|
||||
- Error type is correct
|
||||
"""
|
||||
# Arrange
|
||||
user = DatasetPermissionTestDataFactory.create_user_mock(user_id="user-123", role=TenantAccountRole.NORMAL)
|
||||
dataset = DatasetPermissionTestDataFactory.create_dataset_mock(
|
||||
tenant_id="tenant-123",
|
||||
permission=DatasetPermissionEnum.PARTIAL_TEAM,
|
||||
created_by="other-user-456", # Not the creator
|
||||
)
|
||||
|
||||
# Mock permission query to return empty list (no permission)
|
||||
mock_query = Mock()
|
||||
mock_query.filter_by.return_value = mock_query
|
||||
mock_query.all.return_value = [] # No permissions found
|
||||
mock_db_session.query.return_value = mock_query
|
||||
|
||||
# Act & Assert
|
||||
with pytest.raises(NoPermissionError, match="You do not have permission to access this dataset"):
|
||||
DatasetService.check_dataset_operator_permission(user=user, dataset=dataset)
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Additional Documentation and Notes
|
||||
|
|
|
|||
|
|
@ -1,216 +0,0 @@
|
|||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from models.account import Account, TenantAccountRole
|
||||
from models.dataset import Dataset
|
||||
from services.dataset_service import DatasetService
|
||||
|
||||
|
||||
class DatasetDeleteTestDataFactory:
|
||||
"""Factory class for creating test data and mock objects for dataset delete tests."""
|
||||
|
||||
@staticmethod
|
||||
def create_dataset_mock(
|
||||
dataset_id: str = "dataset-123",
|
||||
tenant_id: str = "test-tenant-123",
|
||||
created_by: str = "creator-456",
|
||||
doc_form: str | None = None,
|
||||
indexing_technique: str | None = "high_quality",
|
||||
**kwargs,
|
||||
) -> Mock:
|
||||
"""Create a mock dataset with specified attributes."""
|
||||
dataset = Mock(spec=Dataset)
|
||||
dataset.id = dataset_id
|
||||
dataset.tenant_id = tenant_id
|
||||
dataset.created_by = created_by
|
||||
dataset.doc_form = doc_form
|
||||
dataset.indexing_technique = indexing_technique
|
||||
for key, value in kwargs.items():
|
||||
setattr(dataset, key, value)
|
||||
return dataset
|
||||
|
||||
@staticmethod
|
||||
def create_user_mock(
|
||||
user_id: str = "user-789",
|
||||
tenant_id: str = "test-tenant-123",
|
||||
role: TenantAccountRole = TenantAccountRole.ADMIN,
|
||||
**kwargs,
|
||||
) -> Mock:
|
||||
"""Create a mock user with specified attributes."""
|
||||
user = Mock(spec=Account)
|
||||
user.id = user_id
|
||||
user.current_tenant_id = tenant_id
|
||||
user.current_role = role
|
||||
for key, value in kwargs.items():
|
||||
setattr(user, key, value)
|
||||
return user
|
||||
|
||||
|
||||
class TestDatasetServiceDeleteDataset:
|
||||
"""
|
||||
Comprehensive unit tests for DatasetService.delete_dataset method.
|
||||
|
||||
This test suite covers all deletion scenarios including:
|
||||
- Normal dataset deletion with documents
|
||||
- Empty dataset deletion (no documents, doc_form is None)
|
||||
- Dataset deletion with missing indexing_technique
|
||||
- Permission checks
|
||||
- Event handling
|
||||
|
||||
This test suite provides regression protection for issue #27073.
|
||||
"""
|
||||
|
||||
@pytest.fixture
|
||||
def mock_dataset_service_dependencies(self):
|
||||
"""Common mock setup for dataset service dependencies."""
|
||||
with (
|
||||
patch("services.dataset_service.DatasetService.get_dataset") as mock_get_dataset,
|
||||
patch("services.dataset_service.DatasetService.check_dataset_permission") as mock_check_perm,
|
||||
patch("extensions.ext_database.db.session") as mock_db,
|
||||
patch("services.dataset_service.dataset_was_deleted") as mock_dataset_was_deleted,
|
||||
):
|
||||
yield {
|
||||
"get_dataset": mock_get_dataset,
|
||||
"check_permission": mock_check_perm,
|
||||
"db_session": mock_db,
|
||||
"dataset_was_deleted": mock_dataset_was_deleted,
|
||||
}
|
||||
|
||||
def test_delete_dataset_with_documents_success(self, mock_dataset_service_dependencies):
|
||||
"""
|
||||
Test successful deletion of a dataset with documents.
|
||||
|
||||
This test verifies:
|
||||
- Dataset is retrieved correctly
|
||||
- Permission check is performed
|
||||
- dataset_was_deleted event is sent
|
||||
- Dataset is deleted from database
|
||||
- Method returns True
|
||||
"""
|
||||
# Arrange
|
||||
dataset = DatasetDeleteTestDataFactory.create_dataset_mock(
|
||||
doc_form="text_model", indexing_technique="high_quality"
|
||||
)
|
||||
user = DatasetDeleteTestDataFactory.create_user_mock()
|
||||
|
||||
mock_dataset_service_dependencies["get_dataset"].return_value = dataset
|
||||
|
||||
# Act
|
||||
result = DatasetService.delete_dataset(dataset.id, user)
|
||||
|
||||
# Assert
|
||||
assert result is True
|
||||
mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id)
|
||||
mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user)
|
||||
mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].commit.assert_called_once()
|
||||
|
||||
def test_delete_empty_dataset_success(self, mock_dataset_service_dependencies):
|
||||
"""
|
||||
Test successful deletion of an empty dataset (no documents, doc_form is None).
|
||||
|
||||
This test verifies that:
|
||||
- Empty datasets can be deleted without errors
|
||||
- dataset_was_deleted event is sent (event handler will skip cleanup if doc_form is None)
|
||||
- Dataset is deleted from database
|
||||
- Method returns True
|
||||
|
||||
This is the primary test for issue #27073 where deleting an empty dataset
|
||||
caused internal server error due to assertion failure in event handlers.
|
||||
"""
|
||||
# Arrange
|
||||
dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form=None, indexing_technique=None)
|
||||
user = DatasetDeleteTestDataFactory.create_user_mock()
|
||||
|
||||
mock_dataset_service_dependencies["get_dataset"].return_value = dataset
|
||||
|
||||
# Act
|
||||
result = DatasetService.delete_dataset(dataset.id, user)
|
||||
|
||||
# Assert - Verify complete deletion flow
|
||||
assert result is True
|
||||
mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id)
|
||||
mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user)
|
||||
mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].commit.assert_called_once()
|
||||
|
||||
def test_delete_dataset_with_partial_none_values(self, mock_dataset_service_dependencies):
|
||||
"""
|
||||
Test deletion of dataset with partial None values.
|
||||
|
||||
This test verifies that datasets with partial None values (e.g., doc_form exists
|
||||
but indexing_technique is None) can be deleted successfully. The event handler
|
||||
will skip cleanup if any required field is None.
|
||||
|
||||
Improvement based on Gemini Code Assist suggestion: Added comprehensive assertions
|
||||
to verify all core deletion operations are performed, not just event sending.
|
||||
"""
|
||||
# Arrange
|
||||
dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form="text_model", indexing_technique=None)
|
||||
user = DatasetDeleteTestDataFactory.create_user_mock()
|
||||
|
||||
mock_dataset_service_dependencies["get_dataset"].return_value = dataset
|
||||
|
||||
# Act
|
||||
result = DatasetService.delete_dataset(dataset.id, user)
|
||||
|
||||
# Assert - Verify complete deletion flow (Gemini suggestion implemented)
|
||||
assert result is True
|
||||
mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id)
|
||||
mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user)
|
||||
mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].commit.assert_called_once()
|
||||
|
||||
def test_delete_dataset_with_doc_form_none_indexing_technique_exists(self, mock_dataset_service_dependencies):
|
||||
"""
|
||||
Test deletion of dataset where doc_form is None but indexing_technique exists.
|
||||
|
||||
This edge case can occur in certain dataset configurations and should be handled
|
||||
gracefully by the event handler's conditional check.
|
||||
"""
|
||||
# Arrange
|
||||
dataset = DatasetDeleteTestDataFactory.create_dataset_mock(doc_form=None, indexing_technique="high_quality")
|
||||
user = DatasetDeleteTestDataFactory.create_user_mock()
|
||||
|
||||
mock_dataset_service_dependencies["get_dataset"].return_value = dataset
|
||||
|
||||
# Act
|
||||
result = DatasetService.delete_dataset(dataset.id, user)
|
||||
|
||||
# Assert - Verify complete deletion flow
|
||||
assert result is True
|
||||
mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset.id)
|
||||
mock_dataset_service_dependencies["check_permission"].assert_called_once_with(dataset, user)
|
||||
mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].delete.assert_called_once_with(dataset)
|
||||
mock_dataset_service_dependencies["db_session"].commit.assert_called_once()
|
||||
|
||||
def test_delete_dataset_not_found(self, mock_dataset_service_dependencies):
|
||||
"""
|
||||
Test deletion attempt when dataset doesn't exist.
|
||||
|
||||
This test verifies that:
|
||||
- Method returns False when dataset is not found
|
||||
- No deletion operations are performed
|
||||
- No events are sent
|
||||
"""
|
||||
# Arrange
|
||||
dataset_id = "non-existent-dataset"
|
||||
user = DatasetDeleteTestDataFactory.create_user_mock()
|
||||
|
||||
mock_dataset_service_dependencies["get_dataset"].return_value = None
|
||||
|
||||
# Act
|
||||
result = DatasetService.delete_dataset(dataset_id, user)
|
||||
|
||||
# Assert
|
||||
assert result is False
|
||||
mock_dataset_service_dependencies["get_dataset"].assert_called_once_with(dataset_id)
|
||||
mock_dataset_service_dependencies["check_permission"].assert_not_called()
|
||||
mock_dataset_service_dependencies["dataset_was_deleted"].send.assert_not_called()
|
||||
mock_dataset_service_dependencies["db_session"].delete.assert_not_called()
|
||||
mock_dataset_service_dependencies["db_session"].commit.assert_not_called()
|
||||
105
api/uv.lock
105
api/uv.lock
|
|
@ -505,14 +505,14 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "basedpyright"
|
||||
version = "1.31.7"
|
||||
version = "1.38.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "nodejs-wheel-binaries" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/c6/ba/ed69e8df732a09c8ca469f592c8e08707fe29149735b834c276d94d4a3da/basedpyright-1.31.7.tar.gz", hash = "sha256:394f334c742a19bcc5905b2455c9f5858182866b7679a6f057a70b44b049bceb", size = 22710948, upload-time = "2025-10-11T05:12:48.3Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e4/a3/20aa7c4e83f2f614e0036300f3c352775dede0655c66814da16c37b661a9/basedpyright-1.38.2.tar.gz", hash = "sha256:b433b2b8ba745ed7520cdc79a29a03682f3fb00346d272ece5944e9e5e5daa92", size = 25277019, upload-time = "2026-02-26T11:18:43.594Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/f8/90/ce01ad2d0afdc1b82b8b5aaba27e60d2e138e39d887e71c35c55d8f1bfcd/basedpyright-1.31.7-py3-none-any.whl", hash = "sha256:7c54beb7828c9ed0028630aaa6904f395c27e5a9f5a313aa9e91fc1d11170831", size = 11817571, upload-time = "2025-10-11T05:12:45.432Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ac/12/736cab83626fea3fe65cdafb3ef3d2ee9480c56723f2fd33921537289a5e/basedpyright-1.38.2-py3-none-any.whl", hash = "sha256:153481d37fd19f9e3adedc8629d1d071b10c5f5e49321fb026b74444b7c70e24", size = 12312475, upload-time = "2026-02-26T11:18:40.373Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -1606,7 +1606,7 @@ requires-dist = [
|
|||
{ name = "langfuse", specifier = "~=2.51.3" },
|
||||
{ name = "langsmith", specifier = "~=0.1.77" },
|
||||
{ name = "litellm", specifier = "==1.77.1" },
|
||||
{ name = "markdown", specifier = "~=3.5.1" },
|
||||
{ name = "markdown", specifier = "~=3.8.1" },
|
||||
{ name = "mlflow-skinny", specifier = ">=3.0.0" },
|
||||
{ name = "numpy", specifier = "~=1.26.4" },
|
||||
{ name = "openpyxl", specifier = "~=3.1.5" },
|
||||
|
|
@ -1660,7 +1660,7 @@ requires-dist = [
|
|||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "basedpyright", specifier = "~=1.31.0" },
|
||||
{ name = "basedpyright", specifier = "~=1.38.2" },
|
||||
{ name = "boto3-stubs", specifier = ">=1.38.20" },
|
||||
{ name = "celery-types", specifier = ">=0.23.0" },
|
||||
{ name = "coverage", specifier = "~=7.2.4" },
|
||||
|
|
@ -1669,9 +1669,9 @@ dev = [
|
|||
{ name = "hypothesis", specifier = ">=6.131.15" },
|
||||
{ name = "import-linter", specifier = ">=2.3" },
|
||||
{ name = "lxml-stubs", specifier = "~=0.5.1" },
|
||||
{ name = "mypy", specifier = "~=1.17.1" },
|
||||
{ name = "mypy", specifier = "~=1.19.1" },
|
||||
{ name = "pandas-stubs", specifier = "~=2.2.3" },
|
||||
{ name = "pyrefly", specifier = ">=0.54.0" },
|
||||
{ name = "pyrefly", specifier = ">=0.55.0" },
|
||||
{ name = "pytest", specifier = "~=8.3.2" },
|
||||
{ name = "pytest-benchmark", specifier = "~=4.0.0" },
|
||||
{ name = "pytest-cov", specifier = "~=4.1.0" },
|
||||
|
|
@ -3267,6 +3267,40 @@ wheels = [
|
|||
{ url = "https://files.pythonhosted.org/packages/de/f0/63b06b99b730b9954f8709f6f7d9b8d076fa0a973e472efe278089bde42b/langsmith-0.1.147-py3-none-any.whl", hash = "sha256:7166fc23b965ccf839d64945a78e9f1157757add228b086141eb03a60d699a15", size = 311812, upload-time = "2024-11-27T17:32:39.569Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "librt"
|
||||
version = "0.8.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/01/0e748af5e4fee180cf7cd12bd12b0513ad23b045dccb2a83191bde82d168/librt-0.8.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:681dc2451d6d846794a828c16c22dc452d924e9f700a485b7ecb887a30aad1fd", size = 65315, upload-time = "2026-02-17T16:11:25.152Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/4d/7184806efda571887c798d573ca4134c80ac8642dcdd32f12c31b939c595/librt-0.8.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a3b4350b13cc0e6f5bec8fa7caf29a8fb8cdc051a3bae45cfbfd7ce64f009965", size = 68021, upload-time = "2026-02-17T16:11:26.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ae/88/c3c52d2a5d5101f28d3dc89298444626e7874aa904eed498464c2af17627/librt-0.8.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ac1e7817fd0ed3d14fd7c5df91daed84c48e4c2a11ee99c0547f9f62fdae13da", size = 194500, upload-time = "2026-02-17T16:11:27.177Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d6/5d/6fb0a25b6a8906e85b2c3b87bee1d6ed31510be7605b06772f9374ca5cb3/librt-0.8.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:747328be0c5b7075cde86a0e09d7a9196029800ba75a1689332348e998fb85c0", size = 205622, upload-time = "2026-02-17T16:11:28.242Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/b2/a6/8006ae81227105476a45691f5831499e4d936b1c049b0c1feb17c11b02d1/librt-0.8.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f0af2bd2bc204fa27f3d6711d0f360e6b8c684a035206257a81673ab924aa11e", size = 218304, upload-time = "2026-02-17T16:11:29.344Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ee/19/60e07886ad16670aae57ef44dada41912c90906a6fe9f2b9abac21374748/librt-0.8.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d480de377f5b687b6b1bc0c0407426da556e2a757633cc7e4d2e1a057aa688f3", size = 211493, upload-time = "2026-02-17T16:11:30.445Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9c/cf/f666c89d0e861d05600438213feeb818c7514d3315bae3648b1fc145d2b6/librt-0.8.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d0ee06b5b5291f609ddb37b9750985b27bc567791bc87c76a569b3feed8481ac", size = 219129, upload-time = "2026-02-17T16:11:32.021Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8f/ef/f1bea01e40b4a879364c031476c82a0dc69ce068daad67ab96302fed2d45/librt-0.8.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9e2c6f77b9ad48ce5603b83b7da9ee3e36b3ab425353f695cba13200c5d96596", size = 213113, upload-time = "2026-02-17T16:11:33.192Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9b/80/cdab544370cc6bc1b72ea369525f547a59e6938ef6863a11ab3cd24759af/librt-0.8.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:439352ba9373f11cb8e1933da194dcc6206daf779ff8df0ed69c5e39113e6a99", size = 212269, upload-time = "2026-02-17T16:11:34.373Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9d/9c/48d6ed8dac595654f15eceab2035131c136d1ae9a1e3548e777bb6dbb95d/librt-0.8.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:82210adabbc331dbb65d7868b105185464ef13f56f7f76688565ad79f648b0fe", size = 234673, upload-time = "2026-02-17T16:11:36.063Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/16/01/35b68b1db517f27a01be4467593292eb5315def8900afad29fabf56304ba/librt-0.8.1-cp311-cp311-win32.whl", hash = "sha256:52c224e14614b750c0a6d97368e16804a98c684657c7518752c356834fff83bb", size = 54597, upload-time = "2026-02-17T16:11:37.544Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/71/02/796fe8f02822235966693f257bf2c79f40e11337337a657a8cfebba5febc/librt-0.8.1-cp311-cp311-win_amd64.whl", hash = "sha256:c00e5c884f528c9932d278d5c9cbbea38a6b81eb62c02e06ae53751a83a4d52b", size = 61733, upload-time = "2026-02-17T16:11:38.691Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/28/ad/232e13d61f879a42a4e7117d65e4984bb28371a34bb6fb9ca54ec2c8f54e/librt-0.8.1-cp311-cp311-win_arm64.whl", hash = "sha256:f7cdf7f26c2286ffb02e46d7bac56c94655540b26347673bea15fa52a6af17e9", size = 52273, upload-time = "2026-02-17T16:11:40.308Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/95/21/d39b0a87ac52fc98f621fb6f8060efb017a767ebbbac2f99fbcbc9ddc0d7/librt-0.8.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a28f2612ab566b17f3698b0da021ff9960610301607c9a5e8eaca62f5e1c350a", size = 66516, upload-time = "2026-02-17T16:11:41.604Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/69/f1/46375e71441c43e8ae335905e069f1c54febee63a146278bcee8782c84fd/librt-0.8.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:60a78b694c9aee2a0f1aaeaa7d101cf713e92e8423a941d2897f4fa37908dab9", size = 68634, upload-time = "2026-02-17T16:11:43.268Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/33/c510de7f93bf1fa19e13423a606d8189a02624a800710f6e6a0a0f0784b3/librt-0.8.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:758509ea3f1eba2a57558e7e98f4659d0ea7670bff49673b0dde18a3c7e6c0eb", size = 198941, upload-time = "2026-02-17T16:11:44.28Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/dd/36/e725903416409a533d92398e88ce665476f275081d0d7d42f9c4951999e5/librt-0.8.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:039b9f2c506bd0ab0f8725aa5ba339c6f0cd19d3b514b50d134789809c24285d", size = 209991, upload-time = "2026-02-17T16:11:45.462Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/30/7a/8d908a152e1875c9f8eac96c97a480df425e657cdb47854b9efaa4998889/librt-0.8.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5bb54f1205a3a6ab41a6fd71dfcdcbd278670d3a90ca502a30d9da583105b6f7", size = 224476, upload-time = "2026-02-17T16:11:46.542Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a8/b8/a22c34f2c485b8903a06f3fe3315341fe6876ef3599792344669db98fcff/librt-0.8.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:05bd41cdee35b0c59c259f870f6da532a2c5ca57db95b5f23689fcb5c9e42440", size = 217518, upload-time = "2026-02-17T16:11:47.746Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/79/6f/5c6fea00357e4f82ba44f81dbfb027921f1ab10e320d4a64e1c408d035d9/librt-0.8.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adfab487facf03f0d0857b8710cf82d0704a309d8ffc33b03d9302b4c64e91a9", size = 225116, upload-time = "2026-02-17T16:11:49.298Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f2/a0/95ced4e7b1267fe1e2720a111685bcddf0e781f7e9e0ce59d751c44dcfe5/librt-0.8.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:153188fe98a72f206042be10a2c6026139852805215ed9539186312d50a8e972", size = 217751, upload-time = "2026-02-17T16:11:50.49Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/93/c2/0517281cb4d4101c27ab59472924e67f55e375bc46bedae94ac6dc6e1902/librt-0.8.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dd3c41254ee98604b08bd5b3af5bf0a89740d4ee0711de95b65166bf44091921", size = 218378, upload-time = "2026-02-17T16:11:51.783Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/43/e8/37b3ac108e8976888e559a7b227d0ceac03c384cfd3e7a1c2ee248dbae79/librt-0.8.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e0d138c7ae532908cbb342162b2611dbd4d90c941cd25ab82084aaf71d2c0bd0", size = 241199, upload-time = "2026-02-17T16:11:53.561Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/4b/5b/35812d041c53967fedf551a39399271bbe4257e681236a2cf1a69c8e7fa1/librt-0.8.1-cp312-cp312-win32.whl", hash = "sha256:43353b943613c5d9c49a25aaffdba46f888ec354e71e3529a00cca3f04d66a7a", size = 54917, upload-time = "2026-02-17T16:11:54.758Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/de/d1/fa5d5331b862b9775aaf2a100f5ef86854e5d4407f71bddf102f4421e034/librt-0.8.1-cp312-cp312-win_amd64.whl", hash = "sha256:ff8baf1f8d3f4b6b7257fcb75a501f2a5499d0dda57645baa09d4d0d34b19444", size = 62017, upload-time = "2026-02-17T16:11:55.748Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c7/7c/c614252f9acda59b01a66e2ddfd243ed1c7e1deab0293332dfbccf862808/librt-0.8.1-cp312-cp312-win_arm64.whl", hash = "sha256:0f2ae3725904f7377e11cc37722d5d401e8b3d5851fb9273d7f4fe04f6b3d37d", size = 52441, upload-time = "2026-02-17T16:11:56.801Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "litellm"
|
||||
version = "1.77.1"
|
||||
|
|
@ -3403,11 +3437,11 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "markdown"
|
||||
version = "3.5.2"
|
||||
version = "3.8.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/11/28/c5441a6642681d92de56063fa7984df56f783d3f1eba518dc3e7a253b606/Markdown-3.5.2.tar.gz", hash = "sha256:e1ac7b3dc550ee80e602e71c1d168002f062e49f1b11e26a36264dafd4df2ef8", size = 349398, upload-time = "2024-01-10T15:19:38.261Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/db/7c/0738e5ff0adccd0b4e02c66d0446c03a3c557e02bb49b7c263d7ab56c57d/markdown-3.8.1.tar.gz", hash = "sha256:a2e2f01cead4828ee74ecca9623045f62216aef2212a7685d6eb9163f590b8c1", size = 361280, upload-time = "2025-06-18T14:50:49.618Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/42/f4/f0031854de10a0bc7821ef9fca0b92ca0d7aa6fbfbf504c5473ba825e49c/Markdown-3.5.2-py3-none-any.whl", hash = "sha256:d43323865d89fc0cb9b20c75fc8ad313af307cc087e84b657d9eec768eddeadd", size = 103870, upload-time = "2024-01-10T15:19:36.071Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/50/34/3d1ff0cb4843a33817d06800e9383a2b2a2df4d508e37f53a40e829905d9/markdown-3.8.1-py3-none-any.whl", hash = "sha256:46cc0c0f1e5211ab2e9d453582f0b28a1bfaf058a9f7d5c50386b99b588d8811", size = 106642, upload-time = "2025-06-18T14:50:48.52Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -3653,28 +3687,29 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "1.17.1"
|
||||
version = "1.19.1"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "librt", marker = "platform_python_implementation != 'PyPy'" },
|
||||
{ name = "mypy-extensions" },
|
||||
{ name = "pathspec" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/8e/22/ea637422dedf0bf36f3ef238eab4e455e2a0dcc3082b5cc067615347ab8e/mypy-1.17.1.tar.gz", hash = "sha256:25e01ec741ab5bb3eec8ba9cdb0f769230368a22c959c4937360efb89b7e9f01", size = 3352570, upload-time = "2025-07-31T07:54:19.204Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/46/cf/eadc80c4e0a70db1c08921dcc220357ba8ab2faecb4392e3cebeb10edbfa/mypy-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad37544be07c5d7fba814eb370e006df58fed8ad1ef33ed1649cb1889ba6ff58", size = 10921009, upload-time = "2025-07-31T07:53:23.037Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5d/c1/c869d8c067829ad30d9bdae051046561552516cfb3a14f7f0347b7d973ee/mypy-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:064e2ff508e5464b4bd807a7c1625bc5047c5022b85c70f030680e18f37273a5", size = 10047482, upload-time = "2025-07-31T07:53:26.151Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/98/b9/803672bab3fe03cee2e14786ca056efda4bb511ea02dadcedde6176d06d0/mypy-1.17.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:70401bbabd2fa1aa7c43bb358f54037baf0586f41e83b0ae67dd0534fc64edfd", size = 11832883, upload-time = "2025-07-31T07:53:47.948Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/88/fb/fcdac695beca66800918c18697b48833a9a6701de288452b6715a98cfee1/mypy-1.17.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e92bdc656b7757c438660f775f872a669b8ff374edc4d18277d86b63edba6b8b", size = 12566215, upload-time = "2025-07-31T07:54:04.031Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7f/37/a932da3d3dace99ee8eb2043b6ab03b6768c36eb29a02f98f46c18c0da0e/mypy-1.17.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c1fdf4abb29ed1cb091cf432979e162c208a5ac676ce35010373ff29247bcad5", size = 12751956, upload-time = "2025-07-31T07:53:36.263Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8c/cf/6438a429e0f2f5cab8bc83e53dbebfa666476f40ee322e13cac5e64b79e7/mypy-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:ff2933428516ab63f961644bc49bc4cbe42bbffb2cd3b71cc7277c07d16b1a8b", size = 9507307, upload-time = "2025-07-31T07:53:59.734Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/17/a2/7034d0d61af8098ec47902108553122baa0f438df8a713be860f7407c9e6/mypy-1.17.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:69e83ea6553a3ba79c08c6e15dbd9bfa912ec1e493bf75489ef93beb65209aeb", size = 11086295, upload-time = "2025-07-31T07:53:28.124Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/1f/19e7e44b594d4b12f6ba8064dbe136505cec813549ca3e5191e40b1d3cc2/mypy-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b16708a66d38abb1e6b5702f5c2c87e133289da36f6a1d15f6a5221085c6403", size = 10112355, upload-time = "2025-07-31T07:53:21.121Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/69/baa33927e29e6b4c55d798a9d44db5d394072eef2bdc18c3e2048c9ed1e9/mypy-1.17.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:89e972c0035e9e05823907ad5398c5a73b9f47a002b22359b177d40bdaee7056", size = 11875285, upload-time = "2025-07-31T07:53:55.293Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/13/f3a89c76b0a41e19490b01e7069713a30949d9a6c147289ee1521bcea245/mypy-1.17.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:03b6d0ed2b188e35ee6d5c36b5580cffd6da23319991c49ab5556c023ccf1341", size = 12737895, upload-time = "2025-07-31T07:53:43.623Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/23/a1/c4ee79ac484241301564072e6476c5a5be2590bc2e7bfd28220033d2ef8f/mypy-1.17.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c837b896b37cd103570d776bda106eabb8737aa6dd4f248451aecf53030cdbeb", size = 12931025, upload-time = "2025-07-31T07:54:17.125Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/b8/7409477be7919a0608900e6320b155c72caab4fef46427c5cc75f85edadd/mypy-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:665afab0963a4b39dff7c1fa563cc8b11ecff7910206db4b2e64dd1ba25aed19", size = 9584664, upload-time = "2025-07-31T07:54:12.842Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/1d/f3/8fcd2af0f5b806f6cf463efaffd3c9548a28f84220493ecd38d127b6b66d/mypy-1.17.1-py3-none-any.whl", hash = "sha256:a9f52c0351c21fe24c21d8c0eb1f62967b262d6729393397b6f443c3b773c3b9", size = 2283411, upload-time = "2025-07-31T07:53:24.664Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
@ -5140,18 +5175,18 @@ wheels = [
|
|||
|
||||
[[package]]
|
||||
name = "pyrefly"
|
||||
version = "0.54.0"
|
||||
version = "0.55.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/81/44/c10b16a302fda90d0af1328f880b232761b510eab546616a7be2fdf35a57/pyrefly-0.54.0.tar.gz", hash = "sha256:c6663be64d492f0d2f2a411ada9f28a6792163d34133639378b7f3dd9a8dca94", size = 5098893, upload-time = "2026-02-23T15:44:35.111Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/bf/c4/76e0797215e62d007f81f86c9c4fb5d6202685a3f5e70810f3fd94294f92/pyrefly-0.55.0.tar.gz", hash = "sha256:434c3282532dd4525c4840f2040ed0eb79b0ec8224fe18d957956b15471f2441", size = 5135682, upload-time = "2026-03-03T00:46:38.122Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/5f/99/8fdcdb4e55f0227fdd9f6abce36b619bab1ecb0662b83b66adc8cba3c788/pyrefly-0.54.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:58a3f092b6dc25ef79b2dc6c69a40f36784ca157c312bfc0baea463926a9db6d", size = 12223973, upload-time = "2026-02-23T15:44:14.278Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/35/c2aaf87a76003ad27b286594d2e5178f811eaa15bfe3d98dba2b47d56dd1/pyrefly-0.54.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:615081414106dd95873bc39c3a4bed68754c6cc24a8177ac51d22f88f88d3eb3", size = 11785585, upload-time = "2026-02-23T15:44:17.468Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c4/4a/ced02691ed67e5a897714979196f08ad279ec7ec7f63c45e00a75a7f3c0e/pyrefly-0.54.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbcaf20f5fe585079079a95205c1f3cd4542d17228cdf1df560288880623b70", size = 33381977, upload-time = "2026-02-23T15:44:19.736Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/0b/ce/72a117ed437c8f6950862181014b41e36f3c3997580e29b772b71e78d587/pyrefly-0.54.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d5da116c0d34acfbd66663addd3ca8aa78a636f6692a66e078126d3620a883", size = 35962821, upload-time = "2026-02-23T15:44:22.357Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/85/de/89013f5ae0a35d2b6b01274a92a35ee91431ea001050edf0a16748d39875/pyrefly-0.54.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef3ac27f1a4baaf67aead64287d3163350844794aca6315ad1a9650b16ec26a", size = 38496689, upload-time = "2026-02-23T15:44:25.236Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9f/9a/33b097c7bf498b924742dca32dd5d9c6a3fa6c2b52b63a58eb9e1980ca89/pyrefly-0.54.0-py3-none-win32.whl", hash = "sha256:7d607d72200a8afbd2db10bfefb40160a7a5d709d207161c21649cedd5cfc09a", size = 11295268, upload-time = "2026-02-23T15:44:27.551Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d4/21/9263fd1144d2a3d7342b474f183f7785b3358a1565c864089b780110b933/pyrefly-0.54.0-py3-none-win_amd64.whl", hash = "sha256:fd416f04f89309385696f685bd5c9141011f18c8072f84d31ca20c748546e791", size = 12081810, upload-time = "2026-02-23T15:44:29.461Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ea/5b/fad062a196c064cbc8564de5b2f4d3cb6315f852e3b31e8a1ce74c69a1ea/pyrefly-0.54.0-py3-none-win_arm64.whl", hash = "sha256:f06ab371356c7b1925e0bffe193b738797e71e5dbbff7fb5a13f90ee7521211d", size = 11564930, upload-time = "2026-02-23T15:44:33.053Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/39/b0/16e50cf716784513648e23e726a24f71f9544aa4f86103032dcaa5ff71a2/pyrefly-0.55.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:49aafcefe5e2dd4256147db93e5b0ada42bff7d9a60db70e03d1f7055338eec9", size = 12210073, upload-time = "2026-03-03T00:46:15.51Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/3a/ad/89500c01bac3083383011600370289fbc67700c5be46e781787392628a3a/pyrefly-0.55.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:2827426e6b28397c13badb93c0ede0fb0f48046a7a89e3d774cda04e8e2067cd", size = 11767474, upload-time = "2026-03-03T00:46:18.003Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/78/68/4c66b260f817f304ead11176ff13985625f7c269e653304b4bdb546551af/pyrefly-0.55.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7346b2d64dc575bd61aa3bca854fbf8b5a19a471cbdb45e0ca1e09861b63488c", size = 33260395, upload-time = "2026-03-03T00:46:20.509Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/47/09/10bd48c9f860064f29f412954126a827d60f6451512224912c265e26bbe6/pyrefly-0.55.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:233b861b4cff008b1aff62f4f941577ed752e4d0060834229eb9b6826e6973c9", size = 35848269, upload-time = "2026-03-03T00:46:23.418Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a9/39/bc65cdd5243eb2dfea25dd1321f9a5a93e8d9c3a308501c4c6c05d011585/pyrefly-0.55.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5aa85657d76da1d25d081a49f0e33c8fc3ec91c1a0f185a8ed393a5a3d9e178", size = 38449820, upload-time = "2026-03-03T00:46:26.309Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e5/64/58b38963b011af91209e87f868cc85cfc762ec49a4568ce610c45e7a5f40/pyrefly-0.55.0-py3-none-win32.whl", hash = "sha256:23f786a78536a56fed331b245b7d10ec8945bebee7b723491c8d66fdbc155fe6", size = 11259415, upload-time = "2026-03-03T00:46:30.875Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7a/0b/a4aa519ff632a1ea69eec942566951670b870b99b5c08407e1387b85b6a4/pyrefly-0.55.0-py3-none-win_amd64.whl", hash = "sha256:d465b49e999b50eeb069ad23f0f5710651cad2576f9452a82991bef557df91ee", size = 12043581, upload-time = "2026-03-03T00:46:33.674Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/f1/51/89017636fbe1ffd166ad478990c6052df615b926182fa6d3c0842b407e89/pyrefly-0.55.0-py3-none-win_arm64.whl", hash = "sha256:732ff490e0e863b296e7c0b2471e08f8ba7952f9fa6e9de09d8347fd67dde77f", size = 11548076, upload-time = "2026-03-03T00:46:36.193Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
|||
|
|
@ -99,7 +99,7 @@
|
|||
"cron-parser": "5.4.0",
|
||||
"dayjs": "1.11.19",
|
||||
"decimal.js": "10.6.0",
|
||||
"dompurify": "3.3.0",
|
||||
"dompurify": "3.3.2",
|
||||
"echarts": "5.6.0",
|
||||
"echarts-for-react": "3.0.5",
|
||||
"elkjs": "0.9.3",
|
||||
|
|
|
|||
|
|
@ -169,8 +169,8 @@ importers:
|
|||
specifier: 10.6.0
|
||||
version: 10.6.0
|
||||
dompurify:
|
||||
specifier: 3.3.0
|
||||
version: 3.3.0
|
||||
specifier: 3.3.2
|
||||
version: 3.3.2
|
||||
echarts:
|
||||
specifier: 5.6.0
|
||||
version: 5.6.0
|
||||
|
|
@ -4492,8 +4492,9 @@ packages:
|
|||
dompurify@3.2.7:
|
||||
resolution: {integrity: sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==}
|
||||
|
||||
dompurify@3.3.0:
|
||||
resolution: {integrity: sha512-r+f6MYR1gGN1eJv0TVQbhA7if/U7P87cdPl3HN5rikqaBSBxLiCb/b9O+2eG0cxz0ghyU+mU1QkbsOwERMYlWQ==}
|
||||
dompurify@3.3.2:
|
||||
resolution: {integrity: sha512-6obghkliLdmKa56xdbLOpUZ43pAR6xFy1uOrxBaIDjT+yaRuuybLjGS9eVBoSR/UPU5fq3OXClEHLJNGvbxKpQ==}
|
||||
engines: {node: '>=20'}
|
||||
|
||||
domutils@3.2.2:
|
||||
resolution: {integrity: sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==}
|
||||
|
|
@ -4657,7 +4658,7 @@ packages:
|
|||
eslint: '*'
|
||||
|
||||
eslint-plugin-better-tailwindcss@https://pkg.pr.new/hyoban/eslint-plugin-better-tailwindcss@a520d15:
|
||||
resolution: {integrity: sha512-hbxpqInIW0Q5UIwXEuQxSBjrMd5bYttXeSPU6dfK2zpECKNIzGR+KXZZEdZaPagEMDJosSyQ9RKievmBcCAxfA==, tarball: https://pkg.pr.new/hyoban/eslint-plugin-better-tailwindcss@a520d15}
|
||||
resolution: {tarball: https://pkg.pr.new/hyoban/eslint-plugin-better-tailwindcss@a520d15}
|
||||
version: 4.3.1
|
||||
engines: {node: ^20.19.0 || ^22.12.0 || >=23.0.0}
|
||||
peerDependencies:
|
||||
|
|
@ -6548,9 +6549,6 @@ packages:
|
|||
resolution: {integrity: sha512-h36JMxKRqrAxVD8201FrCpyeNuUY9Y5zZwujr20fFO77tpUtGa6EZzfKw/3WaiBX95fq7+MpsuMLNdSnORAwSA==}
|
||||
engines: {node: '>=14.18.0'}
|
||||
|
||||
randombytes@2.1.0:
|
||||
resolution: {integrity: sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==}
|
||||
|
||||
rc@1.2.8:
|
||||
resolution: {integrity: sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==}
|
||||
hasBin: true
|
||||
|
|
@ -6946,9 +6944,6 @@ packages:
|
|||
engines: {node: '>=10'}
|
||||
hasBin: true
|
||||
|
||||
serialize-javascript@6.0.2:
|
||||
resolution: {integrity: sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g==}
|
||||
|
||||
seroval-plugins@1.5.0:
|
||||
resolution: {integrity: sha512-EAHqADIQondwRZIdeW2I636zgsODzoBDwb3PT/+7TLDWyw1Dy/Xv7iGUIEXXav7usHDE9HVhOU61irI3EnyyHA==}
|
||||
engines: {node: '>=10'}
|
||||
|
|
@ -7223,8 +7218,8 @@ packages:
|
|||
engines: {node: '>=18'}
|
||||
deprecated: Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me
|
||||
|
||||
terser-webpack-plugin@5.3.16:
|
||||
resolution: {integrity: sha512-h9oBFCWrq78NyWWVcSwZarJkZ01c2AyGrzs1crmHZO3QUg9D61Wu4NPjBy69n7JqylFF5y+CsUZYmYEIZ3mR+Q==}
|
||||
terser-webpack-plugin@5.3.17:
|
||||
resolution: {integrity: sha512-YR7PtUp6GMU91BgSJmlaX/rS2lGDbAF7D+Wtq7hRO+MiljNmodYvqslzCFiYVAgW+Qoaaia/QUIP4lGXufjdZw==}
|
||||
engines: {node: '>= 10.13.0'}
|
||||
peerDependencies:
|
||||
'@swc/core': '*'
|
||||
|
|
@ -7567,7 +7562,7 @@ packages:
|
|||
resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
|
||||
|
||||
vinext@https://pkg.pr.new/hyoban/vinext@556a6d6:
|
||||
resolution: {integrity: sha512-Sz8RkTDsY6cnGrevlQi4nXgahu8okEGsdKY5m31d/L9tXo35bNETMHfVee5gaI2UKZS9LMcffWaTOxxINUgogQ==, tarball: https://pkg.pr.new/hyoban/vinext@556a6d6}
|
||||
resolution: {tarball: https://pkg.pr.new/hyoban/vinext@556a6d6}
|
||||
version: 0.0.5
|
||||
engines: {node: '>=22'}
|
||||
hasBin: true
|
||||
|
|
@ -12198,7 +12193,7 @@ snapshots:
|
|||
optionalDependencies:
|
||||
'@types/trusted-types': 2.0.7
|
||||
|
||||
dompurify@3.3.0:
|
||||
dompurify@3.3.2:
|
||||
optionalDependencies:
|
||||
'@types/trusted-types': 2.0.7
|
||||
|
||||
|
|
@ -13978,7 +13973,7 @@ snapshots:
|
|||
d3-sankey: 0.12.3
|
||||
dagre-d3-es: 7.0.11
|
||||
dayjs: 1.11.19
|
||||
dompurify: 3.3.0
|
||||
dompurify: 3.3.2
|
||||
katex: 0.16.25
|
||||
khroma: 2.1.0
|
||||
lodash-es: 4.17.23
|
||||
|
|
@ -14124,8 +14119,8 @@ snapshots:
|
|||
|
||||
micromark-extension-mdxjs@3.0.0:
|
||||
dependencies:
|
||||
acorn: 8.16.0
|
||||
acorn-jsx: 5.3.2(acorn@8.16.0)
|
||||
acorn: 8.15.0
|
||||
acorn-jsx: 5.3.2(acorn@8.15.0)
|
||||
micromark-extension-mdx-expression: 3.0.1
|
||||
micromark-extension-mdx-jsx: 3.0.2
|
||||
micromark-extension-mdx-md: 2.0.0
|
||||
|
|
@ -14776,10 +14771,6 @@ snapshots:
|
|||
|
||||
radash@12.1.1: {}
|
||||
|
||||
randombytes@2.1.0:
|
||||
dependencies:
|
||||
safe-buffer: 5.2.1
|
||||
|
||||
rc@1.2.8:
|
||||
dependencies:
|
||||
deep-extend: 0.6.0
|
||||
|
|
@ -15284,7 +15275,8 @@ snapshots:
|
|||
dependencies:
|
||||
tslib: 2.8.1
|
||||
|
||||
safe-buffer@5.2.1: {}
|
||||
safe-buffer@5.2.1:
|
||||
optional: true
|
||||
|
||||
sass@1.93.2:
|
||||
dependencies:
|
||||
|
|
@ -15335,10 +15327,6 @@ snapshots:
|
|||
|
||||
semver@7.7.4: {}
|
||||
|
||||
serialize-javascript@6.0.2:
|
||||
dependencies:
|
||||
randombytes: 2.1.0
|
||||
|
||||
seroval-plugins@1.5.0(seroval@1.5.0):
|
||||
dependencies:
|
||||
seroval: 1.5.0
|
||||
|
|
@ -15681,12 +15669,11 @@ snapshots:
|
|||
minizlib: 3.1.0
|
||||
yallist: 5.0.0
|
||||
|
||||
terser-webpack-plugin@5.3.16(esbuild@0.27.2)(uglify-js@3.19.3)(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)):
|
||||
terser-webpack-plugin@5.3.17(esbuild@0.27.2)(uglify-js@3.19.3)(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)):
|
||||
dependencies:
|
||||
'@jridgewell/trace-mapping': 0.3.31
|
||||
jest-worker: 27.5.1
|
||||
schema-utils: 4.3.3
|
||||
serialize-javascript: 6.0.2
|
||||
terser: 5.46.0
|
||||
webpack: 5.104.1(esbuild@0.27.2)(uglify-js@3.19.3)
|
||||
optionalDependencies:
|
||||
|
|
@ -16249,7 +16236,7 @@ snapshots:
|
|||
neo-async: 2.6.2
|
||||
schema-utils: 4.3.3
|
||||
tapable: 2.3.0
|
||||
terser-webpack-plugin: 5.3.16(esbuild@0.27.2)(uglify-js@3.19.3)(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3))
|
||||
terser-webpack-plugin: 5.3.17(esbuild@0.27.2)(uglify-js@3.19.3)(webpack@5.104.1(esbuild@0.27.2)(uglify-js@3.19.3))
|
||||
watchpack: 2.5.1
|
||||
webpack-sources: 3.3.4
|
||||
transitivePeerDependencies:
|
||||
|
|
|
|||
Loading…
Reference in New Issue