diff --git a/api/AGENTS.md b/api/AGENTS.md index 8e5d9f600d..a45c4fd3b4 100644 --- a/api/AGENTS.md +++ b/api/AGENTS.md @@ -51,6 +51,7 @@ This is the default standard for backend code in this repo. Follow it for new co - Use Ruff for formatting and linting (follow `.ruff.toml`). - Keep each line under 120 characters (including spaces). +- Avoid using bare `# type: ignore` to ignore all type violations. Always specify an error code (e.g. `# type: ignore[operator, attr-defined]`). ### Naming Conventions diff --git a/api/celery_entrypoint.py b/api/celery_entrypoint.py index 28fa0972e8..11c2e39731 100644 --- a/api/celery_entrypoint.py +++ b/api/celery_entrypoint.py @@ -1,5 +1,5 @@ -import psycogreen.gevent as pscycogreen_gevent # type: ignore -from grpc.experimental import gevent as grpc_gevent # type: ignore +import psycogreen.gevent as pscycogreen_gevent # type: ignore[reportMissingTypeStubs] +from grpc.experimental import gevent as grpc_gevent # type: ignore[reportMissingTypeStubs] # grpc gevent grpc_gevent.init_gevent() diff --git a/api/configs/__init__.py b/api/configs/__init__.py index 1932046322..fcb6df7237 100644 --- a/api/configs/__init__.py +++ b/api/configs/__init__.py @@ -1,3 +1,3 @@ from .app_config import DifyConfig -dify_config = DifyConfig() # type: ignore +dify_config = DifyConfig() # type: ignore[call-arg, assignment] diff --git a/api/context/flask_app_context.py b/api/context/flask_app_context.py index 324a9ee8b4..cfe5758f60 100644 --- a/api/context/flask_app_context.py +++ b/api/context/flask_app_context.py @@ -72,7 +72,7 @@ def capture_flask_context(user: Any = None) -> IExecutionContext: RuntimeError: If called outside Flask context """ # Get Flask app instance - flask_app = current_app._get_current_object() # type: ignore + flask_app = current_app._get_current_object() # type: ignore[attr-defined] # Save current user if available saved_user = user diff --git a/api/controllers/common/fields.py b/api/controllers/common/fields.py index ff5326dade..eff879c3ac 100644 --- a/api/controllers/common/fields.py +++ b/api/controllers/common/fields.py @@ -51,7 +51,7 @@ class Site(BaseModel): show_workflow_steps: bool use_icon_as_answer_icon: bool - @computed_field(return_type=str | None) # type: ignore + @computed_field(return_type=str | None) # type: ignore[prop-decorator] @property def icon_url(self) -> str | None: if self.icon and self.icon_type == IconType.IMAGE: diff --git a/api/controllers/console/apikey.py b/api/controllers/console/apikey.py index 783cb5c444..73c8c3915a 100644 --- a/api/controllers/console/apikey.py +++ b/api/controllers/console/apikey.py @@ -148,7 +148,7 @@ class AppApiKeyListResource(BaseApiKeyListResource): @console_ns.doc(description="Get all API keys for an app") @console_ns.doc(params={"resource_id": "App ID"}) @console_ns.response(200, "Success", api_key_list_model) - def get(self, resource_id): # type: ignore + def get(self, resource_id): # type: ignore[misc] """Get all API keys for an app""" return super().get(resource_id) @@ -157,7 +157,7 @@ class AppApiKeyListResource(BaseApiKeyListResource): @console_ns.doc(params={"resource_id": "App ID"}) @console_ns.response(201, "API key created successfully", api_key_item_model) @console_ns.response(400, "Maximum keys exceeded") - def post(self, resource_id): # type: ignore + def post(self, resource_id): # type: ignore[misc] """Create a new API key for an app""" return super().post(resource_id) @@ -188,7 +188,7 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): @console_ns.doc(description="Get all API keys for a dataset") @console_ns.doc(params={"resource_id": "Dataset ID"}) @console_ns.response(200, "Success", api_key_list_model) - def get(self, resource_id): # type: ignore + def get(self, resource_id): # type: ignore[misc] """Get all API keys for a dataset""" return super().get(resource_id) @@ -197,7 +197,7 @@ class DatasetApiKeyListResource(BaseApiKeyListResource): @console_ns.doc(params={"resource_id": "Dataset ID"}) @console_ns.response(201, "API key created successfully", api_key_item_model) @console_ns.response(400, "Maximum keys exceeded") - def post(self, resource_id): # type: ignore + def post(self, resource_id): # type: ignore[misc] """Create a new API key for a dataset""" return super().post(resource_id) diff --git a/api/controllers/console/app/advanced_prompt_template.py b/api/controllers/console/app/advanced_prompt_template.py index 3bd61feb44..1cfc7dce16 100644 --- a/api/controllers/console/app/advanced_prompt_template.py +++ b/api/controllers/console/app/advanced_prompt_template.py @@ -34,6 +34,6 @@ class AdvancedPromptTemplateList(Resource): @login_required @account_initialization_required def get(self): - args = AdvancedPromptTemplateQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = AdvancedPromptTemplateQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] return AdvancedPromptTemplateService.get_prompt(args.model_dump()) diff --git a/api/controllers/console/app/agent.py b/api/controllers/console/app/agent.py index cfdb9cf417..d44291b7e0 100644 --- a/api/controllers/console/app/agent.py +++ b/api/controllers/console/app/agent.py @@ -44,6 +44,6 @@ class AgentLogApi(Resource): @get_app_model(mode=[AppMode.AGENT_CHAT]) def get(self, app_model): """Get agent logs""" - args = AgentLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = AgentLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] return AgentService.get_agent_logs(app_model, args.conversation_id, args.message_id) diff --git a/api/controllers/console/app/annotation.py b/api/controllers/console/app/annotation.py index 9931bb5dd7..e57b7dc20b 100644 --- a/api/controllers/console/app/annotation.py +++ b/api/controllers/console/app/annotation.py @@ -206,7 +206,7 @@ class AnnotationApi(Resource): @account_initialization_required @edit_permission_required def get(self, app_id): - args = AnnotationListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = AnnotationListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] page = args.page limit = args.limit keyword = args.keyword diff --git a/api/controllers/console/app/app.py b/api/controllers/console/app/app.py index 5ac0e342e6..10a3773bba 100644 --- a/api/controllers/console/app/app.py +++ b/api/controllers/console/app/app.py @@ -299,7 +299,7 @@ class Site(ResponseModel): updated_by: str | None = None updated_at: int | None = None - @computed_field(return_type=str | None) # type: ignore + @computed_field(return_type=str | None) # type: ignore[prop-decorator] @property def icon_url(self) -> str | None: return _build_icon_url(self.icon_type, self.icon) @@ -349,7 +349,7 @@ class AppPartial(ResponseModel): author_name: str | None = None has_draft_trigger: bool | None = None - @computed_field(return_type=str | None) # type: ignore + @computed_field(return_type=str | None) # type: ignore[prop-decorator] @property def icon_url(self) -> str | None: return _build_icon_url(self.icon_type, self.icon) @@ -397,7 +397,7 @@ class AppDetailWithSite(AppDetail): deleted_tools: list[DeletedTool] = Field(default_factory=list) site: Site | None = None - @computed_field(return_type=str | None) # type: ignore + @computed_field(return_type=str | None) # type: ignore[prop-decorator] @property def icon_url(self) -> str | None: return _build_icon_url(self.icon_type, self.icon) @@ -473,7 +473,7 @@ class AppListApi(Resource): """Get app list""" current_user, current_tenant_id = current_account_with_tenant() - args = AppListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = AppListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] args_dict = args.model_dump() # get app list @@ -692,7 +692,7 @@ class AppExportApi(Resource): @edit_permission_required def get(self, app_model): """Export app""" - args = AppExportQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = AppExportQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] payload = AppExportResponse( data=AppDslService.export_dsl( diff --git a/api/controllers/console/app/audio.py b/api/controllers/console/app/audio.py index 2c5e8d29ee..0e382ca6e6 100644 --- a/api/controllers/console/app/audio.py +++ b/api/controllers/console/app/audio.py @@ -173,7 +173,7 @@ class TextModesApi(Resource): @account_initialization_required def get(self, app_model): try: - args = TextToSpeechVoiceQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = TextToSpeechVoiceQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] response = AudioService.transcript_tts_voices( tenant_id=app_model.tenant_id, diff --git a/api/controllers/console/app/conversation.py b/api/controllers/console/app/conversation.py index d329d22309..5dc0b113b2 100644 --- a/api/controllers/console/app/conversation.py +++ b/api/controllers/console/app/conversation.py @@ -342,7 +342,7 @@ class CompletionConversationApi(Resource): @edit_permission_required def get(self, app_model): current_user, _ = current_account_with_tenant() - args = CompletionConversationQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = CompletionConversationQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] query = sa.select(Conversation).where( Conversation.app_id == app_model.id, Conversation.mode == "completion", Conversation.is_deleted.is_(False) @@ -378,7 +378,7 @@ class CompletionConversationApi(Resource): if args.annotation_status == "annotated": query = ( query.options(selectinload(Conversation.message_annotations)) # type: ignore[arg-type] - .join( # type: ignore + .join( # type: ignore[arg-type] MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id ) .distinct() @@ -455,7 +455,7 @@ class ChatConversationApi(Resource): @edit_permission_required def get(self, app_model): current_user, _ = current_account_with_tenant() - args = ChatConversationQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ChatConversationQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] subquery = ( sa.select(Conversation.id.label("conversation_id"), EndUser.session_id.label("from_end_user_session_id")) @@ -515,7 +515,7 @@ class ChatConversationApi(Resource): case "annotated": query = ( query.options(selectinload(Conversation.message_annotations)) # type: ignore[arg-type] - .join( # type: ignore + .join( # type: ignore[arg-type] MessageAnnotation, MessageAnnotation.conversation_id == Conversation.id ) .distinct() diff --git a/api/controllers/console/app/conversation_variables.py b/api/controllers/console/app/conversation_variables.py index 368a6112ba..f2d61c933f 100644 --- a/api/controllers/console/app/conversation_variables.py +++ b/api/controllers/console/app/conversation_variables.py @@ -55,7 +55,7 @@ class ConversationVariablesApi(Resource): @get_app_model(mode=AppMode.ADVANCED_CHAT) @marshal_with(paginated_conversation_variable_model) def get(self, app_model): - args = ConversationVariablesQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ConversationVariablesQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] stmt = ( select(ConversationVariable) diff --git a/api/controllers/console/app/ops_trace.py b/api/controllers/console/app/ops_trace.py index cbcf513162..7f140633e2 100644 --- a/api/controllers/console/app/ops_trace.py +++ b/api/controllers/console/app/ops_trace.py @@ -50,7 +50,7 @@ class TraceAppConfigApi(Resource): @login_required @account_initialization_required def get(self, app_id): - args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: trace_config = OpsService.get_tracing_app_config(app_id=app_id, tracing_provider=args.tracing_provider) @@ -121,7 +121,7 @@ class TraceAppConfigApi(Resource): @account_initialization_required def delete(self, app_id): """Delete an existing trace app configuration""" - args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = TraceProviderQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: result = OpsService.delete_tracing_app_config(app_id=app_id, tracing_provider=args.tracing_provider) diff --git a/api/controllers/console/app/statistic.py b/api/controllers/console/app/statistic.py index ffa28b1c95..4044cf27dc 100644 --- a/api/controllers/console/app/statistic.py +++ b/api/controllers/console/app/statistic.py @@ -54,7 +54,7 @@ class DailyMessageStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("created_at") sql_query = f"""SELECT @@ -111,7 +111,7 @@ class DailyConversationStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("created_at") sql_query = f"""SELECT @@ -167,7 +167,7 @@ class DailyTerminalsStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("created_at") sql_query = f"""SELECT @@ -224,7 +224,7 @@ class DailyTokenCostStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("created_at") sql_query = f"""SELECT @@ -284,7 +284,7 @@ class AverageSessionInteractionStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("c.created_at") sql_query = f"""SELECT @@ -360,7 +360,7 @@ class UserSatisfactionRateStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("m.created_at") sql_query = f"""SELECT @@ -426,7 +426,7 @@ class AverageResponseTimeStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("created_at") sql_query = f"""SELECT @@ -482,7 +482,7 @@ class TokensPerSecondStatistic(Resource): @account_initialization_required def get(self, app_model): account, _ = current_account_with_tenant() - args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = StatisticTimeRangeQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] converted_created_at = convert_datetime_to_date("created_at") sql_query = f"""SELECT diff --git a/api/controllers/console/app/workflow.py b/api/controllers/console/app/workflow.py index d59aa44718..6b6ef2b83b 100644 --- a/api/controllers/console/app/workflow.py +++ b/api/controllers/console/app/workflow.py @@ -900,7 +900,7 @@ class DefaultBlockConfigApi(Resource): """ Get default block config """ - args = DefaultBlockConfigQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = DefaultBlockConfigQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] filters = None if args.q: @@ -968,7 +968,7 @@ class PublishedAllWorkflowApi(Resource): """ current_user, _ = current_account_with_tenant() - args = WorkflowListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] page = args.page limit = args.limit user_id = args.user_id diff --git a/api/controllers/console/app/workflow_app_log.py b/api/controllers/console/app/workflow_app_log.py index 9b148c3f18..bb28c4b6d6 100644 --- a/api/controllers/console/app/workflow_app_log.py +++ b/api/controllers/console/app/workflow_app_log.py @@ -41,7 +41,7 @@ class WorkflowAppLogQuery(BaseModel): def parse_datetime(cls, value: str | None) -> datetime | None: if value in (None, ""): return None - return isoparse(value) # type: ignore + return isoparse(value) # type: ignore[arg-type] @field_validator("detail", mode="before") @classmethod @@ -83,7 +83,7 @@ class WorkflowAppLogApi(Resource): """ Get workflow app logs """ - args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] # get paginate workflow app logs workflow_app_service = WorkflowAppService() @@ -121,7 +121,7 @@ class WorkflowArchivedLogApi(Resource): """ Get workflow archived logs """ - args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowAppLogQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] workflow_app_service = WorkflowAppService() with Session(db.engine) as session: diff --git a/api/controllers/console/app/workflow_draft_variable.py b/api/controllers/console/app/workflow_draft_variable.py index b78d97a382..bfa66ad6e5 100644 --- a/api/controllers/console/app/workflow_draft_variable.py +++ b/api/controllers/console/app/workflow_draft_variable.py @@ -233,7 +233,7 @@ class WorkflowVariableCollectionApi(Resource): """ Get draft workflow """ - args = WorkflowDraftVariableListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowDraftVariableListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] # fetch draft workflow by app_model workflow_service = WorkflowService() diff --git a/api/controllers/console/app/workflow_run.py b/api/controllers/console/app/workflow_run.py index 7ac653395e..ee26c3b28c 100644 --- a/api/controllers/console/app/workflow_run.py +++ b/api/controllers/console/app/workflow_run.py @@ -195,7 +195,7 @@ class AdvancedChatAppWorkflowRunListApi(Resource): """ Get advanced chat app workflow run list """ - args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] args = args_model.model_dump(exclude_none=True) # Default to DEBUGGING if not specified @@ -293,7 +293,7 @@ class AdvancedChatAppWorkflowRunCountApi(Resource): """ Get advanced chat workflow runs count statistics """ - args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] args = args_model.model_dump(exclude_none=True) # Default to DEBUGGING if not specified @@ -337,7 +337,7 @@ class WorkflowRunListApi(Resource): """ Get workflow run list """ - args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args_model = WorkflowRunListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] args = args_model.model_dump(exclude_none=True) # Default to DEBUGGING for workflow if not specified (backward compatibility) @@ -385,7 +385,7 @@ class WorkflowRunCountApi(Resource): """ Get workflow runs count statistics """ - args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args_model = WorkflowRunCountQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] args = args_model.model_dump(exclude_none=True) # Default to DEBUGGING for workflow if not specified (backward compatibility) diff --git a/api/controllers/console/app/workflow_statistic.py b/api/controllers/console/app/workflow_statistic.py index e48cf42762..9b1c81b43e 100644 --- a/api/controllers/console/app/workflow_statistic.py +++ b/api/controllers/console/app/workflow_statistic.py @@ -53,7 +53,7 @@ class WorkflowDailyRunsStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] assert account.timezone is not None @@ -93,7 +93,7 @@ class WorkflowDailyTerminalsStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] assert account.timezone is not None @@ -133,7 +133,7 @@ class WorkflowDailyTokenCostStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] assert account.timezone is not None @@ -173,7 +173,7 @@ class WorkflowAverageAppInteractionStatistic(Resource): def get(self, app_model): account, _ = current_account_with_tenant() - args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = WorkflowStatisticQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] assert account.timezone is not None diff --git a/api/controllers/console/app/workflow_trigger.py b/api/controllers/console/app/workflow_trigger.py index 8236e766ae..864d9149a8 100644 --- a/api/controllers/console/app/workflow_trigger.py +++ b/api/controllers/console/app/workflow_trigger.py @@ -60,7 +60,7 @@ class WebhookTriggerApi(Resource): @marshal_with(webhook_trigger_model) def get(self, app_model: App): """Get webhook trigger for a node""" - args = Parser.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = Parser.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] node_id = args.node_id @@ -114,9 +114,9 @@ class AppTriggersApi(Resource): url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/" for trigger in triggers: if trigger.trigger_type == "trigger-plugin": - trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore + trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore[attr-defined,operator] else: - trigger.icon = "" # type: ignore + trigger.icon = "" # type: ignore[attr-defined] return {"data": triggers} @@ -159,8 +159,8 @@ class AppTriggerEnableApi(Resource): # Add computed icon field url_prefix = dify_config.CONSOLE_API_URL + "/console/api/workspaces/current/tool-provider/builtin/" if trigger.trigger_type == "trigger-plugin": - trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore + trigger.icon = url_prefix + trigger.provider_name + "/icon" # type: ignore[attr-defined,operator] else: - trigger.icon = "" # type: ignore + trigger.icon = "" # type: ignore[attr-defined] return trigger diff --git a/api/controllers/console/auth/activate.py b/api/controllers/console/auth/activate.py index f741107b87..fb4baba055 100644 --- a/api/controllers/console/auth/activate.py +++ b/api/controllers/console/auth/activate.py @@ -60,7 +60,7 @@ class ActivateCheckApi(Resource): ), ) def get(self): - args = ActivateCheckQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ActivateCheckQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] workspaceId = args.workspace_id token = args.token diff --git a/api/controllers/console/billing/billing.py b/api/controllers/console/billing/billing.py index ac039f9c5d..0c741db3dc 100644 --- a/api/controllers/console/billing/billing.py +++ b/api/controllers/console/billing/billing.py @@ -36,7 +36,7 @@ class Subscription(Resource): @only_edition_cloud def get(self): current_user, current_tenant_id = current_account_with_tenant() - args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = SubscriptionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] BillingService.is_tenant_owner_or_admin(current_user) return BillingService.get_subscription(args.plan, args.interval, current_user.email, current_tenant_id) diff --git a/api/controllers/console/billing/compliance.py b/api/controllers/console/billing/compliance.py index afc5f92b68..67c59dd89f 100644 --- a/api/controllers/console/billing/compliance.py +++ b/api/controllers/console/billing/compliance.py @@ -31,7 +31,7 @@ class ComplianceApi(Resource): @only_edition_cloud def get(self): current_user, current_tenant_id = current_account_with_tenant() - args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ComplianceDownloadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] ip_address = extract_remote_ip(request) device_info = request.headers.get("User-Agent", "Unknown device") diff --git a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py index 7caf5b52ed..9dd45db648 100644 --- a/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py +++ b/api/controllers/console/datasets/rag_pipeline/datasource_content_preview.py @@ -1,5 +1,5 @@ -from flask_restx import ( # type: ignore - Resource, # type: ignore +from flask_restx import ( + Resource, ) from pydantic import BaseModel from werkzeug.exceptions import Forbidden diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py index af142b4646..0d8df4d32e 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_import.py @@ -1,5 +1,5 @@ from flask import request -from flask_restx import Resource, fields, marshal_with # type: ignore +from flask_restx import Resource, fields, marshal_with from pydantic import BaseModel, Field from sqlalchemy.orm import Session diff --git a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py index 3912cc73ca..8ff7892576 100644 --- a/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py +++ b/api/controllers/console/datasets/rag_pipeline/rag_pipeline_workflow.py @@ -3,7 +3,7 @@ import logging from typing import Any, Literal, cast from flask import abort, request -from flask_restx import Resource, marshal_with # type: ignore +from flask_restx import Resource, marshal_with from pydantic import BaseModel, Field from sqlalchemy.orm import Session from werkzeug.exceptions import BadRequest, Forbidden, InternalServerError, NotFound diff --git a/api/controllers/console/explore/recommended_app.py b/api/controllers/console/explore/recommended_app.py index c9920c97cf..dfae9c0099 100644 --- a/api/controllers/console/explore/recommended_app.py +++ b/api/controllers/console/explore/recommended_app.py @@ -63,7 +63,7 @@ class RecommendedAppListApi(Resource): @marshal_with(recommended_app_list_model) def get(self): # language args - args = RecommendedAppsQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = RecommendedAppsQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] language = args.language if language and language in languages: language_prefix = language diff --git a/api/controllers/console/extension.py b/api/controllers/console/extension.py index efa46c9779..d741655820 100644 --- a/api/controllers/console/extension.py +++ b/api/controllers/console/extension.py @@ -49,7 +49,7 @@ class CodeBasedExtensionAPI(Resource): @login_required @account_initialization_required def get(self): - query = CodeBasedExtensionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + query = CodeBasedExtensionQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] return {"module": query.module, "data": CodeBasedExtensionService.get_code_based_extension(query.module)} diff --git a/api/controllers/console/workspace/account.py b/api/controllers/console/workspace/account.py index 6f93ff1e70..54b4643da5 100644 --- a/api/controllers/console/workspace/account.py +++ b/api/controllers/console/workspace/account.py @@ -519,7 +519,7 @@ class EducationAutoCompleteApi(Resource): @cloud_edition_billing_enabled @marshal_with(data_fields) def get(self): - payload = request.args.to_dict(flat=True) # type: ignore + payload = request.args.to_dict(flat=True) # type: ignore[arg-type] args = EducationAutocompleteQuery.model_validate(payload) return BillingService.EducationIdentity.autocomplete(args.keywords, args.page, args.limit) diff --git a/api/controllers/console/workspace/endpoint.py b/api/controllers/console/workspace/endpoint.py index 538c5fb561..5faa22ff8a 100644 --- a/api/controllers/console/workspace/endpoint.py +++ b/api/controllers/console/workspace/endpoint.py @@ -138,7 +138,7 @@ class EndpointListApi(Resource): def get(self): user, tenant_id = current_account_with_tenant() - args = EndpointListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = EndpointListQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] page = args.page page_size = args.page_size @@ -171,7 +171,7 @@ class EndpointListForSinglePluginApi(Resource): def get(self): user, tenant_id = current_account_with_tenant() - args = EndpointListForPluginQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = EndpointListForPluginQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] page = args.page page_size = args.page_size diff --git a/api/controllers/console/workspace/model_providers.py b/api/controllers/console/workspace/model_providers.py index db3b02ae94..f64d4449d5 100644 --- a/api/controllers/console/workspace/model_providers.py +++ b/api/controllers/console/workspace/model_providers.py @@ -99,7 +99,7 @@ class ModelProviderListApi(Resource): _, current_tenant_id = current_account_with_tenant() tenant_id = current_tenant_id - payload = request.args.to_dict(flat=True) # type: ignore + payload = request.args.to_dict(flat=True) # type: ignore[arg-type] args = ParserModelList.model_validate(payload) model_provider_service = ModelProviderService() @@ -118,7 +118,7 @@ class ModelProviderCredentialApi(Resource): _, current_tenant_id = current_account_with_tenant() tenant_id = current_tenant_id # if credential_id is not provided, return current used credential - payload = request.args.to_dict(flat=True) # type: ignore + payload = request.args.to_dict(flat=True) # type: ignore[arg-type] args = ParserCredentialId.model_validate(payload) model_provider_service = ModelProviderService() diff --git a/api/controllers/console/workspace/models.py b/api/controllers/console/workspace/models.py index d7eceb656c..cabc0ae7b0 100644 --- a/api/controllers/console/workspace/models.py +++ b/api/controllers/console/workspace/models.py @@ -133,7 +133,7 @@ class DefaultModelApi(Resource): def get(self): _, tenant_id = current_account_with_tenant() - args = ParserGetDefault.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserGetDefault.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] model_provider_service = ModelProviderService() default_model_entity = model_provider_service.get_default_model_of_model_type( @@ -261,7 +261,7 @@ class ModelProviderModelCredentialApi(Resource): def get(self, provider: str): _, tenant_id = current_account_with_tenant() - args = ParserGetCredentials.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserGetCredentials.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] model_provider_service = ModelProviderService() current_credential = model_provider_service.get_model_credential( @@ -513,7 +513,7 @@ class ModelProviderModelParameterRuleApi(Resource): @login_required @account_initialization_required def get(self, provider: str): - args = ParserParameter.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserParameter.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] _, tenant_id = current_account_with_tenant() model_provider_service = ModelProviderService() diff --git a/api/controllers/console/workspace/plugin.py b/api/controllers/console/workspace/plugin.py index ee537367c7..e68ac48329 100644 --- a/api/controllers/console/workspace/plugin.py +++ b/api/controllers/console/workspace/plugin.py @@ -211,7 +211,7 @@ class PluginListApi(Resource): @account_initialization_required def get(self): _, tenant_id = current_account_with_tenant() - args = ParserList.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserList.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: plugins_with_total = PluginService.list_with_total(tenant_id, args.page, args.page_size) except PluginDaemonClientSideError as e: @@ -261,7 +261,7 @@ class PluginIconApi(Resource): @console_ns.expect(console_ns.models[ParserIcon.__name__]) @setup_required def get(self): - args = ParserIcon.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserIcon.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: icon_bytes, mimetype = PluginService.get_asset(args.tenant_id, args.filename) @@ -279,7 +279,7 @@ class PluginAssetApi(Resource): @login_required @account_initialization_required def get(self): - args = ParserAsset.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserAsset.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] _, tenant_id = current_account_with_tenant() try: @@ -421,7 +421,7 @@ class PluginFetchMarketplacePkgApi(Resource): @plugin_permission_required(install_required=True) def get(self): _, tenant_id = current_account_with_tenant() - args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: return jsonable_encoder( @@ -446,7 +446,7 @@ class PluginFetchManifestApi(Resource): def get(self): _, tenant_id = current_account_with_tenant() - args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserPluginIdentifierQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: return jsonable_encoder( @@ -466,7 +466,7 @@ class PluginFetchInstallTasksApi(Resource): def get(self): _, tenant_id = current_account_with_tenant() - args = ParserTasks.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserTasks.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: return jsonable_encoder({"tasks": PluginService.fetch_install_tasks(tenant_id, args.page, args.page_size)}) @@ -660,7 +660,7 @@ class PluginFetchDynamicSelectOptionsApi(Resource): current_user, tenant_id = current_account_with_tenant() user_id = current_user.id - args = ParserDynamicOptions.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserDynamicOptions.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: options = PluginParameterService.get_dynamic_select_options( @@ -822,7 +822,7 @@ class PluginReadmeApi(Resource): @account_initialization_required def get(self): _, tenant_id = current_account_with_tenant() - args = ParserReadme.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = ParserReadme.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] return jsonable_encoder( {"readme": PluginService.fetch_plugin_readme(tenant_id, args.plugin_unique_identifier, args.language)} ) diff --git a/api/controllers/console/workspace/tool_providers.py b/api/controllers/console/workspace/tool_providers.py index b38f05795a..e17527ebaa 100644 --- a/api/controllers/console/workspace/tool_providers.py +++ b/api/controllers/console/workspace/tool_providers.py @@ -267,7 +267,7 @@ class ToolProviderListApi(Resource): raw_args = request.args.to_dict() query = ToolProviderListQuery.model_validate(raw_args) - return ToolCommonService.list_tool_providers(user_id, tenant_id, query.type) # type: ignore + return ToolCommonService.list_tool_providers(user_id, tenant_id, query.type) # type: ignore[arg-type, operator] @console_ns.route("/workspaces/current/tool-provider/builtin//tools") diff --git a/api/controllers/console/workspace/workspace.py b/api/controllers/console/workspace/workspace.py index 88fd2c010f..d48d2f5646 100644 --- a/api/controllers/console/workspace/workspace.py +++ b/api/controllers/console/workspace/workspace.py @@ -155,7 +155,7 @@ class WorkspaceListApi(Resource): @setup_required @admin_required def get(self): - payload = request.args.to_dict(flat=True) # type: ignore + payload = request.args.to_dict(flat=True) # type: ignore[arg-type] args = WorkspaceListQuery.model_validate(payload) stmt = select(Tenant).order_by(Tenant.created_at.desc()) diff --git a/api/controllers/console/wraps.py b/api/controllers/console/wraps.py index 6785ba0c34..3cbb874d79 100644 --- a/api/controllers/console/wraps.py +++ b/api/controllers/console/wraps.py @@ -313,7 +313,7 @@ def edit_permission_required(f: Callable[P, R]): from libs.login import current_user from models import Account - user = current_user._get_current_object() # type: ignore + user = current_user._get_current_object() # type: ignore[attr-defined] if not isinstance(user, Account): raise Forbidden() if not current_user.has_edit_permission: diff --git a/api/controllers/files/image_preview.py b/api/controllers/files/image_preview.py index a91e745f80..854824e6c6 100644 --- a/api/controllers/files/image_preview.py +++ b/api/controllers/files/image_preview.py @@ -58,7 +58,7 @@ class ImagePreviewApi(Resource): def get(self, file_id): file_id = str(file_id) - args = FileSignatureQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = FileSignatureQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] timestamp = args.timestamp nonce = args.nonce sign = args.sign @@ -100,7 +100,7 @@ class FilePreviewApi(Resource): def get(self, file_id): file_id = str(file_id) - args = FilePreviewQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = FilePreviewQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] try: generator, upload_file = FileService(db.engine).get_file_generator_by_file_id( diff --git a/api/controllers/files/upload.py b/api/controllers/files/upload.py index 52690a12e1..8150f7b0f5 100644 --- a/api/controllers/files/upload.py +++ b/api/controllers/files/upload.py @@ -69,7 +69,7 @@ class PluginUploadFileApi(Resource): FileTooLargeError: File exceeds size limit UnsupportedFileTypeError: File type not supported """ - args = PluginUploadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore + args = PluginUploadQuery.model_validate(request.args.to_dict(flat=True)) # type: ignore[arg-type] file = request.files.get("file") if file is None: diff --git a/api/controllers/inner_api/mail.py b/api/controllers/inner_api/mail.py index 885ab7b78d..bcfa23dfb7 100644 --- a/api/controllers/inner_api/mail.py +++ b/api/controllers/inner_api/mail.py @@ -32,7 +32,7 @@ class BaseMail(Resource): to=args.to, subject=args.subject, body=args.body, - substitutions=args.substitutions, # type: ignore + substitutions=args.substitutions, # type: ignore[arg-type] ) return {"message": "success"}, 200 diff --git a/api/controllers/inner_api/plugin/wraps.py b/api/controllers/inner_api/plugin/wraps.py index d6e3ebfbcd..bc665f2bc8 100644 --- a/api/controllers/inner_api/plugin/wraps.py +++ b/api/controllers/inner_api/plugin/wraps.py @@ -89,8 +89,8 @@ def get_user_tenant(view_func: Callable[P, R]): user = get_user(tenant_id, user_id) kwargs["user_model"] = user - current_app.login_manager._update_request_context_with_user(user) # type: ignore - user_logged_in.send(current_app._get_current_object(), user=current_user) # type: ignore + current_app.login_manager._update_request_context_with_user(user) # type: ignore[attr-defined] + user_logged_in.send(current_app._get_current_object(), user=current_user) # type: ignore[attr-defined] return view_func(*args, **kwargs) diff --git a/api/controllers/service_api/app/file.py b/api/controllers/service_api/app/file.py index 6f6dadf768..778d3a48cf 100644 --- a/api/controllers/service_api/app/file.py +++ b/api/controllers/service_api/app/file.py @@ -34,7 +34,7 @@ class FileApi(Resource): 415: "Unsupported file type", } ) - @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) # type: ignore + @validate_app_token(fetch_user_arg=FetchUserArg(fetch_from=WhereisUserArg.FORM)) # type: ignore[misc] @service_api_ns.response(HTTPStatus.CREATED, "File uploaded", service_api_ns.models[FileResponse.__name__]) def post(self, app_model: App, end_user: EndUser): """Upload a file for use in conversations. diff --git a/api/controllers/service_api/wraps.py b/api/controllers/service_api/wraps.py index 7aa5b2f092..0f051f254d 100644 --- a/api/controllers/service_api/wraps.py +++ b/api/controllers/service_api/wraps.py @@ -101,8 +101,8 @@ def validate_app_token( kwargs["end_user"] = end_user # Set EndUser as current logged-in user for flask_login.current_user - current_app.login_manager._update_request_context_with_user(end_user) # type: ignore - user_logged_in.send(current_app._get_current_object(), user=end_user) # type: ignore + current_app.login_manager._update_request_context_with_user(end_user) # type: ignore[attr-defined] + user_logged_in.send(current_app._get_current_object(), user=end_user) # type: ignore[attr-defined] else: # For service API without end-user context, ensure an Account is logged in # so services relying on current_account_with_tenant() work correctly. @@ -121,8 +121,8 @@ def validate_app_token( if tenant_owner_info: tenant_model, account = tenant_owner_info account.current_tenant = tenant_model - current_app.login_manager._update_request_context_with_user(account) # type: ignore - user_logged_in.send(current_app._get_current_object(), user=current_user) # type: ignore + current_app.login_manager._update_request_context_with_user(account) # type: ignore[attr-defined] + user_logged_in.send(current_app._get_current_object(), user=current_user) # type: ignore[attr-defined] else: raise Unauthorized("Tenant owner account not found or tenant is not active.") @@ -303,8 +303,8 @@ def validate_dataset_token( # Login admin if account: account.current_tenant = tenant - current_app.login_manager._update_request_context_with_user(account) # type: ignore - user_logged_in.send(current_app._get_current_object(), user=current_user) # type: ignore + current_app.login_manager._update_request_context_with_user(account) # type: ignore[attr-defined] + user_logged_in.send(current_app._get_current_object(), user=current_user) # type: ignore[attr-defined] else: raise Unauthorized("Tenant owner account does not exist.") else: diff --git a/api/core/app/apps/advanced_chat/app_generator.py b/api/core/app/apps/advanced_chat/app_generator.py index 5d974335ff..ad225dc3d4 100644 --- a/api/core/app/apps/advanced_chat/app_generator.py +++ b/api/core/app/apps/advanced_chat/app_generator.py @@ -171,7 +171,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): if invoke_from == InvokeFrom.DEBUGGER: # always enable retriever resource in debugger mode - app_config.additional_features.show_retrieve_source = True # type: ignore + app_config.additional_features.show_retrieve_source = True # type: ignore[attr-defined] # init application generate entity application_generate_entity = AdvancedChatAppGenerateEntity( @@ -504,7 +504,7 @@ class AdvancedChatAppGenerator(MessageBasedAppGenerator): worker_thread = threading.Thread( target=self._generate_worker, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "application_generate_entity": application_generate_entity, "queue_manager": queue_manager, "conversation_id": conversation.id, diff --git a/api/core/app/apps/agent_chat/app_generator.py b/api/core/app/apps/agent_chat/app_generator.py index 76a067d7b6..477f4ff5c9 100644 --- a/api/core/app/apps/agent_chat/app_generator.py +++ b/api/core/app/apps/agent_chat/app_generator.py @@ -191,7 +191,7 @@ class AgentChatAppGenerator(MessageBasedAppGenerator): worker_thread = threading.Thread( target=self._generate_worker, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "context": context, "application_generate_entity": application_generate_entity, "queue_manager": queue_manager, diff --git a/api/core/app/apps/chat/app_generator.py b/api/core/app/apps/chat/app_generator.py index 91cf54c774..f7acd4ad82 100644 --- a/api/core/app/apps/chat/app_generator.py +++ b/api/core/app/apps/chat/app_generator.py @@ -181,7 +181,7 @@ class ChatAppGenerator(MessageBasedAppGenerator): @copy_current_request_context def worker_with_context(): return self._generate_worker( - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] application_generate_entity=application_generate_entity, queue_manager=queue_manager, conversation_id=conversation.id, diff --git a/api/core/app/apps/completion/app_generator.py b/api/core/app/apps/completion/app_generator.py index 002b914ef1..66d0810b6d 100644 --- a/api/core/app/apps/completion/app_generator.py +++ b/api/core/app/apps/completion/app_generator.py @@ -164,7 +164,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator): @copy_current_request_context def worker_with_context(): return self._generate_worker( - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] application_generate_entity=application_generate_entity, queue_manager=queue_manager, message_id=message.id, @@ -327,7 +327,7 @@ class CompletionAppGenerator(MessageBasedAppGenerator): @copy_current_request_context def worker_with_context(): return self._generate_worker( - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] application_generate_entity=application_generate_entity, queue_manager=queue_manager, message_id=message.id, diff --git a/api/core/app/apps/pipeline/pipeline_generator.py b/api/core/app/apps/pipeline/pipeline_generator.py index 19d67eb108..0d0450e35e 100644 --- a/api/core/app/apps/pipeline/pipeline_generator.py +++ b/api/core/app/apps/pipeline/pipeline_generator.py @@ -217,7 +217,7 @@ class PipelineGenerator(BaseAppGenerator): ) if invoke_from == InvokeFrom.DEBUGGER or is_retry: return self._generate( - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] context=contextvars.copy_context(), pipeline=pipeline, workflow_id=workflow.id, @@ -315,7 +315,7 @@ class PipelineGenerator(BaseAppGenerator): worker_thread = threading.Thread( target=self._generate_worker, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "context": context, "queue_manager": queue_manager, "application_generate_entity": application_generate_entity, @@ -428,7 +428,7 @@ class PipelineGenerator(BaseAppGenerator): ) return self._generate( - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] pipeline=pipeline, workflow_id=workflow.id, user=user, @@ -524,7 +524,7 @@ class PipelineGenerator(BaseAppGenerator): ) return self._generate( - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] pipeline=pipeline, workflow_id=workflow.id, user=user, diff --git a/api/core/app/apps/workflow/app_generator.py b/api/core/app/apps/workflow/app_generator.py index 6fbe19a3b2..7b6f29ac19 100644 --- a/api/core/app/apps/workflow/app_generator.py +++ b/api/core/app/apps/workflow/app_generator.py @@ -320,7 +320,7 @@ class WorkflowAppGenerator(BaseAppGenerator): worker_thread = threading.Thread( target=self._generate_worker, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "application_generate_entity": application_generate_entity, "queue_manager": queue_manager, "context": context, diff --git a/api/core/app/entities/app_invoke_entities.py b/api/core/app/entities/app_invoke_entities.py index ecbb1cf2f3..56b94efaac 100644 --- a/api/core/app/entities/app_invoke_entities.py +++ b/api/core/app/entities/app_invoke_entities.py @@ -138,7 +138,7 @@ class EasyUIBasedAppGenerateEntity(AppGenerateEntity): """ # app config - app_config: EasyUIBasedAppConfig = None # type: ignore + app_config: EasyUIBasedAppConfig = None # type: ignore[assignment] model_conf: ModelConfigWithCredentialsEntity query: str = "" @@ -202,7 +202,7 @@ class AdvancedChatAppGenerateEntity(ConversationAppGenerateEntity): """ # app config - app_config: WorkflowUIBasedAppConfig = None # type: ignore + app_config: WorkflowUIBasedAppConfig = None # type: ignore[assignment] workflow_run_id: str | None = None query: str @@ -234,7 +234,7 @@ class WorkflowAppGenerateEntity(AppGenerateEntity): """ # app config - app_config: WorkflowUIBasedAppConfig = None # type: ignore + app_config: WorkflowUIBasedAppConfig = None # type: ignore[assignment] workflow_execution_id: str class SingleIterationRunEntity(BaseModel): diff --git a/api/core/app/layers/timeslice_layer.py b/api/core/app/layers/timeslice_layer.py index d7ca45f209..e2a76fe15e 100644 --- a/api/core/app/layers/timeslice_layer.py +++ b/api/core/app/layers/timeslice_layer.py @@ -2,7 +2,7 @@ import logging import uuid from typing import ClassVar -from apscheduler.schedulers.background import BackgroundScheduler # type: ignore +from apscheduler.schedulers.background import BackgroundScheduler # type: ignore[reportMissingTypeStubs] from dify_graph.graph_engine.entities.commands import CommandType, GraphEngineCommand from dify_graph.graph_engine.layers.base import GraphEngineLayer diff --git a/api/core/app/task_pipeline/message_cycle_manager.py b/api/core/app/task_pipeline/message_cycle_manager.py index 62f27060b4..6d93951339 100644 --- a/api/core/app/task_pipeline/message_cycle_manager.py +++ b/api/core/app/task_pipeline/message_cycle_manager.py @@ -100,7 +100,7 @@ class MessageCycleManager: 1, self._generate_conversation_name_worker, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "conversation_id": conversation_id, "query": query, }, diff --git a/api/core/datasource/local_file/local_file_provider.py b/api/core/datasource/local_file/local_file_provider.py index b2b6f51dd3..b7bc0bc98d 100644 --- a/api/core/datasource/local_file/local_file_provider.py +++ b/api/core/datasource/local_file/local_file_provider.py @@ -31,7 +31,7 @@ class LocalFileDatasourcePluginProviderController(DatasourcePluginProviderContro """ pass - def get_datasource(self, datasource_name: str) -> LocalFileDatasourcePlugin: # type: ignore + def get_datasource(self, datasource_name: str) -> LocalFileDatasourcePlugin: # type: ignore[return-value] """ return datasource with given name """ diff --git a/api/core/datasource/online_document/online_document_provider.py b/api/core/datasource/online_document/online_document_provider.py index a128b479f4..dc81ab28be 100644 --- a/api/core/datasource/online_document/online_document_provider.py +++ b/api/core/datasource/online_document/online_document_provider.py @@ -23,7 +23,7 @@ class OnlineDocumentDatasourcePluginProviderController(DatasourcePluginProviderC """ return DatasourceProviderType.ONLINE_DOCUMENT - def get_datasource(self, datasource_name: str) -> OnlineDocumentDatasourcePlugin: # type: ignore + def get_datasource(self, datasource_name: str) -> OnlineDocumentDatasourcePlugin: # type: ignore[return-value] """ return datasource with given name """ diff --git a/api/core/datasource/online_drive/online_drive_provider.py b/api/core/datasource/online_drive/online_drive_provider.py index d0923ed807..60f9f564fc 100644 --- a/api/core/datasource/online_drive/online_drive_provider.py +++ b/api/core/datasource/online_drive/online_drive_provider.py @@ -23,7 +23,7 @@ class OnlineDriveDatasourcePluginProviderController(DatasourcePluginProviderCont """ return DatasourceProviderType.ONLINE_DRIVE - def get_datasource(self, datasource_name: str) -> OnlineDriveDatasourcePlugin: # type: ignore + def get_datasource(self, datasource_name: str) -> OnlineDriveDatasourcePlugin: # type: ignore[return-value] """ return datasource with given name """ diff --git a/api/core/datasource/website_crawl/website_crawl_provider.py b/api/core/datasource/website_crawl/website_crawl_provider.py index 8c0f20ce2d..8a84a1ba37 100644 --- a/api/core/datasource/website_crawl/website_crawl_provider.py +++ b/api/core/datasource/website_crawl/website_crawl_provider.py @@ -27,7 +27,7 @@ class WebsiteCrawlDatasourcePluginProviderController(DatasourcePluginProviderCon """ return DatasourceProviderType.WEBSITE_CRAWL - def get_datasource(self, datasource_name: str) -> WebsiteCrawlDatasourcePlugin: # type: ignore + def get_datasource(self, datasource_name: str) -> WebsiteCrawlDatasourcePlugin: # type: ignore[return-value] """ return datasource with given name """ diff --git a/api/core/external_data_tool/external_data_fetch.py b/api/core/external_data_tool/external_data_fetch.py index 86bbb7060c..becf73458f 100644 --- a/api/core/external_data_tool/external_data_fetch.py +++ b/api/core/external_data_tool/external_data_fetch.py @@ -37,7 +37,7 @@ class ExternalDataFetch: for tool in external_data_tools: future: Future[tuple[str | None, str | None]] = executor.submit( self._query_external_data_tool, - current_app._get_current_object(), # type: ignore + current_app._get_current_object(), # type: ignore[attr-defined] tenant_id, app_id, tool, diff --git a/api/core/external_data_tool/factory.py b/api/core/external_data_tool/factory.py index 6c542d681b..88bb349daf 100644 --- a/api/core/external_data_tool/factory.py +++ b/api/core/external_data_tool/factory.py @@ -24,7 +24,7 @@ class ExternalDataToolFactory: """ extension_class = code_based_extension.extension_class(ExtensionModule.EXTERNAL_DATA_TOOL, name) # FIXME mypy issue here, figure out how to fix it - extension_class.validate_config(tenant_id, config) # type: ignore + extension_class.validate_config(tenant_id, config) # type: ignore[attr-defined, operator] def query(self, inputs: Mapping[str, Any], query: str | None = None) -> str: """ diff --git a/api/core/indexing_runner.py b/api/core/indexing_runner.py index 52776ee626..53b6abe116 100644 --- a/api/core/indexing_runner.py +++ b/api/core/indexing_runner.py @@ -592,7 +592,7 @@ class IndexingRunner: # create keyword index create_keyword_thread = threading.Thread( target=self._process_keyword_index, - args=(current_app._get_current_object(), dataset.id, dataset_document.id, documents), # type: ignore + args=(current_app._get_current_object(), dataset.id, dataset_document.id, documents), # type: ignore[attr-defined] ) create_keyword_thread.start() @@ -615,7 +615,7 @@ class IndexingRunner: futures.append( executor.submit( self._process_chunk, - current_app._get_current_object(), # type: ignore + current_app._get_current_object(), # type: ignore[attr-defined] index_processor, chunk_documents, dataset, @@ -742,7 +742,7 @@ class IndexingRunner: if extra_update_params: update_params.update(extra_update_params) - db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params) # type: ignore + db.session.query(DatasetDocument).filter_by(id=document_id).update(update_params) # type: ignore[arg-type, operator] db.session.commit() @staticmethod diff --git a/api/core/moderation/factory.py b/api/core/moderation/factory.py index c2c8be6d6d..43c62d4858 100644 --- a/api/core/moderation/factory.py +++ b/api/core/moderation/factory.py @@ -22,7 +22,7 @@ class ModerationFactory: """ extension_class = code_based_extension.extension_class(ExtensionModule.MODERATION, name) # FIXME: mypy error, try to fix it instead of using type: ignore - extension_class.validate_config(tenant_id, config) # type: ignore + extension_class.validate_config(tenant_id, config) # type: ignore[attr-defined, operator] def moderation_for_inputs(self, inputs: dict, query: str = "") -> ModerationInputsResult: """ diff --git a/api/core/moderation/output_moderation.py b/api/core/moderation/output_moderation.py index a97e3d4253..24482583f3 100644 --- a/api/core/moderation/output_moderation.py +++ b/api/core/moderation/output_moderation.py @@ -75,7 +75,7 @@ class OutputModeration(BaseModel): thread = threading.Thread( target=self.worker, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "buffer_size": buffer_size if buffer_size > 0 else dify_config.MODERATION_BUFFER_SIZE, }, ) diff --git a/api/core/ops/ops_trace_manager.py b/api/core/ops/ops_trace_manager.py index 9ac753240b..ff9608d85a 100644 --- a/api/core/ops/ops_trace_manager.py +++ b/api/core/ops/ops_trace_manager.py @@ -936,7 +936,7 @@ class TraceQueueManager: self.app_id = app_id self.user_id = user_id self.trace_instance = OpsTraceManager.get_ops_trace_instance(app_id) - self.flask_app = current_app._get_current_object() # type: ignore + self.flask_app = current_app._get_current_object() # type: ignore[attr-defined] if trace_manager_timer is None: self.start_timer() @@ -995,4 +995,4 @@ class TraceQueueManager: "file_id": file_id, "app_id": task.app_id, } - process_trace_tasks.delay(file_info) # type: ignore + process_trace_tasks.delay(file_info) # type: ignore[operator] diff --git a/api/core/plugin/impl/base.py b/api/core/plugin/impl/base.py index 737d204105..e2c0516e80 100644 --- a/api/core/plugin/impl/base.py +++ b/api/core/plugin/impl/base.py @@ -198,7 +198,7 @@ class BasePluginClient: Make a stream request to the plugin daemon inner API and yield the response as a model. """ for line in self._stream_request(method, path, params, headers, data, files): - yield type_(**json.loads(line)) # type: ignore + yield type_(**json.loads(line)) # type: ignore[misc, operator] def _request_with_model( self, @@ -246,7 +246,7 @@ class BasePluginClient: if transformer: json_response = transformer(json_response) # https://stackoverflow.com/questions/59634937/variable-foo-class-is-not-valid-as-type-but-why - rep = PluginDaemonBasicResponse[type_].model_validate(json_response) # type: ignore + rep = PluginDaemonBasicResponse[type_].model_validate(json_response) # type: ignore[valid-type, operator] except Exception: msg = ( f"Failed to parse response from plugin daemon to PluginDaemonBasicResponse [{str(type_.__name__)}]," @@ -283,7 +283,7 @@ class BasePluginClient: """ for line in self._stream_request(method, path, params, headers, data, files): try: - rep = PluginDaemonBasicResponse[type_].model_validate_json(line) # type: ignore + rep = PluginDaemonBasicResponse[type_].model_validate_json(line) # type: ignore[valid-type, operator] except (ValueError, TypeError): # TODO modify this when line_data has code and message try: diff --git a/api/core/plugin/utils/chunk_merger.py b/api/core/plugin/utils/chunk_merger.py index 28cb70f96a..7b67f057cb 100644 --- a/api/core/plugin/utils/chunk_merger.py +++ b/api/core/plugin/utils/chunk_merger.py @@ -88,7 +88,7 @@ def merge_blob_chunks( meta=resp.meta, ) assert isinstance(merged_message, (ToolInvokeMessage, AgentInvokeMessage)) - yield merged_message # type: ignore + yield merged_message # type: ignore[misc] # Clean up the buffer del files[chunk_id] else: diff --git a/api/core/provider_manager.py b/api/core/provider_manager.py index 6d2be0ab7a..ad5ac873fd 100644 --- a/api/core/provider_manager.py +++ b/api/core/provider_manager.py @@ -629,7 +629,7 @@ class ProviderManager: provider_name=ModelProviderID(provider_name).provider_name, provider_type=ProviderType.SYSTEM, quota_type=quota.quota_type, - quota_limit=0, # type: ignore + quota_limit=0, # type: ignore[assignment] quota_used=0, is_valid=True, ) diff --git a/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py b/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py index 57a60e6970..6e57ef003f 100644 --- a/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py +++ b/api/core/rag/datasource/keyword/jieba/jieba_keyword_table_handler.py @@ -44,7 +44,7 @@ class JiebaKeywordTableHandler: │ & cache to default │ │ _SimpleTFIDF │ └────────────────────────┘ └─────────────────┘ """ - import jieba.analyse # type: ignore + import jieba.analyse tfidf = getattr(jieba.analyse, "default_tfidf", None) if tfidf is not None: @@ -53,7 +53,7 @@ class JiebaKeywordTableHandler: tfidf_class = getattr(jieba.analyse, "TFIDF", None) if tfidf_class is None: try: - from jieba.analyse.tfidf import TFIDF # type: ignore + from jieba.analyse.tfidf import TFIDF tfidf_class = TFIDF except Exception: @@ -69,7 +69,7 @@ class JiebaKeywordTableHandler: @staticmethod def _build_fallback_tfidf(): """Fallback lightweight TFIDF for environments missing jieba's TFIDF.""" - import jieba # type: ignore + import jieba from core.rag.datasource.keyword.jieba.stopwords import STOPWORDS diff --git a/api/core/rag/datasource/retrieval_service.py b/api/core/rag/datasource/retrieval_service.py index 713319ab9d..8215545a53 100644 --- a/api/core/rag/datasource/retrieval_service.py +++ b/api/core/rag/datasource/retrieval_service.py @@ -115,14 +115,14 @@ class RetrievalService: exceptions: list[str] = [] # Optimize multithreading with thread pools - with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore + with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore[operator] futures = [] retrieval_service = RetrievalService() if query: futures.append( executor.submit( retrieval_service._retrieve, - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] retrieval_method=retrieval_method, dataset=dataset, query=query, @@ -142,7 +142,7 @@ class RetrievalService: futures.append( executor.submit( retrieval_service._retrieve, - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] retrieval_method=retrieval_method, dataset=dataset, query=None, @@ -541,7 +541,7 @@ class RetrievalService: DocumentSegment.status == "completed", DocumentSegment.index_node_id.in_(index_node_ids), ) - index_node_segments = session.execute(document_segment_stmt).scalars().all() # type: ignore + index_node_segments = session.execute(document_segment_stmt).scalars().all() # type: ignore[assignment, operator] for index_node_segment in index_node_segments: doc_segment_map[index_node_segment.id] = [index_node_segment.index_node_id] @@ -551,7 +551,7 @@ class RetrievalService: DocumentSegment.status == "completed", DocumentSegment.id.in_(segment_ids), ) - segments = session.execute(document_segment_stmt).scalars().all() # type: ignore + segments = session.execute(document_segment_stmt).scalars().all() # type: ignore[assignment, operator] if index_node_segments: segments.extend(index_node_segments) @@ -564,7 +564,7 @@ class RetrievalService: DocumentSegment.status == "completed", DocumentSegment.id.in_(summary_segment_ids_list), ) - summary_segments = session.execute(summary_segment_stmt).scalars().all() # type: ignore + summary_segments = session.execute(summary_segment_stmt).scalars().all() # type: ignore[operator] segments.extend(summary_segments) # Add summary segment IDs to segment_ids for summary query for seg in summary_segments: @@ -747,13 +747,13 @@ class RetrievalService: with flask_app.app_context(): all_documents_item: list[Document] = [] # Optimize multithreading with thread pools - with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore + with ThreadPoolExecutor(max_workers=dify_config.RETRIEVAL_SERVICE_EXECUTORS) as executor: # type: ignore[operator] futures = [] if retrieval_method == RetrievalMethod.KEYWORD_SEARCH and query: futures.append( executor.submit( self.keyword_search, - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] dataset_id=dataset.id, query=query, top_k=top_k, @@ -767,7 +767,7 @@ class RetrievalService: futures.append( executor.submit( self.embedding_search, - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] dataset_id=dataset.id, query=query, top_k=top_k, @@ -784,7 +784,7 @@ class RetrievalService: futures.append( executor.submit( self.embedding_search, - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] dataset_id=dataset.id, query=attachment_id, top_k=top_k, @@ -801,7 +801,7 @@ class RetrievalService: futures.append( executor.submit( self.full_text_index_search, - flask_app=current_app._get_current_object(), # type: ignore + flask_app=current_app._get_current_object(), # type: ignore[attr-defined] dataset_id=dataset.id, query=query, top_k=top_k, diff --git a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py index 702200e0ac..baa531f755 100644 --- a/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py +++ b/api/core/rag/datasource/vdb/analyticdb/analyticdb_vector_openapi.py @@ -55,8 +55,8 @@ class AnalyticdbVectorOpenAPIConfig(BaseModel): class AnalyticdbVectorOpenAPI: def __init__(self, collection_name: str, config: AnalyticdbVectorOpenAPIConfig): try: - from alibabacloud_gpdb20160503.client import Client # type: ignore - from alibabacloud_tea_openapi import models as open_api_models # type: ignore + from alibabacloud_gpdb20160503.client import Client + from alibabacloud_tea_openapi import models as open_api_models except: raise ImportError(_import_err_msg) self._collection_name = collection_name.lower() @@ -77,7 +77,7 @@ class AnalyticdbVectorOpenAPI: redis_client.set(database_exist_cache_key, 1, ex=3600) def _initialize_vector_database(self): - from alibabacloud_gpdb20160503 import models as gpdb_20160503_models # type: ignore + from alibabacloud_gpdb20160503 import models as gpdb_20160503_models request = gpdb_20160503_models.InitVectorDatabaseRequest( dbinstance_id=self.config.instance_id, @@ -89,7 +89,7 @@ class AnalyticdbVectorOpenAPI: def _create_namespace_if_not_exists(self): from alibabacloud_gpdb20160503 import models as gpdb_20160503_models - from Tea.exceptions import TeaException # type: ignore + from Tea.exceptions import TeaException try: request = gpdb_20160503_models.DescribeNamespaceRequest( diff --git a/api/core/rag/datasource/vdb/baidu/baidu_vector.py b/api/core/rag/datasource/vdb/baidu/baidu_vector.py index 9f5842e449..e31b6c1e4d 100644 --- a/api/core/rag/datasource/vdb/baidu/baidu_vector.py +++ b/api/core/rag/datasource/vdb/baidu/baidu_vector.py @@ -6,12 +6,19 @@ from typing import Any import numpy as np from pydantic import BaseModel, model_validator -from pymochow import MochowClient # type: ignore -from pymochow.auth.bce_credentials import BceCredentials # type: ignore -from pymochow.configuration import Configuration # type: ignore -from pymochow.exception import ServerError # type: ignore +from pymochow import MochowClient +from pymochow.auth.bce_credentials import BceCredentials +from pymochow.configuration import Configuration +from pymochow.exception import ServerError from pymochow.model.database import Database -from pymochow.model.enum import FieldType, IndexState, IndexType, MetricType, ServerErrCode, TableState # type: ignore +from pymochow.model.enum import ( + FieldType, + IndexState, + IndexType, + MetricType, + ServerErrCode, + TableState, +) from pymochow.model.schema import ( AutoBuildRowCountIncrement, Field, @@ -24,8 +31,14 @@ from pymochow.model.schema import ( InvertedIndexParseMode, Schema, VectorIndex, -) # type: ignore -from pymochow.model.table import AnnSearch, BM25SearchRequest, HNSWSearchParams, Partition, Row # type: ignore +) +from pymochow.model.table import ( + AnnSearch, + BM25SearchRequest, + HNSWSearchParams, + Partition, + Row, +) from configs import dify_config from core.rag.datasource.vdb.field import Field as VDBField diff --git a/api/core/rag/datasource/vdb/chroma/chroma_vector.py b/api/core/rag/datasource/vdb/chroma/chroma_vector.py index cbc846f716..aabce134da 100644 --- a/api/core/rag/datasource/vdb/chroma/chroma_vector.py +++ b/api/core/rag/datasource/vdb/chroma/chroma_vector.py @@ -72,13 +72,13 @@ class ChromaVector(BaseVector): collection = self._client.get_or_create_collection(self._collection_name) # FIXME: chromadb using numpy array, fix the type error later - collection.upsert(ids=uuids, documents=texts, embeddings=embeddings, metadatas=metadatas) # type: ignore + collection.upsert(ids=uuids, documents=texts, embeddings=embeddings, metadatas=metadatas) # type: ignore[arg-type, operator] return uuids def delete_by_metadata_field(self, key: str, value: str): collection = self._client.get_or_create_collection(self._collection_name) # FIXME: fix the type error later - collection.delete(where={key: {"$eq": value}}) # type: ignore + collection.delete(where={key: {"$eq": value}}) # type: ignore[arg-type, dict-item] def delete(self): self._client.delete_collection(self._collection_name) @@ -101,10 +101,10 @@ class ChromaVector(BaseVector): results: QueryResult = collection.query( query_embeddings=query_vector, n_results=kwargs.get("top_k", 4), - where={"document_id": {"$in": document_ids_filter}}, # type: ignore + where={"document_id": {"$in": document_ids_filter}}, # type: ignore[arg-type, dict-item] ) else: - results: QueryResult = collection.query(query_embeddings=query_vector, n_results=kwargs.get("top_k", 4)) # type: ignore + results: QueryResult = collection.query(query_embeddings=query_vector, n_results=kwargs.get("top_k", 4)) # type: ignore[arg-type, no-redef] score_threshold = float(kwargs.get("score_threshold") or 0.0) # Check if results contain data diff --git a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py index 8e8120fc10..4a3e0e670c 100644 --- a/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py +++ b/api/core/rag/datasource/vdb/clickzetta/clickzetta_vector.py @@ -10,11 +10,11 @@ import time import uuid from typing import TYPE_CHECKING, Any -import clickzetta # type: ignore +import clickzetta from pydantic import BaseModel, model_validator if TYPE_CHECKING: - from clickzetta.connector.v0.connection import Connection # type: ignore + from clickzetta.connector.v0.connection import Connection from configs import dify_config from core.rag.datasource.vdb.field import Field diff --git a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py index 9a4a65cf6f..69606a0464 100644 --- a/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py +++ b/api/core/rag/datasource/vdb/couchbase/couchbase_vector.py @@ -5,14 +5,14 @@ import uuid from datetime import timedelta from typing import Any -from couchbase import search # type: ignore -from couchbase.auth import PasswordAuthenticator # type: ignore -from couchbase.cluster import Cluster # type: ignore -from couchbase.management.search import SearchIndex # type: ignore +from couchbase import search +from couchbase.auth import PasswordAuthenticator +from couchbase.cluster import Cluster +from couchbase.management.search import SearchIndex # needed for options -- cluster, timeout, SQL++ (N1QL) query, etc. -from couchbase.options import ClusterOptions, SearchOptions # type: ignore -from couchbase.vector_search import VectorQuery, VectorSearch # type: ignore +from couchbase.options import ClusterOptions, SearchOptions +from couchbase.vector_search import VectorQuery, VectorSearch from flask import current_app from pydantic import BaseModel, model_validator diff --git a/api/core/rag/datasource/vdb/hologres/hologres_vector.py b/api/core/rag/datasource/vdb/hologres/hologres_vector.py index 36b259e494..fc5e44ae86 100644 --- a/api/core/rag/datasource/vdb/hologres/hologres_vector.py +++ b/api/core/rag/datasource/vdb/hologres/hologres_vector.py @@ -3,7 +3,7 @@ import logging import time from typing import Any -import holo_search_sdk as holo # type: ignore +import holo_search_sdk as holo from holo_search_sdk.types import BaseQuantizationType, DistanceType, TokenizerType from psycopg import sql as psql from pydantic import BaseModel, model_validator diff --git a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py index bfcb620618..4b8c8f8e28 100644 --- a/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py +++ b/api/core/rag/datasource/vdb/lindorm/lindorm_vector.py @@ -268,7 +268,7 @@ class LindormVectorStore(BaseVector): try: params = {"timeout": self._client_config.request_timeout} if self._using_ugc: - params["routing"] = self._routing # type: ignore + params["routing"] = self._routing # type: ignore[arg-type, assignment] response = self._client.search(index=self._collection_name, body=search_query, params=params) except Exception: logger.exception("Error executing vector search, query: %s", search_query) diff --git a/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py b/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py index 14955c8d7c..c71789e472 100644 --- a/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py +++ b/api/core/rag/datasource/vdb/matrixone/matrixone_vector.py @@ -5,7 +5,7 @@ from collections.abc import Callable from functools import wraps from typing import Any, Concatenate, ParamSpec, TypeVar -from mo_vector.client import MoVectorClient # type: ignore +from mo_vector.client import MoVectorClient from pydantic import BaseModel, model_validator from configs import dify_config diff --git a/api/core/rag/datasource/vdb/milvus/milvus_vector.py b/api/core/rag/datasource/vdb/milvus/milvus_vector.py index 96eb465401..dc25c7315c 100644 --- a/api/core/rag/datasource/vdb/milvus/milvus_vector.py +++ b/api/core/rag/datasource/vdb/milvus/milvus_vector.py @@ -4,8 +4,8 @@ from typing import Any from packaging import version from pydantic import BaseModel, model_validator -from pymilvus import MilvusClient, MilvusException # type: ignore -from pymilvus.milvus_client import IndexParams # type: ignore +from pymilvus import MilvusClient, MilvusException +from pymilvus.milvus_client import IndexParams from configs import dify_config from core.rag.datasource.vdb.field import Field @@ -304,8 +304,14 @@ class MilvusVector(BaseVector): return # Grab the existing collection if it exists if not self._client.has_collection(self._collection_name): - from pymilvus import CollectionSchema, DataType, FieldSchema, Function, FunctionType # type: ignore - from pymilvus.orm.types import infer_dtype_bydata # type: ignore + from pymilvus import ( + CollectionSchema, + DataType, + FieldSchema, + Function, + FunctionType, + ) + from pymilvus.orm.types import infer_dtype_bydata # Determine embedding dim dim = len(embeddings[0]) diff --git a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py index 86c1e65f47..ac9629df97 100644 --- a/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py +++ b/api/core/rag/datasource/vdb/oceanbase/oceanbase_vector.py @@ -4,7 +4,13 @@ import re from typing import Any, Literal from pydantic import BaseModel, model_validator -from pyobvector import VECTOR, ObVecClient, cosine_distance, inner_product, l2_distance # type: ignore +from pyobvector import ( + VECTOR, + ObVecClient, + cosine_distance, + inner_product, + l2_distance, +) from sqlalchemy import JSON, Column, String from sqlalchemy.dialects.mysql import LONGTEXT from sqlalchemy.exc import SQLAlchemyError diff --git a/api/core/rag/datasource/vdb/oracle/oraclevector.py b/api/core/rag/datasource/vdb/oracle/oraclevector.py index cb05c22b55..d7e78ee1bd 100644 --- a/api/core/rag/datasource/vdb/oracle/oraclevector.py +++ b/api/core/rag/datasource/vdb/oracle/oraclevector.py @@ -5,7 +5,7 @@ import re import uuid from typing import Any -import jieba.posseg as pseg # type: ignore +import jieba.posseg as pseg import numpy import oracledb from oracledb.connection import Connection @@ -271,8 +271,8 @@ class OracleVector(BaseVector): def search_by_full_text(self, query: str, **kwargs: Any) -> list[Document]: # lazy import - import nltk # type: ignore - from nltk.corpus import stopwords # type: ignore + import nltk + from nltk.corpus import stopwords # Validate and sanitize top_k to prevent SQL injection top_k = kwargs.get("top_k", 5) diff --git a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py index 90d9173409..c9fb3e7113 100644 --- a/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py +++ b/api/core/rag/datasource/vdb/pgvecto_rs/pgvecto_rs.py @@ -4,7 +4,7 @@ from typing import Any from uuid import UUID, uuid4 from numpy import ndarray -from pgvecto_rs.sqlalchemy import VECTOR # type: ignore +from pgvecto_rs.sqlalchemy import VECTOR from pydantic import BaseModel, model_validator from sqlalchemy import Float, create_engine, insert, select, text from sqlalchemy import text as sql_text diff --git a/api/core/rag/datasource/vdb/relyt/relyt_vector.py b/api/core/rag/datasource/vdb/relyt/relyt_vector.py index e486375ec2..2abda159fa 100644 --- a/api/core/rag/datasource/vdb/relyt/relyt_vector.py +++ b/api/core/rag/datasource/vdb/relyt/relyt_vector.py @@ -108,7 +108,7 @@ class RelytVector(BaseVector): redis_client.set(collection_exist_cache_key, 1, ex=3600) def add_texts(self, documents: list[Document], embeddings: list[list[float]], **kwargs): - from pgvecto_rs.sqlalchemy import VECTOR # type: ignore + from pgvecto_rs.sqlalchemy import VECTOR ids = [str(uuid.uuid1()) for _ in documents] metadatas = [d.metadata for d in documents if d.metadata is not None] diff --git a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py index f2156afa59..a9dbdc00bf 100644 --- a/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py +++ b/api/core/rag/datasource/vdb/tablestore/tablestore_vector.py @@ -4,7 +4,7 @@ import math from collections.abc import Iterable from typing import Any -import tablestore # type: ignore +import tablestore from pydantic import BaseModel, model_validator from tablestore import BatchGetRowRequest, TableInBatchGetRowItem diff --git a/api/core/rag/datasource/vdb/tencent/tencent_vector.py b/api/core/rag/datasource/vdb/tencent/tencent_vector.py index 291d047c04..e81cedc108 100644 --- a/api/core/rag/datasource/vdb/tencent/tencent_vector.py +++ b/api/core/rag/datasource/vdb/tencent/tencent_vector.py @@ -4,11 +4,16 @@ import math from typing import Any from pydantic import BaseModel -from tcvdb_text.encoder import BM25Encoder # type: ignore -from tcvectordb import RPCVectorDBClient, VectorDBException # type: ignore -from tcvectordb.model import document, enum # type: ignore -from tcvectordb.model import index as vdb_index # type: ignore -from tcvectordb.model.document import AnnSearch, Filter, KeywordSearch, WeightedRerank # type: ignore +from tcvdb_text.encoder import BM25Encoder +from tcvectordb import RPCVectorDBClient, VectorDBException +from tcvectordb.model import document, enum +from tcvectordb.model import index as vdb_index +from tcvectordb.model.document import ( + AnnSearch, + Filter, + KeywordSearch, + WeightedRerank, +) from configs import dify_config from core.rag.datasource.vdb.vector_base import BaseVector diff --git a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py index 27ae038a06..9d4792f628 100644 --- a/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py +++ b/api/core/rag/datasource/vdb/tidb_vector/tidb_vector.py @@ -50,7 +50,7 @@ class TiDBVector(BaseVector): return VectorType.TIDB_VECTOR def _table(self, dim: int) -> Table: - from tidb_vector.sqlalchemy import VectorType # type: ignore + from tidb_vector.sqlalchemy import VectorType return Table( self._collection_name, diff --git a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py index e5feecf2bc..fee14813ab 100644 --- a/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py +++ b/api/core/rag/datasource/vdb/vikingdb/vikingdb_vector.py @@ -2,7 +2,7 @@ import json from typing import Any from pydantic import BaseModel -from volcengine.viking_db import ( # type: ignore +from volcengine.viking_db import ( Data, DistanceType, Field, @@ -126,7 +126,7 @@ class VikingDBVector(BaseVector): # FIXME: fix the type of metadata later doc = Data( { - vdb_Field.PRIMARY_KEY: metadatas[i]["doc_id"], # type: ignore + vdb_Field.PRIMARY_KEY: metadatas[i]["doc_id"], # type: ignore[operator] vdb_Field.VECTOR: embeddings[i] if embeddings else None, vdb_Field.CONTENT_KEY: page_content, vdb_Field.METADATA_KEY: json.dumps(metadata), diff --git a/api/core/rag/embedding/cached_embedding.py b/api/core/rag/embedding/cached_embedding.py index 6d1b65a055..9afacdfbd4 100644 --- a/api/core/rag/embedding/cached_embedding.py +++ b/api/core/rag/embedding/cached_embedding.py @@ -71,7 +71,7 @@ class CacheEmbedding(Embeddings): for vector in embedding_result.embeddings: try: # FIXME: type ignore for numpy here - normalized_embedding = (vector / np.linalg.norm(vector)).tolist() # type: ignore + normalized_embedding = (vector / np.linalg.norm(vector)).tolist() # type: ignore[call-overload, operator] # stackoverflow best way: https://stackoverflow.com/questions/20319813/how-to-check-list-containing-nan if np.isnan(normalized_embedding).any(): # for issue #11827 float values are not json compliant @@ -154,7 +154,7 @@ class CacheEmbedding(Embeddings): for vector in embedding_result.embeddings: try: # FIXME: type ignore for numpy here - normalized_embedding = (vector / np.linalg.norm(vector)).tolist() # type: ignore + normalized_embedding = (vector / np.linalg.norm(vector)).tolist() # type: ignore[call-overload, operator] # stackoverflow best way: https://stackoverflow.com/questions/20319813/how-to-check-list-containing-nan if np.isnan(normalized_embedding).any(): # for issue #11827 float values are not json compliant @@ -207,7 +207,7 @@ class CacheEmbedding(Embeddings): embedding_results = embedding_result.embeddings[0] # FIXME: type ignore for numpy here - embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() # type: ignore + embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() # type: ignore[call-overload, operator] if np.isnan(embedding_results).any(): raise ValueError("Normalized embedding is nan please try again") except Exception as ex: @@ -231,7 +231,7 @@ class CacheEmbedding(Embeddings): ) raise ex - return embedding_results # type: ignore + return embedding_results # type: ignore[return-value] def embed_multimodal_query(self, multimodel_document: dict) -> list[float]: """Embed multimodal documents.""" @@ -250,7 +250,7 @@ class CacheEmbedding(Embeddings): embedding_results = embedding_result.embeddings[0] # FIXME: type ignore for numpy here - embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() # type: ignore + embedding_results = (embedding_results / np.linalg.norm(embedding_results)).tolist() # type: ignore[call-overload, operator] if np.isnan(embedding_results).any(): raise ValueError("Normalized embedding is nan please try again") except Exception as ex: @@ -274,4 +274,4 @@ class CacheEmbedding(Embeddings): ) raise ex - return embedding_results # type: ignore + return embedding_results # type: ignore[return-value] diff --git a/api/core/rag/extractor/extract_processor.py b/api/core/rag/extractor/extract_processor.py index 449be6a448..d7ca7d8145 100644 --- a/api/core/rag/extractor/extract_processor.py +++ b/api/core/rag/extractor/extract_processor.py @@ -75,7 +75,7 @@ class ExtractProcessor: suffix = "" # https://stackoverflow.com/questions/26541416/generate-temporary-file-names-without-creating-actual-file-in-python#comment90414256_26541521 # Generate a temporary filename under the created temp_dir and ensure the directory exists - file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore[attr-defined, operator] Path(file_path).write_bytes(response.content) extract_setting = ExtractSetting(datasource_type=DatasourceType.FILE, document_model="text_model") if return_text: @@ -100,7 +100,7 @@ class ExtractProcessor: upload_file: UploadFile = extract_setting.upload_file suffix = Path(upload_file.key).suffix # FIXME mypy: Cannot determine type of 'tempfile._get_candidate_names' better not use it here - file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore[attr-defined, operator] storage.download(upload_file.key, file_path) input_file = Path(file_path) file_extension = input_file.suffix.lower() diff --git a/api/core/rag/extractor/notion_extractor.py b/api/core/rag/extractor/notion_extractor.py index 372af8fd94..af1a5e8832 100644 --- a/api/core/rag/extractor/notion_extractor.py +++ b/api/core/rag/extractor/notion_extractor.py @@ -348,7 +348,7 @@ class NotionExtractor(BaseExtractor): db.session.query(DocumentModel).filter_by(id=document_model.id).update( {DocumentModel.data_source_info: json.dumps(data_source_info)} - ) # type: ignore + ) # type: ignore[operator] db.session.commit() def get_notion_last_edited_time(self) -> str: diff --git a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py index 3061d957ac..575a596fbf 100644 --- a/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py +++ b/api/core/rag/extractor/unstructured/unstructured_epub_extractor.py @@ -1,6 +1,6 @@ import logging -import pypandoc # type: ignore +import pypandoc from configs import dify_config from core.rag.extractor.extractor_base import BaseExtractor diff --git a/api/core/rag/index_processor/index_processor.py b/api/core/rag/index_processor/index_processor.py index d9145023ac..55c8f9ee59 100644 --- a/api/core/rag/index_processor/index_processor.py +++ b/api/core/rag/index_processor/index_processor.py @@ -175,7 +175,7 @@ class IndexProcessor: flask_app = None try: - flask_app = current_app._get_current_object() # type: ignore + flask_app = current_app._get_current_object() # type: ignore[attr-defined] except RuntimeError: logger.warning("No Flask application context available, summary generation may fail") diff --git a/api/core/rag/index_processor/index_processor_base.py b/api/core/rag/index_processor/index_processor_base.py index a435dfc46a..9253f4931e 100644 --- a/api/core/rag/index_processor/index_processor_base.py +++ b/api/core/rag/index_processor/index_processor_base.py @@ -147,7 +147,7 @@ class BaseIndexProcessor(ABC): embedding_model_instance=embedding_model_instance, ) - return character_splitter # type: ignore + return character_splitter # type: ignore[return-value] def _get_content_files(self, document: Document, current_user: Account | None = None) -> list[AttachmentDocument]: """ diff --git a/api/core/rag/index_processor/processor/paragraph_index_processor.py b/api/core/rag/index_processor/processor/paragraph_index_processor.py index 80163b1707..ec36bd87fa 100644 --- a/api/core/rag/index_processor/processor/paragraph_index_processor.py +++ b/api/core/rag/index_processor/processor/paragraph_index_processor.py @@ -294,7 +294,7 @@ class ParagraphIndexProcessor(BaseIndexProcessor): # Capture Flask app context for worker threads flask_app = None try: - flask_app = current_app._get_current_object() # type: ignore + flask_app = current_app._get_current_object() # type: ignore[attr-defined] except RuntimeError: logger.warning("No Flask application context available, summary generation may fail") diff --git a/api/core/rag/index_processor/processor/parent_child_index_processor.py b/api/core/rag/index_processor/processor/parent_child_index_processor.py index df0761ca73..996ebe05c0 100644 --- a/api/core/rag/index_processor/processor/parent_child_index_processor.py +++ b/api/core/rag/index_processor/processor/parent_child_index_processor.py @@ -379,7 +379,7 @@ class ParentChildIndexProcessor(BaseIndexProcessor): # Capture Flask app context for worker threads flask_app = None try: - flask_app = current_app._get_current_object() # type: ignore + flask_app = current_app._get_current_object() # type: ignore[attr-defined] except RuntimeError: logger.warning("No Flask application context available, summary generation may fail") diff --git a/api/core/rag/index_processor/processor/qa_index_processor.py b/api/core/rag/index_processor/processor/qa_index_processor.py index 62f88b7760..c691d8a11b 100644 --- a/api/core/rag/index_processor/processor/qa_index_processor.py +++ b/api/core/rag/index_processor/processor/qa_index_processor.py @@ -87,8 +87,8 @@ class QAIndexProcessor(BaseIndexProcessor): all_documents.extend(split_documents) if preview: self._format_qa_document( - current_app._get_current_object(), # type: ignore - kwargs.get("tenant_id"), # type: ignore + current_app._get_current_object(), # type: ignore[attr-defined] + kwargs.get("tenant_id"), # type: ignore[arg-type] all_documents[0], all_qa_documents, kwargs.get("doc_language", "English"), @@ -101,8 +101,8 @@ class QAIndexProcessor(BaseIndexProcessor): document_format_thread = threading.Thread( target=self._format_qa_document, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore - "tenant_id": kwargs.get("tenant_id"), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] + "tenant_id": kwargs.get("tenant_id"), # type: ignore[arg-type] "document_node": doc, "all_qa_documents": all_qa_documents, "document_language": kwargs.get("doc_language", "English"), @@ -121,7 +121,7 @@ class QAIndexProcessor(BaseIndexProcessor): try: # Skip the first row - df = pd.read_csv(file) # type: ignore + df = pd.read_csv(file) # type: ignore[misc] text_docs = [] for _, row in df.iterrows(): data = Document(page_content=row.iloc[0], metadata={"answer": row.iloc[1]}) diff --git a/api/core/rag/retrieval/dataset_retrieval.py b/api/core/rag/retrieval/dataset_retrieval.py index 78a97f79a5..144e5c236f 100644 --- a/api/core/rag/retrieval/dataset_retrieval.py +++ b/api/core/rag/retrieval/dataset_retrieval.py @@ -417,8 +417,8 @@ class DatasetRetrieval: query, tenant_id, user_id, - retrieve_config.metadata_filtering_mode, # type: ignore - retrieve_config.metadata_model_config, # type: ignore + retrieve_config.metadata_filtering_mode, # type: ignore[arg-type] + retrieve_config.metadata_model_config, # type: ignore[arg-type] retrieve_config.metadata_filtering_conditions, inputs, ) @@ -535,11 +535,11 @@ class DatasetRetrieval: DatasetDocument.enabled == True, DatasetDocument.archived == False, ) - documents = db.session.execute(dataset_document_stmt).scalars().all() # type: ignore + documents = db.session.execute(dataset_document_stmt).scalars().all() # type: ignore[operator] dataset_stmt = select(Dataset).where( Dataset.id.in_(dataset_ids), ) - datasets = db.session.execute(dataset_stmt).scalars().all() # type: ignore + datasets = db.session.execute(dataset_stmt).scalars().all() # type: ignore[operator] dataset_map = {i.id: i for i in datasets} document_map = {i.id: i for i in documents} for record in records: @@ -709,7 +709,7 @@ class DatasetRetrieval: thread = threading.Thread( target=self._on_retrieval_end, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "documents": results, "message_id": message_id, "timer": timer, @@ -783,7 +783,7 @@ class DatasetRetrieval: query_thread = threading.Thread( target=self._multiple_retrieve_thread, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "available_datasets": available_datasets, "metadata_condition": metadata_condition, "metadata_filter_document_ids": metadata_filter_document_ids, @@ -809,7 +809,7 @@ class DatasetRetrieval: attachment_thread = threading.Thread( target=self._multiple_retrieve_thread, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "available_datasets": available_datasets, "metadata_condition": metadata_condition, "metadata_filter_document_ids": metadata_filter_document_ids, @@ -850,7 +850,7 @@ class DatasetRetrieval: retrieval_end_thread = threading.Thread( target=self._on_retrieval_end, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "documents": all_documents, "message_id": message_id, "timer": timer, @@ -1313,7 +1313,7 @@ class DatasetRetrieval: DatasetDocument.enabled == True, DatasetDocument.archived == False, ) - filters = [] # type: ignore + filters = [] # type: ignore[var-annotated] metadata_condition = None if metadata_filtering_mode == "disabled": return None, None @@ -1326,28 +1326,28 @@ class DatasetRetrieval: for sequence, filter in enumerate(automatic_metadata_filters): self.process_metadata_filter_func( sequence, - filter.get("condition"), # type: ignore - filter.get("metadata_name"), # type: ignore + filter.get("condition"), # type: ignore[arg-type] + filter.get("metadata_name"), # type: ignore[arg-type] filter.get("value"), - filters, # type: ignore + filters, # type: ignore[arg-type] ) conditions.append( Condition( - name=filter.get("metadata_name"), # type: ignore - comparison_operator=filter.get("condition"), # type: ignore + name=filter.get("metadata_name"), # type: ignore[arg-type] + comparison_operator=filter.get("condition"), # type: ignore[arg-type] value=filter.get("value"), ) ) metadata_condition = MetadataCondition( logical_operator=metadata_filtering_conditions.logical_operator if metadata_filtering_conditions - else "or", # type: ignore + else "or", # type: ignore[assignment] conditions=conditions, ) elif metadata_filtering_mode == "manual": if metadata_filtering_conditions: conditions = [] - for sequence, condition in enumerate(metadata_filtering_conditions.conditions): # type: ignore + for sequence, condition in enumerate(metadata_filtering_conditions.conditions): # type: ignore[arg-type] metadata_name = condition.name expected_value = condition.value if expected_value is not None and condition.comparison_operator not in ("empty", "not empty"): @@ -1374,15 +1374,15 @@ class DatasetRetrieval: else: raise ValueError("Invalid metadata filtering mode") if filters: - if metadata_filtering_conditions and metadata_filtering_conditions.logical_operator == "and": # type: ignore + if metadata_filtering_conditions and metadata_filtering_conditions.logical_operator == "and": # type: ignore[assignment] document_query = document_query.where(and_(*filters)) else: document_query = document_query.where(or_(*filters)) documents = document_query.all() # group by dataset_id - metadata_filter_document_ids = defaultdict(list) if documents else None # type: ignore + metadata_filter_document_ids = defaultdict(list) if documents else None # type: ignore[var-annotated] for document in documents: - metadata_filter_document_ids[document.dataset_id].append(document.id) # type: ignore + metadata_filter_document_ids[document.dataset_id].append(document.id) # type: ignore[index] return metadata_filter_document_ids, metadata_condition def _replace_metadata_filter_value(self, text: str, inputs: dict) -> str: diff --git a/api/core/repositories/celery_workflow_execution_repository.py b/api/core/repositories/celery_workflow_execution_repository.py index 57764574d7..1176294286 100644 --- a/api/core/repositories/celery_workflow_execution_repository.py +++ b/api/core/repositories/celery_workflow_execution_repository.py @@ -108,7 +108,7 @@ class CeleryWorkflowExecutionRepository(WorkflowExecutionRepository): execution_data = execution.model_dump() # Queue the save operation as a Celery task (fire and forget) - save_workflow_execution_task.delay( # type: ignore + save_workflow_execution_task.delay( # type: ignore[operator] execution_data=execution_data, tenant_id=self._tenant_id, app_id=self._app_id or "", diff --git a/api/core/tools/builtin_tool/provider.py b/api/core/tools/builtin_tool/provider.py index 20cdb3e57f..94f2d7e6ba 100644 --- a/api/core/tools/builtin_tool/provider.py +++ b/api/core/tools/builtin_tool/provider.py @@ -162,7 +162,7 @@ class BuiltinToolProviderController(ToolProviderController): """ return self._get_builtin_tools() - def get_tool(self, tool_name: str) -> BuiltinTool | None: # type: ignore + def get_tool(self, tool_name: str) -> BuiltinTool | None: # type: ignore[override, return-value] """ returns the tool that the provider can provide """ diff --git a/api/core/tools/builtin_tool/providers/audio/tools/asr.py b/api/core/tools/builtin_tool/providers/audio/tools/asr.py index dacc49c746..382245a997 100644 --- a/api/core/tools/builtin_tool/providers/audio/tools/asr.py +++ b/api/core/tools/builtin_tool/providers/audio/tools/asr.py @@ -23,12 +23,12 @@ class ASRTool(BuiltinTool): message_id: str | None = None, ) -> Generator[ToolInvokeMessage, None, None]: file = tool_parameters.get("audio_file") - if file.type != FileType.AUDIO: # type: ignore + if file.type != FileType.AUDIO: # type: ignore[attr-defined, union-attr] yield self.create_text_message("not a valid audio file") return - audio_binary = io.BytesIO(download(file)) # type: ignore + audio_binary = io.BytesIO(download(file)) # type: ignore[arg-type] audio_binary.name = "temp.mp3" - provider, model = tool_parameters.get("model").split("#") # type: ignore + provider, model = tool_parameters.get("model").split("#") # type: ignore[operator, union-attr] model_manager = ModelManager() model_instance = model_manager.get_model_instance( tenant_id=self.runtime.tenant_id, diff --git a/api/core/tools/builtin_tool/providers/audio/tools/tts.py b/api/core/tools/builtin_tool/providers/audio/tools/tts.py index 7818bff0ab..37c7e4344b 100644 --- a/api/core/tools/builtin_tool/providers/audio/tools/tts.py +++ b/api/core/tools/builtin_tool/providers/audio/tools/tts.py @@ -20,7 +20,7 @@ class TTSTool(BuiltinTool): app_id: str | None = None, message_id: str | None = None, ) -> Generator[ToolInvokeMessage, None, None]: - provider, model = tool_parameters.get("model").split("#") # type: ignore + provider, model = tool_parameters.get("model").split("#") # type: ignore[operator, union-attr] voice = tool_parameters.get(f"voice#{provider}#{model}") model_manager = ModelManager() if not self.runtime: @@ -40,7 +40,7 @@ class TTSTool(BuiltinTool): else: raise ValueError("Sorry, no voice available.") tts = model_instance.invoke_tts( - content_text=tool_parameters.get("text"), # type: ignore + content_text=tool_parameters.get("text"), # type: ignore[arg-type] user=user_id, tenant_id=self.runtime.tenant_id, voice=voice, diff --git a/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py b/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py index d0a41b940f..68b342c126 100644 --- a/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py +++ b/api/core/tools/builtin_tool/providers/time/tools/localtime_to_timestamp.py @@ -27,7 +27,7 @@ class LocaltimeToTimestampTool(BuiltinTool): timezone = None time_format = "%Y-%m-%d %H:%M:%S" - timestamp = self.localtime_to_timestamp(localtime, time_format, timezone) # type: ignore + timestamp = self.localtime_to_timestamp(localtime, time_format, timezone) # type: ignore[arg-type, misc] if not timestamp: yield self.create_text_message(f"Invalid localtime: {localtime}") return @@ -40,11 +40,11 @@ class LocaltimeToTimestampTool(BuiltinTool): try: local_time = datetime.strptime(localtime, time_format) if local_tz is None: - localtime = local_time.astimezone() # type: ignore + localtime = local_time.astimezone() # type: ignore[assignment, operator] elif isinstance(local_tz, str): local_tz = pytz.timezone(local_tz) - localtime = local_tz.localize(local_time) # type: ignore - timestamp = int(localtime.timestamp()) # type: ignore + localtime = local_tz.localize(local_time) # type: ignore[operator] + timestamp = int(localtime.timestamp()) # type: ignore[attr-defined, operator] return timestamp except Exception as e: raise ToolInvokeError(str(e)) diff --git a/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py b/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py index e23ae3b001..b4730f1005 100644 --- a/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py +++ b/api/core/tools/builtin_tool/providers/time/tools/timezone_conversion.py @@ -24,7 +24,7 @@ class TimezoneConversionTool(BuiltinTool): current_time = tool_parameters.get("current_time") current_timezone = tool_parameters.get("current_timezone", "Asia/Shanghai") target_timezone = tool_parameters.get("target_timezone", "Asia/Tokyo") - target_time = self.timezone_convert(current_time, current_timezone, target_timezone) # type: ignore + target_time = self.timezone_convert(current_time, current_timezone, target_timezone) # type: ignore[arg-type, misc] if not target_time: yield self.create_text_message( f"Invalid datetime and timezone: {current_time},{current_timezone},{target_timezone}" diff --git a/api/core/tools/plugin_tool/provider.py b/api/core/tools/plugin_tool/provider.py index 3fbbd4c9e5..aebe3e3852 100644 --- a/api/core/tools/plugin_tool/provider.py +++ b/api/core/tools/plugin_tool/provider.py @@ -44,7 +44,7 @@ class PluginToolProviderController(BuiltinToolProviderController): ): raise ToolProviderCredentialValidationError("Invalid credentials") - def get_tool(self, tool_name: str) -> PluginTool: # type: ignore + def get_tool(self, tool_name: str) -> PluginTool: # type: ignore[override, return-value] """ return tool with given name """ @@ -63,7 +63,7 @@ class PluginToolProviderController(BuiltinToolProviderController): plugin_unique_identifier=self.plugin_unique_identifier, ) - def get_tools(self) -> list[PluginTool]: # type: ignore + def get_tools(self) -> list[PluginTool]: # type: ignore[override, return-value] """ get all tools """ diff --git a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py index c2b520fa99..831acc201c 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_multi_retriever_tool.py @@ -53,7 +53,7 @@ class DatasetMultiRetrieverTool(DatasetRetrieverBaseTool): retrieval_thread = threading.Thread( target=self._retriever, kwargs={ - "flask_app": current_app._get_current_object(), # type: ignore + "flask_app": current_app._get_current_object(), # type: ignore[attr-defined] "dataset_id": dataset_id, "query": query, "all_documents": all_documents, diff --git a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py index 429b7e6622..28446471b1 100644 --- a/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py +++ b/api/core/tools/utils/dataset_retriever/dataset_retriever_tool.py @@ -244,8 +244,8 @@ class DatasetRetrieverTool(DatasetRetrieverBaseTool): key=lambda x: x.score or 0.0, reverse=True, ) - for position, item in enumerate(retrieval_resource_list, start=1): # type: ignore - item.position = position # type: ignore + for position, item in enumerate(retrieval_resource_list, start=1): # type: ignore[assignment, operator] + item.position = position # type: ignore[assignment, attr-defined] for hit_callback in self.hit_callbacks: hit_callback.return_retriever_resource_info(retrieval_resource_list) if document_context_list: diff --git a/api/core/tools/workflow_as_tool/provider.py b/api/core/tools/workflow_as_tool/provider.py index aef8b3f779..83c8d8012a 100644 --- a/api/core/tools/workflow_as_tool/provider.py +++ b/api/core/tools/workflow_as_tool/provider.py @@ -238,7 +238,7 @@ class WorkflowToolProviderController(ToolProviderController): return self.tools - def get_tool(self, tool_name: str) -> WorkflowTool | None: # type: ignore + def get_tool(self, tool_name: str) -> WorkflowTool | None: # type: ignore[override, return-value] """ get tool by name diff --git a/api/core/tools/workflow_as_tool/tool.py b/api/core/tools/workflow_as_tool/tool.py index 9b9aa7a741..e31ee44b97 100644 --- a/api/core/tools/workflow_as_tool/tool.py +++ b/api/core/tools/workflow_as_tool/tool.py @@ -113,9 +113,9 @@ class WorkflowTool(Tool): if outputs is None: outputs = {} else: - outputs, files = self._extract_files(outputs) # type: ignore + outputs, files = self._extract_files(outputs) # type: ignore[assignment] for file in files: - yield self.create_file_message(file) # type: ignore + yield self.create_file_message(file) # type: ignore[arg-type, misc] # traverse `outputs` field and create variable messages for key, value in outputs.items(): diff --git a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py index 4ea9091c5b..1aafeb0231 100644 --- a/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py +++ b/api/core/workflow/nodes/knowledge_index/knowledge_index_node.py @@ -41,7 +41,7 @@ class KnowledgeIndexNode(Node[KnowledgeIndexNodeData]): self.index_processor = IndexProcessor() self.summary_index_service = SummaryIndex() - def _run(self) -> NodeRunResult: # type: ignore + def _run(self) -> NodeRunResult: # type: ignore[misc] node_data = self.node_data variable_pool = self.graph_runtime_state.variable_pool diff --git a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py index 80f59140be..245d48cde4 100644 --- a/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py +++ b/api/core/workflow/nodes/knowledge_retrieval/knowledge_retrieval_node.py @@ -120,7 +120,7 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node[KnowledgeRetrievalNodeD status=WorkflowNodeExecutionStatus.SUCCEEDED, inputs=variables, process_data={"usage": jsonable_encoder(usage)}, - outputs=outputs, # type: ignore + outputs=outputs, # type: ignore[assignment] metadata={ WorkflowNodeExecutionMetadataKey.TOTAL_TOKENS: usage.total_tokens, WorkflowNodeExecutionMetadataKey.TOTAL_PRICE: usage.total_price, @@ -282,7 +282,7 @@ class KnowledgeRetrievalNode(LLMUsageTrackingMixin, Node[KnowledgeRetrievalNodeD resolved_value = segment_group.text elif isinstance(value, Sequence) and all(isinstance(v, str) for v in value): resolved_values = [] - for v in value: # type: ignore + for v in value: # type: ignore[misc] segment_group = variable_pool.convert_template(v) if len(segment_group.value) == 1: resolved_values.append(segment_group.value[0].to_object()) diff --git a/api/dify_graph/model_runtime/entities/message_entities.py b/api/dify_graph/model_runtime/entities/message_entities.py index 402bfdc606..f0948e6cf7 100644 --- a/api/dify_graph/model_runtime/entities/message_entities.py +++ b/api/dify_graph/model_runtime/entities/message_entities.py @@ -76,7 +76,7 @@ class TextPromptMessageContent(PromptMessageContent): Model class for text prompt message content. """ - type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT # type: ignore + type: Literal[PromptMessageContentType.TEXT] = PromptMessageContentType.TEXT # type: ignore[misc] data: str @@ -97,11 +97,11 @@ class MultiModalPromptMessageContent(PromptMessageContent): class VideoPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO # type: ignore + type: Literal[PromptMessageContentType.VIDEO] = PromptMessageContentType.VIDEO # type: ignore[misc] class AudioPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO # type: ignore + type: Literal[PromptMessageContentType.AUDIO] = PromptMessageContentType.AUDIO # type: ignore[misc] class ImagePromptMessageContent(MultiModalPromptMessageContent): @@ -113,12 +113,12 @@ class ImagePromptMessageContent(MultiModalPromptMessageContent): LOW = auto() HIGH = auto() - type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE # type: ignore + type: Literal[PromptMessageContentType.IMAGE] = PromptMessageContentType.IMAGE # type: ignore[misc] detail: DETAIL = DETAIL.LOW class DocumentPromptMessageContent(MultiModalPromptMessageContent): - type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT # type: ignore + type: Literal[PromptMessageContentType.DOCUMENT] = PromptMessageContentType.DOCUMENT # type: ignore[misc] PromptMessageContentUnionTypes = Annotated[ diff --git a/api/dify_graph/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py b/api/dify_graph/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py index 3967acf07b..e3fb5f7483 100644 --- a/api/dify_graph/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py +++ b/api/dify_graph/model_runtime/model_providers/__base/tokenizers/gpt2_tokenizer.py @@ -15,7 +15,7 @@ class GPT2Tokenizer: use gpt2 tokenizer to get num tokens """ _tokenizer = GPT2Tokenizer.get_encoder() - tokens = _tokenizer.encode(text) # type: ignore + tokens = _tokenizer.encode(text) # type: ignore[operator] return len(tokens) @staticmethod diff --git a/api/dify_graph/model_runtime/utils/encoders.py b/api/dify_graph/model_runtime/utils/encoders.py index c85152463e..c36303bee7 100644 --- a/api/dify_graph/model_runtime/utils/encoders.py +++ b/api/dify_graph/model_runtime/utils/encoders.py @@ -196,12 +196,12 @@ def jsonable_encoder( return encoder(obj) try: - data = dict(obj) # type: ignore + data = dict(obj) # type: ignore[operator] except Exception as e: errors: list[Exception] = [] errors.append(e) try: - data = vars(obj) # type: ignore + data = vars(obj) # type: ignore[operator] except Exception as e: errors.append(e) raise ValueError(str(errors)) from e diff --git a/api/dify_graph/nodes/iteration/iteration_node.py b/api/dify_graph/nodes/iteration/iteration_node.py index 033ec8672f..a635d3dfdc 100644 --- a/api/dify_graph/nodes/iteration/iteration_node.py +++ b/api/dify_graph/nodes/iteration/iteration_node.py @@ -81,7 +81,7 @@ class IterationNode(LLMUsageTrackingMixin, Node[IterationNodeData]): def version(cls) -> str: return "1" - def _run(self) -> Generator[GraphNodeEventBase | NodeEventBase, None, None]: # type: ignore + def _run(self) -> Generator[GraphNodeEventBase | NodeEventBase, None, None]: # type: ignore[misc, override] variable = self._get_iterator_variable() if self._is_empty_iteration(variable): diff --git a/api/events/event_handlers/update_provider_when_message_created.py b/api/events/event_handlers/update_provider_when_message_created.py index 1ddcc8f792..ad7a69d944 100644 --- a/api/events/event_handlers/update_provider_when_message_created.py +++ b/api/events/event_handlers/update_provider_when_message_created.py @@ -272,7 +272,7 @@ def _execute_provider_updates(updates_to_perform: list[_ProviderUpdateOperation] now = datetime_utils.naive_utc_now() last_update = _get_last_update_timestamp(cache_key) - if last_update is None or (now - last_update).total_seconds() > LAST_USED_UPDATE_WINDOW_SECONDS: # type: ignore + if last_update is None or (now - last_update).total_seconds() > LAST_USED_UPDATE_WINDOW_SECONDS: # type: ignore[misc] update_values["last_used"] = values.last_used _set_last_update_timestamp(cache_key, now) diff --git a/api/extensions/ext_app_metrics.py b/api/extensions/ext_app_metrics.py index 4a6490b9f0..1027059551 100644 --- a/api/extensions/ext_app_metrics.py +++ b/api/extensions/ext_app_metrics.py @@ -58,10 +58,10 @@ def init_app(app: DifyApp): # FIXME maybe its sqlalchemy issue return { "pid": os.getpid(), - "pool_size": engine.pool.size(), # type: ignore - "checked_in_connections": engine.pool.checkedin(), # type: ignore - "checked_out_connections": engine.pool.checkedout(), # type: ignore - "overflow_connections": engine.pool.overflow(), # type: ignore - "connection_timeout": engine.pool.timeout(), # type: ignore - "recycle_time": db.engine.pool._recycle, # type: ignore + "pool_size": engine.pool.size(), # type: ignore[attr-defined] + "checked_in_connections": engine.pool.checkedin(), # type: ignore[attr-defined] + "checked_out_connections": engine.pool.checkedout(), # type: ignore[attr-defined] + "overflow_connections": engine.pool.overflow(), # type: ignore[attr-defined] + "connection_timeout": engine.pool.timeout(), # type: ignore[attr-defined] + "recycle_time": db.engine.pool._recycle, # type: ignore[attr-defined] } diff --git a/api/extensions/ext_otel.py b/api/extensions/ext_otel.py index a5baa21018..911a94f300 100644 --- a/api/extensions/ext_otel.py +++ b/api/extensions/ext_otel.py @@ -26,23 +26,23 @@ def init_app(app: DifyApp): ConsoleSpanExporter, ) from opentelemetry.sdk.trace.sampling import ParentBasedTraceIdRatio - from opentelemetry.semconv._incubating.attributes.deployment_attributes import ( # type: ignore[import-untyped] + from opentelemetry.semconv._incubating.attributes.deployment_attributes import ( # type: ignore[reportMissingTypeStubs] DEPLOYMENT_ENVIRONMENT_NAME, ) - from opentelemetry.semconv._incubating.attributes.host_attributes import ( # type: ignore[import-untyped] + from opentelemetry.semconv._incubating.attributes.host_attributes import ( # type: ignore[reportMissingTypeStubs] HOST_ARCH, HOST_ID, HOST_NAME, ) - from opentelemetry.semconv._incubating.attributes.os_attributes import ( # type: ignore[import-untyped] + from opentelemetry.semconv._incubating.attributes.os_attributes import ( # type: ignore[reportMissingTypeStubs] OS_DESCRIPTION, OS_TYPE, OS_VERSION, ) - from opentelemetry.semconv._incubating.attributes.process_attributes import ( # type: ignore[import-untyped] + from opentelemetry.semconv._incubating.attributes.process_attributes import ( # type: ignore[reportMissingTypeStubs] PROCESS_PID, ) - from opentelemetry.semconv.attributes.service_attributes import ( # type: ignore[import-untyped] + from opentelemetry.semconv.attributes.service_attributes import ( # type: ignore[reportMissingTypeStubs] SERVICE_NAME, SERVICE_VERSION, ) diff --git a/api/extensions/ext_redis.py b/api/extensions/ext_redis.py index 26262484f9..dc88532e72 100644 --- a/api/extensions/ext_redis.py +++ b/api/extensions/ext_redis.py @@ -302,7 +302,7 @@ R = TypeVar("R") T = TypeVar("T") -def redis_fallback(default_return: T | None = None): # type: ignore +def redis_fallback(default_return: T | None = None): # type: ignore[misc] """ decorator to handle Redis operation exceptions and return a default value when Redis is unavailable. diff --git a/api/extensions/logstore/aliyun_logstore.py b/api/extensions/logstore/aliyun_logstore.py index f6a4765f14..4abe1726f4 100644 --- a/api/extensions/logstore/aliyun_logstore.py +++ b/api/extensions/logstore/aliyun_logstore.py @@ -9,7 +9,7 @@ from collections.abc import Sequence from typing import Any import sqlalchemy as sa -from aliyun.log import ( # type: ignore[import-untyped] +from aliyun.log import ( # type: ignore[reportMissingTypeStubs] GetLogsRequest, IndexConfig, IndexKeyConfig, @@ -18,8 +18,8 @@ from aliyun.log import ( # type: ignore[import-untyped] LogItem, PutLogsRequest, ) -from aliyun.log.auth import AUTH_VERSION_4 # type: ignore[import-untyped] -from aliyun.log.logexception import LogException # type: ignore[import-untyped] +from aliyun.log.auth import AUTH_VERSION_4 # type: ignore[reportMissingTypeStubs] +from aliyun.log.logexception import LogException # type: ignore[reportMissingTypeStubs] from dotenv import load_dotenv from sqlalchemy.orm import DeclarativeBase diff --git a/api/extensions/otel/instrumentation.py b/api/extensions/otel/instrumentation.py index b73ba8df8c..1e96b57f06 100644 --- a/api/extensions/otel/instrumentation.py +++ b/api/extensions/otel/instrumentation.py @@ -7,7 +7,7 @@ from opentelemetry.instrumentation.httpx import HTTPXClientInstrumentor from opentelemetry.instrumentation.redis import RedisInstrumentor from opentelemetry.instrumentation.sqlalchemy import SQLAlchemyInstrumentor from opentelemetry.metrics import get_meter, get_meter_provider -from opentelemetry.semconv.attributes.http_attributes import ( # type: ignore[import-untyped] +from opentelemetry.semconv.attributes.http_attributes import ( HTTP_REQUEST_METHOD, HTTP_ROUTE, ) diff --git a/api/extensions/storage/azure_blob_storage.py b/api/extensions/storage/azure_blob_storage.py index f270267ce9..b3eca5ecad 100644 --- a/api/extensions/storage/azure_blob_storage.py +++ b/api/extensions/storage/azure_blob_storage.py @@ -86,7 +86,7 @@ class AzureBlobStorage(BaseStorage): def _sync_client(self): if self.account_key == "managedidentity": - return BlobServiceClient(account_url=self.account_url, credential=self.credential) # type: ignore + return BlobServiceClient(account_url=self.account_url, credential=self.credential) # type: ignore[arg-type, import-untyped] cache_key = f"azure_blob_sas_token_{self.account_name}_{self.account_key}" cache_result = redis_client.get(cache_key) diff --git a/api/extensions/storage/google_cloud_storage.py b/api/extensions/storage/google_cloud_storage.py index 4ad7e2d159..7fc35b9397 100644 --- a/api/extensions/storage/google_cloud_storage.py +++ b/api/extensions/storage/google_cloud_storage.py @@ -3,7 +3,7 @@ import io import json from collections.abc import Generator -from google.cloud import storage as google_cloud_storage # type: ignore +from google.cloud import storage as google_cloud_storage from configs import dify_config from extensions.storage.base_storage import BaseStorage diff --git a/api/gunicorn.conf.py b/api/gunicorn.conf.py index da75d25ba6..0c7c363409 100644 --- a/api/gunicorn.conf.py +++ b/api/gunicorn.conf.py @@ -1,6 +1,6 @@ -import psycogreen.gevent as pscycogreen_gevent # type: ignore +import psycogreen.gevent as pscycogreen_gevent # type: ignore[reportMissingTypeStubs] from gevent import events as gevent_events -from grpc.experimental import gevent as grpc_gevent # type: ignore +from grpc.experimental import gevent as grpc_gevent # type: ignore[reportMissingTypeStubs] # WARNING: This module is loaded very early in the Gunicorn worker lifecycle, # before gevent's monkey-patching is applied. Importing modules at the top level here can diff --git a/api/libs/login.py b/api/libs/login.py index bd5cb5f30d..221959e20d 100644 --- a/api/libs/login.py +++ b/api/libs/login.py @@ -26,7 +26,7 @@ def current_account_with_tenant(): user_proxy = current_user get_current_object = getattr(user_proxy, "_get_current_object", None) - user = get_current_object() if callable(get_current_object) else user_proxy # type: ignore + user = get_current_object() if callable(get_current_object) else user_proxy # type: ignore[attr-defined] if not isinstance(user, Account): raise ValueError("current_user must be an Account instance") @@ -81,7 +81,7 @@ def login_required(func: Callable[P, R]) -> Callable[P, R | ResponseReturnValue] user = _get_user() if user is None or not user.is_authenticated: - return current_app.login_manager.unauthorized() # type: ignore + return current_app.login_manager.unauthorized() # type: ignore[attr-defined, operator] # we put csrf validation here for less conflicts # TODO: maybe find a better place for it. check_csrf_token(request, user.id) @@ -93,7 +93,7 @@ def login_required(func: Callable[P, R]) -> Callable[P, R | ResponseReturnValue] def _get_user() -> EndUser | Account | None: if has_request_context(): if "_login_user" not in g: - current_app.login_manager._load_user() # type: ignore + current_app.login_manager._load_user() # type: ignore[attr-defined] return g._login_user diff --git a/api/libs/sendgrid.py b/api/libs/sendgrid.py index c047c54d06..d190d6885c 100644 --- a/api/libs/sendgrid.py +++ b/api/libs/sendgrid.py @@ -28,7 +28,7 @@ class SendGridClient: content = Content("text/html", mail["html"]) sg_mail = Mail(from_email, to_email, subject, content) mail_json = sg_mail.get() - response = sg.client.mail.send.post(request_body=mail_json) # type: ignore + response = sg.client.mail.send.post(request_body=mail_json) # type: ignore[operator] logger.debug(response.status_code) logger.debug(response.body) logger.debug(response.headers) diff --git a/api/models/model.py b/api/models/model.py index 05233f8711..52437fa801 100644 --- a/api/models/model.py +++ b/api/models/model.py @@ -12,7 +12,7 @@ from uuid import uuid4 import sqlalchemy as sa from flask import request -from flask_login import UserMixin # type: ignore[import-untyped] +from flask_login import UserMixin from sqlalchemy import BigInteger, Float, Index, PrimaryKeyConstraint, String, exists, func, select, text from sqlalchemy.orm import Mapped, Session, mapped_column from typing_extensions import TypedDict diff --git a/api/schedule/clean_workflow_runlogs_precise.py b/api/schedule/clean_workflow_runlogs_precise.py index ebb8d52924..03ece2725b 100644 --- a/api/schedule/clean_workflow_runlogs_precise.py +++ b/api/schedule/clean_workflow_runlogs_precise.py @@ -132,7 +132,7 @@ def _delete_batch( SavedMessage, ] for model in message_related_models: - session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore + session.query(model).where(model.message_id.in_(message_id_list)).delete(synchronize_session=False) # type: ignore[attr-defined, operator] # error: "DeclarativeAttributeIntercept" has no attribute "message_id". But this type is only in lib # and these 6 types all have the message_id field. diff --git a/api/schedule/mail_clean_document_notify_task.py b/api/schedule/mail_clean_document_notify_task.py index 8479cdfb0c..6e30c047a6 100644 --- a/api/schedule/mail_clean_document_notify_task.py +++ b/api/schedule/mail_clean_document_notify_task.py @@ -64,7 +64,7 @@ def mail_clean_document_notify_task(): if not account: continue - dataset_auto_dataset_map = {} # type: ignore + dataset_auto_dataset_map = {} # type: ignore[assignment, var-annotated] for dataset_auto_disable_log in tenant_dataset_auto_disable_logs: if dataset_auto_disable_log.dataset_id not in dataset_auto_dataset_map: dataset_auto_dataset_map[dataset_auto_disable_log.dataset_id] = [] diff --git a/api/schedule/queue_monitor_task.py b/api/schedule/queue_monitor_task.py index 01642e397e..e38f00c2bd 100644 --- a/api/schedule/queue_monitor_task.py +++ b/api/schedule/queue_monitor_task.py @@ -2,7 +2,7 @@ import logging from datetime import datetime import click -from kombu.utils.url import parse_url # type: ignore +from kombu.utils.url import parse_url # type: ignore[reportMissingTypeStubs] from redis import Redis import app diff --git a/api/schedule/workflow_schedule_task.py b/api/schedule/workflow_schedule_task.py index 2fee9e467d..909333dcb1 100644 --- a/api/schedule/workflow_schedule_task.py +++ b/api/schedule/workflow_schedule_task.py @@ -35,7 +35,7 @@ def poll_workflow_schedules() -> None: if not due_schedules: break - with current_app.producer_or_acquire() as producer: # type: ignore + with current_app.producer_or_acquire() as producer: # type: ignore[attr-defined] dispatched_count = _process_schedules(session, due_schedules, producer) total_dispatched += dispatched_count diff --git a/api/services/clear_free_plan_tenant_expired_logs.py b/api/services/clear_free_plan_tenant_expired_logs.py index 0e0eab00ad..3e89102b0b 100644 --- a/api/services/clear_free_plan_tenant_expired_logs.py +++ b/api/services/clear_free_plan_tenant_expired_logs.py @@ -65,7 +65,7 @@ class ClearFreePlanTenantExpiredLogs: records = ( session.query(model) .where( - model.message_id.in_(batch_message_ids), # type: ignore + model.message_id.in_(batch_message_ids), # type: ignore[attr-defined, operator] ) .all() ) @@ -104,7 +104,7 @@ class ClearFreePlanTenantExpiredLogs: logger.exception("Failed to save %s records", table_name) session.query(model).where( - model.id.in_(record_ids), # type: ignore + model.id.in_(record_ids), # type: ignore[attr-defined, operator] ).delete(synchronize_session=False) click.echo( diff --git a/api/services/dataset_service.py b/api/services/dataset_service.py index ba4ab6757f..bc91921372 100644 --- a/api/services/dataset_service.py +++ b/api/services/dataset_service.py @@ -2069,7 +2069,7 @@ class DocumentService: documents.append(document) position += 1 elif knowledge_config.data_source.info_list.data_source_type == "notion_import": - notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore + notion_info_list = knowledge_config.data_source.info_list.notion_info_list # type: ignore[assignment] if not notion_info_list: raise ValueError("No notion info list found.") exist_page_ids = [] @@ -2097,7 +2097,7 @@ class DocumentService: "credential_id": notion_info.credential_id, "notion_workspace_id": workspace_id, "notion_page_id": page.page_id, - "notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore + "notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore[dict-item] "type": page.type, } # Truncate page name to 255 characters to prevent DB field length errors @@ -2193,15 +2193,15 @@ class DocumentService: # if knowledge_config.data_source: # if knowledge_config.data_source.info_list.data_source_type == "upload_file": # upload_file_list = knowledge_config.data_source.info_list.file_info_list.file_ids - # # type: ignore + # # type: ignore[attr-defined] # count = len(upload_file_list) # elif knowledge_config.data_source.info_list.data_source_type == "notion_import": # notion_info_list = knowledge_config.data_source.info_list.notion_info_list - # for notion_info in notion_info_list: # type: ignore + # for notion_info in notion_info_list: # type: ignore[attr-defined] # count = count + len(notion_info.pages) # elif knowledge_config.data_source.info_list.data_source_type == "website_crawl": # website_info = knowledge_config.data_source.info_list.website_info_list - # count = len(website_info.urls) # type: ignore + # count = len(website_info.urls) # type: ignore[attr-defined] # batch_upload_limit = int(dify_config.BATCH_UPLOAD_LIMIT) # if features.billing.subscription.plan == CloudPlan.SANDBOX and count > 1: @@ -2250,7 +2250,7 @@ class DocumentService: # knowledge_config.retrieval_model.model_dump() # if knowledge_config.retrieval_model # else default_retrieval_model - # ) # type: ignore + # ) # type: ignore[arg-type] # documents = [] # if knowledge_config.original_document_id: @@ -2330,8 +2330,8 @@ class DocumentService: # continue # document = DocumentService.build_document( # dataset, - # dataset_process_rule.id, # type: ignore - # knowledge_config.data_source.info_list.data_source_type, # type: ignore + # dataset_process_rule.id, # type: ignore[attr-defined] + # knowledge_config.data_source.info_list.data_source_type, # type: ignore[attr-defined] # knowledge_config.doc_form, # knowledge_config.doc_language, # data_source_info, @@ -2387,8 +2387,8 @@ class DocumentService: # truncated_page_name = page.page_name[:255] if page.page_name else "nopagename" # document = DocumentService.build_document( # dataset, - # dataset_process_rule.id, # type: ignore - # knowledge_config.data_source.info_list.data_source_type, # type: ignore + # dataset_process_rule.id, # type: ignore[attr-defined] + # knowledge_config.data_source.info_list.data_source_type, # type: ignore[attr-defined] # knowledge_config.doc_form, # knowledge_config.doc_language, # data_source_info, @@ -2427,8 +2427,8 @@ class DocumentService: # document_name = url # document = DocumentService.build_document( # dataset, - # dataset_process_rule.id, # type: ignore - # knowledge_config.data_source.info_list.data_source_type, # type: ignore + # dataset_process_rule.id, # type: ignore[attr-defined] + # knowledge_config.data_source.info_list.data_source_type, # type: ignore[attr-defined] # knowledge_config.doc_form, # knowledge_config.doc_language, # data_source_info, @@ -2609,7 +2609,7 @@ class DocumentService: "credential_id": notion_info.credential_id, "notion_workspace_id": workspace_id, "notion_page_id": page.page_id, - "notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore + "notion_page_icon": page.page_icon.model_dump() if page.page_icon else None, # type: ignore[dict-item] "type": page.type, } elif document_data.data_source.info_list.data_source_type == "website_crawl": @@ -3421,7 +3421,7 @@ class SegmentService: # calc embedding use tokens if document.doc_form == "qa_model": segment.answer = args.answer - tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0] # type: ignore + tokens = embedding_model.get_text_embedding_num_tokens(texts=[content + segment.answer])[0] # type: ignore[operator] else: tokens = embedding_model.get_text_embedding_num_tokens(texts=[content])[0] segment.content = content diff --git a/api/services/document_indexing_proxy/batch_indexing_base.py b/api/services/document_indexing_proxy/batch_indexing_base.py index dd122f34a8..93f8b4875d 100644 --- a/api/services/document_indexing_proxy/batch_indexing_base.py +++ b/api/services/document_indexing_proxy/batch_indexing_base.py @@ -41,7 +41,7 @@ class BatchDocumentIndexingProxy(DocumentTaskProxyBase): task_func: The Celery task function to call with (tenant_id, dataset_id, document_ids) """ logger.info("tenant %s send documents %s to direct queue", self._tenant_id, self._document_ids) - task_func.delay( # type: ignore + task_func.delay( # type: ignore[attr-defined, operator] tenant_id=self._tenant_id, dataset_id=self._dataset_id, document_ids=self._document_ids ) @@ -70,7 +70,7 @@ class BatchDocumentIndexingProxy(DocumentTaskProxyBase): else: # Set flag and execute task self._tenant_isolated_task_queue.set_task_waiting_time() - task_func.delay( # type: ignore + task_func.delay( # type: ignore[attr-defined, operator] tenant_id=self._tenant_id, dataset_id=self._dataset_id, document_ids=self._document_ids ) logger.info("tenant %s init tasks: %s - %s", self._tenant_id, self._dataset_id, self._document_ids) diff --git a/api/services/metadata_service.py b/api/services/metadata_service.py index 2f47a647a8..fcfcd73e89 100644 --- a/api/services/metadata_service.py +++ b/api/services/metadata_service.py @@ -46,7 +46,7 @@ class MetadataService: return metadata @staticmethod - def update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata: # type: ignore + def update_metadata_name(dataset_id: str, metadata_id: str, name: str) -> DatasetMetadata: # type: ignore[misc, return] # check if metadata name is too long if len(name) > 255: raise ValueError("Metadata name cannot exceed 255 characters.") diff --git a/api/services/plugin/plugin_migration.py b/api/services/plugin/plugin_migration.py index df5fa3e233..dec92a6faa 100644 --- a/api/services/plugin/plugin_migration.py +++ b/api/services/plugin/plugin_migration.py @@ -146,7 +146,7 @@ class PluginMigration: futures.append( thread_pool.submit( process_tenant, - current_app._get_current_object(), # type: ignore + current_app._get_current_object(), # type: ignore[attr-defined] tenant_id, ) ) diff --git a/api/services/rag_pipeline/rag_pipeline.py b/api/services/rag_pipeline/rag_pipeline.py index 296b9f0890..007d899a52 100644 --- a/api/services/rag_pipeline/rag_pipeline.py +++ b/api/services/rag_pipeline/rag_pipeline.py @@ -56,7 +56,7 @@ from dify_graph.variables.variables import VariableBase from extensions.ext_database import db from libs.infinite_scroll_pagination import InfiniteScrollPagination from models import Account -from models.dataset import ( # type: ignore +from models.dataset import ( # type: ignore[import] Dataset, Document, DocumentPipelineExecutionLog, @@ -918,10 +918,10 @@ class RagPipelineService: ) error = node_run_result.error if not run_succeeded else None except WorkflowNodeRunFailedError as e: - node_instance = e._node # type: ignore + node_instance = e._node # type: ignore[attr-defined] run_succeeded = False node_run_result = None - error = e._error # type: ignore + error = e._error # type: ignore[attr-defined] workflow_node_execution = WorkflowNodeExecution( id=str(uuid4()), @@ -1317,7 +1317,7 @@ class RagPipelineService: repository.save(workflow_node_execution) # Convert node_execution to WorkflowNodeExecution after save - workflow_node_execution_db_model = repository._to_db_model(workflow_node_execution) # type: ignore + workflow_node_execution_db_model = repository._to_db_model(workflow_node_execution) # type: ignore[attr-defined] with Session(bind=db.engine) as session, session.begin(): draft_var_saver = DraftVariableSaver( diff --git a/api/services/rag_pipeline/rag_pipeline_dsl_service.py b/api/services/rag_pipeline/rag_pipeline_dsl_service.py index deb59da8d3..a4eb6a9c71 100644 --- a/api/services/rag_pipeline/rag_pipeline_dsl_service.py +++ b/api/services/rag_pipeline/rag_pipeline_dsl_service.py @@ -10,7 +10,7 @@ from typing import cast from urllib.parse import urlparse from uuid import uuid4 -import yaml # type: ignore +import yaml from Crypto.Cipher import AES from Crypto.Util.Padding import pad, unpad from flask_login import current_user @@ -671,7 +671,7 @@ class RagPipelineDslService: self._append_workflow_export_data(export_data=export_data, pipeline=pipeline, include_secret=include_secret) - return yaml.dump(export_data, allow_unicode=True) # type: ignore + return yaml.dump(export_data, allow_unicode=True) # type: ignore[operator] def _append_workflow_export_data(self, *, export_data: dict, pipeline: Pipeline, include_secret: bool) -> None: """ diff --git a/api/services/rag_pipeline/rag_pipeline_task_proxy.py b/api/services/rag_pipeline/rag_pipeline_task_proxy.py index 1a7b104a70..58aeed35b5 100644 --- a/api/services/rag_pipeline/rag_pipeline_task_proxy.py +++ b/api/services/rag_pipeline/rag_pipeline_task_proxy.py @@ -45,7 +45,7 @@ class RagPipelineTaskProxy: def _send_to_direct_queue(self, upload_file_id: str, task_func: Callable[[str, str], None]): logger.info("tenant %s send file %s to direct queue", self._dataset_tenant_id, upload_file_id) - task_func.delay( # type: ignore + task_func.delay( # type: ignore[attr-defined, operator] rag_pipeline_invoke_entities_file_id=upload_file_id, tenant_id=self._dataset_tenant_id, ) @@ -59,7 +59,7 @@ class RagPipelineTaskProxy: else: # Set flag and execute task self._tenant_isolated_task_queue.set_task_waiting_time() - task_func.delay( # type: ignore + task_func.delay( # type: ignore[attr-defined, operator] rag_pipeline_invoke_entities_file_id=upload_file_id, tenant_id=self._dataset_tenant_id, ) diff --git a/api/services/rag_pipeline/rag_pipeline_transform_service.py b/api/services/rag_pipeline/rag_pipeline_transform_service.py index 1d0aafd5fd..c57b89732a 100644 --- a/api/services/rag_pipeline/rag_pipeline_transform_service.py +++ b/api/services/rag_pipeline/rag_pipeline_transform_service.py @@ -270,7 +270,7 @@ class RagPipelineTransformService: plugin_unique_identifier = dependency.get("value", {}).get("plugin_unique_identifier") plugin_id = plugin_unique_identifier.split(":")[0] if plugin_id not in installed_plugins_ids: - plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(plugin_id) # type: ignore + plugin_unique_identifier = plugin_migration._fetch_plugin_unique_identifier(plugin_id) # type: ignore[attr-defined] if plugin_unique_identifier: need_install_plugin_unique_identifiers.append(plugin_unique_identifier) if need_install_plugin_unique_identifiers: diff --git a/api/services/trigger/trigger_request_service.py b/api/services/trigger/trigger_request_service.py index 91a838c265..2e8ae6baca 100644 --- a/api/services/trigger/trigger_request_service.py +++ b/api/services/trigger/trigger_request_service.py @@ -61,5 +61,5 @@ class TriggerHttpRequestCachingService: """ storage.save( f"{cls._TRIGGER_STORAGE_PATH}/{request_id}.payload", - TypeAdapter(Mapping[str, Any]).dump_json(payload), # type: ignore + TypeAdapter(Mapping[str, Any]).dump_json(payload), # type: ignore[arg-type, operator] ) diff --git a/api/services/variable_truncator.py b/api/services/variable_truncator.py index 60dc1dedb8..2855ea1146 100644 --- a/api/services/variable_truncator.py +++ b/api/services/variable_truncator.py @@ -401,7 +401,7 @@ class VariableTruncator(BaseTruncator): def _truncate_json_primitives(self, val: dict[str, object], target_size: int) -> _PartResult[dict[str, object]]: ... @overload - def _truncate_json_primitives(self, val: bool, target_size: int) -> _PartResult[bool]: ... # type: ignore + def _truncate_json_primitives(self, val: bool, target_size: int) -> _PartResult[bool]: ... # type: ignore[misc] @overload def _truncate_json_primitives(self, val: int, target_size: int) -> _PartResult[int]: ... diff --git a/api/services/workflow/queue_dispatcher.py b/api/services/workflow/queue_dispatcher.py index cc366482c8..e0039bfd72 100644 --- a/api/services/workflow/queue_dispatcher.py +++ b/api/services/workflow/queue_dispatcher.py @@ -103,4 +103,4 @@ class QueueDispatcherManager: SandboxQueueDispatcher, # Default to sandbox for unknown plans ) - return dispatcher_class() # type: ignore + return dispatcher_class() # type: ignore[abstract, operator] diff --git a/api/tasks/app_generate/workflow_execute_task.py b/api/tasks/app_generate/workflow_execute_task.py index 174aa50343..58b3c0f313 100644 --- a/api/tasks/app_generate/workflow_execute_task.py +++ b/api/tasks/app_generate/workflow_execute_task.py @@ -137,7 +137,7 @@ class _AppRunner: @contextlib.contextmanager def _setup_flask_context(self, user: Account | EndUser): - flask_app = current_app._get_current_object() # type: ignore + flask_app = current_app._get_current_object() # type: ignore[attr-defined] with flask_app.app_context(): set_login_user(user) yield diff --git a/api/tasks/batch_create_segment_to_index_task.py b/api/tasks/batch_create_segment_to_index_task.py index 49dee00919..3173d6795d 100644 --- a/api/tasks/batch_create_segment_to_index_task.py +++ b/api/tasks/batch_create_segment_to_index_task.py @@ -103,7 +103,7 @@ def batch_create_segment_to_index_task( with tempfile.TemporaryDirectory() as temp_dir: suffix = Path(upload_file_key).suffix - file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore + file_path = f"{temp_dir}/{next(tempfile._get_candidate_names())}{suffix}" # type: ignore[attr-defined, operator] storage.download(upload_file_key, file_path) df = pd.read_csv(file_path) diff --git a/api/tasks/deal_dataset_index_update_task.py b/api/tasks/deal_dataset_index_update_task.py index fa844a8647..5d34399c65 100644 --- a/api/tasks/deal_dataset_index_update_task.py +++ b/api/tasks/deal_dataset_index_update_task.py @@ -2,7 +2,7 @@ import logging import time import click -from celery import shared_task # type: ignore +from celery import shared_task from core.db.session_factory import session_factory from core.rag.index_processor.constant.doc_type import DocType diff --git a/api/tasks/document_indexing_task.py b/api/tasks/document_indexing_task.py index e05d63426c..cf972c7b5c 100644 --- a/api/tasks/document_indexing_task.py +++ b/api/tasks/document_indexing_task.py @@ -209,7 +209,7 @@ def _document_indexing_with_tenant_queue( logger.info("document indexing tenant isolation queue %s next tasks: %s", tenant_id, next_tasks) if next_tasks: - with current_app.producer_or_acquire() as producer: # type: ignore + with current_app.producer_or_acquire() as producer: # type: ignore[attr-defined] for next_task in next_tasks: document_task = DocumentTask(**next_task) # Keep the flag set to indicate a task is running diff --git a/api/tasks/duplicate_document_indexing_task.py b/api/tasks/duplicate_document_indexing_task.py index 13c651753f..c974007400 100644 --- a/api/tasks/duplicate_document_indexing_task.py +++ b/api/tasks/duplicate_document_indexing_task.py @@ -66,7 +66,7 @@ def _duplicate_document_indexing_task_with_tenant_queue( # Process the next waiting task # Keep the flag set to indicate a task is running tenant_isolated_task_queue.set_task_waiting_time() - task_func.delay( # type: ignore + task_func.delay( # type: ignore[attr-defined, operator] tenant_id=document_task.tenant_id, dataset_id=document_task.dataset_id, document_ids=document_task.document_ids, diff --git a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py index 3c5e152520..0f48c52c5e 100644 --- a/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/priority_rag_pipeline_run_task.py @@ -8,7 +8,7 @@ from concurrent.futures import ThreadPoolExecutor from typing import Any import click -from celery import shared_task # type: ignore +from celery import shared_task from flask import current_app, g from sqlalchemy.orm import Session, sessionmaker @@ -50,7 +50,7 @@ def priority_rag_pipeline_run_task( logger.info("tenant %s received %d rag pipeline invoke entities", tenant_id, len(rag_pipeline_invoke_entities)) # Get Flask app object for thread context - flask_app = current_app._get_current_object() # type: ignore + flask_app = current_app._get_current_object() # type: ignore[attr-defined] with ThreadPoolExecutor(max_workers=10) as executor: futures = [] @@ -87,7 +87,7 @@ def priority_rag_pipeline_run_task( # Process the next waiting task # Keep the flag set to indicate a task is running tenant_isolated_task_queue.set_task_waiting_time() - priority_rag_pipeline_run_task.delay( # type: ignore + priority_rag_pipeline_run_task.delay( # type: ignore[operator] rag_pipeline_invoke_entities_file_id=next_file_id.decode("utf-8") if isinstance(next_file_id, bytes) else next_file_id, diff --git a/api/tasks/rag_pipeline/rag_pipeline_run_task.py b/api/tasks/rag_pipeline/rag_pipeline_run_task.py index 52f66dddb8..a129bd1e3a 100644 --- a/api/tasks/rag_pipeline/rag_pipeline_run_task.py +++ b/api/tasks/rag_pipeline/rag_pipeline_run_task.py @@ -56,7 +56,7 @@ def rag_pipeline_run_task( logger.info("tenant %s received %d rag pipeline invoke entities", tenant_id, len(rag_pipeline_invoke_entities)) # Get Flask app object for thread context - flask_app = current_app._get_current_object() # type: ignore + flask_app = current_app._get_current_object() # type: ignore[attr-defined] with ThreadPoolExecutor(max_workers=10) as executor: futures = [] diff --git a/api/tasks/workflow_draft_var_tasks.py b/api/tasks/workflow_draft_var_tasks.py index 26f8f7c29e..bd1ecb7f42 100644 --- a/api/tasks/workflow_draft_var_tasks.py +++ b/api/tasks/workflow_draft_var_tasks.py @@ -5,7 +5,7 @@ These tasks provide asynchronous storage capabilities for workflow execution dat improving performance by offloading storage operations to background workers. """ -from celery import shared_task # type: ignore[import-untyped] +from celery import shared_task from core.db.session_factory import session_factory from services.workflow_draft_variable_service import DraftVarFileDeletion, WorkflowDraftVariableService diff --git a/api/tests/integration_tests/conftest.py b/api/tests/integration_tests/conftest.py index 44adadeaa5..ceedc639e4 100644 --- a/api/tests/integration_tests/conftest.py +++ b/api/tests/integration_tests/conftest.py @@ -52,7 +52,7 @@ _CACHED_APP = create_app() @pytest.fixture(scope="session") def dify_config() -> DifyConfig: - config = DifyConfig() # type: ignore + config = DifyConfig() # type: ignore[assignment] return config diff --git a/api/tests/unit_tests/core/datasource/entities/test_api_entities.py b/api/tests/unit_tests/core/datasource/entities/test_api_entities.py index 9855b4040a..781dba4798 100644 --- a/api/tests/unit_tests/core/datasource/entities/test_api_entities.py +++ b/api/tests/unit_tests/core/datasource/entities/test_api_entities.py @@ -50,7 +50,7 @@ def test_datasource_provider_api_entity_convert_none_to_empty_list(): icon="icon", label=label, type="type", - datasources=None, # type: ignore + datasources=None, # type: ignore[assignment] ) assert entity.datasources == [] diff --git a/api/tests/unit_tests/core/mcp/test_types.py b/api/tests/unit_tests/core/mcp/test_types.py index d4fe353f0a..c85a22fd70 100644 --- a/api/tests/unit_tests/core/mcp/test_types.py +++ b/api/tests/unit_tests/core/mcp/test_types.py @@ -89,7 +89,7 @@ class TestRequestParams: """Test RequestParams.Meta allows extra fields.""" meta = RequestParams.Meta(progressToken="token", customField="value") assert meta.progressToken == "token" - assert meta.customField == "value" # type: ignore + assert meta.customField == "value" # type: ignore[misc] def test_request_params_serialization(self): """Test RequestParams serialization with _meta alias.""" @@ -179,7 +179,7 @@ class TestCapabilities: assert caps.experimental == {"feature": {"enabled": True}} assert caps.sampling is not None - assert caps.roots.listChanged is True # type: ignore + assert caps.roots.listChanged is True # type: ignore[misc] def test_server_capabilities(self): """Test ServerCapabilities creation.""" @@ -191,9 +191,9 @@ class TestCapabilities: completions={}, ) - assert caps.tools.listChanged is True # type: ignore - assert caps.resources.subscribe is True # type: ignore - assert caps.resources.listChanged is False # type: ignore + assert caps.tools.listChanged is True # type: ignore[misc] + assert caps.resources.subscribe is True # type: ignore[misc] + assert caps.resources.listChanged is False # type: ignore[misc] class TestInitialization: @@ -272,7 +272,7 @@ class TestTools: ) assert len(result.content) == 1 - assert result.content[0].text == "Tool executed successfully" # type: ignore + assert result.content[0].text == "Tool executed successfully" # type: ignore[misc] assert result.structuredContent == {"status": "success", "data": "test"} assert result.isError is False @@ -434,7 +434,7 @@ class TestCompletion: request = CompleteRequest(params=params) assert request.method == "completion/complete" - assert request.params.ref.name == "test_prompt" # type: ignore + assert request.params.ref.name == "test_prompt" # type: ignore[misc] assert request.params.argument.name == "arg1" def test_complete_result(self): @@ -474,9 +474,9 @@ class TestValidation: tool = Tool( name="test", inputSchema={}, - customField="allowed", # type: ignore + customField="allowed", # type: ignore[misc] ) - assert tool.customField == "allowed" # type: ignore + assert tool.customField == "allowed" # type: ignore[misc] def test_result_meta_alias(self): """Test Result model with _meta alias.""" diff --git a/api/tests/unit_tests/core/workflow/context/test_execution_context.py b/api/tests/unit_tests/core/workflow/context/test_execution_context.py index d09b8397c3..af53adf582 100644 --- a/api/tests/unit_tests/core/workflow/context/test_execution_context.py +++ b/api/tests/unit_tests/core/workflow/context/test_execution_context.py @@ -26,7 +26,7 @@ class TestAppContext: def test_app_context_is_abstract(self): """Test that AppContext cannot be instantiated directly.""" with pytest.raises(TypeError): - AppContext() # type: ignore + AppContext() # type: ignore[operator] class TestNullAppContext: diff --git a/api/tests/unit_tests/core/workflow/entities/test_pause_reason.py b/api/tests/unit_tests/core/workflow/entities/test_pause_reason.py index 158f7018b5..5a4430ac16 100644 --- a/api/tests/unit_tests/core/workflow/entities/test_pause_reason.py +++ b/api/tests/unit_tests/core/workflow/entities/test_pause_reason.py @@ -80,7 +80,7 @@ class TestPauseReasonDiscriminator: def test_model_construct_with_invalid_type(self): with pytest.raises(ValidationError): - holder = _Holder(reason=object()) # type: ignore + holder = _Holder(reason=object()) # type: ignore[arg-type] def test_unknown_type_fails_validation(self): """Unknown TYPE values should raise a validation error.""" diff --git a/api/tests/unit_tests/core/workflow/nodes/base/test_base_node.py b/api/tests/unit_tests/core/workflow/nodes/base/test_base_node.py index 81d3f5be9c..5564b4ff9a 100644 --- a/api/tests/unit_tests/core/workflow/nodes/base/test_base_node.py +++ b/api/tests/unit_tests/core/workflow/nodes/base/test_base_node.py @@ -28,7 +28,7 @@ def _get_all_subclasses(root: type[Node]) -> list[type[Node]]: def test_ensure_subclasses_of_base_node_has_node_type_and_version_method_defined(): - classes = _get_all_subclasses(Node) # type: ignore + classes = _get_all_subclasses(Node) # type: ignore[attr-defined] type_version_set: set[tuple[NodeType, str]] = set() for cls in classes: diff --git a/api/tests/unit_tests/libs/_human_input/support.py b/api/tests/unit_tests/libs/_human_input/support.py index 3fff54f487..05719ec816 100644 --- a/api/tests/unit_tests/libs/_human_input/support.py +++ b/api/tests/unit_tests/libs/_human_input/support.py @@ -110,7 +110,7 @@ class FormSubmissionData: submitted_at: datetime = field(default_factory=datetime.utcnow) @classmethod - def from_request(cls, form_id: str, request: FormSubmissionRequest) -> FormSubmissionData: # type: ignore + def from_request(cls, form_id: str, request: FormSubmissionRequest) -> FormSubmissionData: # type: ignore[misc] return cls(form_id=form_id, inputs=request.inputs, action=request.action) diff --git a/api/tests/unit_tests/libs/test_external_api.py b/api/tests/unit_tests/libs/test_external_api.py index 5135970bcc..4f8f376a60 100644 --- a/api/tests/unit_tests/libs/test_external_api.py +++ b/api/tests/unit_tests/libs/test_external_api.py @@ -128,8 +128,8 @@ def test_unauthorized_and_force_logout_clears_cookies(): api = ExternalApi(bp) @api.route("/force-logout") - class ForceLogout(Resource): # type: ignore - def get(self): # type: ignore + class ForceLogout(Resource): # type: ignore[misc] + def get(self): # type: ignore[misc] raise UnauthorizedAndForceLogout() app.register_blueprint(bp, url_prefix="/api") diff --git a/dev/reformat b/dev/reformat index 6966267193..c6e3f99744 100755 --- a/dev/reformat +++ b/dev/reformat @@ -1,6 +1,7 @@ #!/bin/bash set -x +set -euo pipefail SCRIPT_DIR="$(dirname "$(realpath "$0")")" cd "$SCRIPT_DIR/.." @@ -17,5 +18,8 @@ uv run --directory api --dev ruff format ./ # run dotenv-linter linter uv run --project api --dev dotenv-linter ./api/.env.example ./web/.env.example +# run pyrefly check +dev/pyrefly-check-local + # run basedpyright check dev/basedpyright-check