diff --git a/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py b/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py index 284429b7417e32..a8a13313db946d 100644 --- a/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py +++ b/api/core/model_runtime/model_providers/huggingface_tei/text_embedding/text_embedding.py @@ -157,7 +157,6 @@ def validate_credentials(self, model: str, credentials: dict) -> None: headers["Authorization"] = f"Bearer {api_key}" extra_args = TeiHelper.get_tei_extra_parameter(server_url, model, headers) - print(extra_args) if extra_args.model_type != "embedding": raise CredentialsValidateFailedError("Current model is not a embedding model") diff --git a/api/core/workflow/graph_engine/graph_engine.py b/api/core/workflow/graph_engine/graph_engine.py index d7d33c65fcdb38..854036b2c13212 100644 --- a/api/core/workflow/graph_engine/graph_engine.py +++ b/api/core/workflow/graph_engine/graph_engine.py @@ -612,8 +612,8 @@ def _run_node( max_retries = node_instance.node_data.retry_config.max_retries retry_interval = node_instance.node_data.retry_config.retry_interval_seconds retries = 0 - shoudl_continue_retry = True - while shoudl_continue_retry and retries <= max_retries: + should_continue_retry = True + while should_continue_retry and retries <= max_retries: try: # run node retry_start_at = datetime.now(UTC).replace(tzinfo=None) @@ -692,7 +692,7 @@ def _run_node( parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, ) - shoudl_continue_retry = False + should_continue_retry = False else: yield NodeRunFailedEvent( error=route_node_state.failed_reason or "Unknown error.", @@ -706,7 +706,7 @@ def _run_node( parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, ) - shoudl_continue_retry = False + should_continue_retry = False elif run_result.status == WorkflowNodeExecutionStatus.SUCCEEDED: if node_instance.should_continue_on_error and self.graph.edge_mapping.get( node_instance.node_id @@ -758,7 +758,7 @@ def _run_node( parent_parallel_id=parent_parallel_id, parent_parallel_start_node_id=parent_parallel_start_node_id, ) - shoudl_continue_retry = False + should_continue_retry = False break elif isinstance(item, RunStreamChunkEvent):