diff --git a/api/core/model_runtime/model_providers/ibm/__init__.py b/api/core/model_runtime/model_providers/ibm/__init__.py
new file mode 100644
index 00000000000000..e69de29bb2d1d6
diff --git a/api/core/model_runtime/model_providers/ibm/_assets/icon_l_en.svg b/api/core/model_runtime/model_providers/ibm/_assets/icon_l_en.svg
new file mode 100644
index 00000000000000..30c40d7d7a4705
--- /dev/null
+++ b/api/core/model_runtime/model_providers/ibm/_assets/icon_l_en.svg
@@ -0,0 +1,45 @@
+
+
+
\ No newline at end of file
diff --git a/api/core/model_runtime/model_providers/ibm/_assets/icon_s_en.svg b/api/core/model_runtime/model_providers/ibm/_assets/icon_s_en.svg
new file mode 100644
index 00000000000000..f1fe82bc954e56
--- /dev/null
+++ b/api/core/model_runtime/model_providers/ibm/_assets/icon_s_en.svg
@@ -0,0 +1,16 @@
+
+
+
\ No newline at end of file
diff --git a/api/core/model_runtime/model_providers/ibm/ibm.py b/api/core/model_runtime/model_providers/ibm/ibm.py
new file mode 100644
index 00000000000000..9426aa76e9e8df
--- /dev/null
+++ b/api/core/model_runtime/model_providers/ibm/ibm.py
@@ -0,0 +1,32 @@
+import logging
+
+from ibm_watsonx_ai import APIClient, Credentials
+
+from core.model_runtime.errors.validate import CredentialsValidateFailedError
+from core.model_runtime.model_providers.__base.model_provider import ModelProvider
+
+logger = logging.getLogger(__name__)
+
+
+class IbmProvider(ModelProvider):
+ def validate_provider_credentials(self, credentials: dict) -> None:
+ """
+ Validate provider credentials
+
+ if validate failed, raise exception
+
+ :param credentials: provider credentials, credentials form defined in `provider_credential_schema`.
+ """
+ try:
+ credentials = Credentials(
+ url=credentials.get("base_url"),
+ api_key=credentials.get("api_key")
+ )
+
+ client = APIClient(credentials)
+ except CredentialsValidateFailedError as ex:
+ raise ex
+ except Exception as ex:
+ logger.exception(
+ f"{self.get_provider_schema().provider} credentials validate failed")
+ raise ex
diff --git a/api/core/model_runtime/model_providers/ibm/ibm.yaml b/api/core/model_runtime/model_providers/ibm/ibm.yaml
new file mode 100644
index 00000000000000..7d7f6c4ebbafad
--- /dev/null
+++ b/api/core/model_runtime/model_providers/ibm/ibm.yaml
@@ -0,0 +1,133 @@
+provider: ibm
+label:
+ zh_Hans: IBM
+ en_US: IBM
+icon_small:
+ en_US: icon_s_en.svg
+icon_large:
+ en_US: icon_l_en.svg
+background: "#E3F0FF"
+help:
+ title:
+ en_US: Get your API key from IBM
+ zh_Hans: 从 IBM 获取 API Key
+ url:
+ en_US: https://cloud.ibm.com/iam/apikeys
+supported_model_types:
+ - llm
+configurate_methods:
+ - customizable-model
+provider_credential_schema:
+ credential_form_schemas:
+ - variable: api_key
+ label:
+ zh_Hans: API Key
+ en_US: API Key
+ type: secret-input
+ required: true
+ placeholder:
+ zh_Hans: 在此输入您的 API Key
+ en_US: Enter your API Key
+ show_on: [ ]
+ - variable: base_url
+ label:
+ zh_Hans: API Base URL
+ en_US: API Base URL
+ type: text-input
+ required: false
+ placeholder:
+ zh_Hans: 在此输入您的 API Base URL,如 https://eu-de.ml.cloud.ibm.com
+ en_US: Enter your API Base URL, e.g. https://eu-de.ml.cloud.ibm.com
+model_credential_schema:
+ model:
+ label:
+ en_US: Model Name
+ zh_Hans: 模型名称
+ placeholder:
+ en_US: Enter your model name
+ zh_Hans: 输入模型名称
+ credential_form_schemas:
+ - variable: mode
+ show_on:
+ - variable: __model_type
+ value: llm
+ label:
+ en_US: Completion mode
+ type: select
+ required: false
+ default: chat
+ placeholder:
+ zh_Hans: 选择对话类型
+ en_US: Select completion mode
+ options:
+ - value: completion
+ label:
+ en_US: Completion
+ zh_Hans: 补全
+ - value: chat
+ label:
+ en_US: Chat
+ zh_Hans: 对话
+ - variable: api_key
+ label:
+ en_US: API Key
+ type: secret-input
+ required: true
+ placeholder:
+ zh_Hans: 在此输入您的 API Key
+ en_US: Enter your API Key
+ - variable: base_url
+ label:
+ zh_Hans: API Base URL
+ en_US: API Base URL
+ type: text-input
+ required: true
+ placeholder:
+ zh_Hans: 在此输入您的 API Base URL,如 https://eu-de.ml.cloud.ibm.com
+ en_US: Enter your API Base URL, e.g. https://eu-de.ml.cloud.ibm.com
+ - variable: project_id
+ label:
+ zh_Hans: Project ID
+ en_US: Project ID
+ type: text-input
+ required: true
+ placeholder:
+ zh_Hans: 在此输入您的 API Base
+ en_US: Enter your Project ID
+ - variable: function_calling_type
+ show_on:
+ - variable: __model_type
+ value: llm
+ label:
+ en_US: Function calling
+ type: select
+ required: false
+ default: no_call
+ options:
+ - value: tool_call
+ label:
+ en_US: Tool Call
+ zh_Hans: Tool Call
+ - value: no_call
+ label:
+ en_US: Not Support
+ zh_Hans: 不支持
+ - variable: vision_support
+ show_on:
+ - variable: __model_type
+ value: llm
+ label:
+ zh_Hans: Vision 支持
+ en_US: Vision Support
+ type: select
+ required: false
+ default: no_support
+ options:
+ - value: support
+ label:
+ en_US: Support
+ zh_Hans: 支持
+ - value: no_support
+ label:
+ en_US: Not Support
+ zh_Hans: 不支持
diff --git a/api/core/model_runtime/model_providers/ibm/llm/__init__.py b/api/core/model_runtime/model_providers/ibm/llm/__init__.py
new file mode 100644
index 00000000000000..e69de29bb2d1d6
diff --git a/api/core/model_runtime/model_providers/ibm/llm/llm.py b/api/core/model_runtime/model_providers/ibm/llm/llm.py
new file mode 100644
index 00000000000000..eb63ec01b53931
--- /dev/null
+++ b/api/core/model_runtime/model_providers/ibm/llm/llm.py
@@ -0,0 +1,970 @@
+from collections.abc import Generator
+from typing import Any, Optional, Union, cast
+
+import requests
+from ibm_watsonx_ai import APIClient, Credentials
+from ibm_watsonx_ai.foundation_models import ModelInference
+from ibm_watsonx_ai.foundation_models.schema import TextChatParameters, TextGenParameters
+
+from core.model_runtime.entities.llm_entities import (
+ LLMMode,
+ LLMResult,
+ LLMResultChunk,
+ LLMResultChunkDelta,
+)
+from core.model_runtime.entities.message_entities import (
+ AssistantPromptMessage,
+ ImagePromptMessageContent,
+ PromptMessage,
+ PromptMessageContent,
+ PromptMessageContentType,
+ PromptMessageFunction,
+ PromptMessageTool,
+ SystemPromptMessage,
+ ToolPromptMessage,
+ UserPromptMessage,
+)
+from core.model_runtime.entities.model_entities import (
+ AIModelEntity,
+ DefaultParameterName,
+ FetchFrom,
+ I18nObject,
+ ModelFeature,
+ ModelPropertyKey,
+ ModelType,
+ ParameterRule,
+ ParameterType,
+)
+from core.model_runtime.errors.invoke import (
+ InvokeAuthorizationError,
+ InvokeBadRequestError,
+ InvokeConnectionError,
+ InvokeError,
+ InvokeRateLimitError,
+ InvokeServerUnavailableError,
+)
+from core.model_runtime.errors.validate import CredentialsValidateFailedError
+from core.model_runtime.model_providers.__base.large_language_model import LargeLanguageModel
+from core.model_runtime.utils import helper
+
+
+class IbmLargeLanguageModel(LargeLanguageModel):
+ """
+ Model class for IBM large language model.
+ """
+
+ def _invoke(
+ self,
+ model: str,
+ credentials: dict,
+ prompt_messages: list[PromptMessage],
+ model_parameters: dict,
+ tools: Optional[list[PromptMessageTool]] = None,
+ stop: Optional[list[str]] = None,
+ stream: bool = True,
+ user: Optional[str] = None,
+ ) -> Union[LLMResult, Generator]:
+ """
+ Invoke large language model
+
+ :param model: model name
+ :param credentials: model credentials
+ :param prompt_messages: prompt messages
+ :param model_parameters: model parameters
+ :param tools: tools for tool calling
+ :param stop: stop words
+ :param stream: is stream response
+ :param user: unique user id
+ :return: full response or stream response chunk generator result
+ """
+
+ model_mode = self.get_model_mode(model, credentials)
+
+ if model_mode == LLMMode.CHAT:
+ return self._chat_generate(
+ model=model,
+ credentials=credentials,
+ prompt_messages=prompt_messages,
+ model_parameters=model_parameters,
+ tools=tools,
+ stop=stop,
+ stream=stream,
+ user=user,
+ )
+ else:
+ return self._generate(
+ model=model,
+ credentials=credentials,
+ prompt_messages=prompt_messages,
+ model_parameters=model_parameters,
+ stop=stop,
+ stream=stream,
+ user=user,
+ )
+
+ def get_num_tokens(
+ self,
+ model: str,
+ credentials: dict,
+ prompt_messages: list[PromptMessage],
+ tools: Optional[list[PromptMessageTool]] = None,
+ ) -> int:
+ """
+ Get number of tokens for given prompt messages
+
+ :param model:
+ :param credentials:
+ :param prompt_messages:
+ :param tools: tools for tool calling
+ :return:
+ """
+ return self._num_tokens_from_messages(model, prompt_messages, tools, credentials)
+
+ def validate_credentials(self, model: str, credentials: dict) -> None:
+ """
+ Validate model credentials
+
+ :param model: model name
+ :param credentials: model credentials
+ :return:
+ """
+
+ try:
+ model_mode = self.get_model_mode(model)
+
+ if model_mode == LLMMode.CHAT:
+ self._chat_generate(
+ model=model,
+ credentials=credentials,
+ prompt_messages=[UserPromptMessage(content="ping")],
+ model_parameters={
+ "max_tokens": 20,
+ "temperature": 0,
+ },
+ stream=False,
+ )
+ else:
+ self._generate(
+ model=model,
+ credentials=credentials,
+ prompt_messages=[UserPromptMessage(content="ping")],
+ model_parameters={
+ "max_new_tokens": 20,
+ "temperature": 0,
+ },
+ stream=False,
+ )
+ except Exception as ex:
+ raise CredentialsValidateFailedError(str(ex))
+
+ def get_customizable_model_schema(self, model: str, credentials: dict) -> AIModelEntity:
+ """
+ generate custom model entities from credentials
+
+ :param model: model name
+ :param credentials: credentials
+
+ :return: AIModelEntity
+ """
+
+ features = []
+
+ # tool_call support
+ function_calling_type = credentials.get("function_calling_type", "no_call")
+ if function_calling_type == "tool_call":
+ features.append(ModelFeature.MULTI_TOOL_CALL)
+
+ # vision_support
+ vision_support = credentials.get("vision_support", "not_support")
+ if vision_support == "support":
+ features.append(ModelFeature.VISION)
+
+ completion_model = None
+ if credentials.get("mode") == "chat":
+ completion_model = LLMMode.CHAT.value
+ elif credentials.get("mode") == "completion":
+ completion_model = LLMMode.COMPLETION.value
+
+ model_properties = (
+ {
+ ModelPropertyKey.MODE: completion_model,
+ }
+ if completion_model
+ else {}
+ )
+
+ model_parameters_rules = [
+ ParameterRule(
+ name=DefaultParameterName.TEMPERATURE.value,
+ label=I18nObject(en_US="Temperature", zh_Hans="温度"),
+ type=ParameterType.FLOAT,
+ default=0.7,
+ min=0,
+ max=2,
+ precision=2,
+ ),
+ ParameterRule(
+ name=DefaultParameterName.TOP_P.value,
+ label=I18nObject(en_US="Top P", zh_Hans="Top P"),
+ type=ParameterType.FLOAT,
+ default=float(1),
+ min=0,
+ max=1,
+ precision=2,
+ ),
+ ]
+
+ if completion_model == LLMMode.CHAT.value:
+ model_parameters_rules.append(
+ ParameterRule(
+ name=DefaultParameterName.FREQUENCY_PENALTY.value,
+ label=I18nObject(en_US="Frequency Penalty", zh_Hans="频率惩罚"),
+ type=ParameterType.FLOAT,
+ default=0.5,
+ min=-2,
+ max=2,
+ )
+ )
+ model_parameters_rules.append(
+ ParameterRule(
+ name=DefaultParameterName.PRESENCE_PENALTY.value,
+ label=I18nObject(en_US="Presence Penalty", zh_Hans="存在惩罚"),
+ type=ParameterType.FLOAT,
+ default=0.3,
+ min=-2,
+ max=2,
+ )
+ )
+ model_parameters_rules.append(
+ ParameterRule(
+ name=DefaultParameterName.MAX_TOKENS.value,
+ label=I18nObject(en_US="Max Tokens", zh_Hans="最大标记"),
+ type=ParameterType.INT,
+ default=4096,
+ min=1,
+ max=128000,
+ )
+ )
+ else:
+ model_parameters_rules.append(
+ ParameterRule(
+ name="max_new_tokens",
+ label=I18nObject(en_US="Max New Tokens", zh_Hans="最大新令牌数"),
+ type=ParameterType.INT,
+ default=4096,
+ min=1,
+ max=128000,
+ )
+ )
+ model_parameters_rules.append(
+ ParameterRule(
+ name="min_new_tokens",
+ label=I18nObject(en_US="Min New Tokens", zh_Hans="最小新标记数量"),
+ type=ParameterType.INT,
+ default=0,
+ min=0,
+ )
+ )
+ model_parameters_rules.append(
+ ParameterRule(
+ name="repetition_penalty",
+ label=I18nObject(en_US="Repetition Penalty", zh_Hans="重复惩罚"),
+ type=ParameterType.FLOAT,
+ default=float(1),
+ min=1,
+ max=2,
+ precision=2,
+ )
+ )
+ model_parameters_rules.append(
+ ParameterRule(
+ name=DefaultParameterName.TOP_K.value,
+ label=I18nObject(en_US="Top K", zh_Hans="顶部 K"),
+ type=ParameterType.INT,
+ default=50,
+ min=1,
+ max=100,
+ )
+ )
+
+ entity = AIModelEntity(
+ model=model,
+ label=I18nObject(zh_Hans=model, en_US=model),
+ model_type=ModelType.LLM,
+ features=list(features),
+ fetch_from=FetchFrom.CUSTOMIZABLE_MODEL,
+ model_properties=dict(model_properties),
+ parameter_rules=list(model_parameters_rules),
+ )
+
+ return entity
+
+ def _generate(
+ self,
+ model: str,
+ credentials: dict,
+ prompt_messages: list[PromptMessage],
+ model_parameters: dict,
+ stop: Optional[list[str]] = None,
+ stream: bool = True,
+ user: Optional[str] = None,
+ ) -> Union[LLMResult, Generator]:
+ """
+ Generate llm model
+
+ :param model: model name
+ :param credentials: credentials
+ :param prompt_messages: prompt messages
+ :param model_parameters: model parameters
+ :param stop: stop words
+ :param stream: is stream response
+ :param user: unique user id
+ :return: full response or stream response chunk generator result
+ """
+
+ # initialize credentials, client and model_inference
+ ibmCredentials = Credentials(
+ url=credentials.get("base_url"),
+ api_key=credentials.get("api_key"),
+ )
+ client = APIClient(ibmCredentials, project_id=credentials.get("project_id"))
+ model_inference = ModelInference(model_id=model, api_client=client)
+
+ params = TextGenParameters(**model_parameters)
+ if stop:
+ params.stop_sequences = stop
+
+ if stream:
+ response = model_inference.generate_text_stream(
+ prompt=prompt_messages[0].content, params=params, raw_response=True
+ )
+
+ return self._handle_generate_stream_response(model, credentials, response, prompt_messages)
+ else:
+ response = model_inference.generate_text(
+ prompt=prompt_messages[0].content, params=params, raw_response=True
+ )
+
+ return self._handle_generate_response(model, credentials, response, prompt_messages)
+
+ def _handle_generate_response(
+ self, model: str, credentials: dict, response: Any, prompt_messages: list[PromptMessage]
+ ) -> LLMResult:
+ """
+ Handle llm generate response
+
+ :param model: model name
+ :param credentials: credentials
+ :param response: response
+ :param prompt_messages: prompt messages
+ :return: llm response
+ """
+ resultsList = response.get("results")
+ if not resultsList:
+ raise InvokeBadRequestError(f"Invalid response structure: missing or empty 'results': {response}")
+ results = resultsList[0]
+
+ # get assistant_text and transform it to prompt message
+ assistant_text = results.get("generated_text")
+ assistant_prompt_message = AssistantPromptMessage(content=assistant_text)
+
+ # calculate tokens and usage
+ prompt_tokens = results.get("input_token_count")
+ if prompt_tokens is None:
+ prompt_tokens = self._num_tokens_from_messages(model, credentials, prompt_messages)
+ completion_tokens = results.get("generated_token_count")
+ if completion_tokens is None:
+ completion_tokens = self._num_tokens_from_messages(model, credentials, [assistant_prompt_message])
+ usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
+
+ return LLMResult(model=model, prompt_messages=prompt_messages, message=assistant_prompt_message, usage=usage)
+
+ def _handle_generate_stream_response(
+ self,
+ model: str,
+ credentials: dict,
+ response: Generator[Any],
+ prompt_messages: list[PromptMessage],
+ ) -> Generator:
+ """
+ Handle llm generate stream response
+
+ :param model: model name
+ :param credentials: credentials
+ :param response: response
+ :param prompt_messages: prompt messages
+ :return: llm response chunk generator
+ """
+ chunk_index = 0
+ full_assistant_content = ""
+
+ for chunk in response:
+ resultsList = chunk.get("results")
+ if not resultsList:
+ raise InvokeBadRequestError(f"Invalid chunk structure: missing or empty 'results': {chunk}")
+ results = resultsList[0]
+
+ finish_reason = results.get("stop_reason")
+
+ if finish_reason == "not_finished":
+ # Process chunk's generated_text
+ generated_text = results.get("generated_text")
+
+ if not generated_text: # Skip chunks with empty or None generated_text
+ continue
+
+ # Create assistant message and append content
+ assistant_prompt_message = AssistantPromptMessage(content=generated_text)
+ full_assistant_content += generated_text
+
+ yield LLMResultChunk(
+ model=model,
+ prompt_messages=prompt_messages,
+ delta=LLMResultChunkDelta(
+ index=chunk_index,
+ message=assistant_prompt_message,
+ ),
+ )
+
+ chunk_index += 1
+ continue # Skip further processing for already handled chunk
+
+ else:
+ # calculate tokens and usage
+ prompt_tokens = results.get("input_token_count")
+ if prompt_tokens is None:
+ prompt_tokens = self._num_tokens_from_messages(model, credentials, prompt_messages)
+ completion_tokens = results.get("generated_token_count")
+ if completion_tokens is None:
+ completion_tokens = self._num_tokens_from_messages(
+ model, credentials, [AssistantPromptMessage(content=full_assistant_content)]
+ )
+ usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
+
+ yield LLMResultChunk(
+ model=model,
+ prompt_messages=prompt_messages,
+ delta=LLMResultChunkDelta(
+ index=chunk_index,
+ message=AssistantPromptMessage(content=""),
+ finish_reason=finish_reason,
+ usage=usage,
+ ),
+ )
+ break
+
+ def _chat_generate(
+ self,
+ model: str,
+ credentials: dict,
+ prompt_messages: list[PromptMessage],
+ model_parameters: dict,
+ tools: Optional[list[PromptMessageTool]] = None,
+ stop: Optional[list[str]] = None,
+ stream: bool = True,
+ user: Optional[str] = None,
+ ) -> Union[LLMResult, Generator]:
+ """
+ Invoke llm chat model
+
+ :param model: model name
+ :param credentials: credentials
+ :param prompt_messages: prompt messages
+ :param model_parameters: model parameters
+ :param tools: tools for tool calling
+ :param stop: stop words
+ :param stream: is stream response
+ :param user: unique user id
+ :return: full response or stream response chunk generator result
+ """
+
+ # initialize credentials, client and model_inference
+ ibmCredentials = Credentials(
+ url=credentials.get("base_url"),
+ api_key=credentials.get("api_key"),
+ )
+ client = APIClient(ibmCredentials, project_id=credentials.get("project_id"))
+ model_inference = ModelInference(model_id=model, api_client=client)
+
+ params = TextChatParameters(**model_parameters)
+
+ messages = [self._convert_prompt_message_to_dict(m, credentials) for m in prompt_messages]
+ # Filter messages: IBM LLM invocation allow at most one image per request
+ messages = self.filter_prompt_image_messages(messages)
+
+ function_calling_type = credentials.get("function_calling_type", "no_call")
+ formatted_tools = None
+ if tools and function_calling_type == "tool_call":
+ formatted_tools = []
+ for tool in tools:
+ formatted_tools.append(helper.dump_model(PromptMessageFunction(function=tool)))
+
+ if stream:
+ response = model_inference.chat_stream(
+ messages=messages,
+ params=model_parameters,
+ tools=formatted_tools,
+ )
+
+ return self._handle_chat_generate_stream_response(model, credentials, response, prompt_messages)
+ else:
+ response = model_inference.chat(
+ messages=messages,
+ params=model_parameters,
+ tools=formatted_tools,
+ )
+
+ return self._handle_chat_generate_response(model, credentials, response, prompt_messages)
+
+ def _handle_chat_generate_response(
+ self, model: str, credentials: dict, response: dict, prompt_messages: list[PromptMessage]
+ ) -> LLMResult:
+ """
+ Handle llm chat response
+
+ :param model: model name
+ :param credentials: credentials
+ :param response: response
+ :param prompt_messages: prompt messages
+ :return: LLMResult - llm response
+ """
+
+ function_calling_type = credentials.get("function_calling_type", "no_call")
+
+ output = response["choices"][0]
+ message_id = response.get("id")
+
+ response_content = output.get("message", {}).get("content", None)
+
+ tool_calls = None
+ if function_calling_type == "tool_call":
+ tool_calls = output.get("message", {}).get("tool_calls")
+
+ assistant_message = AssistantPromptMessage(content=response_content, tool_calls=[])
+
+ if tool_calls and function_calling_type == "tool_call":
+ assistant_message.tool_calls = self._extract_response_tool_calls(tool_calls)
+
+ # calculate tokens and usage
+ if response.get("usage"):
+ prompt_tokens = response["usage"]["prompt_tokens"]
+ completion_tokens = response["usage"]["completion_tokens"]
+ else:
+ prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content)
+ completion_tokens = self._num_tokens_from_string(model, assistant_message.content)
+ usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
+
+ return LLMResult(
+ id=message_id, model=model, prompt_messages=prompt_messages, message=assistant_message, usage=usage
+ )
+
+ def _handle_chat_generate_stream_response(
+ self, model: str, credentials: dict, response: Generator, prompt_messages: list[PromptMessage]
+ ) -> Generator:
+ """
+ Handle llm stream response
+
+ :param model: model name
+ :param credentials: model credentials
+ :param response: streamed response
+ :param prompt_messages: prompt messages
+ :return: llm response chunk generator
+ """
+ chunk_index = 0
+ full_assistant_content = ""
+
+ def create_final_llm_result_chunk(
+ id: Optional[str], index: int, message: AssistantPromptMessage, finish_reason: str, usage: dict
+ ) -> LLMResultChunk:
+ # calculate tokens and usage
+ prompt_tokens = usage and usage.get("prompt_tokens")
+ if prompt_tokens is None:
+ prompt_tokens = self._num_tokens_from_string(model, prompt_messages[0].content)
+ completion_tokens = usage and usage.get("completion_tokens")
+ if completion_tokens is None:
+ completion_tokens = self._num_tokens_from_string(model, full_assistant_content)
+ usage = self._calc_response_usage(model, credentials, prompt_tokens, completion_tokens)
+
+ return LLMResultChunk(
+ id=id,
+ model=model,
+ prompt_messages=prompt_messages,
+ delta=LLMResultChunkDelta(index=index, message=message, finish_reason=finish_reason, usage=usage),
+ )
+
+ tools_calls: list[AssistantPromptMessage.ToolCall] = []
+
+ def increase_tool_call(new_tool_calls: list[AssistantPromptMessage.ToolCall]):
+ def get_tool_call(tool_call_id: str):
+ if not tool_call_id:
+ return tools_calls[-1]
+
+ tool_call = next((tool_call for tool_call in tools_calls if tool_call.id == tool_call_id), None)
+ if tool_call is None:
+ tool_call = AssistantPromptMessage.ToolCall(
+ id=tool_call_id,
+ type="function",
+ function=AssistantPromptMessage.ToolCall.ToolCallFunction(name="", arguments=""),
+ )
+ tools_calls.append(tool_call)
+
+ return tool_call
+
+ for new_tool_call in new_tool_calls:
+ # get tool call
+ tool_call = get_tool_call(new_tool_call.function.name)
+ # update tool call
+ if new_tool_call.id:
+ tool_call.id = new_tool_call.id
+ if new_tool_call.type:
+ tool_call.type = new_tool_call.type
+ if new_tool_call.function.name:
+ tool_call.function.name = new_tool_call.function.name
+ if new_tool_call.function.arguments:
+ tool_call.function.arguments += new_tool_call.function.arguments
+
+ finish_reason = None # The default value of finish_reason is None
+ message_id, usage = None, None
+
+ for chunk in response:
+ if chunk:
+ if chunk:
+ if u := chunk.get("usage"):
+ usage = u
+ if not chunk or len(chunk["choices"]) == 0:
+ continue
+
+ choice = chunk["choices"][0]
+ finish_reason = chunk["choices"][0].get("finish_reason")
+ message_id = chunk.get("id")
+ chunk_index += 1
+
+ if "delta" in choice:
+ delta = choice["delta"]
+ delta_content = delta.get("content")
+
+ assistant_message_tool_calls = None
+
+ if "tool_calls" in delta and credentials.get("function_calling_type", "no_call") == "tool_call":
+ assistant_message_tool_calls = delta.get("tool_calls", None)
+
+ # extract tool calls from response
+ if assistant_message_tool_calls:
+ tool_calls = self._extract_response_tool_calls(assistant_message_tool_calls)
+ increase_tool_call(tool_calls)
+
+ if delta_content is None or delta_content == "":
+ continue
+
+ # transform assistant message to prompt message
+ assistant_prompt_message = AssistantPromptMessage(
+ content=delta_content,
+ )
+
+ # reset tool calls
+ tool_calls = []
+ full_assistant_content += delta_content
+ elif "text" in choice:
+ choice_text = choice.get("text", "")
+ if choice_text == "":
+ continue
+
+ # transform assistant message to prompt message
+ assistant_prompt_message = AssistantPromptMessage(content=choice_text)
+ full_assistant_content += choice_text
+ else:
+ continue
+
+ yield LLMResultChunk(
+ id=message_id,
+ model=model,
+ prompt_messages=prompt_messages,
+ delta=LLMResultChunkDelta(
+ index=chunk_index,
+ message=assistant_prompt_message,
+ ),
+ )
+
+ chunk_index += 1
+
+ if tools_calls:
+ yield LLMResultChunk(
+ id=message_id,
+ model=model,
+ prompt_messages=prompt_messages,
+ delta=LLMResultChunkDelta(
+ index=chunk_index,
+ message=AssistantPromptMessage(tool_calls=tools_calls, content=""),
+ ),
+ )
+
+ yield create_final_llm_result_chunk(
+ id=message_id,
+ index=chunk_index,
+ message=AssistantPromptMessage(content=""),
+ finish_reason=finish_reason,
+ usage=usage,
+ )
+
+ def _convert_prompt_message_to_dict(self, message: PromptMessage, credentials: Optional[dict] = None) -> dict:
+ """
+ Convert PromptMessage to dict for OpenAI API format
+ """
+ if isinstance(message, UserPromptMessage):
+ message = cast(UserPromptMessage, message)
+ if isinstance(message.content, str):
+ message_dict = {"role": "user", "content": message.content}
+ else:
+ sub_messages = []
+ for message_content in message.content:
+ if message_content.type == PromptMessageContentType.TEXT:
+ message_content = cast(PromptMessageContent, message_content)
+ sub_message_dict = {"type": "text", "text": message_content.data}
+ sub_messages.append(sub_message_dict)
+ elif message_content.type == PromptMessageContentType.IMAGE:
+ message_content = cast(ImagePromptMessageContent, message_content)
+ sub_message_dict = {
+ "type": "image_url",
+ "image_url": {"url": message_content.data, "detail": message_content.detail.value},
+ }
+ sub_messages.append(sub_message_dict)
+
+ message_dict = {"role": "user", "content": sub_messages}
+ elif isinstance(message, AssistantPromptMessage):
+ message = cast(AssistantPromptMessage, message)
+ message_dict = {"role": "assistant", "content": message.content}
+ if message.tool_calls:
+ function_calling_type = credentials.get("function_calling_type", "no_call")
+ if function_calling_type == "tool_call":
+ message_dict["tool_calls"] = [tool_call.dict() for tool_call in message.tool_calls]
+ elif function_calling_type == "function_call":
+ function_call = message.tool_calls[0]
+ message_dict["function_call"] = {
+ "name": function_call.function.name,
+ "arguments": function_call.function.arguments,
+ }
+ elif isinstance(message, SystemPromptMessage):
+ message = cast(SystemPromptMessage, message)
+ message_dict = {"role": "system", "content": message.content}
+ elif isinstance(message, ToolPromptMessage):
+ message = cast(ToolPromptMessage, message)
+ function_calling_type = credentials.get("function_calling_type", "no_call")
+ if function_calling_type == "tool_call":
+ message_dict = {"role": "tool", "content": message.content, "tool_call_id": message.tool_call_id}
+ elif function_calling_type == "function_call":
+ message_dict = {"role": "function", "content": message.content, "name": message.tool_call_id}
+ else:
+ raise ValueError(f"Got unknown type {message}")
+
+ if message.name and message_dict.get("role", "") != "tool":
+ message_dict["name"] = message.name
+
+ return message_dict
+
+ def _num_tokens_from_string(
+ self, model: str, text: Union[str, list[PromptMessageContent]], tools: Optional[list[PromptMessageTool]] = None
+ ) -> int:
+ """
+ Approximate num tokens for model with gpt2 tokenizer.
+
+ :param model: model name
+ :param text: prompt text
+ :param tools: tools for tool calling
+ :return: number of tokens
+ """
+ if isinstance(text, str):
+ full_text = text
+ else:
+ full_text = ""
+ for message_content in text:
+ if message_content.type == PromptMessageContentType.TEXT:
+ message_content = cast(PromptMessageContent, message_content)
+ full_text += message_content.data
+
+ num_tokens = self._get_num_tokens_by_gpt2(full_text)
+
+ if tools:
+ num_tokens += self._num_tokens_for_tools(tools)
+
+ return num_tokens
+
+ def _num_tokens_from_messages(
+ self,
+ model: str,
+ messages: list[PromptMessage],
+ tools: Optional[list[PromptMessageTool]] = None,
+ credentials: Optional[dict] = None,
+ ) -> int:
+ """
+ Approximate num tokens with GPT2 tokenizer.
+ """
+
+ tokens_per_message = 3
+ tokens_per_name = 1
+
+ num_tokens = 0
+ messages_dict = [self._convert_prompt_message_to_dict(m, credentials) for m in messages]
+ for message in messages_dict:
+ num_tokens += tokens_per_message
+ for key, value in message.items():
+ # Cast str(value) in case the message value is not a string
+ # This occurs with function messages
+ # TODO: The current token calculation method for the image type is not implemented,
+ # which need to download the image and then get the resolution for calculation,
+ # and will increase the request delay
+ if isinstance(value, list):
+ text = ""
+ for item in value:
+ if isinstance(item, dict) and item["type"] == "text":
+ text += item["text"]
+
+ value = text
+
+ if key == "tool_calls":
+ for tool_call in value:
+ for t_key, t_value in tool_call.items():
+ num_tokens += self._get_num_tokens_by_gpt2(t_key)
+ if t_key == "function":
+ for f_key, f_value in t_value.items():
+ num_tokens += self._get_num_tokens_by_gpt2(f_key)
+ num_tokens += self._get_num_tokens_by_gpt2(f_value)
+ else:
+ num_tokens += self._get_num_tokens_by_gpt2(t_key)
+ num_tokens += self._get_num_tokens_by_gpt2(t_value)
+ else:
+ num_tokens += self._get_num_tokens_by_gpt2(str(value))
+
+ if key == "name":
+ num_tokens += tokens_per_name
+
+ # every reply is primed with assistant
+ num_tokens += 3
+
+ if tools:
+ num_tokens += self._num_tokens_for_tools(tools)
+
+ return num_tokens
+
+ def _num_tokens_for_tools(self, tools: list[PromptMessageTool]) -> int:
+ """
+ Calculate num tokens for tool calling with tiktoken package.
+
+ :param tools: tools for tool calling
+ :return: number of tokens
+ """
+ num_tokens = 0
+ for tool in tools:
+ num_tokens += self._get_num_tokens_by_gpt2("type")
+ num_tokens += self._get_num_tokens_by_gpt2("function")
+ num_tokens += self._get_num_tokens_by_gpt2("function")
+
+ # calculate num tokens for function object
+ num_tokens += self._get_num_tokens_by_gpt2("name")
+ num_tokens += self._get_num_tokens_by_gpt2(tool.name)
+ num_tokens += self._get_num_tokens_by_gpt2("description")
+ num_tokens += self._get_num_tokens_by_gpt2(tool.description)
+ parameters = tool.parameters
+ num_tokens += self._get_num_tokens_by_gpt2("parameters")
+ if "title" in parameters:
+ num_tokens += self._get_num_tokens_by_gpt2("title")
+ num_tokens += self._get_num_tokens_by_gpt2(parameters.get("title"))
+ num_tokens += self._get_num_tokens_by_gpt2("type")
+ num_tokens += self._get_num_tokens_by_gpt2(parameters.get("type"))
+ if "properties" in parameters:
+ num_tokens += self._get_num_tokens_by_gpt2("properties")
+ for key, value in parameters.get("properties").items():
+ num_tokens += self._get_num_tokens_by_gpt2(key)
+ for field_key, field_value in value.items():
+ num_tokens += self._get_num_tokens_by_gpt2(field_key)
+ if field_key == "enum":
+ for enum_field in field_value:
+ num_tokens += 3
+ num_tokens += self._get_num_tokens_by_gpt2(enum_field)
+ else:
+ num_tokens += self._get_num_tokens_by_gpt2(field_key)
+ num_tokens += self._get_num_tokens_by_gpt2(str(field_value))
+ if "required" in parameters:
+ num_tokens += self._get_num_tokens_by_gpt2("required")
+ for required_field in parameters["required"]:
+ num_tokens += 3
+ num_tokens += self._get_num_tokens_by_gpt2(required_field)
+
+ return num_tokens
+
+ def _extract_response_tool_calls(self, response_tool_calls: list[dict]) -> list[AssistantPromptMessage.ToolCall]:
+ """
+ Extract tool calls from response
+
+ :param response_tool_calls: response tool calls
+ :return: list of tool calls
+ """
+ tool_calls = []
+ if response_tool_calls:
+ for response_tool_call in response_tool_calls:
+ function = AssistantPromptMessage.ToolCall.ToolCallFunction(
+ name=response_tool_call.get("function", {}).get("name", ""),
+ arguments=response_tool_call.get("function", {}).get("arguments", ""),
+ )
+
+ tool_call = AssistantPromptMessage.ToolCall(
+ id=response_tool_call.get("id", ""), type=response_tool_call.get("type", ""), function=function
+ )
+ tool_calls.append(tool_call)
+
+ return tool_calls
+
+ def filter_prompt_image_messages(self, messages: list[dict]) -> list[dict]:
+ prompt_user_messages_with_images = [
+ message
+ for message in messages
+ if message["role"] == "user"
+ and not isinstance(message["content"], str)
+ and any(content.get("type") == "image_url" for content in message.get("content", []))
+ ]
+
+ if prompt_user_messages_with_images:
+ last_prompt_user_message_with_image = prompt_user_messages_with_images[-1]
+ messages = [
+ message
+ for message in messages
+ if not (
+ message["role"] == "user"
+ and not isinstance(message["content"], str)
+ and any(content.get("type") == "image_url" for content in message.get("content", []))
+ )
+ ]
+ messages.append(last_prompt_user_message_with_image)
+
+ return messages
+
+ @property
+ def _invoke_error_mapping(self) -> dict[type[InvokeError], list[type[Exception]]]:
+ """
+ Map model invoke error to unified error
+ The key is the error type thrown to the caller
+ The value is the error type thrown by the model,
+ which needs to be converted into a unified error type for the caller.
+
+ :return: Invoke error mapping
+ """
+ return {
+ Exception: [],
+ InvokeAuthorizationError: [
+ requests.exceptions.InvalidHeader, # Missing or Invalid API Key
+ ],
+ InvokeBadRequestError: [
+ requests.exceptions.HTTPError, # Invalid Endpoint URL or model name
+ requests.exceptions.InvalidURL, # Misconfigured request or other API error
+ ],
+ InvokeRateLimitError: [
+ requests.exceptions.RetryError # Too many requests sent in a short period of time
+ ],
+ InvokeServerUnavailableError: [
+ requests.exceptions.ConnectionError, # Engine Overloaded
+ requests.exceptions.HTTPError, # Server Error
+ ],
+ InvokeConnectionError: [
+ requests.exceptions.ConnectTimeout, # Timeout
+ requests.exceptions.ReadTimeout, # Timeout
+ ],
+ }
diff --git a/api/poetry.lock b/api/poetry.lock
index 958673a00bf947..f35d0143ca5ca7 100644
--- a/api/poetry.lock
+++ b/api/poetry.lock
@@ -1,4 +1,4 @@
-# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand.
+# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
[[package]]
name = "aiohappyeyeballs"
@@ -945,10 +945,6 @@ files = [
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a37b8f0391212d29b3a91a799c8e4a2855e0576911cdfb2515487e30e322253d"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e84799f09591700a4154154cab9787452925578841a94321d5ee8fb9a9a328f0"},
{file = "Brotli-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f66b5337fa213f1da0d9000bc8dc0cb5b896b726eefd9c6046f699b169c41b9e"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5dab0844f2cf82be357a0eb11a9087f70c5430b2c241493fc122bb6f2bb0917c"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e4fe605b917c70283db7dfe5ada75e04561479075761a0b3866c081d035b01c1"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:1e9a65b5736232e7a7f91ff3d02277f11d339bf34099a56cdab6a8b3410a02b2"},
- {file = "Brotli-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:58d4b711689366d4a03ac7957ab8c28890415e267f9b6589969e74b6e42225ec"},
{file = "Brotli-1.1.0-cp310-cp310-win32.whl", hash = "sha256:be36e3d172dc816333f33520154d708a2657ea63762ec16b62ece02ab5e4daf2"},
{file = "Brotli-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c6244521dda65ea562d5a69b9a26120769b7a9fb3db2fe9545935ed6735b128"},
{file = "Brotli-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a3daabb76a78f829cafc365531c972016e4aa8d5b4bf60660ad8ecee19df7ccc"},
@@ -961,14 +957,8 @@ files = [
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:19c116e796420b0cee3da1ccec3b764ed2952ccfcc298b55a10e5610ad7885f9"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:510b5b1bfbe20e1a7b3baf5fed9e9451873559a976c1a78eebaa3b86c57b4265"},
{file = "Brotli-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a1fd8a29719ccce974d523580987b7f8229aeace506952fa9ce1d53a033873c8"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c247dd99d39e0338a604f8c2b3bc7061d5c2e9e2ac7ba9cc1be5a69cb6cd832f"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1b2c248cd517c222d89e74669a4adfa5577e06ab68771a529060cf5a156e9757"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:2a24c50840d89ded6c9a8fdc7b6ed3692ed4e86f1c4a4a938e1e92def92933e0"},
- {file = "Brotli-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f31859074d57b4639318523d6ffdca586ace54271a73ad23ad021acd807eb14b"},
{file = "Brotli-1.1.0-cp311-cp311-win32.whl", hash = "sha256:39da8adedf6942d76dc3e46653e52df937a3c4d6d18fdc94a7c29d263b1f5b50"},
{file = "Brotli-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:aac0411d20e345dc0920bdec5548e438e999ff68d77564d5e9463a7ca9d3e7b1"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:32d95b80260d79926f5fab3c41701dbb818fde1c9da590e77e571eefd14abe28"},
- {file = "Brotli-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b760c65308ff1e462f65d69c12e4ae085cff3b332d894637f6273a12a482d09f"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:316cc9b17edf613ac76b1f1f305d2a748f1b976b033b049a6ecdfd5612c70409"},
{file = "Brotli-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:caf9ee9a5775f3111642d33b86237b05808dafcd6268faa492250e9b78046eb2"},
{file = "Brotli-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70051525001750221daa10907c77830bc889cb6d865cc0b813d9db7fefc21451"},
@@ -979,24 +969,8 @@ files = [
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:4093c631e96fdd49e0377a9c167bfd75b6d0bad2ace734c6eb20b348bc3ea180"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:7e4c4629ddad63006efa0ef968c8e4751c5868ff0b1c5c40f76524e894c50248"},
{file = "Brotli-1.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:861bf317735688269936f755fa136a99d1ed526883859f86e41a5d43c61d8966"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:87a3044c3a35055527ac75e419dfa9f4f3667a1e887ee80360589eb8c90aabb9"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c5529b34c1c9d937168297f2c1fde7ebe9ebdd5e121297ff9c043bdb2ae3d6fb"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ca63e1890ede90b2e4454f9a65135a4d387a4585ff8282bb72964fab893f2111"},
- {file = "Brotli-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e79e6520141d792237c70bcd7a3b122d00f2613769ae0cb61c52e89fd3443839"},
{file = "Brotli-1.1.0-cp312-cp312-win32.whl", hash = "sha256:5f4d5ea15c9382135076d2fb28dde923352fe02951e66935a9efaac8f10e81b0"},
{file = "Brotli-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:906bc3a79de8c4ae5b86d3d75a8b77e44404b0f4261714306e3ad248d8ab0951"},
- {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8bf32b98b75c13ec7cf774164172683d6e7891088f6316e54425fde1efc276d5"},
- {file = "Brotli-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7bc37c4d6b87fb1017ea28c9508b36bbcb0c3d18b4260fcdf08b200c74a6aee8"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c0ef38c7a7014ffac184db9e04debe495d317cc9c6fb10071f7fefd93100a4f"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91d7cc2a76b5567591d12c01f019dd7afce6ba8cba6571187e21e2fc418ae648"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a93dde851926f4f2678e704fadeb39e16c35d8baebd5252c9fd94ce8ce68c4a0"},
- {file = "Brotli-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0db75f47be8b8abc8d9e31bc7aad0547ca26f24a54e6fd10231d623f183d089"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6967ced6730aed543b8673008b5a391c3b1076d834ca438bbd70635c73775368"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7eedaa5d036d9336c95915035fb57422054014ebdeb6f3b42eac809928e40d0c"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d487f5432bf35b60ed625d7e1b448e2dc855422e87469e3f450aa5552b0eb284"},
- {file = "Brotli-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:832436e59afb93e1836081a20f324cb185836c617659b07b129141a8426973c7"},
- {file = "Brotli-1.1.0-cp313-cp313-win32.whl", hash = "sha256:43395e90523f9c23a3d5bdf004733246fba087f2948f87ab28015f12359ca6a0"},
- {file = "Brotli-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9011560a466d2eb3f5a6e4929cf4a09be405c64154e12df0dd72713f6500e32b"},
{file = "Brotli-1.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a090ca607cbb6a34b0391776f0cb48062081f5f60ddcce5d11838e67a01928d1"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2de9d02f5bda03d27ede52e8cfe7b865b066fa49258cbab568720aa5be80a47d"},
{file = "Brotli-1.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2333e30a5e00fe0fe55903c8832e08ee9c3b1382aacf4db26664a16528d51b4b"},
@@ -1006,10 +980,6 @@ files = [
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:fd5f17ff8f14003595ab414e45fce13d073e0762394f957182e69035c9f3d7c2"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:069a121ac97412d1fe506da790b3e69f52254b9df4eb665cd42460c837193354"},
{file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e93dfc1a1165e385cc8239fab7c036fb2cd8093728cbd85097b284d7b99249a2"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:aea440a510e14e818e67bfc4027880e2fb500c2ccb20ab21c7a7c8b5b4703d75"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:6974f52a02321b36847cd19d1b8e381bf39939c21efd6ee2fc13a28b0d99348c"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_ppc64le.whl", hash = "sha256:a7e53012d2853a07a4a79c00643832161a910674a893d296c9f1259859a289d2"},
- {file = "Brotli-1.1.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:d7702622a8b40c49bffb46e1e3ba2e81268d5c04a34f460978c6b5517a34dd52"},
{file = "Brotli-1.1.0-cp36-cp36m-win32.whl", hash = "sha256:a599669fd7c47233438a56936988a2478685e74854088ef5293802123b5b2460"},
{file = "Brotli-1.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:d143fd47fad1db3d7c27a1b1d66162e855b5d50a89666af46e1679c496e8e579"},
{file = "Brotli-1.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:11d00ed0a83fa22d29bc6b64ef636c4552ebafcef57154b4ddd132f5638fbd1c"},
@@ -1021,10 +991,6 @@ files = [
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:919e32f147ae93a09fe064d77d5ebf4e35502a8df75c29fb05788528e330fe74"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:23032ae55523cc7bccb4f6a0bf368cd25ad9bcdcc1990b64a647e7bbcce9cb5b"},
{file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:224e57f6eac61cc449f498cc5f0e1725ba2071a3d4f48d5d9dffba42db196438"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:cb1dac1770878ade83f2ccdf7d25e494f05c9165f5246b46a621cc849341dc01"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3ee8a80d67a4334482d9712b8e83ca6b1d9bc7e351931252ebef5d8f7335a547"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:5e55da2c8724191e5b557f8e18943b1b4839b8efc3ef60d65985bcf6f587dd38"},
- {file = "Brotli-1.1.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:d342778ef319e1026af243ed0a07c97acf3bad33b9f29e7ae6a1f68fd083e90c"},
{file = "Brotli-1.1.0-cp37-cp37m-win32.whl", hash = "sha256:587ca6d3cef6e4e868102672d3bd9dc9698c309ba56d41c2b9c85bbb903cdb95"},
{file = "Brotli-1.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2954c1c23f81c2eaf0b0717d9380bd348578a94161a65b3a2afc62c86467dd68"},
{file = "Brotli-1.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:efa8b278894b14d6da122a72fefcebc28445f2d3f880ac59d46c90f4c13be9a3"},
@@ -1037,10 +1003,6 @@ files = [
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1ab4fbee0b2d9098c74f3057b2bc055a8bd92ccf02f65944a241b4349229185a"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:141bd4d93984070e097521ed07e2575b46f817d08f9fa42b16b9b5f27b5ac088"},
{file = "Brotli-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fce1473f3ccc4187f75b4690cfc922628aed4d3dd013d047f95a9b3919a86596"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d2b35ca2c7f81d173d2fadc2f4f31e88cc5f7a39ae5b6db5513cf3383b0e0ec7"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:af6fa6817889314555aede9a919612b23739395ce767fe7fcbea9a80bf140fe5"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2feb1d960f760a575dbc5ab3b1c00504b24caaf6986e2dc2b01c09c87866a943"},
- {file = "Brotli-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4410f84b33374409552ac9b6903507cdb31cd30d2501fc5ca13d18f73548444a"},
{file = "Brotli-1.1.0-cp38-cp38-win32.whl", hash = "sha256:db85ecf4e609a48f4b29055f1e144231b90edc90af7481aa731ba2d059226b1b"},
{file = "Brotli-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:3d7954194c36e304e1523f55d7042c59dc53ec20dd4e9ea9d151f1b62b4415c0"},
{file = "Brotli-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5fb2ce4b8045c78ebbc7b8f3c15062e435d47e7393cc57c25115cfd49883747a"},
@@ -1053,10 +1015,6 @@ files = [
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:949f3b7c29912693cee0afcf09acd6ebc04c57af949d9bf77d6101ebb61e388c"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:89f4988c7203739d48c6f806f1e87a1d96e0806d44f0fba61dba81392c9e474d"},
{file = "Brotli-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:de6551e370ef19f8de1807d0a9aa2cdfdce2e85ce88b122fe9f6b2b076837e59"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:0737ddb3068957cf1b054899b0883830bb1fec522ec76b1098f9b6e0f02d9419"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4f3607b129417e111e30637af1b56f24f7a49e64763253bbc275c75fa887d4b2"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:6c6e0c425f22c1c719c42670d561ad682f7bfeeef918edea971a79ac5252437f"},
- {file = "Brotli-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:494994f807ba0b92092a163a0a283961369a65f6cbe01e8891132b7a320e61eb"},
{file = "Brotli-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f0d8a7a6b5983c2496e364b969f0e526647a06b075d034f3297dc66f3b360c64"},
{file = "Brotli-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cdad5b9014d83ca68c25d2e9444e28e967ef16e80f6b436918c700c117a85467"},
{file = "Brotli-1.1.0.tar.gz", hash = "sha256:81de08ac11bcb85841e440c13611c00b67d3bf82698314928d0b676362546724"},
@@ -4176,6 +4134,80 @@ files = [
{file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"},
]
+[[package]]
+name = "ibm-cos-sdk"
+version = "2.13.4"
+description = "IBM SDK for Python"
+optional = false
+python-versions = ">= 3.6"
+files = [
+ {file = "ibm-cos-sdk-2.13.4.tar.gz", hash = "sha256:ee06bb89205e2bd031967e7a0d3fc47b39363be03badc49442202394a791d24a"},
+]
+
+[package.dependencies]
+ibm-cos-sdk-core = "2.13.4"
+ibm-cos-sdk-s3transfer = "2.13.4"
+jmespath = ">=0.10.0,<=1.0.1"
+
+[[package]]
+name = "ibm-cos-sdk-core"
+version = "2.13.4"
+description = "Low-level, data-driven core of IBM SDK for Python"
+optional = false
+python-versions = ">= 3.6"
+files = [
+ {file = "ibm-cos-sdk-core-2.13.4.tar.gz", hash = "sha256:c0f3c03b6c21bb69d3dedd2a1bb647621e0d99e0e1c0929d2c36bd45cfd4166c"},
+]
+
+[package.dependencies]
+jmespath = ">=0.10.0,<=1.0.1"
+python-dateutil = ">=2.8.2,<3.0.0"
+requests = ">=2.31.0,<3.0"
+urllib3 = {version = ">=1.26.18,<2.2", markers = "python_version >= \"3.10\""}
+
+[[package]]
+name = "ibm-cos-sdk-s3transfer"
+version = "2.13.4"
+description = "IBM S3 Transfer Manager"
+optional = false
+python-versions = ">= 3.6"
+files = [
+ {file = "ibm-cos-sdk-s3transfer-2.13.4.tar.gz", hash = "sha256:3c93feaf66254803b2b8523720efaf90e7374b544ac0a411099617f3b4689279"},
+]
+
+[package.dependencies]
+ibm-cos-sdk-core = "2.13.4"
+
+[[package]]
+name = "ibm-watsonx-ai"
+version = "1.1.24"
+description = "IBM watsonx.ai API Client"
+optional = false
+python-versions = "<3.13,>=3.10"
+files = [
+ {file = "ibm_watsonx_ai-1.1.24-py3-none-any.whl", hash = "sha256:26028bb7305dab5050be14a5b510dbd4939e2578db1f7664e1f34c895ab074df"},
+ {file = "ibm_watsonx_ai-1.1.24.tar.gz", hash = "sha256:2194b3f9761a1665acc79abf2e0dd16216ccc88e8e4bd2679d18d776282326e2"},
+]
+
+[package.dependencies]
+certifi = "*"
+httpx = "*"
+ibm-cos-sdk = ">=2.12.0,<2.14.0"
+importlib-metadata = "*"
+lomond = "*"
+packaging = "*"
+pandas = ">=0.24.2,<2.2.0"
+requests = "*"
+tabulate = "*"
+urllib3 = "*"
+
+[package.extras]
+fl-crypto = ["pyhelayers (==1.5.0.3)"]
+fl-crypto-rt24-1 = ["pyhelayers (==1.5.3.1)"]
+fl-rt23-1-py3-10 = ["GPUtil", "cryptography (==42.0.5)", "ddsketch (==2.0.4)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.1.1)", "lz4", "msgpack (==1.0.7)", "msgpack-numpy (==0.4.8)", "numcompress (==0.1.2)", "numpy (==1.23.5)", "pandas (==1.5.3)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==4.22.1)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.32.3)", "scikit-learn (==1.1.1)", "scipy (==1.10.1)", "setproctitle", "skops (==0.9.0)", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.12.0)", "torch (==2.0.1)", "websockets (==10.1)"]
+fl-rt24-1-py3-11 = ["GPUtil", "cryptography (==42.0.5)", "ddsketch (==2.0.4)", "diffprivlib (==0.5.1)", "environs (==9.5.0)", "gym", "image (==1.5.33)", "joblib (==1.3.2)", "lz4", "msgpack (==1.0.7)", "msgpack-numpy (==0.4.8)", "numcompress (==0.1.2)", "numpy (==1.26.4)", "pandas (==2.1.4)", "parse (==1.19.0)", "pathlib2 (==2.3.6)", "protobuf (==4.22.1)", "psutil", "pyYAML (==6.0.1)", "pytest (==6.2.5)", "requests (==2.32.3)", "scikit-learn (==1.3.0)", "scipy (==1.11.4)", "setproctitle", "skops (==0.9.0)", "skorch (==0.12.0)", "tabulate (==0.8.9)", "tensorflow (==2.14.1)", "torch (==2.1.2)", "websockets (==10.1)"]
+rag = ["beautifulsoup4 (==4.12.3)", "grpcio (>=1.54.3)", "langchain (>=0.3,<0.4)", "langchain-chroma (==0.1.4)", "langchain-community (>=0.3,<0.4)", "langchain-core (>=0.3,<0.4)", "langchain-elasticsearch (==0.3.0)", "langchain-ibm (>=0.3,<0.4)", "langchain-milvus (==0.1.6)", "markdown (==3.4.1)", "pypdf (==4.2.0)", "python-docx (==1.1.2)"]
+
[[package]]
name = "idna"
version = "3.10"
@@ -4782,6 +4814,20 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""}
[package.extras]
dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"]
+[[package]]
+name = "lomond"
+version = "0.3.3"
+description = "Websocket Client Library"
+optional = false
+python-versions = "*"
+files = [
+ {file = "lomond-0.3.3-py2.py3-none-any.whl", hash = "sha256:df1dd4dd7b802a12b71907ab1abb08b8ce9950195311207579379eb3b1553de7"},
+ {file = "lomond-0.3.3.tar.gz", hash = "sha256:427936596b144b4ec387ead99aac1560b77c8a78107d3d49415d3abbe79acbd3"},
+]
+
+[package.dependencies]
+six = ">=1.10.0"
+
[[package]]
name = "lxml"
version = "5.3.0"
@@ -6368,97 +6414,78 @@ files = [
[[package]]
name = "pandas"
-version = "2.2.3"
+version = "2.1.4"
description = "Powerful data structures for data analysis, time series, and statistics"
optional = false
python-versions = ">=3.9"
files = [
- {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"},
- {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"},
- {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"},
- {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"},
- {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"},
- {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"},
- {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"},
- {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"},
- {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"},
- {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"},
- {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"},
- {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"},
- {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"},
- {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"},
- {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"},
- {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"},
- {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"},
- {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"},
- {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"},
- {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"},
- {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"},
- {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"},
- {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"},
- {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"},
- {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"},
- {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"},
- {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"},
- {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"},
- {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"},
- {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"},
- {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"},
- {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"},
- {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"},
- {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"},
- {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"},
- {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"},
- {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"},
- {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"},
- {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"},
- {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"},
- {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"},
- {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"},
-]
-
-[package.dependencies]
-bottleneck = {version = ">=1.3.6", optional = true, markers = "extra == \"performance\""}
-numba = {version = ">=0.56.4", optional = true, markers = "extra == \"performance\""}
-numexpr = {version = ">=2.8.4", optional = true, markers = "extra == \"performance\""}
+ {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"},
+ {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"},
+ {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"},
+ {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"},
+ {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"},
+ {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"},
+ {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"},
+ {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"},
+ {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"},
+ {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"},
+ {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"},
+ {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"},
+ {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"},
+ {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"},
+ {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"},
+ {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"},
+ {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"},
+ {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"},
+ {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"},
+ {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"},
+ {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"},
+ {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"},
+ {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"},
+ {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"},
+ {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"},
+]
+
+[package.dependencies]
+bottleneck = {version = ">=1.3.4", optional = true, markers = "extra == \"performance\""}
+numba = {version = ">=0.55.2", optional = true, markers = "extra == \"performance\""}
+numexpr = {version = ">=2.8.0", optional = true, markers = "extra == \"performance\""}
numpy = [
- {version = ">=1.23.2", markers = "python_version == \"3.11\""},
- {version = ">=1.26.0", markers = "python_version >= \"3.12\""},
+ {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""},
+ {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""},
]
odfpy = {version = ">=1.4.1", optional = true, markers = "extra == \"excel\""}
-openpyxl = {version = ">=3.1.0", optional = true, markers = "extra == \"excel\""}
-python-calamine = {version = ">=0.1.7", optional = true, markers = "extra == \"excel\""}
+openpyxl = {version = ">=3.0.10", optional = true, markers = "extra == \"excel\""}
python-dateutil = ">=2.8.2"
pytz = ">=2020.1"
-pyxlsb = {version = ">=1.0.10", optional = true, markers = "extra == \"excel\""}
-tzdata = ">=2022.7"
+pyxlsb = {version = ">=1.0.9", optional = true, markers = "extra == \"excel\""}
+tzdata = ">=2022.1"
xlrd = {version = ">=2.0.1", optional = true, markers = "extra == \"excel\""}
-xlsxwriter = {version = ">=3.0.5", optional = true, markers = "extra == \"excel\""}
+xlsxwriter = {version = ">=3.0.3", optional = true, markers = "extra == \"excel\""}
[package.extras]
-all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"]
-aws = ["s3fs (>=2022.11.0)"]
-clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"]
-compression = ["zstandard (>=0.19.0)"]
-computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"]
+all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"]
+aws = ["s3fs (>=2022.05.0)"]
+clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"]
+compression = ["zstandard (>=0.17.0)"]
+computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"]
consortium-standard = ["dataframe-api-compat (>=0.1.7)"]
-excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"]
-feather = ["pyarrow (>=10.0.1)"]
-fss = ["fsspec (>=2022.11.0)"]
-gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"]
-hdf5 = ["tables (>=3.8.0)"]
-html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"]
-mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"]
-output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"]
-parquet = ["pyarrow (>=10.0.1)"]
-performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"]
-plot = ["matplotlib (>=3.6.3)"]
-postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"]
-pyarrow = ["pyarrow (>=10.0.1)"]
-spss = ["pyreadstat (>=1.2.0)"]
-sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"]
+excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"]
+feather = ["pyarrow (>=7.0.0)"]
+fss = ["fsspec (>=2022.05.0)"]
+gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"]
+hdf5 = ["tables (>=3.7.0)"]
+html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"]
+mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"]
+output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"]
+parquet = ["pyarrow (>=7.0.0)"]
+performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"]
+plot = ["matplotlib (>=3.6.1)"]
+postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"]
+spss = ["pyreadstat (>=1.1.5)"]
+sql-other = ["SQLAlchemy (>=1.4.36)"]
test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"]
-xml = ["lxml (>=4.9.2)"]
+xml = ["lxml (>=4.8.0)"]
[[package]]
name = "pathos"
@@ -7585,118 +7612,6 @@ pytest = ">=6.2.5"
[package.extras]
dev = ["pre-commit", "pytest-asyncio", "tox"]
-[[package]]
-name = "python-calamine"
-version = "0.3.1"
-description = "Python binding for Rust's library for reading excel and odf file - calamine"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "python_calamine-0.3.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2822c39ad52f289732981cee59b4985388624b54e124e41436bb37565ed32f15"},
- {file = "python_calamine-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f2786751cfe4e81f9170b843741b39a325cf9f49db8d51fc3cd16d6139e0ac60"},
- {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086fc992232207164277fd0f1e463f59097637c849470890f903037fde4bf02d"},
- {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f42795617d23bb87b16761286c07e8407a9044823c972da5dea922f71a98445"},
- {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8dc27a41ebca543e5a0181b3edc223b83839c49063589583927de922887898a"},
- {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:400fd6e650bfedf1a9d79821e32f13aceb0362bbdaa2f37611177eb09cf77056"},
- {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6aec96ea676ec41789a6348137895b3827745d135c3c7f37769f75d417fb867"},
- {file = "python_calamine-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:98808c087bbbfe4e858043fc0b953d326c8c70e73d0cd695c29a9bc7b3b0622b"},
- {file = "python_calamine-0.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:78cd352976ba7324a2e7ab59188b3fac978b5f80d25e753b255dfec2d24076d9"},
- {file = "python_calamine-0.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2e1bfb191b5da6136887ca64deff427cae185d4d59333d1f1a8637db10ce8c3e"},
- {file = "python_calamine-0.3.1-cp310-none-win32.whl", hash = "sha256:bd9616b355f47326ff4ae970f0a91a17976f316877a56ce3ef376ce58505e66c"},
- {file = "python_calamine-0.3.1-cp310-none-win_amd64.whl", hash = "sha256:40354b04fb68e63659bb5f423534fe6f0b3e709be322c25c60158ac332b85ed3"},
- {file = "python_calamine-0.3.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:7fee7306d015e2cb89bd69dc7b928bd947b65415e2cd72deb59a72c5603d0adb"},
- {file = "python_calamine-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c860d5dc649b6be49a94ba07b1673f8dc9be0a89bc33cf13a5ea58998facdb12"},
- {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1df7ae7c29f96b6714cfebfd41666462970583b92ceb179b5ddd0d4556ea21ec"},
- {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:84bac53ba4872b795f808d1d30b51c74eac4a57dc8e4f96bba8140ccdeb320da"},
- {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e93ebe06fee0f10d43ede04691e80ab63366b00edc5eb873742779fdabe626e3"},
- {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23d04d73d2028c7171c63179f3b4d5679aa057db46e1e0058341b5af047474c4"},
- {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2005a4bd693dbaa74c96fdaa71a868c149ad376d309c4ad32fe80145216ad2"},
- {file = "python_calamine-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b8c7ab2e26f124483308f1c0f580b01e3ad474ce3eb6a3acf0e0273247ea7b8b"},
- {file = "python_calamine-0.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:86466870c1898b75503e752f7ea7b7a045253f1e106db9555071d225af4a1de8"},
- {file = "python_calamine-0.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e6a4ef2435715694eeaea537f9578e33a90f68a5e9e697d98ae14d2aacf302cf"},
- {file = "python_calamine-0.3.1-cp311-none-win32.whl", hash = "sha256:545c0cd8bc72a3341f81f9c46f12cad2ec9f3281360d2893a88a4a4a48f364dc"},
- {file = "python_calamine-0.3.1-cp311-none-win_amd64.whl", hash = "sha256:90e848bb9a062185cdc697b93798e67475956ce466c122b477e34fc4548e2906"},
- {file = "python_calamine-0.3.1-cp311-none-win_arm64.whl", hash = "sha256:455e813450eb03bbe3fc09c1324fbb5c367edf4ec0c7a58f81169e5f2008f27d"},
- {file = "python_calamine-0.3.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:eacea175ba67dd04c0d718bcca0488261bd9eefff3b46ae68249e14d705e14a0"},
- {file = "python_calamine-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c2a9a8021d7256e2a21949886d0fe5c67ae805d4b5f9a4d93b2ef971262e64d4"},
- {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65d0f0e7a3e554ba5672b9bd5f77b22dd3fc011fd30157c4e377c49b3d95d6d1"},
- {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c3be559acbcec19d79ba07ae81276bbb8fadd474c790db14119a09fb36427fb"},
- {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:af0226e6826000d83a4ac34d81ae5217cc2baa54aecd76aac07091388bf739a1"},
- {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02b90d7e2e11c7449331d2cb744075fb47949d4e039983d6e6d9085950ad2642"},
- {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:732bb98dd393db80de1cd8a90e7d47dced929c7dea56194394d0fb7baf873fa7"},
- {file = "python_calamine-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56bb60bf663c04e0a4cc801dfd5da3351820a002b4aea72a427603011633d35c"},
- {file = "python_calamine-0.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b8974ee473e6337c9b52d6cab03a202dbe57e1500eb100d96adb6b0dfbff7390"},
- {file = "python_calamine-0.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:90876d9b77429c8168d0e4c3ebe1dcf996130c5b0aecb3c6283f85645d4dd29a"},
- {file = "python_calamine-0.3.1-cp312-none-win32.whl", hash = "sha256:17ab4ba8955206eba4a87c6bc0a805ffa9051f831c9f3d17a463d8a844beb197"},
- {file = "python_calamine-0.3.1-cp312-none-win_amd64.whl", hash = "sha256:33ff20f6603fb3434630a00190022020102dc26b6def519d19a19a58a487a514"},
- {file = "python_calamine-0.3.1-cp312-none-win_arm64.whl", hash = "sha256:808ff13261826b64b8313a53a83873cf46df4522cbca98fb66a85b543de68949"},
- {file = "python_calamine-0.3.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:9401be43100552fb3a0d9a7392e207e4b4dfa0bc99c3f97613c0d703db0b191b"},
- {file = "python_calamine-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b76875209d89227ea0666c346b5e007fa2ac9cc65b95b91551c4b715d9e6c7be"},
- {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2079ae2c434e28f1c9d17a2f4ea50d92e27d1373fc5908f1fd0c159f387e5b9"},
- {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:65c8986318846728d66ac2ce5dc017e79e6409ef17a48ca284d45f7d68a8ead0"},
- {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9504e43f4852265ab55044eb2835c270fda137a1ea35d5e4b7d3581d4ac830f4"},
- {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8eab37611b39cc8093e5671e5f8f8fc7f427459eabc21497f71659be61d5723"},
- {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e226ff25510e62b57443029e5061dd42b551907a0a983f0e07e6c5e1facb4d"},
- {file = "python_calamine-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:721a7bfe0d17c12dcf82886a17c9d1025983cfe61fade8c0d2a1b04bb4bd9980"},
- {file = "python_calamine-0.3.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:1258cb82689ded64b73816fbcb3f02d139c8fd29676e9d451c0f81bb689a7076"},
- {file = "python_calamine-0.3.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:919fe66fd3d3031585c4373a1dd0b128d3ceb0d79d21c8d0878e9ddee4d6b78a"},
- {file = "python_calamine-0.3.1-cp313-none-win32.whl", hash = "sha256:a9df3902b279cb743baf857f29c1c7ed242caa7143c4fdf3a79f553801a662d9"},
- {file = "python_calamine-0.3.1-cp313-none-win_amd64.whl", hash = "sha256:9f96654bceeb10e9ea9624eda857790e1a601593212fc174cb84d1568f12b5e4"},
- {file = "python_calamine-0.3.1-cp313-none-win_arm64.whl", hash = "sha256:e83bd84617400bbca9907f0a44c6eccaeca7bd011791950c181e402992b8cc26"},
- {file = "python_calamine-0.3.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:b37166dcf7d7706e0ca3cd6e21a138120f69f1697ea5c9e22b29daac36d02f1b"},
- {file = "python_calamine-0.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:885c668ad97c637a76b18d63d242cafe16629ed4912044c508a2a34e12c08892"},
- {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50c8462add6488c196925ceee73c11775bf7323c88dbf3be6591f49c5e179d71"},
- {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfe3ae2e73de00310835495166d16a8de27e49b846923e04f3462d100b964d2f"},
- {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a46c344077da8709163c75441ab61b5833e5f83e2586c4d63ad525987032c314"},
- {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8abd68d8d79da7b5316214c9b065c790538a3e0278b7bc278b5395a41330b6a"},
- {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11727fd075f0c184ef7655659794fc060a138c9ff4d2c5ac66e0d067aa8526f0"},
- {file = "python_calamine-0.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fd08e245a7c2676887e548d7a86a909bdc167a3c582f10937f2f55e7216a7305"},
- {file = "python_calamine-0.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6ad2ae302ec13c1610170f0953b6c7accf7b26384b0a3813987e16ec78b59982"},
- {file = "python_calamine-0.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:38975ba9b8b8bbe86ab9d384500b0555af6ede8bd328af77cc3d99484bd0e303"},
- {file = "python_calamine-0.3.1-cp38-none-win32.whl", hash = "sha256:68b8cd5f6aceb56e5eb424830493210d478926e36951ccafe2dad15b440da167"},
- {file = "python_calamine-0.3.1-cp38-none-win_amd64.whl", hash = "sha256:f2d270c4eb15971eb5e2e87183470f7eafb1307d6df15253a6cff7c5649ffe04"},
- {file = "python_calamine-0.3.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:dcf5ffd5a63b806de03629c2f25585e455aa245d6e0fd78e7a85dff79d16b6e7"},
- {file = "python_calamine-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:38c5ff0b9696fe4deec98e8135f33eeee49e302bcfa2ffcc4abe15cb1f8e8054"},
- {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3331cc70e7496592c9c559fa89a7451db56a200d754e416edb51b9e888a41"},
- {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6493fb0fbf766a380d0741c11c6c52b1a06d1917a8e7551f9d560324ca757b82"},
- {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:94f9cd55efdda69352de6679d737d41dfcb1fdb5b8e5c512e0b83fe6b5f3796c"},
- {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79b2a774944b4ed084d9a677cbf88372f725449a260fd134ab2b3422ef2d4a5d"},
- {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c96c5cfd02f4a20a2df51ff6732839d3f4a4a781e9e904a85191aaeb8fce2870"},
- {file = "python_calamine-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:edf8d83c7369b18d4d42e9e2ccc52143bdbf27a326877bf3fc6fc56c78655372"},
- {file = "python_calamine-0.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2252b25e8fd992f10e916fb4eddc504a58839a1e67f32238bba803ecf16ce7c4"},
- {file = "python_calamine-0.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:26d870656fbe1c3767483f3162359765738adab58915157c55afff4dfc32e9e9"},
- {file = "python_calamine-0.3.1-cp39-none-win32.whl", hash = "sha256:28c1394a00bd218ce4223f8f8019fd2c1878f1a01ad47be289964d281fef4dac"},
- {file = "python_calamine-0.3.1-cp39-none-win_amd64.whl", hash = "sha256:529c36520924b16f398e25e78fcd4ea14fdcd95f383db360d107e075628d6941"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:4dbe8d5f27889dfcd03d9ad99a9f392b6c0af41dbc287ac4738804c31c99750a"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9bc553349095b3104708cd1eb345445426400de105df7ede3d5054b0ecfa74e9"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f12fa42ea6c7750f994a1a9674414dfd25adb3e61ad570382c05a84e4e8e949e"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbac293a3c4c98e988e564f13820874c6ac02114cef5698a03b8146bd9566ef7"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a571bab6528504cdb99187f4e6a5a64c7ccb065ee1416b9e10c1f416d331aae5"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:49d2acf3def8ecbabb132b537501bb639ca9d52548fd7058d5da7fa9fdbd1b45"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e71ee834988033d3f8254713423ce5232ffe964f1bb2fdc3383f407b8a52dab9"},
- {file = "python_calamine-0.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:139afbf6a23c33c55ce0144e15e89e03e333a59b4864a2e1e0c764cd33390414"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:ea28ebd4d347c13c6acc787dba1fb0f626188469f861a2fa9cd057fa689161e2"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:39d51754c4375b34b58b6036780ee022db80b54a29fbfc577c785da8dfc358f8"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df72ff860dbd9e659a2a3e77a76a89356eea4ebaaa44b6fc4b84cab76e8e5313"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cef919d074235843c5b27f493a645457c0edd9c4f19de3d3187d5cbfad3cf849"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7aa81f93809e1a0b7ad289168444878ffd0c72ffe500efca7ea7a2778df812d4"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:853aab3a19015c49e24892c145646b59719eeb3c71c0582e0af83379d84977a6"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:79d8f506b917e5c1ec75e3b595181416ebe1cc809addf952a23e170606984709"},
- {file = "python_calamine-0.3.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a4497c11d412e6df3b85a1fde2110f797ff5b2d739ff79fc50ef62476620a27c"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dba960d7668ea7c699f5d68f0a8f7c3f9573fbec26a9db4219cb976c8b751384"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:41a204b59696cae066f399d7a69637e89d1bd34562d411c96108e3675ab57521"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07ef8398036a411896edc6de30eb71a0dcbad61657238525c1c875c089e2a275"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21775f97acbfe40182bb17c256e2f8ce0c787a30b86f09a6786bc4530b17c94b"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d4cdd57ebb563e9bc97501b4eaa7ed3545628d8a0ac482e8903894d80332d506"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:af1e60a711d41e24a24917fe41f98ab36adbcb6f5f85af8a0c895defb5de654f"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1faf371a69da8e364d1391cca3a58e46b3aa181e7202ac6452d09f37d3b99f97"},
- {file = "python_calamine-0.3.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d5ddc96b67f805b3cb27e21d070cee6d269d9fd3a3cb6d6f2a30bc44f848d0f7"},
- {file = "python_calamine-0.3.1.tar.gz", hash = "sha256:4171fadf4a2db1b1ed84536fb2f16ea14bde894d690ff321a85e27df26286b37"},
-]
-
-[package.dependencies]
-packaging = ">=24.1,<25.0"
-
[[package]]
name = "python-dateutil"
version = "2.8.2"
@@ -10010,18 +9925,17 @@ files = [
[[package]]
name = "urllib3"
-version = "2.2.3"
+version = "2.1.0"
description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false
python-versions = ">=3.8"
files = [
- {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"},
- {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"},
+ {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"},
+ {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"},
]
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"]
-h2 = ["h2 (>=4,<5)"]
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
zstd = ["zstandard (>=0.18.0)"]
@@ -11026,4 +10940,4 @@ cffi = ["cffi (>=1.11)"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.11,<3.13"
-content-hash = "983ba4f2cb89f0c867fc50cb48677cad9343f7f0828c7082cb0b5cf171d716fb"
+content-hash = "f524fbe5a9dc3fc9bfa24fc6de6f0987858d920d7523e35980a8735dadaead08"
diff --git a/api/pyproject.toml b/api/pyproject.toml
index 79857f81635eb0..8966324cf80f59 100644
--- a/api/pyproject.toml
+++ b/api/pyproject.toml
@@ -144,6 +144,7 @@ googleapis-common-protos = "1.63.0"
gunicorn = "~22.0.0"
httpx = { version = "~0.27.0", extras = ["socks"] }
huggingface-hub = "~0.16.4"
+ibm-watsonx-ai = "~1.1.24"
jieba = "0.42.1"
langfuse = "~2.51.3"
langsmith = "~0.1.77"
@@ -155,7 +156,7 @@ numpy = "~1.26.4"
oci = "~2.135.1"
openai = "~1.52.0"
openpyxl = "~3.1.5"
-pandas = { version = "~2.2.2", extras = ["performance", "excel"] }
+pandas = { version = "~2.1.4", extras = ["performance", "excel"] }
psycopg2-binary = "~2.9.6"
pycryptodome = "3.19.1"
pydantic = "~2.9.2"