Skip to content

Commit

Permalink
fix: fixes Ollama Embeddings component (#4787)
Browse files Browse the repository at this point in the history
Update ollama.py

removed unwanted parameter temperature for embedding
  • Loading branch information
edwinjosechittilappilly authored Nov 22, 2024
1 parent 5ed32cb commit b3b2b25
Showing 1 changed file with 3 additions and 13 deletions.
16 changes: 3 additions & 13 deletions src/backend/base/langflow/components/embeddings/ollama.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from langflow.base.models.model import LCModelComponent
from langflow.field_typing import Embeddings
from langflow.io import FloatInput, MessageTextInput, Output
from langflow.io import MessageTextInput, Output


class OllamaEmbeddingsComponent(LCModelComponent):
Expand All @@ -16,19 +16,13 @@ class OllamaEmbeddingsComponent(LCModelComponent):
MessageTextInput(
name="model",
display_name="Ollama Model",
value="llama3.1",
value="nomic-embed-text",
),
MessageTextInput(
name="base_url",
display_name="Ollama Base URL",
value="http://localhost:11434",
),
FloatInput(
name="temperature",
display_name="Model Temperature",
value=0.1,
advanced=True,
),
]

outputs = [
Expand All @@ -37,11 +31,7 @@ class OllamaEmbeddingsComponent(LCModelComponent):

def build_embeddings(self) -> Embeddings:
try:
output = OllamaEmbeddings(
model=self.model,
base_url=self.base_url,
temperature=self.temperature,
)
output = OllamaEmbeddings(model=self.model, base_url=self.base_url)
except Exception as e:
msg = "Could not connect to Ollama API."
raise ValueError(msg) from e
Expand Down

0 comments on commit b3b2b25

Please sign in to comment.