Skip to content

Commit

Permalink
update autochat
Browse files Browse the repository at this point in the history
  • Loading branch information
BenderV committed Oct 6, 2024
1 parent d660d44 commit b3bc0a0
Show file tree
Hide file tree
Showing 2 changed files with 57 additions and 51 deletions.
106 changes: 56 additions & 50 deletions autochat/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,8 @@ def __init__(
self.max_interactions = max_interactions
self.functions_schema = []
self.functions = {}
# Give the hability to pause the conversation after a function call or response
self.should_pause_conversation = lambda function_call, function_response: False

if self.provider == APIProvider.OPENAI:
from openai import OpenAI
Expand Down Expand Up @@ -210,62 +212,66 @@ def run_conversation(

yield response

if isinstance(content, Message):
# We support if the function returns a Message class
message = content
yield message
continue

if content is None:
# If function call returns None, we continue the conversation without adding a message
# message = None
# continue
content = None
elif isinstance(content, list): # If data is list of dicts, dumps to CSV
if not content:
content = "[]"
elif isinstance(content[0], dict):
if not isinstance(content, Message):
if content is None:
# If function call returns None, we continue the conversation without adding a message
# message = None
# continue
content = None
elif isinstance(
content, list
): # If data is list of dicts, dumps to CSV
if not content:
content = "[]"
elif isinstance(content[0], dict):
try:
content = csv_dumps(content, OUTPUT_SIZE_LIMIT)
except Exception as e:
print(e)
else:
content = "\n".join(content)
elif isinstance(content, dict):
content = json.dumps(content)
if len(content) > OUTPUT_SIZE_LIMIT:
content = (
content[:OUTPUT_SIZE_LIMIT]
+ f"\n... ({len(content)} characters)"
)
elif isinstance(content, str):
if len(content) > OUTPUT_SIZE_LIMIT:
content = (
content[:OUTPUT_SIZE_LIMIT]
+ f"\n... ({len(content)} characters)"
)
# Support bytes
# If it's an image; resize it
elif isinstance(content, bytes):
# Detect if it's an image
try:
content = csv_dumps(content, OUTPUT_SIZE_LIMIT)
except Exception as e:
print(e)
image = PILImage.open(io.BytesIO(content))
content = None
except IOError:
# If it's not an image, return the original content
raise ValueError("Not an image")
else:
content = "\n".join(content)
elif isinstance(content, dict):
content = json.dumps(content)
if len(content) > OUTPUT_SIZE_LIMIT:
content = (
content[:OUTPUT_SIZE_LIMIT]
+ f"\n... ({len(content)} characters)"
)
elif isinstance(content, str):
if len(content) > OUTPUT_SIZE_LIMIT:
content = (
content[:OUTPUT_SIZE_LIMIT]
+ f"\n... ({len(content)} characters)"
)
# Support bytes
# If it's an image; resize it
elif isinstance(content, bytes):
# Detect if it's an image
try:
image = PILImage.open(io.BytesIO(content))
content = None
except IOError:
# If it's not an image, return the original content
raise ValueError("Not an image")
raise ValueError(f"Invalid content type: {type(content)}")

message = Message(
name=function_name,
role="function",
content=content,
function_call_id=response.function_call_id,
image=image,
)
else:
raise ValueError(f"Invalid content type: {type(content)}")
# We support if the function returns a Message class
message = content

message = Message(
name=function_name,
role="function",
content=content,
function_call_id=response.function_call_id,
image=image,
)
yield message

if self.should_pause_conversation(response, message):
return

@retry(
stop=stop_after_attempt(4),
wait=wait_random_exponential(multiplier=2, max=10),
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

setup(
name="autochat",
version="0.3.6",
version="0.3.7",
packages=find_packages(),
install_requires=["tenacity==8.3.0", "pillow==10.4.0"],
extras_require={
Expand Down

0 comments on commit b3bc0a0

Please sign in to comment.