Skip to content

Commit

Permalink
Merge branch 'develop'
Browse files Browse the repository at this point in the history
  • Loading branch information
clemlesne committed Dec 11, 2024
2 parents e1a8a43 + 416afd5 commit 36bfc7f
Show file tree
Hide file tree
Showing 11 changed files with 842 additions and 296 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -500,6 +500,7 @@ Conversation options are represented as features. They can be configured from Ap
| `slow_llm_for_chat` | Whether to use the slow LLM for chat. | `bool` | false |
| `vad_cutoff_timeout_ms` | The cutoff timeout for voice activity detection in secs. | `int` | 600 |
| `vad_silence_timeout_ms` | The timeout for phone silence in secs. | `int` | 400 |
| `vad_threshold` | The threshold for voice activity detection. | `float` | 0.5 |

### Use an OpenAI compatible model for the LLM

Expand Down
7 changes: 1 addition & 6 deletions app/helpers/call_events.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@
MessageModel,
PersonaEnum as MessagePersonaEnum,
extract_message_style,
remove_message_action,
)
from app.models.next import NextModel
from app.models.synthesis import SynthesisModel
Expand Down Expand Up @@ -169,8 +168,6 @@ async def on_call_disconnected(

@tracer.start_as_current_span("on_audio_connected")
async def on_audio_connected( # noqa: PLR0913
audio_bits_per_sample: int,
audio_channels: int,
audio_in: asyncio.Queue[bytes],
audio_out: asyncio.Queue[bytes | bool],
audio_sample_rate: int,
Expand All @@ -186,8 +183,6 @@ async def on_audio_connected( # noqa: PLR0913
Starts the real-time conversation with the LLM.
"""
await load_llm_chat(
audio_bits_per_sample=audio_bits_per_sample,
audio_channels=audio_channels,
audio_in=audio_in,
audio_out=audio_out,
audio_sample_rate=audio_sample_rate,
Expand Down Expand Up @@ -636,7 +631,7 @@ def _validate(req: str | None) -> tuple[bool, str | None, str | None]:
)

# Delete action and style from the message as they are in the history and LLM hallucinates them
_, content = extract_message_style(remove_message_action(content or ""))
_, content = extract_message_style(content or "")

if not content:
logger.warning("Error generating SMS report")
Expand Down
Loading

0 comments on commit 36bfc7f

Please sign in to comment.