Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 23 additions & 0 deletions botspot/components/new/llm_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -974,6 +974,29 @@ async def astream_llm(
):
yield chunk

async def aquery_llm_structured(
prompt: str,
output_schema: Type[BaseModel],
*,
user: Optional[Union[int, str]] = None,
system_message: Optional[str] = None,
model: Optional[str] = None,
**kwargs,
) -> BaseModel:
"""
Async query the LLM with structured output.

This is a convenience function that uses the global LLM provider.
"""
provider = get_llm_provider()
return await provider.aquery_llm_structured(
prompt=prompt,
output_schema=output_schema,
user=user,
system_message=system_message,
model=model,
**kwargs,
)

# todo: adapt this to user not user_id
async def get_llm_usage_stats(user: Optional[UserLike] = None) -> dict:
Expand Down
Loading