ai-lawyer-agent/core/init_agent.py
2026-03-15 20:11:01 +01:00

55 lines
2.3 KiB
Python

import os
from agents import Agent, AgentHooks
from agents import OpenAIChatCompletionsModel, AsyncOpenAI, ModelSettings
from agents import set_tracing_disabled
from core.system_prompt import get_system_prompt
from api.tools.judges import JudgesAPI
from api.tools.coutrs import CourtsAPI
from api.tools.contracts import ContractsAPI
from api.tools.decisions import DecisionsAPI
from api.tools.admin_proceedings import AdminProceedingsAPI
from api.tools.civil_proceedings import CivilProceedingsAPI
set_tracing_disabled(True)
class MyAgentHooks(AgentHooks):
async def on_start(self, context, agent):
print(f"\n🏃‍♂️‍➡️ [AgentHooks] {agent.name} started.")
async def on_end(self, context, agent, output):
print(f"🏁 [AgentHooks] {agent.name} ended.")
def assistant_agent(model_name: str = "qwen3.5:cloud") -> Agent:
judge_api = JudgesAPI()
court_api = CourtsAPI()
contract_api = ContractsAPI()
decision_api = DecisionsAPI()
admin_proceedings_api = AdminProceedingsAPI()
civil_proceedings_api = CivilProceedingsAPI()
client = AsyncOpenAI(base_url=f"http://localhost:11434/v1", api_key="ollama", timeout=120.0, max_retries=0)
model = OpenAIChatCompletionsModel(model=model_name, openai_client=client)
agent = Agent(
name="Assistant",
instructions=get_system_prompt(model_name),
model=model,
model_settings=ModelSettings(temperature=0.7, tool_choice="auto", parallel_tool_calls=False),
tools=[
judge_api.judge, judge_api.judge_id, judge_api.judge_autocomplete,
court_api.court, court_api.court_id, court_api.court_autocomplete,
contract_api.contract, contract_api.contract_id, contract_api.contract_autocomplete,
decision_api.decision, decision_api.decision_id, decision_api.decision_autocomplete,
admin_proceedings_api.admin_proceedings, admin_proceedings_api.admin_proceedings_id,
admin_proceedings_api.admin_proceedings_autocomplete,
civil_proceedings_api.civil_proceedings, civil_proceedings_api.civil_proceedings_id,
civil_proceedings_api.civil_proceedings_autocomplete,
],
tool_use_behavior="run_llm_again",
reset_tool_choice=True,
hooks=MyAgentHooks(),
)
return agent