logging fix, add thinking mode

This commit is contained in:
G0DSEND016 2026-03-16 01:54:12 +01:00
parent 84653fa19b
commit a95a0efbab
5 changed files with 61 additions and 24 deletions

View File

@ -86,16 +86,13 @@ https://ollama.com/download
### 5. **Sťahovanie modelov** ### 5. **Sťahovanie modelov**
```bash ```bash
ollama pull qwen3-next:80b-cloud ollama pull qwen3.5:cloud
``` ```
```bash ```bash
ollama pull gpt-oss:20b-cloud ollama pull gpt-oss:20b-cloud
``` ```
```bash
ollama pull deepseek-v3.1:671b-cloud
```
### **Inštalácia cez DOCKER** ### **Inštalácia cez DOCKER**

View File

@ -1,11 +1,32 @@
import httpx import httpx
import logging import logging
import json
from pydantic import BaseModel from pydantic import BaseModel
from cachetools import TTLCache from cachetools import TTLCache
from typing import Callable
from tenacity import retry, stop_after_attempt, wait_exponential from tenacity import retry, stop_after_attempt, wait_exponential
from api.config import HTTP_TIMEOUT, HTTP_MAX_CONNECTIONS, HTTP_MAX_KEEPALIVE, CACHE_TTL, CACHE_MAX_SIZE from api.config import HTTP_TIMEOUT, HTTP_MAX_CONNECTIONS, HTTP_MAX_KEEPALIVE, CACHE_TTL, CACHE_MAX_SIZE
logger = logging.getLogger(__name__)
logger.handlers.clear()
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter(
fmt='%(asctime)s - %(levelname)s - %(message)s',
datefmt='%H:%M:%S'
))
logger.addHandler(handler)
logger.propagate = False
httpx_logger = logging.getLogger("httpx")
httpx_logger.handlers.clear()
httpx_logger.setLevel(logging.INFO)
httpx_logger.addHandler(handler)
httpx_logger.propagate = False
_cache = TTLCache(maxsize=CACHE_MAX_SIZE, ttl=CACHE_TTL) _cache = TTLCache(maxsize=CACHE_MAX_SIZE, ttl=CACHE_TTL)
_client = httpx.AsyncClient( _client = httpx.AsyncClient(
@ -14,12 +35,6 @@ _client = httpx.AsyncClient(
max_keepalive_connections=HTTP_MAX_KEEPALIVE), max_keepalive_connections=HTTP_MAX_KEEPALIVE),
) )
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
def docstring_from_model(model: type[BaseModel]): def docstring_from_model(model: type[BaseModel]):
def decorator(func): def decorator(func):
if func.__doc__: if func.__doc__:
@ -32,34 +47,45 @@ def docstring_from_model(model: type[BaseModel]):
return func return func
return decorator return decorator
_log_callback: Callable[[str], None] | None = None
def set_log_callback(cb: Callable[[str], None] | None):
global _log_callback
_log_callback = cb
def _log(msg: str):
logger.info(msg)
if _log_callback is not None:
_log_callback(msg)
@retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=1, max=5)) @retry(stop=stop_after_attempt(3), wait=wait_exponential(multiplier=1, min=1, max=5))
async def fetch_api_data(icon: str, url: str, params: dict, remove_keys: list = None) -> dict: async def fetch_api_data(icon: str, url: str, params: dict, remove_keys: list = None) -> dict:
try: try:
cache_key = f"{url}:{sorted(params.items())}" cache_key = f"{url}:{sorted(params.items())}"
if cache_key in _cache: if cache_key in _cache:
logger.info(f"\n💾 {icon}Cache hit: {cache_key}\n") _log(f"💾 {icon} Cache hit")
return _cache[cache_key] return _cache[cache_key]
logger.info(f"\n🔨 {icon} Input parameters: {params}\n") _log(f"🔨 {icon} Input parameters:\n{json.dumps(params, ensure_ascii=False, indent=2)}")
response = await _client.get(url, params=params) response = await _client.get(url, params=params)
response.raise_for_status() response.raise_for_status()
logger.debug(f"\n🖇️ {icon} Request URL: {response.url}\n") _log(f"🖇️ {icon} Request URL: {response.url}")
data = response.json() data = response.json()
if remove_keys and isinstance(data, dict): if remove_keys and isinstance(data, dict):
for key in remove_keys: for key in remove_keys:
data.pop(key, None) data.pop(key, None)
logger.info(f"\n{icon} Success: {url}")
_cache[cache_key] = data _cache[cache_key] = data
_log(f"{icon} Success: {url}")
return data return data
except httpx.HTTPStatusError as e: except httpx.HTTPStatusError as e:
logger.error(f"\n{icon}HTTP error: {e.response.status_code} - {e.response.text}\n") _log(f"{icon}HTTP error: {e.response.status_code} - {e.response.text}")
return {"error": "http_error", "status_code": e.response.status_code} return {"error": "http_error", "status_code": e.response.status_code}
except httpx.RequestError as e: except httpx.RequestError as e:
logger.error(f"\n{icon}Request error: {str(e)}\n") _log(f"{icon}Request error: {str(e)}")
return {"error": "request_error", "status_code": str(e)} return {"error": "request_error", "status_code": str(e)}
except Exception as e: except Exception as e:
logger.critical(f"\n{icon}Unexpected error: {str(e)}\n", exc_info=True) _log(f"{icon}Unexpected error: {str(e)}")
return {"error": "unexpected_error", "status_code": str(e)} return {"error": "unexpected_error", "status_code": str(e)}

14
app.py
View File

@ -6,6 +6,8 @@ from core.config import DEFAULT_MODEL, MAX_HISTORY
from core.init_agent import assistant_agent from core.init_agent import assistant_agent
from core.stream_response import stream_response from core.stream_response import stream_response
from api.fetch_api_data import set_log_callback
STARTERS = [ STARTERS = [
("What legal data can the agent find?","magnifying_glass"), ("What legal data can the agent find?","magnifying_glass"),
("What is the agent not allowed to do or use?","ban"), ("What is the agent not allowed to do or use?","ban"),
@ -60,11 +62,23 @@ async def main(message: cl.Message):
if len(history) > MAX_HISTORY: if len(history) > MAX_HISTORY:
history = history[-MAX_HISTORY:] history = history[-MAX_HISTORY:]
async with cl.Step(name="🔍 Fetching data...") as step:
log_lines = []
def on_log(line: str):
log_lines.append(line)
step.output = "\n".join(log_lines)
set_log_callback(on_log)
msg = cl.Message(content="") msg = cl.Message(content="")
async for token in stream_response(agent, history): async for token in stream_response(agent, history):
await msg.stream_token(token) await msg.stream_token(token)
await msg.update() await msg.update()
set_log_callback(None)
await step.update()
if msg.content: if msg.content:
history.append({"role": "assistant", "content": msg.content}) history.append({"role": "assistant", "content": msg.content})

View File

@ -10,4 +10,5 @@ async def stream_response(agent: Agent, prompt: list[dict] | str) -> AsyncGenera
if event.type == "raw_response_event" and isinstance(event.data, ResponseTextDeltaEvent): if event.type == "raw_response_event" and isinstance(event.data, ResponseTextDeltaEvent):
yield event.data.delta # <-- sends the next piece of response text yield event.data.delta # <-- sends the next piece of response text
except Exception as e: except Exception as e:
yield f"⚠️🖨️ Error: {e}" yield f"⚠️🖨 Error: {e}"

View File

@ -30,7 +30,6 @@
text-align: left; text-align: left;
} }
#starters button img { #starters button img {
margin-bottom: 6px; margin-bottom: 6px;
} }