From 94aba6ea1a69e47dabf71303ccccc8aa2437de07 Mon Sep 17 00:00:00 2001 From: y9938 Date: Tue, 30 Dec 2025 07:02:52 +0300 Subject: [PATCH] fix: colors --- .env.example | 1 + main.py | 17 +++++++++-------- 2 files changed, 10 insertions(+), 8 deletions(-) diff --git a/.env.example b/.env.example index 1159ced..03ebeda 100644 --- a/.env.example +++ b/.env.example @@ -3,6 +3,7 @@ EMBEDDING_MODEL=mxbai-embed-large:latest LLM_MODEL=qwen2.5:7b-instruct-q8_0 OLLAMA_BASE_URL=http://localhost:11434 +ANSWER_COLOR=purple SYSTEM_PROMPT="You are a precise technical assistant. Cite sources using [filename]. Be concise." USER_PROMPT_TEMPLATE="Previous Conversation: diff --git a/main.py b/main.py index cb67230..7d38456 100644 --- a/main.py +++ b/main.py @@ -36,6 +36,7 @@ load_dotenv() style = Style.from_dict({"prompt": "bold #6a0dad"}) OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434") +ANSWER_COLOR = os.getenv("ANSWER_COLOR", "blue") SYSTEM_PROMPT_SEARCH = os.getenv("SYSTEM_PROMPT", "You are a precise technical assistant. Cite sources using [filename]. Be concise.") SYSTEM_PROMPT_ANALYSIS = ( @@ -49,7 +50,7 @@ SYSTEM_PROMPT_ANALYSIS = ( USER_PROMPT_TEMPLATE = os.getenv("USER_PROMPT_TEMPLATE", "Previous Conversation:\n{history}\n\nContext from Docs:\n{context}\n\nCurrent Question: {question}") -MD_DIRECTORY = os.getenv("MD_FOLDER", "./my_docs") +MD_DIRECTORY = os.getenv("MD_FOLDER", "./notes") EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "nomic-embed-text") LLM_MODEL = os.getenv("LLM_MODEL", "llama3") @@ -91,7 +92,7 @@ def classify_intent(query: str) -> str: r"what have i learned", r"summary of (my )?notes", r"my progress", r"learning path", r"knowledge gap", r"оцени (мой )?прогресс", r"что я выучил", r"итоги", r"анализ знаний", - r"сегодня урок", r"что я изучил" + r"сегодня(?:\s+\w+)*\s*урок", r"что я изучил" ] query_lower = query.lower() @@ -286,7 +287,7 @@ async def main(): processor = ChunkProcessor(vectorstore) cache = load_hash_cache() - console.print("Checking documents...", style="yellow") + # Checking documents files = [ os.path.join(root, file) for root, _, files in os.walk(MD_DIRECTORY) @@ -306,17 +307,17 @@ async def main(): observer = start_watcher(processor, cache) memory = ConversationMemory() - console.print("💬 Ready! Type 'exit' to quit.", style="bold green") - try: while True: query = await session.prompt_async("> ", style=style) query = query.strip() if query.lower() in {"exit", "quit", "q"}: - console.print("Goodbye!", style="yellow") + console.print("\nGoodbye!", style="yellow") break if not query: continue + console.print() + mode = classify_intent(query) history_str = memory.get_history() @@ -366,7 +367,7 @@ async def main(): "question": query, "history": history_str }): - print(chunk, end="") + console.print(chunk, end="", style=ANSWER_COLOR) response += chunk console.print("\n") @@ -385,5 +386,5 @@ if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(main()) except KeyboardInterrupt: - console.print("Goodbye!", style="yellow") + console.print("\nGoodbye!", style="yellow") sys.exit(0)