feat: monitor.py, update main.py
This commit is contained in:
553
main.py
553
main.py
@@ -1,25 +1,19 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""
|
"""
|
||||||
RAG Learning System - Simplified Educational Assistant
|
Simplified RAG Learning Assistant
|
||||||
Tracks learning progress across subjects and provides AI tutoring guidance.
|
Tracks learning progress across 17 subjects and provides AI tutoring guidance.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import sys
|
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
import asyncio
|
import asyncio
|
||||||
import re
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Dict, List, Set, Optional
|
from typing import Dict, List, Set, Optional
|
||||||
from dataclasses import dataclass, asdict
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.panel import Panel
|
|
||||||
from rich.prompt import Prompt
|
|
||||||
from rich.progress import Progress, SpinnerColumn, TextColumn
|
|
||||||
from prompt_toolkit import PromptSession
|
from prompt_toolkit import PromptSession
|
||||||
from prompt_toolkit.styles import Style
|
from prompt_toolkit.styles import Style
|
||||||
|
|
||||||
@@ -36,94 +30,10 @@ from langchain_core.output_parsers import StrOutputParser
|
|||||||
# =========================
|
# =========================
|
||||||
console = Console(color_system="standard", force_terminal=True)
|
console = Console(color_system="standard", force_terminal=True)
|
||||||
session = PromptSession()
|
session = PromptSession()
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
style = Style.from_dict({"prompt": "bold #6a0dad"})
|
style = Style.from_dict({"prompt": "bold #6a0dad"})
|
||||||
|
|
||||||
# Core Configuration
|
|
||||||
OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
OLLAMA_BASE_URL = os.getenv("OLLAMA_BASE_URL", "http://localhost:11434")
|
||||||
ANSWER_COLOR = os.getenv("ANSWER_COLOR", "blue")
|
|
||||||
|
|
||||||
# Subject-specific keywords for filtering
|
|
||||||
SUBJECT_KEYWORDS = {
|
|
||||||
"english": ["english", "английский", "vocabulary", "grammar", "перевод", "словарь", "грамматика"],
|
|
||||||
"math": ["math", "математика", "алгебра", "геометрия", "calculus", "дискретная", "logic", "логика"],
|
|
||||||
"cs": ["computer science", "алгоритмы", "data structures", "oop", "python", "programming", "код"],
|
|
||||||
"electronics": ["electronics", "электротехника", "circuit", "микроконтроллер", "arduino", "цифровая"],
|
|
||||||
"linux": ["linux", "kali", "bash", "terminal", "command line", "скрипт", "администрирование"],
|
|
||||||
"networking": ["network", "сеть", "tcp", "ip", "osi", "маршрутизация", "vlan", "протокол"],
|
|
||||||
"cybersecurity": ["cybersecurity", "безопасность", "owasp", "уязвимость", "pentest", "hack", "хак"],
|
|
||||||
"sql": ["sql"]
|
|
||||||
}
|
|
||||||
|
|
||||||
# System Prompt for Educational Assistant
|
|
||||||
SYSTEM_PROMPT = """Ты — наставник-преподаватель по кибербезопасности. Твоя цель — довести ученика с уровня "пользователь ПК" до уровня junior в кибербезопасности.
|
|
||||||
|
|
||||||
КУРСОВАЯ СТРУКТУРА
|
|
||||||
Модули (6 независимых курсов):
|
|
||||||
1. Computer Science (фундамент)
|
|
||||||
2. Математика
|
|
||||||
3. Основы электротехники
|
|
||||||
4. Linux + Kali Linux
|
|
||||||
5. Основы сетей
|
|
||||||
6. Введение в кибербезопасность
|
|
||||||
7. Английский язык
|
|
||||||
|
|
||||||
СТРУКТУРА КАЖДОГО МОДУЛЯ
|
|
||||||
• Цель урока
|
|
||||||
• Темы в хронологическом порядке (от простого к сложному)
|
|
||||||
• Практические задания
|
|
||||||
• Прогресс-бар (по нормам Минобрнауки РФ)
|
|
||||||
• Блок вопросов для самопроверки
|
|
||||||
• Названия тем для поиска в YouTube/статьях
|
|
||||||
|
|
||||||
ОТСЛЕЖИВАНИЕ ПРОГРЕССА
|
|
||||||
Методология:
|
|
||||||
• Каждый предмет = числовая прямая от 0 до ∞
|
|
||||||
• Темы = точки на прямой (например: "цифры" = 0.01, "дроби" = 0.04)
|
|
||||||
• Без усвоения базы — не переходить дальше
|
|
||||||
• Адаптация вектора обучения по прогрессу
|
|
||||||
|
|
||||||
Критерии Junior-уровня:
|
|
||||||
• CS: Алгоритмы, структуры данных, ООП
|
|
||||||
• Математика: Дискретная математика, логика, теория чисел
|
|
||||||
• Электротехника: Цифровая логика, микроконтроллеры
|
|
||||||
• Linux: CLI, bash-скрипты, системное администрирование
|
|
||||||
• Сети: OSI, TCP/IP, маршрутизация, VLAN
|
|
||||||
• Кибербезопасность: OWASP Top 10, базовые уязвимости, инструменты
|
|
||||||
• Английский: Технический английский, терминология
|
|
||||||
|
|
||||||
РАБОЧИЙ ПРОЦЕСС
|
|
||||||
Ответ пользователю:
|
|
||||||
1. Определи стартовую точку по заметкам Obsidian
|
|
||||||
2. Построй фундамент текущего урока
|
|
||||||
3. Сверяйся с заметками ученика
|
|
||||||
4. Комбинируй стиль живого наставника и учебника
|
|
||||||
|
|
||||||
Формат ответа:
|
|
||||||
"В [ПРЕДМЕТ] будем проходить [ТЕМА_1] и [ТЕМА_2].
|
|
||||||
[Дополнительные инструкции по структуре изучения]"
|
|
||||||
|
|
||||||
ПРАВИЛА ПРОГРЕССИИ
|
|
||||||
• Проверяй усвоение предыдущих тем
|
|
||||||
• Не суди по одному слову вне контекста
|
|
||||||
• Учитывай межпредметные связи
|
|
||||||
• Корректируй траекторию обучения динамически
|
|
||||||
|
|
||||||
ПОИСКОВЫЕ ЗАПРОСЫ
|
|
||||||
Формируй темы для поиска в формате:
|
|
||||||
"[ПРЕДМЕТ] [УРОВЕНЬ] [ТЕМА] [ЯЗЫК]" Пример: "Computer Science beginner algorithms Russian"
|
|
||||||
"""
|
|
||||||
|
|
||||||
USER_PROMPT_TEMPLATE = """Текущий прогресс обучения:
|
|
||||||
{progress}
|
|
||||||
|
|
||||||
Контекст из заметок:
|
|
||||||
{context}
|
|
||||||
|
|
||||||
Вопрос ученика: {question}"""
|
|
||||||
|
|
||||||
# Paths and Models
|
|
||||||
MD_DIRECTORY = os.getenv("MD_FOLDER", "./notes")
|
MD_DIRECTORY = os.getenv("MD_FOLDER", "./notes")
|
||||||
EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "mxbai-embed-large:latest")
|
EMBEDDING_MODEL = os.getenv("EMBEDDING_MODEL", "mxbai-embed-large:latest")
|
||||||
LLM_MODEL = os.getenv("LLM_MODEL", "qwen2.5:7b-instruct-q8_0")
|
LLM_MODEL = os.getenv("LLM_MODEL", "qwen2.5:7b-instruct-q8_0")
|
||||||
@@ -132,23 +42,110 @@ CHROMA_PATH = "./.cache/chroma_db"
|
|||||||
KNOWLEDGE_STATE_PATH = "./.cache/knowledge_state.json"
|
KNOWLEDGE_STATE_PATH = "./.cache/knowledge_state.json"
|
||||||
FILE_HASHES_PATH = "./.cache/file_hashes.json"
|
FILE_HASHES_PATH = "./.cache/file_hashes.json"
|
||||||
|
|
||||||
# Processing Configuration
|
|
||||||
CHUNK_SIZE = 400
|
CHUNK_SIZE = 400
|
||||||
CHUNK_OVERLAP = 50
|
CHUNK_OVERLAP = 50
|
||||||
TOP_K = 6
|
TOP_K = 6
|
||||||
COLLECTION_NAME = "learning_rag"
|
COLLECTION_NAME = "learning_rag"
|
||||||
MAX_CONTEXT_CHARS = 8000
|
MAX_CONTEXT_CHARS = 8000
|
||||||
|
|
||||||
|
# =========================
|
||||||
|
# SUBJECT CONFIGURATION
|
||||||
|
# =========================
|
||||||
|
SUBJECTS = {
|
||||||
|
"computer_science": "Computer Science",
|
||||||
|
"math": "Математика",
|
||||||
|
"english": "Английский язык",
|
||||||
|
"programming": "Основы программирования",
|
||||||
|
"linux": "Операционные системы Linux",
|
||||||
|
"windows": "Операционные системы Windows",
|
||||||
|
"networking": "Сетевые технологии",
|
||||||
|
"databases": "Базы данных и SQL",
|
||||||
|
"web": "Веб-технологии",
|
||||||
|
"cryptography": "Криптография",
|
||||||
|
"cybersecurity": "Базовые принципы кибербезопасности",
|
||||||
|
"pentest": "Тестирование на проникновение (Red Team)",
|
||||||
|
"soc": "SOC и Blue Team",
|
||||||
|
"devsecops": "DevSecOps",
|
||||||
|
"tools": "Инструменты и практика",
|
||||||
|
"certifications": "Сертификации и карьера",
|
||||||
|
"professional": "Профессиональное развитие"
|
||||||
|
}
|
||||||
|
|
||||||
|
SUBJECT_KEYWORDS = {
|
||||||
|
"computer_science": ["computer science", "алгоритмы", "data structures", "oop", "структуры данных"],
|
||||||
|
"math": ["math", "математика", "алгебра", "геометрия", "дискретная", "logic", "логика", "теория чисел"],
|
||||||
|
"english": ["english", "английский", "vocabulary", "grammar", "перевод", "словарь", "грамматика"],
|
||||||
|
"programming": ["programming", "python", "код", "code", "разработка", "программирование"],
|
||||||
|
"linux": ["linux", "kali", "bash", "terminal", "command line", "скрипт", "администрирование"],
|
||||||
|
"windows": ["windows", "powershell", "администрирование windows"],
|
||||||
|
"networking": ["network", "сеть", "tcp", "ip", "osi", "маршрутизация", "vlan", "протокол"],
|
||||||
|
"databases": ["database", "sql", "база данных", "postgresql", "mysql"],
|
||||||
|
"web": ["web", "html", "css", "javascript", "http", "frontend", "backend"],
|
||||||
|
"cryptography": ["cryptography", "криптография", "шифрование", "rsa", "aes"],
|
||||||
|
"cybersecurity": ["cybersecurity", "безопасность", "owasp", "уязвимость", "pentest"],
|
||||||
|
"pentest": ["pentest", "pentesting", "red team", "тестирование на проникновение"],
|
||||||
|
"soc": ["soc", "blue team", "security operations", "siem"],
|
||||||
|
"devsecops": ["devsecops", "ci/cd", "security automation"],
|
||||||
|
"tools": ["tools", "инструменты", "nmap", "burp", "metasploit", "wireshark"],
|
||||||
|
"certifications": ["certification", "сертификация", "ceh", "oscp", "cissp"],
|
||||||
|
"professional": ["github", "portfolio", "linkedin", "блог", "конференция"]
|
||||||
|
}
|
||||||
|
|
||||||
|
SYSTEM_PROMPT = """Ты — наставник-преподаватель по кибербезопасности. Твоя цель — довести ученика с уровня "пользователь ПК" до уровня junior в кибербезопасности.
|
||||||
|
|
||||||
|
КУРСОВАЯ СТРУКТУРА (17 модулей):
|
||||||
|
1) Computer Science: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
2) Математика: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
3) Английский язык: с полного нуля до уровня B2
|
||||||
|
4) Основы программирования: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
5) Операционные системы Linux: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
6) Операционные системы Windows: с уровня пользователя до уровня стандарта мировых вузов
|
||||||
|
7) Сетевые технологии: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
8) Базы данных и SQL: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
9) Веб-технологии: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
10) Криптография: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
11) Базовые принципы кибербезопасности: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
12) Тестирование на проникновение (Red Team): с полного нуля до уровня стандарта мировых вузов
|
||||||
|
13) SOC и Blue Team: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
14) DevSecOps: с полного нуля до уровня стандарта мировых вузов
|
||||||
|
15) Инструменты и практика: список тем для изучения, без практических
|
||||||
|
16) Сертификации и карьера: список тем для изучения, без практических
|
||||||
|
17) Профессиональное развитие: GitHub портфолио, блог, нетворкинг, конференции
|
||||||
|
|
||||||
|
МЕТОДОЛОГИЯ:
|
||||||
|
- Каждый предмет = числовая прямая от 0 до ∞
|
||||||
|
- Темы = точки на прямой (например: "цифры" = 0.01, "дроби" = 0.04)
|
||||||
|
- Без усвоения базы — не переходить дальше
|
||||||
|
- Адаптация вектора обучения по прогрессу
|
||||||
|
|
||||||
|
ФОРМАТ ОТВЕТА:
|
||||||
|
"В [ПРЕДМЕТ] будем проходить [ТЕМА_1] и [ТЕМА_2].
|
||||||
|
[Дополнительные инструкции по структуре изучения]"
|
||||||
|
|
||||||
|
ПРАВИЛА:
|
||||||
|
- Проверяй усвоение предыдущих тем
|
||||||
|
- Не суди по одному слову вне контекста
|
||||||
|
- Учитывай межпредметные связи
|
||||||
|
- Корректируй траекторию обучения динамически
|
||||||
|
- Отвечай всегда на русском языке"""
|
||||||
|
|
||||||
|
USER_PROMPT_TEMPLATE = """Текущий прогресс обучения:
|
||||||
|
{progress}
|
||||||
|
|
||||||
|
Контекст из заметок по предмету:
|
||||||
|
{context}
|
||||||
|
|
||||||
|
Вопрос ученика: {question}"""
|
||||||
|
|
||||||
# =========================
|
# =========================
|
||||||
# DATA STRUCTURES
|
# DATA STRUCTURES
|
||||||
# =========================
|
# =========================
|
||||||
@dataclass
|
@dataclass
|
||||||
class SubjectProgress:
|
class SubjectProgress:
|
||||||
"""Track progress for a specific subject"""
|
|
||||||
name: str
|
name: str
|
||||||
topics_covered: Set[str]
|
topics_covered: Set[str]
|
||||||
last_studied: Optional[str]
|
last_studied: Optional[str]
|
||||||
confidence_level: float = 0.0 # 0.0 to 1.0
|
confidence_level: float = 0.0
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
return {
|
return {
|
||||||
@@ -169,7 +166,6 @@ class SubjectProgress:
|
|||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class KnowledgeState:
|
class KnowledgeState:
|
||||||
"""Complete learning state across all subjects"""
|
|
||||||
subjects: Dict[str, SubjectProgress]
|
subjects: Dict[str, SubjectProgress]
|
||||||
last_analysis: str
|
last_analysis: str
|
||||||
file_hashes: Dict[str, str]
|
file_hashes: Dict[str, str]
|
||||||
@@ -196,22 +192,18 @@ class KnowledgeState:
|
|||||||
# UTILITY FUNCTIONS
|
# UTILITY FUNCTIONS
|
||||||
# =========================
|
# =========================
|
||||||
def get_file_hash(file_path: str) -> str:
|
def get_file_hash(file_path: str) -> str:
|
||||||
"""Generate MD5 hash for file change detection"""
|
|
||||||
return hashlib.md5(Path(file_path).read_bytes()).hexdigest()
|
return hashlib.md5(Path(file_path).read_bytes()).hexdigest()
|
||||||
|
|
||||||
def load_json_cache(file_path: str) -> dict:
|
def load_json_cache(file_path: str) -> dict:
|
||||||
"""Load JSON cache with error handling"""
|
|
||||||
Path(file_path).parent.mkdir(parents=True, exist_ok=True)
|
Path(file_path).parent.mkdir(parents=True, exist_ok=True)
|
||||||
if Path(file_path).exists():
|
if Path(file_path).exists():
|
||||||
try:
|
try:
|
||||||
return json.loads(Path(file_path).read_text())
|
return json.loads(Path(file_path).read_text())
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
console.print(f"[yellow]⚠️ Corrupted cache: {file_path}. Resetting.[/yellow]")
|
|
||||||
return {}
|
return {}
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
def save_json_cache(data, file_path: str):
|
def save_json_cache(data, file_path: str):
|
||||||
"""Save JSON cache with error handling"""
|
|
||||||
try:
|
try:
|
||||||
Path(file_path).write_text(json.dumps(data, indent=2, ensure_ascii=False))
|
Path(file_path).write_text(json.dumps(data, indent=2, ensure_ascii=False))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@@ -220,20 +212,20 @@ def save_json_cache(data, file_path: str):
|
|||||||
# =========================
|
# =========================
|
||||||
# SUBJECT DETECTION
|
# SUBJECT DETECTION
|
||||||
# =========================
|
# =========================
|
||||||
def detect_subject_from_query(query: str) -> Optional[str]:
|
def detect_subjects_from_query(query: str) -> List[str]:
|
||||||
"""Detect which subject the user wants to study"""
|
|
||||||
query_lower = query.lower()
|
query_lower = query.lower()
|
||||||
|
detected = []
|
||||||
|
|
||||||
# Check for explicit subject mentions
|
|
||||||
for subject, keywords in SUBJECT_KEYWORDS.items():
|
for subject, keywords in SUBJECT_KEYWORDS.items():
|
||||||
for keyword in keywords:
|
for keyword in keywords:
|
||||||
if keyword.lower() in query_lower:
|
if keyword.lower() in query_lower:
|
||||||
return subject
|
if subject not in detected:
|
||||||
|
detected.append(subject)
|
||||||
|
break
|
||||||
|
|
||||||
return None
|
return detected
|
||||||
|
|
||||||
def detect_subject_from_content(text: str) -> Optional[str]:
|
def detect_subject_from_content(text: str) -> Optional[str]:
|
||||||
"""Detect subject from note content"""
|
|
||||||
text_lower = text.lower()
|
text_lower = text.lower()
|
||||||
subject_scores = {subject: 0 for subject in SUBJECT_KEYWORDS.keys()}
|
subject_scores = {subject: 0 for subject in SUBJECT_KEYWORDS.keys()}
|
||||||
|
|
||||||
@@ -242,148 +234,13 @@ def detect_subject_from_content(text: str) -> Optional[str]:
|
|||||||
if keyword.lower() in text_lower:
|
if keyword.lower() in text_lower:
|
||||||
subject_scores[subject] += 1
|
subject_scores[subject] += 1
|
||||||
|
|
||||||
# Return subject with highest score, if any matches
|
|
||||||
best_subject = max(subject_scores.items(), key=lambda x: x[1])
|
best_subject = max(subject_scores.items(), key=lambda x: x[1])
|
||||||
return best_subject[0] if best_subject[1] > 0 else None
|
return best_subject[0] if best_subject[1] > 0 else None
|
||||||
|
|
||||||
# =========================
|
|
||||||
# KNOWLEDGE ANALYSIS
|
|
||||||
# =========================
|
|
||||||
class KnowledgeAnalyzer:
|
|
||||||
"""Analyze learning progress from notes"""
|
|
||||||
|
|
||||||
def __init__(self, vectorstore):
|
|
||||||
self.vectorstore = vectorstore
|
|
||||||
|
|
||||||
async def analyze_all_notes(self, file_hashes: Dict[str, str]) -> KnowledgeState:
|
|
||||||
"""Analyze all notes to build complete knowledge state"""
|
|
||||||
console.print("[cyan]🔍 Analyzing all notes for learning progress...[/cyan]")
|
|
||||||
|
|
||||||
# Initialize subjects
|
|
||||||
subjects = {
|
|
||||||
name: SubjectProgress(name=name, topics_covered=set(), last_studied=None)
|
|
||||||
for name in SUBJECT_KEYWORDS.keys()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Get all documents from vectorstore
|
|
||||||
try:
|
|
||||||
db_data = await asyncio.to_thread(self.vectorstore.get)
|
|
||||||
|
|
||||||
if not db_data or not db_data['documents']:
|
|
||||||
console.print("[yellow]⚠️ No documents found in vectorstore[/yellow]")
|
|
||||||
return KnowledgeState(subjects, datetime.now().isoformat(), file_hashes)
|
|
||||||
|
|
||||||
# Process each document
|
|
||||||
for text, metadata in zip(db_data['documents'], db_data['metadatas']):
|
|
||||||
if not metadata or 'source' not in metadata:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Detect subject
|
|
||||||
subject = detect_subject_from_content(text)
|
|
||||||
if subject:
|
|
||||||
subjects[subject].topics_covered.add(text[:100]) # Use first 100 chars as topic identifier
|
|
||||||
|
|
||||||
# Update last studied timestamp
|
|
||||||
file_path = metadata['source']
|
|
||||||
if file_path in file_hashes:
|
|
||||||
subjects[subject].last_studied = file_hashes[file_path]
|
|
||||||
|
|
||||||
# Calculate confidence levels based on topic coverage
|
|
||||||
for subject in subjects.values():
|
|
||||||
subject.confidence_level = min(len(subject.topics_covered) / 10.0, 1.0)
|
|
||||||
|
|
||||||
console.print(f"[green]✓ Analysis complete. Found progress in {len([s for s in subjects.values() if s.topics_covered])} subjects[/green]")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]✗ Error during analysis: {e}[/red]")
|
|
||||||
|
|
||||||
return KnowledgeState(subjects, datetime.now().isoformat(), file_hashes)
|
|
||||||
|
|
||||||
def get_progress_summary(self, knowledge_state: KnowledgeState, subject: Optional[str] = None) -> str:
|
|
||||||
"""Generate human-readable progress summary"""
|
|
||||||
if subject and subject in knowledge_state.subjects:
|
|
||||||
subj = knowledge_state.subjects[subject]
|
|
||||||
return f"Предмет: {subj.name}\n" \
|
|
||||||
f"Тем изучено: {len(subj.topics_covered)}\n" \
|
|
||||||
f"Уровень уверенности: {subj.confidence_level:.1%}"
|
|
||||||
|
|
||||||
# Return all subjects summary
|
|
||||||
summary = "Текущий прогресс обучения:\n"
|
|
||||||
for subj in knowledge_state.subjects.values():
|
|
||||||
if subj.topics_covered:
|
|
||||||
summary += f"- {subj.name}: {len(subj.topics_covered)} тем, уверенность {subj.confidence_level:.1%}\n"
|
|
||||||
|
|
||||||
return summary
|
|
||||||
|
|
||||||
# =========================
|
|
||||||
# DOCUMENT PROCESSING
|
|
||||||
# =========================
|
|
||||||
class DocumentProcessor:
|
|
||||||
"""Process markdown documents for the learning system"""
|
|
||||||
|
|
||||||
def __init__(self, vectorstore):
|
|
||||||
self.vectorstore = vectorstore
|
|
||||||
self.text_splitter = RecursiveCharacterTextSplitter(
|
|
||||||
chunk_size=CHUNK_SIZE,
|
|
||||||
chunk_overlap=CHUNK_OVERLAP,
|
|
||||||
separators=["\n\n", "\n", ". ", " "]
|
|
||||||
)
|
|
||||||
|
|
||||||
async def process_file(self, file_path: str) -> List[Document]:
|
|
||||||
"""Process a single markdown file"""
|
|
||||||
try:
|
|
||||||
loader = UnstructuredMarkdownLoader(file_path)
|
|
||||||
documents = loader.load()
|
|
||||||
|
|
||||||
if not documents:
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Add source metadata
|
|
||||||
for doc in documents:
|
|
||||||
doc.metadata["source"] = file_path
|
|
||||||
|
|
||||||
# Split into chunks
|
|
||||||
chunks = self.text_splitter.split_documents(documents)
|
|
||||||
return chunks
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]✗ Error processing {Path(file_path).name}: {e}[/red]")
|
|
||||||
return []
|
|
||||||
|
|
||||||
async def index_files(self, file_paths: List[str]) -> bool:
|
|
||||||
"""Index multiple files with batching"""
|
|
||||||
all_chunks = []
|
|
||||||
|
|
||||||
for file_path in file_paths:
|
|
||||||
chunks = await self.process_file(file_path)
|
|
||||||
all_chunks.extend(chunks)
|
|
||||||
|
|
||||||
if not all_chunks:
|
|
||||||
return False
|
|
||||||
|
|
||||||
batch_size = 20
|
|
||||||
total_batches = (len(all_chunks) + batch_size - 1) // batch_size
|
|
||||||
|
|
||||||
try:
|
|
||||||
await asyncio.to_thread(self.vectorstore.reset_collection)
|
|
||||||
|
|
||||||
for i in range(0, len(all_chunks), batch_size):
|
|
||||||
batch = all_chunks[i:i + batch_size]
|
|
||||||
await asyncio.to_thread(self.vectorstore.add_documents, batch)
|
|
||||||
console.print(f" [dim]Пакет {i//batch_size + 1}/{total_batches} проиндексирован[/dim]")
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]✗ Error indexing documents: {e}[/red]")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# =========================
|
# =========================
|
||||||
# LEARNING ASSISTANT
|
# LEARNING ASSISTANT
|
||||||
# =========================
|
# =========================
|
||||||
class LearningAssistant:
|
class LearningAssistant:
|
||||||
"""Main learning assistant class"""
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.embeddings = OllamaEmbeddings(
|
self.embeddings = OllamaEmbeddings(
|
||||||
model=EMBEDDING_MODEL,
|
model=EMBEDDING_MODEL,
|
||||||
@@ -408,58 +265,44 @@ class LearningAssistant:
|
|||||||
])
|
])
|
||||||
|
|
||||||
self.chain = self.prompt | self.llm | StrOutputParser()
|
self.chain = self.prompt | self.llm | StrOutputParser()
|
||||||
self.processor = DocumentProcessor(self.vectorstore)
|
self.text_splitter = RecursiveCharacterTextSplitter(
|
||||||
self.analyzer = KnowledgeAnalyzer(self.vectorstore)
|
chunk_size=CHUNK_SIZE,
|
||||||
|
chunk_overlap=CHUNK_OVERLAP,
|
||||||
|
separators=["\n\n", "\n", ". ", " "]
|
||||||
|
)
|
||||||
|
|
||||||
async def initialize(self):
|
async def initialize(self):
|
||||||
"""Initialize the learning system"""
|
console.print("[bold cyan]🎓 RAG Learning System - Educational Assistant[/bold cyan]")
|
||||||
console.print(Panel.fit(
|
console.print(f"📂 Notes Directory: {MD_DIRECTORY}")
|
||||||
"[bold cyan]🎓 RAG Learning System - Educational Assistant[/bold cyan]\n"
|
console.print(f"🧠 Model: {LLM_MODEL}\n")
|
||||||
"📂 Notes Directory: {}\n"
|
|
||||||
"🧠 Model: {}\n"
|
|
||||||
"[dim]Analyzing your learning progress...[/dim]".format(
|
|
||||||
MD_DIRECTORY, LLM_MODEL
|
|
||||||
),
|
|
||||||
border_style="cyan"
|
|
||||||
))
|
|
||||||
|
|
||||||
# Load or create knowledge state
|
|
||||||
knowledge_state = await self.load_or_analyze_knowledge()
|
knowledge_state = await self.load_or_analyze_knowledge()
|
||||||
|
|
||||||
console.print("[green]✓ System initialized successfully![/green]")
|
console.print("[green]✓ System initialized successfully![/green]")
|
||||||
console.print("[dim]💡 Tip: /help[/dim]\n")
|
console.print("[dim]💡 Tip: Просто напишите 'изучаем английский' или 'учим математику'[/dim]\n")
|
||||||
|
|
||||||
return knowledge_state
|
return knowledge_state
|
||||||
|
|
||||||
async def load_or_analyze_knowledge(self) -> KnowledgeState:
|
async def load_or_analyze_knowledge(self) -> KnowledgeState:
|
||||||
"""Load existing knowledge state or analyze all notes"""
|
|
||||||
# Load file hashes
|
|
||||||
file_hashes = self.get_file_hashes()
|
file_hashes = self.get_file_hashes()
|
||||||
|
|
||||||
# Load knowledge state
|
|
||||||
state_data = load_json_cache(KNOWLEDGE_STATE_PATH)
|
state_data = load_json_cache(KNOWLEDGE_STATE_PATH)
|
||||||
|
|
||||||
if state_data:
|
if state_data:
|
||||||
knowledge_state = KnowledgeState.from_dict(state_data)
|
knowledge_state = KnowledgeState.from_dict(state_data)
|
||||||
|
|
||||||
# Check if files have changed
|
|
||||||
if self.have_files_changed(file_hashes, knowledge_state.file_hashes):
|
if self.have_files_changed(file_hashes, knowledge_state.file_hashes):
|
||||||
console.print("[yellow]📁 Files changed, re-analyzing knowledge...[/yellow]")
|
console.print("[yellow]📁 Files changed, re-analyzing knowledge...[/yellow]")
|
||||||
knowledge_state = await self.analyzer.analyze_all_notes(file_hashes)
|
knowledge_state = await self.analyze_all_notes(file_hashes)
|
||||||
save_json_cache(knowledge_state.to_dict(), KNOWLEDGE_STATE_PATH)
|
save_json_cache(knowledge_state.to_dict(), KNOWLEDGE_STATE_PATH)
|
||||||
else:
|
else:
|
||||||
console.print("[green]✓ Knowledge state up to date[/green]")
|
console.print("[green]✓ Knowledge state up to date[/green]")
|
||||||
else:
|
else:
|
||||||
console.print("[yellow]📊 First time setup - analyzing all notes...[/yellow]")
|
console.print("[yellow]📊 First time setup - analyzing all notes...[/yellow]")
|
||||||
knowledge_state = await self.analyzer.analyze_all_notes(file_hashes)
|
knowledge_state = await self.analyze_all_notes(file_hashes)
|
||||||
save_json_cache(knowledge_state.to_dict(), KNOWLEDGE_STATE_PATH)
|
save_json_cache(knowledge_state.to_dict(), KNOWLEDGE_STATE_PATH)
|
||||||
|
|
||||||
return knowledge_state
|
return knowledge_state
|
||||||
|
|
||||||
def get_file_hashes(self) -> Dict[str, str]:
|
def get_file_hashes(self) -> Dict[str, str]:
|
||||||
"""Get hashes for all markdown files"""
|
|
||||||
file_hashes = {}
|
file_hashes = {}
|
||||||
|
|
||||||
for root, _, files in os.walk(MD_DIRECTORY):
|
for root, _, files in os.walk(MD_DIRECTORY):
|
||||||
for file in files:
|
for file in files:
|
||||||
if file.endswith(".md"):
|
if file.endswith(".md"):
|
||||||
@@ -468,24 +311,53 @@ class LearningAssistant:
|
|||||||
file_hashes[file_path] = get_file_hash(file_path)
|
file_hashes[file_path] = get_file_hash(file_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.print(f"[red]✗ Error reading {file}: {e}[/red]")
|
console.print(f"[red]✗ Error reading {file}: {e}[/red]")
|
||||||
|
|
||||||
return file_hashes
|
return file_hashes
|
||||||
|
|
||||||
def have_files_changed(self, current_hashes: Dict[str, str], cached_hashes: Dict[str, str]) -> bool:
|
def have_files_changed(self, current: Dict[str, str], cached: Dict[str, str]) -> bool:
|
||||||
"""Check if any files have changed"""
|
if len(current) != len(cached):
|
||||||
if len(current_hashes) != len(cached_hashes):
|
|
||||||
return True
|
return True
|
||||||
|
for path, hash_val in current.items():
|
||||||
for file_path, current_hash in current_hashes.items():
|
if path not in cached or cached[path] != hash_val:
|
||||||
if file_path not in cached_hashes or cached_hashes[file_path] != current_hash:
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def get_relevant_context(self, subject: str, knowledge_state: KnowledgeState) -> str:
|
async def analyze_all_notes(self, file_hashes: Dict[str, str]) -> KnowledgeState:
|
||||||
"""Get context relevant to the specified subject"""
|
console.print("[cyan]🔍 Analyzing all notes for learning progress...[/cyan]")
|
||||||
|
|
||||||
|
subjects = {
|
||||||
|
name: SubjectProgress(name=name, topics_covered=set(), last_studied=None)
|
||||||
|
for name in SUBJECTS.keys()
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
db_data = await asyncio.to_thread(self.vectorstore.get)
|
||||||
|
|
||||||
|
if db_data and db_data['documents']:
|
||||||
|
for text, metadata in zip(db_data['documents'], db_data['metadatas']):
|
||||||
|
if not metadata or 'source' not in metadata:
|
||||||
|
continue
|
||||||
|
|
||||||
|
subject = detect_subject_from_content(text)
|
||||||
|
if subject:
|
||||||
|
subjects[subject].topics_covered.add(text[:100])
|
||||||
|
|
||||||
|
file_path = metadata['source']
|
||||||
|
if file_path in file_hashes:
|
||||||
|
subjects[subject].last_studied = file_hashes[file_path]
|
||||||
|
|
||||||
|
for subject in subjects.values():
|
||||||
|
subject.confidence_level = min(len(subject.topics_covered) / 10.0, 1.0)
|
||||||
|
|
||||||
|
studied_count = len([s for s in subjects.values() if s.topics_covered])
|
||||||
|
console.print(f"[green]✓ Analysis complete. Found progress in {studied_count} subjects[/green]")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[red]✗ Error during analysis: {e}[/red]")
|
||||||
|
|
||||||
|
return KnowledgeState(subjects, datetime.now().isoformat(), file_hashes)
|
||||||
|
|
||||||
|
async def get_relevant_context(self, subject: str) -> str:
|
||||||
try:
|
try:
|
||||||
# Get all documents and filter by subject
|
|
||||||
db_data = await asyncio.to_thread(self.vectorstore.get)
|
db_data = await asyncio.to_thread(self.vectorstore.get)
|
||||||
|
|
||||||
if not db_data or not db_data['documents']:
|
if not db_data or not db_data['documents']:
|
||||||
@@ -502,11 +374,10 @@ class LearningAssistant:
|
|||||||
"source": Path(metadata.get('source', 'unknown')).name
|
"source": Path(metadata.get('source', 'unknown')).name
|
||||||
})
|
})
|
||||||
|
|
||||||
# Build context string
|
|
||||||
context = f"Найдено {len(relevant_docs)} заметок по предмету:\n"
|
context = f"Найдено {len(relevant_docs)} заметок по предмету:\n"
|
||||||
|
|
||||||
char_count = len(context)
|
char_count = len(context)
|
||||||
for doc in relevant_docs[:TOP_K]: # Limit to top K documents
|
for doc in relevant_docs[:TOP_K]:
|
||||||
doc_text = f"\n---\nИсточник: {doc['source']}\n{doc['text']}\n"
|
doc_text = f"\n---\nИсточник: {doc['source']}\n{doc['text']}\n"
|
||||||
|
|
||||||
if char_count + len(doc_text) > MAX_CONTEXT_CHARS:
|
if char_count + len(doc_text) > MAX_CONTEXT_CHARS:
|
||||||
@@ -517,7 +388,7 @@ class LearningAssistant:
|
|||||||
char_count += len(doc_text)
|
char_count += len(doc_text)
|
||||||
|
|
||||||
if not relevant_docs:
|
if not relevant_docs:
|
||||||
return f"Заметок по предмету '{subject}' не найдено."
|
return f"Заметок по предмету '{SUBJECTS.get(subject, subject)}' не найдено."
|
||||||
|
|
||||||
return context
|
return context
|
||||||
|
|
||||||
@@ -525,74 +396,108 @@ class LearningAssistant:
|
|||||||
console.print(f"[red]✗ Error getting context: {e}[/red]")
|
console.print(f"[red]✗ Error getting context: {e}[/red]")
|
||||||
return "Ошибка при получении контекста."
|
return "Ошибка при получении контекста."
|
||||||
|
|
||||||
|
def get_progress_summary(self, knowledge_state: KnowledgeState, subjects: List[str]) -> str:
|
||||||
|
summary = "Текущий прогресс обучения:\n"
|
||||||
|
for subject in subjects:
|
||||||
|
if subject in knowledge_state.subjects:
|
||||||
|
subj = knowledge_state.subjects[subject]
|
||||||
|
if subj.topics_covered:
|
||||||
|
summary += f"- {SUBJECTS[subject]}: {len(subj.topics_covered)} тем, уверенность {subj.confidence_level:.1%}\n"
|
||||||
|
else:
|
||||||
|
summary += f"- {SUBJECTS[subject]}: изучение с нуля\n"
|
||||||
|
return summary
|
||||||
|
|
||||||
async def process_learning_query(self, query: str, knowledge_state: KnowledgeState) -> str:
|
async def process_learning_query(self, query: str, knowledge_state: KnowledgeState) -> str:
|
||||||
"""Process a learning query"""
|
subjects = detect_subjects_from_query(query)
|
||||||
# Detect subject from query
|
|
||||||
subject = detect_subject_from_query(query)
|
|
||||||
|
|
||||||
if not subject:
|
if not subjects:
|
||||||
# Try to infer from broader context or ask for clarification
|
|
||||||
return "Пожалуйста, уточните предмет для изучения (например: 'изучаем английский', 'учим математику')."
|
return "Пожалуйста, уточните предмет для изучения (например: 'изучаем английский', 'учим математику')."
|
||||||
|
|
||||||
# Get relevant context
|
responses = []
|
||||||
context = await self.get_relevant_context(subject, knowledge_state)
|
|
||||||
|
|
||||||
# Get progress summary
|
for subject in subjects:
|
||||||
progress = self.analyzer.get_progress_summary(knowledge_state, subject)
|
context = await self.get_relevant_context(subject)
|
||||||
|
progress = self.get_progress_summary(knowledge_state, [subject])
|
||||||
|
|
||||||
|
console.print(f"[blue]🔍 Анализирую прогресс по предмету: {SUBJECTS[subject]}[/blue]")
|
||||||
|
|
||||||
|
response = ""
|
||||||
|
console.print("[bold blue]Ассистент:[/bold blue] ", end="")
|
||||||
|
|
||||||
|
async for chunk in self.chain.astream({
|
||||||
|
"context": context,
|
||||||
|
"question": query,
|
||||||
|
"progress": progress
|
||||||
|
}):
|
||||||
|
console.print(chunk, end="", style="blue")
|
||||||
|
response += chunk
|
||||||
|
|
||||||
|
console.print("\n")
|
||||||
|
responses.append(response)
|
||||||
|
|
||||||
# Generate response
|
return "\n\n".join(responses) if len(responses) > 1 else responses[0]
|
||||||
console.print(f"[blue]🔍 Анализирую прогресс по предмету: {subject}[/blue]")
|
|
||||||
console.print(f"[dim]Контекст: {len(context)} символов[/dim]\n")
|
async def index_files(self, file_paths: List[str]) -> bool:
|
||||||
|
all_chunks = []
|
||||||
|
|
||||||
response = ""
|
for file_path in file_paths:
|
||||||
console.print("[bold blue]Ассистент:[/bold blue] ", end="")
|
try:
|
||||||
|
loader = UnstructuredMarkdownLoader(file_path)
|
||||||
|
documents = loader.load()
|
||||||
|
|
||||||
|
if documents:
|
||||||
|
for doc in documents:
|
||||||
|
doc.metadata["source"] = file_path
|
||||||
|
|
||||||
|
chunks = self.text_splitter.split_documents(documents)
|
||||||
|
all_chunks.extend(chunks)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[red]✗ Error processing {Path(file_path).name}: {e}[/red]")
|
||||||
|
|
||||||
async for chunk in self.chain.astream({
|
if not all_chunks:
|
||||||
"context": context,
|
return False
|
||||||
"question": query,
|
|
||||||
"progress": progress
|
|
||||||
}):
|
|
||||||
console.print(chunk, end="", style=ANSWER_COLOR)
|
|
||||||
response += chunk
|
|
||||||
|
|
||||||
console.print("\n")
|
try:
|
||||||
return response
|
await asyncio.to_thread(self.vectorstore.reset_collection)
|
||||||
|
|
||||||
|
batch_size = 20
|
||||||
|
for i in range(0, len(all_chunks), batch_size):
|
||||||
|
batch = all_chunks[i:i + batch_size]
|
||||||
|
await asyncio.to_thread(self.vectorstore.add_documents, batch)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
console.print(f"[red]✗ Error indexing documents: {e}[/red]")
|
||||||
|
return False
|
||||||
|
|
||||||
# =========================
|
# =========================
|
||||||
# MAIN APPLICATION
|
# MAIN APPLICATION
|
||||||
# =========================
|
# =========================
|
||||||
async def main():
|
async def main():
|
||||||
"""Main application entry point"""
|
|
||||||
|
|
||||||
# Setup directories
|
|
||||||
Path(MD_DIRECTORY).mkdir(parents=True, exist_ok=True)
|
Path(MD_DIRECTORY).mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
assistant = LearningAssistant()
|
assistant = LearningAssistant()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Initialize system
|
|
||||||
knowledge_state = await assistant.initialize()
|
knowledge_state = await assistant.initialize()
|
||||||
|
|
||||||
# Main interaction loop
|
|
||||||
while True:
|
while True:
|
||||||
# Get user input
|
|
||||||
query = await session.prompt_async("> ", style=style)
|
query = await session.prompt_async("> ", style=style)
|
||||||
query = query.strip()
|
query = query.strip()
|
||||||
|
|
||||||
if not query:
|
if not query:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Handle exit commands
|
|
||||||
if query.lower() in ['/exit', '/quit', 'exit', 'quit', 'выход']:
|
if query.lower() in ['/exit', '/quit', 'exit', 'quit', 'выход']:
|
||||||
console.print("\n👋 До свидания! Удачи в обучении!", style="yellow")
|
console.print("\n👋 До свидания! Удачи в обучении!", style="yellow")
|
||||||
break
|
break
|
||||||
|
|
||||||
# Handle help
|
|
||||||
if query.lower() in ['/help', 'help', 'помощь']:
|
if query.lower() in ['/help', 'help', 'помощь']:
|
||||||
await show_help()
|
await show_help()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Handle reindex command
|
|
||||||
if query.lower() in ['/reindex', 'reindex']:
|
if query.lower() in ['/reindex', 'reindex']:
|
||||||
console.print("[yellow]🔄 Переиндексирую все файлы...[/yellow]")
|
console.print("[yellow]🔄 Переиндексирую все файлы...[/yellow]")
|
||||||
|
|
||||||
@@ -603,12 +508,11 @@ async def main():
|
|||||||
console.print("[yellow]⚠️ Markdown файлы не найдены[/yellow]")
|
console.print("[yellow]⚠️ Markdown файлы не найдены[/yellow]")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Вызовите index_files напрямую — он сам напечатает прогресс
|
success = await assistant.index_files(files)
|
||||||
success = await assistant.processor.index_files(files)
|
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
console.print("[cyan]📊 Анализирую знания...[/cyan]")
|
console.print("[cyan]📊 Анализирую знания...[/cyan]")
|
||||||
knowledge_state = await assistant.analyzer.analyze_all_notes(
|
knowledge_state = await assistant.analyze_all_notes(
|
||||||
assistant.get_file_hashes()
|
assistant.get_file_hashes()
|
||||||
)
|
)
|
||||||
save_json_cache(knowledge_state.to_dict(), KNOWLEDGE_STATE_PATH)
|
save_json_cache(knowledge_state.to_dict(), KNOWLEDGE_STATE_PATH)
|
||||||
@@ -618,30 +522,27 @@ async def main():
|
|||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Process learning query
|
|
||||||
await assistant.process_learning_query(query, knowledge_state)
|
await assistant.process_learning_query(query, knowledge_state)
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
console.print("\n👋 До свидания! Удачи в обучении!", style="yellow")
|
console.print("\n👋 До свидания! Удачи в обучении!", style="yellow")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
console.print(f"[red]✗ Unexpected error: {e}[/red]")
|
console.print(f"[red]✗ Unexpected error: {e}[/red]")
|
||||||
console.print_exception()
|
|
||||||
|
|
||||||
async def show_help():
|
async def show_help():
|
||||||
"""Display help information"""
|
|
||||||
console.print("\n[bold cyan]🎓 RAG Learning System - Справка[/bold cyan]")
|
console.print("\n[bold cyan]🎓 RAG Learning System - Справка[/bold cyan]")
|
||||||
console.print("=" * 60, style="dim")
|
console.print("=" * 60, style="dim")
|
||||||
|
|
||||||
console.print("\n[bold green]Использование:[/bold green]")
|
console.print("\n[bold green]Использование:[/bold green]")
|
||||||
console.print("Просто напишите, что хотите изучать:")
|
console.print("Просто напишите, что хотите изучать:")
|
||||||
console.print(" • 'изучаем английский'")
|
console.print(" • 'изучаем английский'")
|
||||||
console.print(" • 'учим математику'")
|
console.print(" • 'учим математику и программирование'")
|
||||||
console.print(" • 'погнали по сетям'")
|
console.print(" • 'давай по сетям'")
|
||||||
console.print(" • 'давай python'\n")
|
console.print(" • 'пора изучать кибербезопасность'\n")
|
||||||
|
|
||||||
console.print("[bold green]Доступные предметы:[/bold green]")
|
console.print("[bold green]Доступные предметы:[/bold green]")
|
||||||
for subject, keywords in SUBJECT_KEYWORDS.items():
|
for key, name in SUBJECTS.items():
|
||||||
console.print(f" • {subject}: {', '.join(keywords[:3])}...")
|
console.print(f" • {name}")
|
||||||
|
|
||||||
console.print("\n[bold green]Команды:[/bold green]")
|
console.print("\n[bold green]Команды:[/bold green]")
|
||||||
console.print(" • /help или помощь - показать эту справку")
|
console.print(" • /help или помощь - показать эту справку")
|
||||||
@@ -663,7 +564,3 @@ if __name__ == "__main__":
|
|||||||
asyncio.run(main())
|
asyncio.run(main())
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
console.print("\n👋 До свидания! Удачи в обучении!", style="yellow")
|
console.print("\n👋 До свидания! Удачи в обучении!", style="yellow")
|
||||||
sys.exit(0)
|
|
||||||
except Exception as e:
|
|
||||||
console.print(f"[red]✗ Unexpected error: {e}[/red]")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|||||||
127
monitor.py
Normal file
127
monitor.py
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Real-time GPU and RAM monitoring utility
|
||||||
|
"""
|
||||||
|
|
||||||
|
import psutil
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.live import Live
|
||||||
|
from rich.table import Table
|
||||||
|
|
||||||
|
try:
|
||||||
|
import pynvml
|
||||||
|
NVIDIA_AVAILABLE = True
|
||||||
|
except ImportError:
|
||||||
|
NVIDIA_AVAILABLE = False
|
||||||
|
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
class SystemMonitor:
|
||||||
|
def __init__(self):
|
||||||
|
self.running = False
|
||||||
|
self.gpu_available = False
|
||||||
|
|
||||||
|
if NVIDIA_AVAILABLE:
|
||||||
|
try:
|
||||||
|
pynvml.nvmlInit()
|
||||||
|
self.gpu_count = pynvml.nvmlDeviceGetCount()
|
||||||
|
self.gpu_available = self.gpu_count > 0
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_ram_usage(self):
|
||||||
|
memory = psutil.virtual_memory()
|
||||||
|
return {
|
||||||
|
"used_gb": memory.used / 1024**3,
|
||||||
|
"total_gb": memory.total / 1024**3,
|
||||||
|
"percent": memory.percent
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_gpu_usage(self):
|
||||||
|
if not self.gpu_available:
|
||||||
|
return None
|
||||||
|
|
||||||
|
gpu_info = []
|
||||||
|
try:
|
||||||
|
for i in range(self.gpu_count):
|
||||||
|
handle = pynvml.nvmlDeviceGetHandleByIndex(i)
|
||||||
|
info = pynvml.nvmlDeviceGetMemoryInfo(handle)
|
||||||
|
|
||||||
|
gpu_info.append({
|
||||||
|
"id": i,
|
||||||
|
"used_gb": info.used / 1024**3,
|
||||||
|
"total_gb": info.total / 1024**3,
|
||||||
|
"percent": (info.used / info.total) * 100
|
||||||
|
})
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return gpu_info
|
||||||
|
|
||||||
|
def create_table(self):
|
||||||
|
table = Table(title="System Resources")
|
||||||
|
table.add_column("Resource", style="cyan")
|
||||||
|
table.add_column("Used", style="yellow")
|
||||||
|
table.add_column("Total", style="green")
|
||||||
|
table.add_column("Usage %", style="red")
|
||||||
|
|
||||||
|
# RAM
|
||||||
|
ram = self.get_ram_usage()
|
||||||
|
table.add_row(
|
||||||
|
"RAM",
|
||||||
|
f"{ram['used_gb']:.2f} GB",
|
||||||
|
f"{ram['total_gb']:.2f} GB",
|
||||||
|
f"{ram['percent']:.1f}%"
|
||||||
|
)
|
||||||
|
|
||||||
|
# GPU
|
||||||
|
if self.gpu_available:
|
||||||
|
gpus = self.get_gpu_usage()
|
||||||
|
if gpus:
|
||||||
|
for gpu in gpus:
|
||||||
|
table.add_row(
|
||||||
|
f"GPU {gpu['id']}",
|
||||||
|
f"{gpu['used_gb']:.2f} GB",
|
||||||
|
f"{gpu['total_gb']:.2f} GB",
|
||||||
|
f"{gpu['percent']:.1f}%"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
table.add_row("GPU", "N/A", "N/A", "N/A")
|
||||||
|
|
||||||
|
return table
|
||||||
|
|
||||||
|
def start_monitoring(self):
|
||||||
|
self.running = True
|
||||||
|
|
||||||
|
with Live(self.create_table(), refresh_per_second=1, console=console) as live:
|
||||||
|
while self.running:
|
||||||
|
live.update(self.create_table())
|
||||||
|
time.sleep(1)
|
||||||
|
|
||||||
|
def stop_monitoring(self):
|
||||||
|
self.running = False
|
||||||
|
if self.gpu_available:
|
||||||
|
try:
|
||||||
|
pynvml.nvmlShutdown()
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def monitor_resources():
|
||||||
|
"""Start monitoring in a separate thread"""
|
||||||
|
monitor = SystemMonitor()
|
||||||
|
thread = threading.Thread(target=monitor.start_monitoring, daemon=True)
|
||||||
|
thread.start()
|
||||||
|
return monitor
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
console.print("[bold cyan]System Resource Monitor[/bold cyan]")
|
||||||
|
console.print("Press Ctrl+C to stop\n")
|
||||||
|
|
||||||
|
monitor = SystemMonitor()
|
||||||
|
try:
|
||||||
|
monitor.start_monitoring()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
console.print("\n[yellow]Monitoring stopped.[/yellow]")
|
||||||
|
monitor.stop_monitoring()
|
||||||
@@ -6,17 +6,15 @@ readme = "README.md"
|
|||||||
requires-python = ">=3.13"
|
requires-python = ">=3.13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"chromadb>=1.4.0",
|
"chromadb>=1.4.0",
|
||||||
"langchain>=1.2.0",
|
|
||||||
"langchain-chroma>=1.1.0",
|
"langchain-chroma>=1.1.0",
|
||||||
"langchain-community>=0.4.1",
|
"langchain-community>=0.4.1",
|
||||||
"langchain-ollama>=1.0.1",
|
"langchain-ollama>=1.0.1",
|
||||||
"langchain-text-splitters>=1.1.0",
|
"langchain-text-splitters>=1.1.0",
|
||||||
"nest-asyncio>=1.6.0",
|
|
||||||
"nvidia-ml-py>=13.590.44",
|
|
||||||
"prompt-toolkit>=3.0.52",
|
"prompt-toolkit>=3.0.52",
|
||||||
"python-dotenv>=1.2.1",
|
"python-dotenv>=1.2.1",
|
||||||
"pyyaml>=6.0.3",
|
|
||||||
"rich>=14.2.0",
|
"rich>=14.2.0",
|
||||||
"unstructured[md]>=0.18.21",
|
"unstructured[md]>=0.18.21",
|
||||||
"watchdog>=6.0.0",
|
"psutil>=7.2.1",
|
||||||
|
"nvidia-ml-py>=13.590.44",
|
||||||
|
"nest-asyncio>=1.6.0",
|
||||||
]
|
]
|
||||||
|
|||||||
196
uv.lock
generated
196
uv.lock
generated
@@ -952,20 +952,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/ca/ec/65f7d563aa4a62dd58777e8f6aa882f15db53b14eb29aba0c28a20f7eb26/kubernetes-34.1.0-py2.py3-none-any.whl", hash = "sha256:bffba2272534e224e6a7a74d582deb0b545b7c9879d2cd9e4aae9481d1f2cc2a", size = 2008380, upload-time = "2025-09-29T20:23:47.684Z" },
|
{ url = "https://files.pythonhosted.org/packages/ca/ec/65f7d563aa4a62dd58777e8f6aa882f15db53b14eb29aba0c28a20f7eb26/kubernetes-34.1.0-py2.py3-none-any.whl", hash = "sha256:bffba2272534e224e6a7a74d582deb0b545b7c9879d2cd9e4aae9481d1f2cc2a", size = 2008380, upload-time = "2025-09-29T20:23:47.684Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "langchain"
|
|
||||||
version = "1.2.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "langchain-core" },
|
|
||||||
{ name = "langgraph" },
|
|
||||||
{ name = "pydantic" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/b1/12/3a74c22abdfddd877dfc2ee666d516f9132877fcd25eb4dd694835c59c79/langchain-1.2.0.tar.gz", hash = "sha256:a087d1e2b2969819e29a91a6d5f98302aafe31bd49ba377ecee3bf5a5dcfe14a", size = 536126, upload-time = "2025-12-15T14:51:42.24Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/00/4e3fa0d90f5a5c376ccb8ca983d0f0f7287783dfac48702e18f01d24673b/langchain-1.2.0-py3-none-any.whl", hash = "sha256:82f0d17aa4fbb11560b30e1e7d4aeb75e3ad71ce09b85c90ab208b181a24ffac", size = 102828, upload-time = "2025-12-15T14:51:40.802Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langchain-chroma"
|
name = "langchain-chroma"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
@@ -1074,62 +1060,6 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2569554f7c70f4a3c27712f40e3284d483e88094cc0e/langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0", size = 981474, upload-time = "2021-05-07T07:54:13.562Z" }
|
sdist = { url = "https://files.pythonhosted.org/packages/0e/72/a3add0e4eec4eb9e2569554f7c70f4a3c27712f40e3284d483e88094cc0e/langdetect-1.0.9.tar.gz", hash = "sha256:cbc1fef89f8d062739774bd51eda3da3274006b3661d199c2655f6b3f6d605a0", size = 981474, upload-time = "2021-05-07T07:54:13.562Z" }
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "langgraph"
|
|
||||||
version = "1.0.5"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "langchain-core" },
|
|
||||||
{ name = "langgraph-checkpoint" },
|
|
||||||
{ name = "langgraph-prebuilt" },
|
|
||||||
{ name = "langgraph-sdk" },
|
|
||||||
{ name = "pydantic" },
|
|
||||||
{ name = "xxhash" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/7d/47/28f4d4d33d88f69de26f7a54065961ac0c662cec2479b36a2db081ef5cb6/langgraph-1.0.5.tar.gz", hash = "sha256:7f6ae59622386b60fe9fa0ad4c53f42016b668455ed604329e7dc7904adbf3f8", size = 493969, upload-time = "2025-12-12T23:05:48.224Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/23/1b/e318ee76e42d28f515d87356ac5bd7a7acc8bad3b8f54ee377bef62e1cbf/langgraph-1.0.5-py3-none-any.whl", hash = "sha256:b4cfd173dca3c389735b47228ad8b295e6f7b3df779aba3a1e0c23871f81281e", size = 157056, upload-time = "2025-12-12T23:05:46.499Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "langgraph-checkpoint"
|
|
||||||
version = "3.0.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "langchain-core" },
|
|
||||||
{ name = "ormsgpack" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/0f/07/2b1c042fa87d40cf2db5ca27dc4e8dd86f9a0436a10aa4361a8982718ae7/langgraph_checkpoint-3.0.1.tar.gz", hash = "sha256:59222f875f85186a22c494aedc65c4e985a3df27e696e5016ba0b98a5ed2cee0", size = 137785, upload-time = "2025-11-04T21:55:47.774Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/48/e3/616e3a7ff737d98c1bbb5700dd62278914e2a9ded09a79a1fa93cf24ce12/langgraph_checkpoint-3.0.1-py3-none-any.whl", hash = "sha256:9b04a8d0edc0474ce4eaf30c5d731cee38f11ddff50a6177eead95b5c4e4220b", size = 46249, upload-time = "2025-11-04T21:55:46.472Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "langgraph-prebuilt"
|
|
||||||
version = "1.0.5"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "langchain-core" },
|
|
||||||
{ name = "langgraph-checkpoint" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/46/f9/54f8891b32159e4542236817aea2ee83de0de18bce28e9bdba08c7f93001/langgraph_prebuilt-1.0.5.tar.gz", hash = "sha256:85802675ad778cc7240fd02d47db1e0b59c0c86d8369447d77ce47623845db2d", size = 144453, upload-time = "2025-11-20T16:47:39.23Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/87/5e/aeba4a5b39fe6e874e0dd003a82da71c7153e671312671a8dacc5cb7c1af/langgraph_prebuilt-1.0.5-py3-none-any.whl", hash = "sha256:22369563e1848862ace53fbc11b027c28dd04a9ac39314633bb95f2a7e258496", size = 35072, upload-time = "2025-11-20T16:47:38.187Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "langgraph-sdk"
|
|
||||||
version = "0.3.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
dependencies = [
|
|
||||||
{ name = "httpx" },
|
|
||||||
{ name = "orjson" },
|
|
||||||
]
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/a9/d3/b6be0b0aba2a53a8920a2b0b4328a83121ec03eea9952e576d06a4182f6f/langgraph_sdk-0.3.1.tar.gz", hash = "sha256:f6dadfd2444eeff3e01405a9005c95fb3a028d4bd954ebec80ea6150084f92bb", size = 130312, upload-time = "2025-12-18T22:11:47.42Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ab/fe/0c1c9c01a154eba62b20b02fabe811fd94a2b810061ae9e4d8462b8cf85a/langgraph_sdk-0.3.1-py3-none-any.whl", hash = "sha256:0b856923bfd20bf3441ce9d03bef488aa333fb610e972618799a9d584436acad", size = 66517, upload-time = "2025-12-18T22:11:46.625Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "langsmith"
|
name = "langsmith"
|
||||||
version = "0.5.1"
|
version = "0.5.1"
|
||||||
@@ -1672,35 +1602,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/8f/dd/f4fff4a6fe601b4f8f3ba3aa6da8ac33d17d124491a3b804c662a70e1636/orjson-3.11.5-cp314-cp314-win_arm64.whl", hash = "sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5", size = 126713, upload-time = "2025-12-06T15:55:19.738Z" },
|
{ url = "https://files.pythonhosted.org/packages/8f/dd/f4fff4a6fe601b4f8f3ba3aa6da8ac33d17d124491a3b804c662a70e1636/orjson-3.11.5-cp314-cp314-win_arm64.whl", hash = "sha256:38b22f476c351f9a1c43e5b07d8b5a02eb24a6ab8e75f700f7d479d4568346a5", size = 126713, upload-time = "2025-12-06T15:55:19.738Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "ormsgpack"
|
|
||||||
version = "1.12.1"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/fe/96/34c40d621996c2f377a18decbd3c59f031dde73c3ba47d1e1e8f29a05aaa/ormsgpack-1.12.1.tar.gz", hash = "sha256:a3877fde1e4f27a39f92681a0aab6385af3a41d0c25375d33590ae20410ea2ac", size = 39476, upload-time = "2025-12-14T07:57:43.248Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/11/42/f110dfe7cf23a52a82e23eb23d9a6a76ae495447d474686dfa758f3d71d6/ormsgpack-1.12.1-cp313-cp313-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:9663d6b3ecc917c063d61a99169ce196a80f3852e541ae404206836749459279", size = 376746, upload-time = "2025-12-14T07:57:17.699Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/11/76/b386e508a8ae207daec240201a81adb26467bf99b163560724e86bd9ff33/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32e85cfbaf01a94a92520e7fe7851cfcfe21a5698299c28ab86194895f9b9233", size = 202489, upload-time = "2025-12-14T07:57:18.807Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ea/0e/5db7a63f387149024572daa3d9512fe8fb14bf4efa0722d6d491bed280e7/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dabfd2c24b59c7c69870a5ecee480dfae914a42a0c2e7c9d971cf531e2ba471a", size = 210757, upload-time = "2025-12-14T07:57:19.893Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/64/79/3a9899e57cb57430bd766fc1b4c9ad410cb2ba6070bc8cf6301e7d385768/ormsgpack-1.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51bbf2b64afeded34ccd8e25402e4bca038757913931fa0d693078d75563f6f9", size = 211518, upload-time = "2025-12-14T07:57:20.972Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/cd/4f41710ae9fe50d7fcbe476793b3c487746d0e1cc194cc0fee42ff6d989b/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9959a71dde1bd0ced84af17facc06a8afada495a34e9cb1bad8e9b20d4c59cef", size = 386251, upload-time = "2025-12-14T07:57:22.099Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bf/54/ba0c97d6231b1f01daafaa520c8cce1e1b7fceaae6fdc1c763925874a7de/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:e9be0e3b62d758f21f5b20e0e06b3a240ec546c4a327bf771f5825462aa74714", size = 479607, upload-time = "2025-12-14T07:57:23.525Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/18/75/19a9a97a462776d525baf41cfb7072734528775f0a3d5fbfab3aa7756b9b/ormsgpack-1.12.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a29d49ab7fdd77ea787818e60cb4ef491708105b9c4c9b0f919201625eb036b5", size = 388062, upload-time = "2025-12-14T07:57:24.616Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a8/6a/ec26e3f44e9632ecd2f43638b7b37b500eaea5d79cab984ad0b94be14f82/ormsgpack-1.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:c418390b47a1d367e803f6c187f77e4d67c7ae07ba962e3a4a019001f4b0291a", size = 116195, upload-time = "2025-12-14T07:57:25.626Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7d/64/bfa5f4a34d0f15c6aba1b73e73f7441a66d635bd03249d334a4796b7a924/ormsgpack-1.12.1-cp313-cp313-win_arm64.whl", hash = "sha256:cfa22c91cffc10a7fbd43729baff2de7d9c28cef2509085a704168ae31f02568", size = 109986, upload-time = "2025-12-14T07:57:26.569Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/87/0e/78e5697164e3223b9b216c13e99f1acbc1ee9833490d68842b13da8ba883/ormsgpack-1.12.1-cp314-cp314-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:b93c91efb1a70751a1902a5b43b27bd8fd38e0ca0365cf2cde2716423c15c3a6", size = 376758, upload-time = "2025-12-14T07:57:27.641Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2c/0e/3a3cbb64703263d7bbaed7effa3ce78cb9add360a60aa7c544d7df28b641/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf0ea0389167b5fa8d2933dd3f33e887ec4ba68f89c25214d7eec4afd746d22", size = 202487, upload-time = "2025-12-14T07:57:29.051Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/2c/807ebe2b77995599bbb1dec8c3f450d5d7dddee14ce3e1e71dc60e2e2a74/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f4c29af837f35af3375070689e781161e7cf019eb2f7cd641734ae45cd001c0d", size = 210853, upload-time = "2025-12-14T07:57:30.508Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/25/57/2cdfc354e3ad8e847628f511f4d238799d90e9e090941e50b9d5ba955ae2/ormsgpack-1.12.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336fc65aa0fe65896a3dabaae31e332a0a98b4a00ad7b0afde21a7505fd23ff3", size = 211545, upload-time = "2025-12-14T07:57:31.585Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/76/1d/c6fda560e4a8ff865b3aec8a86f7c95ab53f4532193a6ae4ab9db35f85aa/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:940f60aabfefe71dd6b82cb33f4ff10b2e7f5fcfa5f103cdb0a23b6aae4c713c", size = 386333, upload-time = "2025-12-14T07:57:32.957Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fc/3e/715081b36fceb8b497c68b87d384e1cc6d9c9c130ce3b435634d3d785b86/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:596ad9e1b6d4c95595c54aaf49b1392609ca68f562ce06f4f74a5bc4053bcda4", size = 479701, upload-time = "2025-12-14T07:57:34.686Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6d/cf/01ad04def42b3970fc1a302c07f4b46339edf62ef9650247097260471f40/ormsgpack-1.12.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:575210e8fcbc7b0375026ba040a5eef223e9f66a4453d9623fc23282ae09c3c8", size = 388148, upload-time = "2025-12-14T07:57:35.771Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/15/91/1fff2fc2b5943c740028f339154e7103c8f2edf1a881d9fbba2ce11c3b1d/ormsgpack-1.12.1-cp314-cp314-win_amd64.whl", hash = "sha256:647daa3718572280893456be44c60aea6690b7f2edc54c55648ee66e8f06550f", size = 116201, upload-time = "2025-12-14T07:57:36.763Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ed/66/142b542aed3f96002c7d1c33507ca6e1e0d0a42b9253ab27ef7ed5793bd9/ormsgpack-1.12.1-cp314-cp314-win_arm64.whl", hash = "sha256:a8b3ab762a6deaf1b6490ab46dda0c51528cf8037e0246c40875c6fe9e37b699", size = 110029, upload-time = "2025-12-14T07:57:37.703Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/38/b3/ef4494438c90359e1547eaed3c5ec46e2c431d59a3de2af4e70ebd594c49/ormsgpack-1.12.1-cp314-cp314t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:12087214e436c1f6c28491949571abea759a63111908c4f7266586d78144d7a8", size = 376777, upload-time = "2025-12-14T07:57:38.795Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/05/a0/1149a7163f8b0dfbc64bf9099b6f16d102ad3b03bcc11afee198d751da2d/ormsgpack-1.12.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6d54c14cf86ef13f10ccade94d1e7de146aa9b17d371e18b16e95f329393b7", size = 202490, upload-time = "2025-12-14T07:57:40.168Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/68/82/f2ec5e758d6a7106645cca9bb7137d98bce5d363789fa94075be6572057c/ormsgpack-1.12.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f3584d07882b7ea2a1a589f795a3af97fe4c2932b739408e6d1d9d286cad862", size = 211733, upload-time = "2025-12-14T07:57:42.253Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "overrides"
|
name = "overrides"
|
||||||
version = "7.7.0"
|
version = "7.7.0"
|
||||||
@@ -2202,7 +2103,6 @@ version = "0.1.0"
|
|||||||
source = { virtual = "." }
|
source = { virtual = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "chromadb" },
|
{ name = "chromadb" },
|
||||||
{ name = "langchain" },
|
|
||||||
{ name = "langchain-chroma" },
|
{ name = "langchain-chroma" },
|
||||||
{ name = "langchain-community" },
|
{ name = "langchain-community" },
|
||||||
{ name = "langchain-ollama" },
|
{ name = "langchain-ollama" },
|
||||||
@@ -2210,17 +2110,15 @@ dependencies = [
|
|||||||
{ name = "nest-asyncio" },
|
{ name = "nest-asyncio" },
|
||||||
{ name = "nvidia-ml-py" },
|
{ name = "nvidia-ml-py" },
|
||||||
{ name = "prompt-toolkit" },
|
{ name = "prompt-toolkit" },
|
||||||
|
{ name = "psutil" },
|
||||||
{ name = "python-dotenv" },
|
{ name = "python-dotenv" },
|
||||||
{ name = "pyyaml" },
|
|
||||||
{ name = "rich" },
|
{ name = "rich" },
|
||||||
{ name = "unstructured", extra = ["md"] },
|
{ name = "unstructured", extra = ["md"] },
|
||||||
{ name = "watchdog" },
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.metadata]
|
[package.metadata]
|
||||||
requires-dist = [
|
requires-dist = [
|
||||||
{ name = "chromadb", specifier = ">=1.4.0" },
|
{ name = "chromadb", specifier = ">=1.4.0" },
|
||||||
{ name = "langchain", specifier = ">=1.2.0" },
|
|
||||||
{ name = "langchain-chroma", specifier = ">=1.1.0" },
|
{ name = "langchain-chroma", specifier = ">=1.1.0" },
|
||||||
{ name = "langchain-community", specifier = ">=0.4.1" },
|
{ name = "langchain-community", specifier = ">=0.4.1" },
|
||||||
{ name = "langchain-ollama", specifier = ">=1.0.1" },
|
{ name = "langchain-ollama", specifier = ">=1.0.1" },
|
||||||
@@ -2228,11 +2126,10 @@ requires-dist = [
|
|||||||
{ name = "nest-asyncio", specifier = ">=1.6.0" },
|
{ name = "nest-asyncio", specifier = ">=1.6.0" },
|
||||||
{ name = "nvidia-ml-py", specifier = ">=13.590.44" },
|
{ name = "nvidia-ml-py", specifier = ">=13.590.44" },
|
||||||
{ name = "prompt-toolkit", specifier = ">=3.0.52" },
|
{ name = "prompt-toolkit", specifier = ">=3.0.52" },
|
||||||
|
{ name = "psutil", specifier = ">=7.2.1" },
|
||||||
{ name = "python-dotenv", specifier = ">=1.2.1" },
|
{ name = "python-dotenv", specifier = ">=1.2.1" },
|
||||||
{ name = "pyyaml", specifier = ">=6.0.3" },
|
|
||||||
{ name = "rich", specifier = ">=14.2.0" },
|
{ name = "rich", specifier = ">=14.2.0" },
|
||||||
{ name = "unstructured", extras = ["md"], specifier = ">=0.18.21" },
|
{ name = "unstructured", extras = ["md"], specifier = ">=0.18.21" },
|
||||||
{ name = "watchdog", specifier = ">=6.0.0" },
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -2794,27 +2691,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" },
|
{ url = "https://files.pythonhosted.org/packages/e4/16/c1fd27e9549f3c4baf1dc9c20c456cd2f822dbf8de9f463824b0c0357e06/uvloop-0.22.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6cde23eeda1a25c75b2e07d39970f3374105d5eafbaab2a4482be82f272d5a5e", size = 4296730, upload-time = "2025-10-16T22:17:00.744Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "watchdog"
|
|
||||||
version = "6.0.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "watchfiles"
|
name = "watchfiles"
|
||||||
version = "1.1.1"
|
version = "1.1.1"
|
||||||
@@ -2976,74 +2852,6 @@ wheels = [
|
|||||||
{ url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046, upload-time = "2025-11-07T00:45:32.116Z" },
|
{ url = "https://files.pythonhosted.org/packages/15/d1/b51471c11592ff9c012bd3e2f7334a6ff2f42a7aed2caffcf0bdddc9cb89/wrapt-2.0.1-py3-none-any.whl", hash = "sha256:4d2ce1bf1a48c5277d7969259232b57645aae5686dba1eaeade39442277afbca", size = 44046, upload-time = "2025-11-07T00:45:32.116Z" },
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "xxhash"
|
|
||||||
version = "3.6.0"
|
|
||||||
source = { registry = "https://pypi.org/simple" }
|
|
||||||
sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" }
|
|
||||||
wheels = [
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" },
|
|
||||||
{ url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" },
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "yarl"
|
name = "yarl"
|
||||||
version = "1.22.0"
|
version = "1.22.0"
|
||||||
|
|||||||
Reference in New Issue
Block a user