# --- CORE AI & DEEP LEARNING --- torch>=2.2.0 # The engine for DeBERTa and ByT5 transformers>=4.37.0 # Hugging Face library for NLP models sentence-transformers>=2.3.0 # For the MiniLM embedding layer accelerate>=0.27.0 # Optimizes model loading (CPU/GPU) # --- CLASSICAL MACHINE LEARNING --- xgboost>=2.0.3 # For the Fast Ensemble (Model A & B) scikit-learn>=1.4.0 # For Random Forest & Meta-Learner numpy>=1.26.0 # Math operations # --- SECURITY & DECRYPTION --- pycryptodome>=3.20.0 # For AES & RSA decryption (Crypto.Cipher) base58>=2.1.1 # For decoding Base58 (Bitcoin-style) text-unidecode>=1.3 # For normalizing Leetspeak characters wordsegment>=1.3.1 # For fixing spaced text (i g n o r e) # --- API & SERVER --- fastapi>=0.109.0 # The web framework uvicorn[standard]>=0.27.0 # The server implementation pydantic>=2.6.0 # Data validation python-multipart>=0.0.9 # For handling file uploads (if needed later) jinja2>=3.1.3 # For rendering the HTML templates # --- OPTIONAL / MEMORY --- chromadb>=0.4.22 # Vector database for Long-Term Memory langdetect deep_translator