<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE pkgmetadata SYSTEM "https://www.gentoo.org/dtd/metadata.dtd">
<pkgmetadata>
  <maintainer type="person">
    <email>iohann.s.titov@gmail.com</email>
    <name>Ivan S. Titov</name>
  </maintainer>
  <longdescription lang="en">
    GAIA is AMD's open-source agent framework for local AI agents on
    Ryzen AI hardware (NPU + iGPU). It orchestrates LLM-driven workflows
    over any OpenAI-compatible inference endpoint, with built-in
    integrations for Docker, Jira, code-search, RAG, MCP servers, and
    Whisper / Kokoro voice pipelines. The reference local backend is
    Lemonade Server (sci-ml/lemonade); GAIA itself is hardware-agnostic
    so long as the upstream LLM API is OpenAI-compatible.
  </longdescription>
  <use>
    <flag name="api">Install FastAPI / uvicorn HTTP-server bindings (gaia.api)</flag>
    <flag name="audio">Pull sci-ml/pytorch for the gaia.audio module (Whisper ASR / Kokoro TTS prereq; upstream's audio extra also lists torchvision/torchaudio but gaia code never imports them)</flag>
    <flag name="eval">Install evaluation harness deps (anthropic, scikit-learn, reportlab) for gaia.eval benchmarks</flag>
    <flag name="image">Pull dev-python/term-image for terminal-rendered image output (gaia.image)</flag>
    <flag name="mcp">Install Model Context Protocol client/server support (gaia.mcp)</flag>
    <flag name="talk">Pull openai-whisper (ASR) + sounddevice (audio I/O) for gaia.talk's voice pipeline; the Kokoro TTS half is unavailable until that chain lands and is not pulled by this flag</flag>
    <flag name="ui">Build the gaia.ui web frontend (FastAPI + faiss-backed RAG over PDFs via PyMuPDF + sentence-transformers); requires +api</flag>
  </use>
  <upstream>
    <remote-id type="github">amd/gaia</remote-id>
    <remote-id type="pypi">amd-gaia</remote-id>
  </upstream>
</pkgmetadata>
