paperless-ngx/src/paperless/ai/chat.py

45 lines
1.3 KiB
Python
Raw Normal View History

2025-04-24 23:41:31 -07:00
import logging
2025-04-24 23:56:51 -07:00
from llama_index.core import VectorStoreIndex
2025-04-24 23:41:31 -07:00
from llama_index.core.query_engine import RetrieverQueryEngine
2025-04-25 00:09:33 -07:00
from documents.models import Document
2025-04-24 23:41:31 -07:00
from paperless.ai.client import AIClient
2025-04-24 23:56:51 -07:00
from paperless.ai.indexing import load_index
2025-04-24 23:41:31 -07:00
logger = logging.getLogger("paperless.ai.chat")
2025-04-25 00:09:33 -07:00
def chat_with_documents(prompt: str, documents: list[Document]) -> str:
2025-04-24 23:41:31 -07:00
client = AIClient()
2025-04-24 23:56:51 -07:00
index = load_index()
2025-04-25 00:09:33 -07:00
doc_ids = [doc.pk for doc in documents]
# Filter only the node(s) that match the document IDs
2025-04-24 23:56:51 -07:00
nodes = [
node
for node in index.docstore.docs.values()
2025-04-25 00:09:33 -07:00
if node.metadata.get("document_id") in doc_ids
2025-04-24 23:56:51 -07:00
]
2025-04-25 00:09:33 -07:00
if len(nodes) == 0:
logger.warning("No nodes found for the given documents.")
return "Sorry, I couldn't find any content to answer your question."
2025-04-24 23:56:51 -07:00
local_index = VectorStoreIndex.from_documents(nodes)
2025-04-25 00:09:33 -07:00
retriever = local_index.as_retriever(
similarity_top_k=3 if len(documents) == 1 else 5,
)
2025-04-24 23:56:51 -07:00
2025-04-25 00:09:33 -07:00
query_engine = RetrieverQueryEngine.from_args(
retriever=retriever,
2025-04-24 23:56:51 -07:00
llm=client.llm,
)
2025-04-25 00:09:33 -07:00
logger.debug("Document chat prompt: %s", prompt)
response = query_engine.query(prompt)
logger.debug("Document chat response: %s", response)
2025-04-24 23:56:51 -07:00
return str(response)