mirror of
				https://github.com/paperless-ngx/paperless-ngx.git
				synced 2025-11-03 19:17:13 -05:00 
			
		
		
		
	Individual doc chat
[ci skip]
This commit is contained in:
		
							parent
							
								
									d1bd2af49c
								
							
						
					
					
						commit
						ccfc7d98b1
					
				@ -174,6 +174,7 @@ from documents.utils import get_boolean
 | 
			
		||||
from paperless import version
 | 
			
		||||
from paperless.ai.ai_classifier import get_ai_document_classification
 | 
			
		||||
from paperless.ai.chat import chat_with_documents
 | 
			
		||||
from paperless.ai.chat import chat_with_single_document
 | 
			
		||||
from paperless.ai.matching import extract_unmatched_names
 | 
			
		||||
from paperless.ai.matching import match_correspondents_by_name
 | 
			
		||||
from paperless.ai.matching import match_document_types_by_name
 | 
			
		||||
@ -1175,7 +1176,15 @@ class DocumentViewSet(
 | 
			
		||||
            return HttpResponseBadRequest("AI is required for this feature")
 | 
			
		||||
 | 
			
		||||
        question = request.data["q"]
 | 
			
		||||
        result = chat_with_documents(question, request.user)
 | 
			
		||||
        doc_id = request.data.get("document_id", None)
 | 
			
		||||
        if doc_id:
 | 
			
		||||
            document = Document.objects.get(id=doc_id)
 | 
			
		||||
            if not has_perms_owner_aware(request.user, "view_document", document):
 | 
			
		||||
                return HttpResponseForbidden("Insufficient permissions")
 | 
			
		||||
 | 
			
		||||
            result = chat_with_single_document(document, question, request.user)
 | 
			
		||||
        else:
 | 
			
		||||
            result = chat_with_documents(question, request.user)
 | 
			
		||||
 | 
			
		||||
        return Response({"answer": result})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,10 +1,12 @@
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from django.contrib.auth.models import User
 | 
			
		||||
from llama_index.core import VectorStoreIndex
 | 
			
		||||
from llama_index.core.query_engine import RetrieverQueryEngine
 | 
			
		||||
 | 
			
		||||
from paperless.ai.client import AIClient
 | 
			
		||||
from paperless.ai.indexing import get_document_retriever
 | 
			
		||||
from paperless.ai.indexing import load_index
 | 
			
		||||
 | 
			
		||||
logger = logging.getLogger("paperless.ai.chat")
 | 
			
		||||
 | 
			
		||||
@ -22,3 +24,29 @@ def chat_with_documents(prompt: str, user: User) -> str:
 | 
			
		||||
    response = query_engine.query(prompt)
 | 
			
		||||
    logger.debug("Document chat response: %s", response)
 | 
			
		||||
    return str(response)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def chat_with_single_document(document, question: str, user):
 | 
			
		||||
    index = load_index()
 | 
			
		||||
 | 
			
		||||
    # Filter only the node(s) belonging to this doc
 | 
			
		||||
    nodes = [
 | 
			
		||||
        node
 | 
			
		||||
        for node in index.docstore.docs.values()
 | 
			
		||||
        if node.metadata.get("document_id") == str(document.id)
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    if not nodes:
 | 
			
		||||
        raise Exception("This document is not indexed yet.")
 | 
			
		||||
 | 
			
		||||
    local_index = VectorStoreIndex.from_documents(nodes)
 | 
			
		||||
 | 
			
		||||
    client = AIClient()
 | 
			
		||||
 | 
			
		||||
    engine = RetrieverQueryEngine.from_args(
 | 
			
		||||
        retriever=local_index.as_retriever(similarity_top_k=3),
 | 
			
		||||
        llm=client.llm,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    response = engine.query(question)
 | 
			
		||||
    return str(response)
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user