2016-03-28 11:11:15 +01:00
|
|
|
import logging
|
2016-08-16 19:13:37 +01:00
|
|
|
import os
|
2022-04-02 07:23:31 +02:00
|
|
|
import shutil
|
2023-07-23 16:49:20 -07:00
|
|
|
from typing import Optional
|
2016-03-28 19:47:11 +01:00
|
|
|
|
2022-10-17 12:42:08 -07:00
|
|
|
from celery import states
|
|
|
|
|
from celery.signals import before_task_publish
|
2023-04-09 17:26:00 -07:00
|
|
|
from celery.signals import task_failure
|
2022-10-17 12:42:08 -07:00
|
|
|
from celery.signals import task_postrun
|
|
|
|
|
from celery.signals import task_prerun
|
2016-03-28 19:47:11 +01:00
|
|
|
from django.conf import settings
|
2022-03-11 10:55:51 -08:00
|
|
|
from django.contrib.admin.models import ADDITION
|
|
|
|
|
from django.contrib.admin.models import LogEntry
|
2018-03-11 17:09:43 +01:00
|
|
|
from django.contrib.auth.models import User
|
2018-03-13 19:09:48 +01:00
|
|
|
from django.contrib.contenttypes.models import ContentType
|
2022-03-11 10:55:51 -08:00
|
|
|
from django.db import DatabaseError
|
2023-09-25 12:52:22 -07:00
|
|
|
from django.db import close_old_connections
|
2022-03-11 10:55:51 -08:00
|
|
|
from django.db import models
|
2020-12-12 01:19:22 +01:00
|
|
|
from django.db.models import Q
|
2020-11-11 14:21:33 +01:00
|
|
|
from django.dispatch import receiver
|
2022-03-11 10:55:51 -08:00
|
|
|
from django.utils import termcolors
|
|
|
|
|
from django.utils import timezone
|
2020-12-08 13:54:35 +01:00
|
|
|
from filelock import FileLock
|
2016-03-28 19:47:11 +01:00
|
|
|
|
2023-03-28 09:39:30 -07:00
|
|
|
from documents import matching
|
2023-07-23 16:49:20 -07:00
|
|
|
from documents.classifier import DocumentClassifier
|
2023-03-28 09:39:30 -07:00
|
|
|
from documents.file_handling import create_source_path_directory
|
|
|
|
|
from documents.file_handling import delete_empty_directories
|
|
|
|
|
from documents.file_handling import generate_unique_filename
|
|
|
|
|
from documents.models import Document
|
|
|
|
|
from documents.models import MatchingModel
|
|
|
|
|
from documents.models import PaperlessTask
|
|
|
|
|
from documents.models import Tag
|
2023-04-27 01:24:22 -07:00
|
|
|
from documents.permissions import get_objects_for_user_owner_aware
|
2016-03-28 11:11:15 +01:00
|
|
|
|
2021-02-05 01:10:29 +01:00
|
|
|
logger = logging.getLogger("paperless.handlers")
|
2016-03-28 11:11:15 +01:00
|
|
|
|
|
|
|
|
|
2023-07-23 16:49:20 -07:00
|
|
|
def add_inbox_tags(sender, document: Document, logging_group=None, **kwargs):
|
2023-04-27 01:24:22 -07:00
|
|
|
if document.owner is not None:
|
|
|
|
|
tags = get_objects_for_user_owner_aware(
|
|
|
|
|
document.owner,
|
2023-04-27 18:00:30 +02:00
|
|
|
"documents.view_tag",
|
2023-04-27 01:24:22 -07:00
|
|
|
Tag,
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
tags = Tag.objects.all()
|
|
|
|
|
inbox_tags = tags.filter(is_inbox_tag=True)
|
2018-09-11 14:30:18 +02:00
|
|
|
document.tags.add(*inbox_tags)
|
2016-03-28 19:47:11 +01:00
|
|
|
|
|
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
def set_correspondent(
|
|
|
|
|
sender,
|
2023-07-23 16:49:20 -07:00
|
|
|
document: Document,
|
2022-02-27 15:26:41 +01:00
|
|
|
logging_group=None,
|
2023-07-23 16:49:20 -07:00
|
|
|
classifier: Optional[DocumentClassifier] = None,
|
2022-02-27 15:26:41 +01:00
|
|
|
replace=False,
|
|
|
|
|
use_first=True,
|
|
|
|
|
suggest=False,
|
|
|
|
|
base_url=None,
|
|
|
|
|
color=False,
|
|
|
|
|
**kwargs,
|
|
|
|
|
):
|
2020-10-28 11:45:11 +01:00
|
|
|
if document.correspondent and not replace:
|
|
|
|
|
return
|
|
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
potential_correspondents = matching.match_correspondents(document, classifier)
|
2020-10-28 11:45:11 +01:00
|
|
|
|
|
|
|
|
potential_count = len(potential_correspondents)
|
2023-03-28 09:39:30 -07:00
|
|
|
selected = potential_correspondents[0] if potential_correspondents else None
|
2020-10-28 11:45:11 +01:00
|
|
|
if potential_count > 1:
|
|
|
|
|
if use_first:
|
2021-04-10 14:38:39 +02:00
|
|
|
logger.debug(
|
2020-11-21 14:03:45 +01:00
|
|
|
f"Detected {potential_count} potential correspondents, "
|
|
|
|
|
f"so we've opted for {selected}",
|
2022-02-27 15:26:41 +01:00
|
|
|
extra={"group": logging_group},
|
2020-10-28 11:45:11 +01:00
|
|
|
)
|
|
|
|
|
else:
|
2021-04-10 14:38:39 +02:00
|
|
|
logger.debug(
|
2020-11-21 14:03:45 +01:00
|
|
|
f"Detected {potential_count} potential correspondents, "
|
|
|
|
|
f"not assigning any correspondent",
|
2022-02-27 15:26:41 +01:00
|
|
|
extra={"group": logging_group},
|
2020-10-28 11:45:11 +01:00
|
|
|
)
|
|
|
|
|
return
|
|
|
|
|
|
2020-11-02 19:27:24 +01:00
|
|
|
if selected or replace:
|
2021-03-04 22:16:56 +01:00
|
|
|
if suggest:
|
|
|
|
|
if base_url:
|
|
|
|
|
print(
|
2022-02-27 15:26:41 +01:00
|
|
|
termcolors.colorize(str(document), fg="green")
|
2021-03-04 22:16:56 +01:00
|
|
|
if color
|
2022-03-11 10:55:51 -08:00
|
|
|
else str(document),
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
|
|
|
|
print(f"{base_url}/documents/{document.pk}")
|
|
|
|
|
else:
|
|
|
|
|
print(
|
|
|
|
|
(
|
2022-02-27 15:26:41 +01:00
|
|
|
termcolors.colorize(str(document), fg="green")
|
2021-03-04 22:16:56 +01:00
|
|
|
if color
|
|
|
|
|
else str(document)
|
2022-02-27 15:26:41 +01:00
|
|
|
)
|
2022-03-11 10:55:51 -08:00
|
|
|
+ f" [{document.pk}]",
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
|
|
|
|
print(f"Suggest correspondent {selected}")
|
|
|
|
|
else:
|
|
|
|
|
logger.info(
|
|
|
|
|
f"Assigning correspondent {selected} to {document}",
|
2022-02-27 15:26:41 +01:00
|
|
|
extra={"group": logging_group},
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
2020-10-28 11:45:11 +01:00
|
|
|
|
2021-03-04 22:16:56 +01:00
|
|
|
document.correspondent = selected
|
|
|
|
|
document.save(update_fields=("correspondent",))
|
2020-10-28 11:45:11 +01:00
|
|
|
|
|
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
def set_document_type(
|
|
|
|
|
sender,
|
2023-07-23 16:49:20 -07:00
|
|
|
document: Document,
|
2022-02-27 15:26:41 +01:00
|
|
|
logging_group=None,
|
2023-07-23 16:49:20 -07:00
|
|
|
classifier: Optional[DocumentClassifier] = None,
|
2022-02-27 15:26:41 +01:00
|
|
|
replace=False,
|
|
|
|
|
use_first=True,
|
|
|
|
|
suggest=False,
|
|
|
|
|
base_url=None,
|
|
|
|
|
color=False,
|
|
|
|
|
**kwargs,
|
|
|
|
|
):
|
2020-10-28 11:45:11 +01:00
|
|
|
if document.document_type and not replace:
|
|
|
|
|
return
|
|
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
potential_document_type = matching.match_document_types(document, classifier)
|
2020-10-28 11:45:11 +01:00
|
|
|
|
|
|
|
|
potential_count = len(potential_document_type)
|
2023-03-28 09:39:30 -07:00
|
|
|
selected = potential_document_type[0] if potential_document_type else None
|
2020-10-28 11:45:11 +01:00
|
|
|
|
|
|
|
|
if potential_count > 1:
|
|
|
|
|
if use_first:
|
2021-02-05 01:10:29 +01:00
|
|
|
logger.info(
|
2020-11-21 14:03:45 +01:00
|
|
|
f"Detected {potential_count} potential document types, "
|
|
|
|
|
f"so we've opted for {selected}",
|
2022-02-27 15:26:41 +01:00
|
|
|
extra={"group": logging_group},
|
2020-10-28 11:45:11 +01:00
|
|
|
)
|
|
|
|
|
else:
|
2021-02-05 01:10:29 +01:00
|
|
|
logger.info(
|
2020-11-21 14:03:45 +01:00
|
|
|
f"Detected {potential_count} potential document types, "
|
|
|
|
|
f"not assigning any document type",
|
2022-02-27 15:26:41 +01:00
|
|
|
extra={"group": logging_group},
|
2020-10-28 11:45:11 +01:00
|
|
|
)
|
|
|
|
|
return
|
|
|
|
|
|
2020-11-02 19:27:24 +01:00
|
|
|
if selected or replace:
|
2021-03-04 22:16:56 +01:00
|
|
|
if suggest:
|
|
|
|
|
if base_url:
|
|
|
|
|
print(
|
2022-02-27 15:26:41 +01:00
|
|
|
termcolors.colorize(str(document), fg="green")
|
2021-03-04 22:16:56 +01:00
|
|
|
if color
|
2022-03-11 10:55:51 -08:00
|
|
|
else str(document),
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
|
|
|
|
print(f"{base_url}/documents/{document.pk}")
|
|
|
|
|
else:
|
|
|
|
|
print(
|
|
|
|
|
(
|
2022-02-27 15:26:41 +01:00
|
|
|
termcolors.colorize(str(document), fg="green")
|
2021-03-04 22:16:56 +01:00
|
|
|
if color
|
|
|
|
|
else str(document)
|
2022-02-27 15:26:41 +01:00
|
|
|
)
|
2022-03-11 10:55:51 -08:00
|
|
|
+ f" [{document.pk}]",
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
2022-02-20 15:57:26 +01:00
|
|
|
print(f"Suggest document type {selected}")
|
2021-03-04 22:16:56 +01:00
|
|
|
else:
|
|
|
|
|
logger.info(
|
|
|
|
|
f"Assigning document type {selected} to {document}",
|
2022-02-27 15:26:41 +01:00
|
|
|
extra={"group": logging_group},
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
2020-10-28 11:45:11 +01:00
|
|
|
|
2021-03-04 22:16:56 +01:00
|
|
|
document.document_type = selected
|
|
|
|
|
document.save(update_fields=("document_type",))
|
2020-10-28 11:45:11 +01:00
|
|
|
|
|
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
def set_tags(
|
|
|
|
|
sender,
|
2023-07-23 16:49:20 -07:00
|
|
|
document: Document,
|
2022-02-27 15:26:41 +01:00
|
|
|
logging_group=None,
|
2023-07-23 16:49:20 -07:00
|
|
|
classifier: Optional[DocumentClassifier] = None,
|
2022-02-27 15:26:41 +01:00
|
|
|
replace=False,
|
|
|
|
|
suggest=False,
|
|
|
|
|
base_url=None,
|
|
|
|
|
color=False,
|
|
|
|
|
**kwargs,
|
|
|
|
|
):
|
2020-10-28 11:45:11 +01:00
|
|
|
if replace:
|
2020-12-12 02:06:43 +01:00
|
|
|
Document.tags.through.objects.filter(document=document).exclude(
|
2022-03-11 10:55:51 -08:00
|
|
|
Q(tag__is_inbox_tag=True),
|
2022-02-27 15:26:41 +01:00
|
|
|
).exclude(
|
2022-03-11 10:55:51 -08:00
|
|
|
Q(tag__match="") & ~Q(tag__matching_algorithm=Tag.MATCH_AUTO),
|
2020-12-12 01:19:22 +01:00
|
|
|
).delete()
|
|
|
|
|
|
|
|
|
|
current_tags = set(document.tags.all())
|
2020-10-28 11:45:11 +01:00
|
|
|
|
2021-01-13 17:17:23 +01:00
|
|
|
matched_tags = matching.match_tags(document, classifier)
|
2020-11-21 14:03:45 +01:00
|
|
|
|
|
|
|
|
relevant_tags = set(matched_tags) - current_tags
|
2020-10-28 11:45:11 +01:00
|
|
|
|
2021-03-04 22:16:56 +01:00
|
|
|
if suggest:
|
|
|
|
|
extra_tags = current_tags - set(matched_tags)
|
|
|
|
|
extra_tags = [
|
2022-02-27 15:26:41 +01:00
|
|
|
t for t in extra_tags if t.matching_algorithm == MatchingModel.MATCH_AUTO
|
2021-03-04 22:16:56 +01:00
|
|
|
]
|
|
|
|
|
if not relevant_tags and not extra_tags:
|
|
|
|
|
return
|
|
|
|
|
if base_url:
|
|
|
|
|
print(
|
2022-02-27 15:26:41 +01:00
|
|
|
termcolors.colorize(str(document), fg="green")
|
2021-03-04 22:16:56 +01:00
|
|
|
if color
|
2022-03-11 10:55:51 -08:00
|
|
|
else str(document),
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
|
|
|
|
print(f"{base_url}/documents/{document.pk}")
|
|
|
|
|
else:
|
|
|
|
|
print(
|
|
|
|
|
(
|
2022-02-27 15:26:41 +01:00
|
|
|
termcolors.colorize(str(document), fg="green")
|
2021-03-04 22:16:56 +01:00
|
|
|
if color
|
|
|
|
|
else str(document)
|
2022-02-27 15:26:41 +01:00
|
|
|
)
|
2022-03-11 10:55:51 -08:00
|
|
|
+ f" [{document.pk}]",
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
|
|
|
|
if relevant_tags:
|
2022-02-27 15:26:41 +01:00
|
|
|
print("Suggest tags: " + ", ".join([t.name for t in relevant_tags]))
|
2021-03-04 22:16:56 +01:00
|
|
|
if extra_tags:
|
|
|
|
|
print("Extra tags: " + ", ".join([t.name for t in extra_tags]))
|
|
|
|
|
else:
|
|
|
|
|
if not relevant_tags:
|
|
|
|
|
return
|
2020-10-28 11:45:11 +01:00
|
|
|
|
2021-03-04 22:16:56 +01:00
|
|
|
message = 'Tagging "{}" with "{}"'
|
|
|
|
|
logger.info(
|
2022-02-27 15:26:41 +01:00
|
|
|
message.format(document, ", ".join([t.name for t in relevant_tags])),
|
|
|
|
|
extra={"group": logging_group},
|
2021-03-04 22:16:56 +01:00
|
|
|
)
|
2020-10-28 11:45:11 +01:00
|
|
|
|
2021-03-04 22:16:56 +01:00
|
|
|
document.tags.add(*relevant_tags)
|
2020-10-28 11:45:11 +01:00
|
|
|
|
|
|
|
|
|
2022-05-19 23:42:25 +02:00
|
|
|
def set_storage_path(
|
|
|
|
|
sender,
|
2023-07-23 16:49:20 -07:00
|
|
|
document: Document,
|
2022-05-19 23:42:25 +02:00
|
|
|
logging_group=None,
|
2023-07-23 16:49:20 -07:00
|
|
|
classifier: Optional[DocumentClassifier] = None,
|
2022-05-19 23:42:25 +02:00
|
|
|
replace=False,
|
|
|
|
|
use_first=True,
|
|
|
|
|
suggest=False,
|
|
|
|
|
base_url=None,
|
|
|
|
|
color=False,
|
|
|
|
|
**kwargs,
|
|
|
|
|
):
|
|
|
|
|
if document.storage_path and not replace:
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
potential_storage_path = matching.match_storage_paths(
|
|
|
|
|
document,
|
|
|
|
|
classifier,
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
potential_count = len(potential_storage_path)
|
2023-03-28 09:39:30 -07:00
|
|
|
selected = potential_storage_path[0] if potential_storage_path else None
|
2022-05-19 23:42:25 +02:00
|
|
|
|
|
|
|
|
if potential_count > 1:
|
|
|
|
|
if use_first:
|
|
|
|
|
logger.info(
|
|
|
|
|
f"Detected {potential_count} potential storage paths, "
|
|
|
|
|
f"so we've opted for {selected}",
|
|
|
|
|
extra={"group": logging_group},
|
|
|
|
|
)
|
|
|
|
|
else:
|
|
|
|
|
logger.info(
|
|
|
|
|
f"Detected {potential_count} potential storage paths, "
|
|
|
|
|
f"not assigning any storage directory",
|
|
|
|
|
extra={"group": logging_group},
|
|
|
|
|
)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
if selected or replace:
|
|
|
|
|
if suggest:
|
|
|
|
|
if base_url:
|
|
|
|
|
print(
|
|
|
|
|
termcolors.colorize(str(document), fg="green")
|
|
|
|
|
if color
|
|
|
|
|
else str(document),
|
|
|
|
|
)
|
|
|
|
|
print(f"{base_url}/documents/{document.pk}")
|
|
|
|
|
else:
|
|
|
|
|
print(
|
|
|
|
|
(
|
|
|
|
|
termcolors.colorize(str(document), fg="green")
|
|
|
|
|
if color
|
|
|
|
|
else str(document)
|
|
|
|
|
)
|
|
|
|
|
+ f" [{document.pk}]",
|
|
|
|
|
)
|
2022-08-21 17:54:05 -07:00
|
|
|
print(f"Suggest storage directory {selected}")
|
2022-05-19 23:42:25 +02:00
|
|
|
else:
|
|
|
|
|
logger.info(
|
|
|
|
|
f"Assigning storage path {selected} to {document}",
|
|
|
|
|
extra={"group": logging_group},
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
document.storage_path = selected
|
|
|
|
|
document.save(update_fields=("storage_path",))
|
|
|
|
|
|
|
|
|
|
|
2020-11-11 14:21:33 +01:00
|
|
|
@receiver(models.signals.post_delete, sender=Document)
|
2016-08-16 19:13:37 +01:00
|
|
|
def cleanup_document_deletion(sender, instance, using, **kwargs):
|
2020-12-09 22:17:17 +01:00
|
|
|
with FileLock(settings.MEDIA_LOCK):
|
2021-08-22 19:31:50 +02:00
|
|
|
if settings.TRASH_DIR:
|
2022-02-20 14:04:28 +01:00
|
|
|
# Find a non-conflicting filename in case a document with the same
|
|
|
|
|
# name was moved to trash earlier
|
2021-08-22 19:31:50 +02:00
|
|
|
counter = 0
|
|
|
|
|
old_filename = os.path.split(instance.source_path)[1]
|
|
|
|
|
(old_filebase, old_fileext) = os.path.splitext(old_filename)
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
new_file_path = os.path.join(
|
|
|
|
|
settings.TRASH_DIR,
|
2022-02-27 15:26:41 +01:00
|
|
|
old_filebase + (f"_{counter:02}" if counter else "") + old_fileext,
|
2021-08-22 19:31:50 +02:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
if os.path.exists(new_file_path):
|
|
|
|
|
counter += 1
|
|
|
|
|
else:
|
|
|
|
|
break
|
|
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
logger.debug(f"Moving {instance.source_path} to trash at {new_file_path}")
|
2022-02-20 14:04:43 +01:00
|
|
|
try:
|
2022-04-02 07:23:31 +02:00
|
|
|
shutil.move(instance.source_path, new_file_path)
|
2022-02-20 14:04:43 +01:00
|
|
|
except OSError as e:
|
|
|
|
|
logger.error(
|
|
|
|
|
f"Failed to move {instance.source_path} to trash at "
|
2022-03-11 10:55:51 -08:00
|
|
|
f"{new_file_path}: {e}. Skipping cleanup!",
|
2022-02-20 14:04:43 +01:00
|
|
|
)
|
|
|
|
|
return
|
2021-08-22 19:31:50 +02:00
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
for filename in (
|
|
|
|
|
instance.source_path,
|
|
|
|
|
instance.archive_path,
|
|
|
|
|
instance.thumbnail_path,
|
|
|
|
|
):
|
2021-02-09 19:46:19 +01:00
|
|
|
if filename and os.path.isfile(filename):
|
2020-12-09 22:17:17 +01:00
|
|
|
try:
|
2021-02-09 19:46:19 +01:00
|
|
|
os.unlink(filename)
|
2022-02-27 15:26:41 +01:00
|
|
|
logger.debug(f"Deleted file {filename}.")
|
2020-12-09 22:17:17 +01:00
|
|
|
except OSError as e:
|
2021-02-05 01:10:29 +01:00
|
|
|
logger.warning(
|
2023-06-13 10:55:52 -07:00
|
|
|
f"While deleting document {instance!s}, the file "
|
2022-03-11 10:55:51 -08:00
|
|
|
f"{filename} could not be deleted: {e}",
|
2020-12-09 22:17:17 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
delete_empty_directories(
|
2022-03-11 10:55:51 -08:00
|
|
|
os.path.dirname(instance.source_path),
|
|
|
|
|
root=settings.ORIGINALS_DIR,
|
2020-12-09 22:17:17 +01:00
|
|
|
)
|
2020-11-30 21:38:21 +01:00
|
|
|
|
2021-02-09 19:46:19 +01:00
|
|
|
if instance.has_archive_version:
|
|
|
|
|
delete_empty_directories(
|
2022-03-11 10:55:51 -08:00
|
|
|
os.path.dirname(instance.archive_path),
|
|
|
|
|
root=settings.ARCHIVE_DIR,
|
2021-02-09 19:46:19 +01:00
|
|
|
)
|
2020-11-30 21:38:21 +01:00
|
|
|
|
|
|
|
|
|
2021-02-12 01:31:50 +01:00
|
|
|
class CannotMoveFilesException(Exception):
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
2020-11-30 21:38:21 +01:00
|
|
|
def validate_move(instance, old_path, new_path):
|
|
|
|
|
if not os.path.isfile(old_path):
|
|
|
|
|
# Can't do anything if the old file does not exist anymore.
|
2023-06-13 10:55:52 -07:00
|
|
|
logger.fatal(f"Document {instance!s}: File {old_path} has gone.")
|
2023-03-28 09:39:30 -07:00
|
|
|
raise CannotMoveFilesException
|
2020-11-30 21:38:21 +01:00
|
|
|
|
|
|
|
|
if os.path.isfile(new_path):
|
|
|
|
|
# Can't do anything if the new file already exists. Skip updating file.
|
2021-02-05 01:10:29 +01:00
|
|
|
logger.warning(
|
2023-06-13 10:55:52 -07:00
|
|
|
f"Document {instance!s}: Cannot rename file "
|
2022-03-11 10:55:51 -08:00
|
|
|
f"since target path {new_path} already exists.",
|
2022-02-27 15:26:41 +01:00
|
|
|
)
|
2023-03-28 09:39:30 -07:00
|
|
|
raise CannotMoveFilesException
|
2020-11-11 14:21:33 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@receiver(models.signals.m2m_changed, sender=Document.tags.through)
|
|
|
|
|
@receiver(models.signals.post_save, sender=Document)
|
2023-02-17 16:55:58 -08:00
|
|
|
def update_filename_and_move_files(sender, instance: Document, **kwargs):
|
2020-11-11 14:21:33 +01:00
|
|
|
if not instance.filename:
|
2020-11-30 21:38:21 +01:00
|
|
|
# Can't update the filename if there is no filename to begin with
|
|
|
|
|
# This happens when the consumer creates a new document.
|
|
|
|
|
# The document is modified and saved multiple times, and only after
|
|
|
|
|
# everything is done (i.e., the generated filename is final),
|
|
|
|
|
# filename will be set to the location where the consumer has put
|
|
|
|
|
# the file.
|
|
|
|
|
#
|
|
|
|
|
# This will in turn cause this logic to move the file where it belongs.
|
2020-11-11 14:21:33 +01:00
|
|
|
return
|
|
|
|
|
|
2020-12-08 13:54:35 +01:00
|
|
|
with FileLock(settings.MEDIA_LOCK):
|
2021-02-12 01:31:50 +01:00
|
|
|
try:
|
2022-10-31 15:30:56 -07:00
|
|
|
# If this was waiting for the lock, the filename or archive_filename
|
|
|
|
|
# of this document may have been updated. This happens if multiple updates
|
|
|
|
|
# get queued from the UI for the same document
|
|
|
|
|
# So freshen up the data before doing anything
|
|
|
|
|
instance.refresh_from_db()
|
|
|
|
|
|
2021-02-12 01:31:50 +01:00
|
|
|
old_filename = instance.filename
|
|
|
|
|
old_source_path = instance.source_path
|
2020-11-11 14:21:33 +01:00
|
|
|
|
2021-02-12 01:31:50 +01:00
|
|
|
instance.filename = generate_unique_filename(instance)
|
|
|
|
|
move_original = old_filename != instance.filename
|
2021-02-11 13:47:17 +01:00
|
|
|
|
2021-02-12 01:31:50 +01:00
|
|
|
old_archive_filename = instance.archive_filename
|
2020-12-08 13:54:35 +01:00
|
|
|
old_archive_path = instance.archive_path
|
2020-11-11 14:21:33 +01:00
|
|
|
|
2021-02-12 01:31:50 +01:00
|
|
|
if instance.has_archive_version:
|
|
|
|
|
instance.archive_filename = generate_unique_filename(
|
2022-03-11 10:55:51 -08:00
|
|
|
instance,
|
|
|
|
|
archive_filename=True,
|
2021-02-12 01:31:50 +01:00
|
|
|
)
|
|
|
|
|
|
2022-03-11 10:55:51 -08:00
|
|
|
move_archive = old_archive_filename != instance.archive_filename
|
2021-02-12 01:31:50 +01:00
|
|
|
else:
|
|
|
|
|
move_archive = False
|
|
|
|
|
|
|
|
|
|
if not move_original and not move_archive:
|
|
|
|
|
# Don't do anything if filenames did not change.
|
|
|
|
|
return
|
2020-11-11 14:21:33 +01:00
|
|
|
|
2021-02-11 13:47:17 +01:00
|
|
|
if move_original:
|
2021-02-12 01:31:50 +01:00
|
|
|
validate_move(instance, old_source_path, instance.source_path)
|
|
|
|
|
create_source_path_directory(instance.source_path)
|
2023-04-30 17:55:25 -07:00
|
|
|
shutil.move(old_source_path, instance.source_path)
|
2020-11-29 15:47:56 +01:00
|
|
|
|
2021-02-11 13:47:17 +01:00
|
|
|
if move_archive:
|
2022-02-27 15:26:41 +01:00
|
|
|
validate_move(instance, old_archive_path, instance.archive_path)
|
2021-02-12 01:31:50 +01:00
|
|
|
create_source_path_directory(instance.archive_path)
|
2023-04-30 17:55:25 -07:00
|
|
|
shutil.move(old_archive_path, instance.archive_path)
|
2021-02-09 19:46:19 +01:00
|
|
|
|
2020-12-08 13:54:35 +01:00
|
|
|
# Don't save() here to prevent infinite recursion.
|
|
|
|
|
Document.objects.filter(pk=instance.pk).update(
|
2021-02-09 19:46:19 +01:00
|
|
|
filename=instance.filename,
|
|
|
|
|
archive_filename=instance.archive_filename,
|
|
|
|
|
)
|
2020-11-30 21:38:21 +01:00
|
|
|
|
2022-11-28 13:44:17 -08:00
|
|
|
except (OSError, DatabaseError, CannotMoveFilesException) as e:
|
2022-12-26 13:43:30 -08:00
|
|
|
logger.warning(f"Exception during file handling: {e}")
|
2021-02-11 13:47:17 +01:00
|
|
|
# This happens when either:
|
|
|
|
|
# - moving the files failed due to file system errors
|
|
|
|
|
# - saving to the database failed due to database errors
|
|
|
|
|
# In both cases, we need to revert to the original state.
|
|
|
|
|
|
|
|
|
|
# Try to move files to their original location.
|
2020-12-08 13:54:35 +01:00
|
|
|
try:
|
2021-02-12 01:31:50 +01:00
|
|
|
if move_original and os.path.isfile(instance.source_path):
|
2022-11-28 13:44:17 -08:00
|
|
|
logger.info("Restoring previous original path")
|
2023-04-30 17:55:25 -07:00
|
|
|
shutil.move(instance.source_path, old_source_path)
|
2021-02-11 13:47:17 +01:00
|
|
|
|
2021-02-12 01:31:50 +01:00
|
|
|
if move_archive and os.path.isfile(instance.archive_path):
|
2022-11-28 13:44:17 -08:00
|
|
|
logger.info("Restoring previous archive path")
|
2023-04-30 17:55:25 -07:00
|
|
|
shutil.move(instance.archive_path, old_archive_path)
|
2021-02-11 13:47:17 +01:00
|
|
|
|
2022-03-11 10:55:51 -08:00
|
|
|
except Exception:
|
2020-12-08 13:54:35 +01:00
|
|
|
# This is fine, since:
|
|
|
|
|
# A: if we managed to move source from A to B, we will also
|
|
|
|
|
# manage to move it from B to A. If not, we have a serious
|
|
|
|
|
# issue that's going to get caught by the santiy checker.
|
|
|
|
|
# All files remain in place and will never be overwritten,
|
|
|
|
|
# so this is not the end of the world.
|
2022-11-28 13:44:17 -08:00
|
|
|
# B: if moving the original file failed, nothing has changed
|
2020-12-08 13:54:35 +01:00
|
|
|
# anyway.
|
|
|
|
|
pass
|
|
|
|
|
|
2021-02-12 01:31:50 +01:00
|
|
|
# restore old values on the instance
|
|
|
|
|
instance.filename = old_filename
|
|
|
|
|
instance.archive_filename = old_archive_filename
|
|
|
|
|
|
2020-12-08 13:54:35 +01:00
|
|
|
# finally, remove any empty sub folders. This will do nothing if
|
|
|
|
|
# something has failed above.
|
|
|
|
|
if not os.path.isfile(old_source_path):
|
2022-02-27 15:26:41 +01:00
|
|
|
delete_empty_directories(
|
2022-03-11 10:55:51 -08:00
|
|
|
os.path.dirname(old_source_path),
|
|
|
|
|
root=settings.ORIGINALS_DIR,
|
2022-02-27 15:26:41 +01:00
|
|
|
)
|
2020-12-08 13:54:35 +01:00
|
|
|
|
2022-02-27 15:26:41 +01:00
|
|
|
if instance.has_archive_version and not os.path.isfile(
|
2022-03-11 10:55:51 -08:00
|
|
|
old_archive_path,
|
|
|
|
|
):
|
2022-02-27 15:26:41 +01:00
|
|
|
delete_empty_directories(
|
2022-03-11 10:55:51 -08:00
|
|
|
os.path.dirname(old_archive_path),
|
|
|
|
|
root=settings.ARCHIVE_DIR,
|
2022-02-27 15:26:41 +01:00
|
|
|
)
|
2020-11-11 14:21:33 +01:00
|
|
|
|
2018-03-11 17:09:43 +01:00
|
|
|
|
2023-07-23 16:49:20 -07:00
|
|
|
def set_log_entry(sender, document: Document, logging_group=None, **kwargs):
|
2018-03-11 17:09:43 +01:00
|
|
|
ct = ContentType.objects.get(model="document")
|
2018-03-30 08:33:08 +02:00
|
|
|
user = User.objects.get(username="consumer")
|
2018-03-11 17:09:43 +01:00
|
|
|
|
|
|
|
|
LogEntry.objects.create(
|
|
|
|
|
action_flag=ADDITION,
|
2018-03-29 23:15:54 +02:00
|
|
|
action_time=timezone.now(),
|
2018-03-11 17:09:43 +01:00
|
|
|
content_type=ct,
|
2020-11-03 12:37:37 +01:00
|
|
|
object_id=document.pk,
|
2018-03-11 17:09:43 +01:00
|
|
|
user=user,
|
|
|
|
|
object_repr=document.__str__(),
|
|
|
|
|
)
|
2020-11-08 11:24:57 +01:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def add_to_index(sender, document, **kwargs):
|
2021-02-15 13:26:36 +01:00
|
|
|
from documents import index
|
|
|
|
|
|
2020-11-08 11:24:57 +01:00
|
|
|
index.add_or_update_document(document)
|
2022-05-23 00:24:52 -07:00
|
|
|
|
|
|
|
|
|
2022-10-17 12:42:08 -07:00
|
|
|
@before_task_publish.connect
|
|
|
|
|
def before_task_publish_handler(sender=None, headers=None, body=None, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
Creates the PaperlessTask object in a pending state. This is sent before
|
2022-11-22 09:59:59 -08:00
|
|
|
the task reaches the broker, but before it begins executing on a worker.
|
2022-10-17 12:42:08 -07:00
|
|
|
|
|
|
|
|
https://docs.celeryq.dev/en/stable/userguide/signals.html#before-task-publish
|
|
|
|
|
|
2022-11-22 09:59:59 -08:00
|
|
|
https://docs.celeryq.dev/en/stable/internals/protocol.html#version-2
|
|
|
|
|
|
2022-10-17 12:42:08 -07:00
|
|
|
"""
|
|
|
|
|
if "task" not in headers or headers["task"] != "documents.tasks.consume_file":
|
|
|
|
|
# Assumption: this is only ever a v2 message
|
|
|
|
|
return
|
|
|
|
|
|
2022-05-23 01:52:46 -07:00
|
|
|
try:
|
2023-09-25 12:52:22 -07:00
|
|
|
close_old_connections()
|
|
|
|
|
|
2022-11-22 09:59:59 -08:00
|
|
|
task_args = body[0]
|
2023-01-23 15:55:49 -08:00
|
|
|
input_doc, _ = task_args
|
2022-10-17 12:42:08 -07:00
|
|
|
|
2023-01-23 15:55:49 -08:00
|
|
|
task_file_name = input_doc.original_file.name
|
2022-10-17 12:42:08 -07:00
|
|
|
|
|
|
|
|
PaperlessTask.objects.create(
|
|
|
|
|
task_id=headers["id"],
|
|
|
|
|
status=states.PENDING,
|
|
|
|
|
task_file_name=task_file_name,
|
|
|
|
|
task_name=headers["task"],
|
|
|
|
|
result=None,
|
|
|
|
|
date_created=timezone.now(),
|
|
|
|
|
date_started=None,
|
|
|
|
|
date_done=None,
|
|
|
|
|
)
|
2023-03-28 09:39:30 -07:00
|
|
|
except Exception: # pragma: no cover
|
2022-09-01 16:25:11 -07:00
|
|
|
# Don't let an exception in the signal handlers prevent
|
|
|
|
|
# a document from being consumed.
|
2023-03-28 09:39:30 -07:00
|
|
|
logger.exception("Creating PaperlessTask failed")
|
2022-10-17 12:42:08 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@task_prerun.connect
|
|
|
|
|
def task_prerun_handler(sender=None, task_id=None, task=None, **kwargs):
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
|
|
Updates the PaperlessTask to be started. Sent before the task begins execution
|
|
|
|
|
on a worker.
|
|
|
|
|
|
|
|
|
|
https://docs.celeryq.dev/en/stable/userguide/signals.html#task-prerun
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2023-09-25 12:52:22 -07:00
|
|
|
close_old_connections()
|
2022-10-17 12:42:08 -07:00
|
|
|
task_instance = PaperlessTask.objects.filter(task_id=task_id).first()
|
|
|
|
|
|
|
|
|
|
if task_instance is not None:
|
|
|
|
|
task_instance.status = states.STARTED
|
|
|
|
|
task_instance.date_started = timezone.now()
|
|
|
|
|
task_instance.save()
|
2023-03-28 09:39:30 -07:00
|
|
|
except Exception: # pragma: no cover
|
2022-10-17 12:42:08 -07:00
|
|
|
# Don't let an exception in the signal handlers prevent
|
|
|
|
|
# a document from being consumed.
|
2023-03-28 09:39:30 -07:00
|
|
|
logger.exception("Setting PaperlessTask started failed")
|
2022-10-17 12:42:08 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@task_postrun.connect
|
|
|
|
|
def task_postrun_handler(
|
2023-03-28 09:39:30 -07:00
|
|
|
sender=None,
|
|
|
|
|
task_id=None,
|
|
|
|
|
task=None,
|
|
|
|
|
retval=None,
|
|
|
|
|
state=None,
|
|
|
|
|
**kwargs,
|
2022-10-17 12:42:08 -07:00
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Updates the result of the PaperlessTask.
|
|
|
|
|
|
|
|
|
|
https://docs.celeryq.dev/en/stable/userguide/signals.html#task-postrun
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2023-09-25 12:52:22 -07:00
|
|
|
close_old_connections()
|
2022-10-17 12:42:08 -07:00
|
|
|
task_instance = PaperlessTask.objects.filter(task_id=task_id).first()
|
|
|
|
|
|
|
|
|
|
if task_instance is not None:
|
|
|
|
|
task_instance.status = state
|
|
|
|
|
task_instance.result = retval
|
|
|
|
|
task_instance.date_done = timezone.now()
|
|
|
|
|
task_instance.save()
|
2023-03-28 09:39:30 -07:00
|
|
|
except Exception: # pragma: no cover
|
2022-10-17 12:42:08 -07:00
|
|
|
# Don't let an exception in the signal handlers prevent
|
|
|
|
|
# a document from being consumed.
|
2023-03-28 09:39:30 -07:00
|
|
|
logger.exception("Updating PaperlessTask failed")
|
2023-04-09 17:26:00 -07:00
|
|
|
|
|
|
|
|
|
|
|
|
|
@task_failure.connect
|
|
|
|
|
def task_failure_handler(
|
|
|
|
|
sender=None,
|
|
|
|
|
task_id=None,
|
|
|
|
|
exception=None,
|
|
|
|
|
args=None,
|
|
|
|
|
traceback=None,
|
|
|
|
|
**kwargs,
|
|
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Updates the result of a failed PaperlessTask.
|
|
|
|
|
|
|
|
|
|
https://docs.celeryq.dev/en/stable/userguide/signals.html#task-failure
|
|
|
|
|
"""
|
|
|
|
|
try:
|
2023-09-25 12:52:22 -07:00
|
|
|
close_old_connections()
|
2023-04-09 17:26:00 -07:00
|
|
|
task_instance = PaperlessTask.objects.filter(task_id=task_id).first()
|
|
|
|
|
|
|
|
|
|
if task_instance is not None and task_instance.result is None:
|
|
|
|
|
task_instance.status = states.FAILURE
|
|
|
|
|
task_instance.result = traceback
|
|
|
|
|
task_instance.date_done = timezone.now()
|
|
|
|
|
task_instance.save()
|
|
|
|
|
except Exception: # pragma: no cover
|
|
|
|
|
logger.exception("Updating PaperlessTask failed")
|