paperless-ngx/src/documents/sanity_checker.py

134 lines
4.7 KiB
Python
Raw Normal View History

2020-11-25 16:04:58 +01:00
import hashlib
import logging
2020-11-25 16:04:58 +01:00
import os
from django.conf import settings
from documents.models import Document
from tqdm import tqdm
2020-11-25 16:04:58 +01:00
class SanityCheckMessages:
def __init__(self):
self._messages = []
2020-11-25 16:04:58 +01:00
def error(self, message):
self._messages.append({"level": logging.ERROR, "message": message})
2020-11-25 16:04:58 +01:00
def warning(self, message):
self._messages.append({"level": logging.WARNING, "message": message})
2020-11-25 16:04:58 +01:00
def info(self, message):
self._messages.append({"level": logging.INFO, "message": message})
2020-11-25 16:04:58 +01:00
def log_messages(self):
logger = logging.getLogger("paperless.sanity_checker")
2020-11-25 16:04:58 +01:00
if len(self._messages) == 0:
logger.info("Sanity checker detected no issues.")
else:
for msg in self._messages:
2022-02-27 15:26:41 +01:00
logger.log(msg["level"], msg["message"])
def __len__(self):
return len(self._messages)
def __getitem__(self, item):
return self._messages[item]
2020-11-25 16:04:58 +01:00
def has_error(self):
2022-02-27 15:26:41 +01:00
return any([msg["level"] == logging.ERROR for msg in self._messages])
2020-11-25 16:04:58 +01:00
def has_warning(self):
2022-02-27 15:26:41 +01:00
return any([msg["level"] == logging.WARNING for msg in self._messages])
2020-11-25 16:04:58 +01:00
class SanityCheckFailedException(Exception):
pass
2020-11-25 16:04:58 +01:00
def check_sanity(progress=False):
messages = SanityCheckMessages()
2020-11-25 16:04:58 +01:00
present_files = []
for root, subdirs, files in os.walk(settings.MEDIA_ROOT):
for f in files:
present_files.append(os.path.normpath(os.path.join(root, f)))
lockfile = os.path.normpath(settings.MEDIA_LOCK)
if lockfile in present_files:
present_files.remove(lockfile)
for doc in tqdm(Document.objects.all(), disable=not progress):
2020-12-02 01:18:11 +01:00
# Check sanity of the thumbnail
2020-11-25 16:04:58 +01:00
if not os.path.isfile(doc.thumbnail_path):
messages.error(f"Thumbnail of document {doc.pk} does not exist.")
2020-11-25 16:04:58 +01:00
else:
2021-02-07 18:23:54 +01:00
if os.path.normpath(doc.thumbnail_path) in present_files:
present_files.remove(os.path.normpath(doc.thumbnail_path))
2020-11-25 16:04:58 +01:00
try:
with doc.thumbnail_file as f:
f.read()
except OSError as e:
2022-02-27 15:26:41 +01:00
messages.error(f"Cannot read thumbnail file of document {doc.pk}: {e}")
2020-11-25 16:04:58 +01:00
2020-12-02 01:18:11 +01:00
# Check sanity of the original file
# TODO: extract method
2020-11-25 16:04:58 +01:00
if not os.path.isfile(doc.source_path):
messages.error(f"Original of document {doc.pk} does not exist.")
2020-11-25 16:04:58 +01:00
else:
2021-02-07 18:23:54 +01:00
if os.path.normpath(doc.source_path) in present_files:
present_files.remove(os.path.normpath(doc.source_path))
2020-11-25 16:04:58 +01:00
try:
with doc.source_file as f:
checksum = hashlib.md5(f.read()).hexdigest()
except OSError as e:
2022-02-27 15:26:41 +01:00
messages.error(f"Cannot read original file of document {doc.pk}: {e}")
else:
if not checksum == doc.checksum:
messages.error(
f"Checksum mismatch of document {doc.pk}. "
f"Stored: {doc.checksum}, actual: {checksum}.",
)
2020-11-25 16:04:58 +01:00
2021-02-10 00:52:18 +01:00
# Check sanity of the archive file.
if doc.archive_checksum and not doc.archive_filename:
messages.error(
f"Document {doc.pk} has an archive file checksum, but no "
f"archive filename.",
)
2021-02-10 00:52:18 +01:00
elif not doc.archive_checksum and doc.archive_filename:
messages.error(
f"Document {doc.pk} has an archive file, but its checksum is "
f"missing.",
)
2021-02-10 00:52:18 +01:00
elif doc.has_archive_version:
2020-12-02 01:18:11 +01:00
if not os.path.isfile(doc.archive_path):
2022-02-27 15:26:41 +01:00
messages.error(f"Archived version of document {doc.pk} does not exist.")
else:
2021-02-07 18:23:54 +01:00
if os.path.normpath(doc.archive_path) in present_files:
present_files.remove(os.path.normpath(doc.archive_path))
2020-12-02 01:18:11 +01:00
try:
with doc.archive_file as f:
checksum = hashlib.md5(f.read()).hexdigest()
except OSError as e:
messages.error(
f"Cannot read archive file of document {doc.pk}: {e}",
)
2020-12-02 01:18:11 +01:00
else:
if not checksum == doc.archive_checksum:
messages.error(
2021-02-07 18:23:54 +01:00
f"Checksum mismatch of archived document "
f"{doc.pk}. "
f"Stored: {doc.archive_checksum}, "
f"actual: {checksum}.",
)
2020-12-02 01:18:11 +01:00
# other document checks
2020-11-25 16:04:58 +01:00
if not doc.content:
messages.info(f"Document {doc.pk} has no content.")
2020-11-25 16:04:58 +01:00
for extra_file in present_files:
messages.warning(f"Orphaned file in media dir: {extra_file}")
2020-11-25 16:04:58 +01:00
return messages