mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-08-11 09:14:02 -04:00
Chore: switch from os.path to pathlib.Path (#10397)
This commit is contained in:
parent
54e2b916e6
commit
6dca4daea5
@ -204,15 +204,9 @@ lint.per-file-ignores."docker/wait-for-redis.py" = [
|
|||||||
"INP001",
|
"INP001",
|
||||||
"T201",
|
"T201",
|
||||||
]
|
]
|
||||||
lint.per-file-ignores."src/documents/file_handling.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/management/commands/document_consumer.py" = [
|
lint.per-file-ignores."src/documents/management/commands/document_consumer.py" = [
|
||||||
"PTH",
|
"PTH",
|
||||||
] # TODO Enable & remove
|
] # TODO Enable & remove
|
||||||
lint.per-file-ignores."src/documents/management/commands/document_exporter.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/documents/migrations/1012_fix_archive_files.py" = [
|
lint.per-file-ignores."src/documents/migrations/1012_fix_archive_files.py" = [
|
||||||
"PTH",
|
"PTH",
|
||||||
] # TODO Enable & remove
|
] # TODO Enable & remove
|
||||||
@ -222,9 +216,6 @@ lint.per-file-ignores."src/documents/models.py" = [
|
|||||||
lint.per-file-ignores."src/documents/parsers.py" = [
|
lint.per-file-ignores."src/documents/parsers.py" = [
|
||||||
"PTH",
|
"PTH",
|
||||||
] # TODO Enable & remove
|
] # TODO Enable & remove
|
||||||
lint.per-file-ignores."src/documents/signals/handlers.py" = [
|
|
||||||
"PTH",
|
|
||||||
] # TODO Enable & remove
|
|
||||||
lint.per-file-ignores."src/paperless_tesseract/tests/test_parser.py" = [
|
lint.per-file-ignores."src/paperless_tesseract/tests/test_parser.py" = [
|
||||||
"RUF001",
|
"RUF001",
|
||||||
]
|
]
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from django.conf import settings
|
from django.conf import settings
|
||||||
|
|
||||||
@ -7,19 +8,15 @@ from documents.templating.filepath import validate_filepath_template_and_render
|
|||||||
from documents.templating.utils import convert_format_str_to_template_format
|
from documents.templating.utils import convert_format_str_to_template_format
|
||||||
|
|
||||||
|
|
||||||
def create_source_path_directory(source_path):
|
def create_source_path_directory(source_path: Path) -> None:
|
||||||
os.makedirs(os.path.dirname(source_path), exist_ok=True)
|
source_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
def delete_empty_directories(directory, root):
|
def delete_empty_directories(directory: Path, root: Path) -> None:
|
||||||
if not os.path.isdir(directory):
|
if not directory.is_dir():
|
||||||
return
|
return
|
||||||
|
|
||||||
# Go up in the directory hierarchy and try to delete all directories
|
if not directory.is_relative_to(root):
|
||||||
directory = os.path.normpath(directory)
|
|
||||||
root = os.path.normpath(root)
|
|
||||||
|
|
||||||
if not directory.startswith(root + os.path.sep):
|
|
||||||
# don't do anything outside our originals folder.
|
# don't do anything outside our originals folder.
|
||||||
|
|
||||||
# append os.path.set so that we avoid these cases:
|
# append os.path.set so that we avoid these cases:
|
||||||
@ -27,11 +24,12 @@ def delete_empty_directories(directory, root):
|
|||||||
# root = /home/originals ("/" gets appended and startswith fails)
|
# root = /home/originals ("/" gets appended and startswith fails)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Go up in the directory hierarchy and try to delete all directories
|
||||||
while directory != root:
|
while directory != root:
|
||||||
if not os.listdir(directory):
|
if not list(directory.iterdir()):
|
||||||
# it's empty
|
# it's empty
|
||||||
try:
|
try:
|
||||||
os.rmdir(directory)
|
directory.rmdir()
|
||||||
except OSError:
|
except OSError:
|
||||||
# whatever. empty directories aren't that bad anyway.
|
# whatever. empty directories aren't that bad anyway.
|
||||||
return
|
return
|
||||||
@ -40,10 +38,10 @@ def delete_empty_directories(directory, root):
|
|||||||
return
|
return
|
||||||
|
|
||||||
# go one level up
|
# go one level up
|
||||||
directory = os.path.normpath(os.path.dirname(directory))
|
directory = directory.parent
|
||||||
|
|
||||||
|
|
||||||
def generate_unique_filename(doc, *, archive_filename=False):
|
def generate_unique_filename(doc, *, archive_filename=False) -> Path:
|
||||||
"""
|
"""
|
||||||
Generates a unique filename for doc in settings.ORIGINALS_DIR.
|
Generates a unique filename for doc in settings.ORIGINALS_DIR.
|
||||||
|
|
||||||
@ -56,21 +54,32 @@ def generate_unique_filename(doc, *, archive_filename=False):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
if archive_filename:
|
if archive_filename:
|
||||||
old_filename = doc.archive_filename
|
old_filename: Path | None = (
|
||||||
|
Path(doc.archive_filename) if doc.archive_filename else None
|
||||||
|
)
|
||||||
root = settings.ARCHIVE_DIR
|
root = settings.ARCHIVE_DIR
|
||||||
else:
|
else:
|
||||||
old_filename = doc.filename
|
old_filename = Path(doc.filename) if doc.filename else None
|
||||||
root = settings.ORIGINALS_DIR
|
root = settings.ORIGINALS_DIR
|
||||||
|
|
||||||
# If generating archive filenames, try to make a name that is similar to
|
# If generating archive filenames, try to make a name that is similar to
|
||||||
# the original filename first.
|
# the original filename first.
|
||||||
|
|
||||||
if archive_filename and doc.filename:
|
if archive_filename and doc.filename:
|
||||||
new_filename = os.path.splitext(doc.filename)[0] + ".pdf"
|
# Generate the full path using the same logic as generate_filename
|
||||||
if new_filename == old_filename or not os.path.exists(
|
base_generated = generate_filename(doc, archive_filename=archive_filename)
|
||||||
os.path.join(root, new_filename),
|
|
||||||
):
|
# Try to create a simple PDF version based on the original filename
|
||||||
return new_filename
|
# but preserve any directory structure from the template
|
||||||
|
if str(base_generated.parent) != ".":
|
||||||
|
# Has directory structure, preserve it
|
||||||
|
simple_pdf_name = base_generated.parent / (Path(doc.filename).stem + ".pdf")
|
||||||
|
else:
|
||||||
|
# No directory structure
|
||||||
|
simple_pdf_name = Path(Path(doc.filename).stem + ".pdf")
|
||||||
|
|
||||||
|
if simple_pdf_name == old_filename or not (root / simple_pdf_name).exists():
|
||||||
|
return simple_pdf_name
|
||||||
|
|
||||||
counter = 0
|
counter = 0
|
||||||
|
|
||||||
@ -84,7 +93,7 @@ def generate_unique_filename(doc, *, archive_filename=False):
|
|||||||
# still the same as before.
|
# still the same as before.
|
||||||
return new_filename
|
return new_filename
|
||||||
|
|
||||||
if os.path.exists(os.path.join(root, new_filename)):
|
if (root / new_filename).exists():
|
||||||
counter += 1
|
counter += 1
|
||||||
else:
|
else:
|
||||||
return new_filename
|
return new_filename
|
||||||
@ -96,8 +105,8 @@ def generate_filename(
|
|||||||
counter=0,
|
counter=0,
|
||||||
append_gpg=True,
|
append_gpg=True,
|
||||||
archive_filename=False,
|
archive_filename=False,
|
||||||
):
|
) -> Path:
|
||||||
path = ""
|
base_path: Path | None = None
|
||||||
|
|
||||||
def format_filename(document: Document, template_str: str) -> str | None:
|
def format_filename(document: Document, template_str: str) -> str | None:
|
||||||
rendered_filename = validate_filepath_template_and_render(
|
rendered_filename = validate_filepath_template_and_render(
|
||||||
@ -134,17 +143,34 @@ def generate_filename(
|
|||||||
|
|
||||||
# If we have one, render it
|
# If we have one, render it
|
||||||
if filename_format is not None:
|
if filename_format is not None:
|
||||||
path = format_filename(doc, filename_format)
|
rendered_path: str | None = format_filename(doc, filename_format)
|
||||||
|
if rendered_path:
|
||||||
|
base_path = Path(rendered_path)
|
||||||
|
|
||||||
counter_str = f"_{counter:02}" if counter else ""
|
counter_str = f"_{counter:02}" if counter else ""
|
||||||
filetype_str = ".pdf" if archive_filename else doc.file_type
|
filetype_str = ".pdf" if archive_filename else doc.file_type
|
||||||
|
|
||||||
if path:
|
if base_path:
|
||||||
filename = f"{path}{counter_str}{filetype_str}"
|
# Split the path into directory and filename parts
|
||||||
|
directory = base_path.parent
|
||||||
|
# Use the full name (not just stem) as the base filename
|
||||||
|
base_filename = base_path.name
|
||||||
|
|
||||||
|
# Build the final filename with counter and filetype
|
||||||
|
final_filename = f"{base_filename}{counter_str}{filetype_str}"
|
||||||
|
|
||||||
|
# If we have a directory component, include it
|
||||||
|
if str(directory) != ".":
|
||||||
|
full_path = directory / final_filename
|
||||||
|
else:
|
||||||
|
full_path = Path(final_filename)
|
||||||
else:
|
else:
|
||||||
filename = f"{doc.pk:07}{counter_str}{filetype_str}"
|
# No template, use document ID
|
||||||
|
final_filename = f"{doc.pk:07}{counter_str}{filetype_str}"
|
||||||
|
full_path = Path(final_filename)
|
||||||
|
|
||||||
|
# Add GPG extension if needed
|
||||||
if append_gpg and doc.storage_type == doc.STORAGE_TYPE_GPG:
|
if append_gpg and doc.storage_type == doc.STORAGE_TYPE_GPG:
|
||||||
filename += ".gpg"
|
full_path = full_path.with_suffix(full_path.suffix + ".gpg")
|
||||||
|
|
||||||
return filename
|
return full_path
|
||||||
|
@ -236,10 +236,7 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
# now make an archive in the original target, with all files stored
|
# now make an archive in the original target, with all files stored
|
||||||
if self.zip_export and temp_dir is not None:
|
if self.zip_export and temp_dir is not None:
|
||||||
shutil.make_archive(
|
shutil.make_archive(
|
||||||
os.path.join(
|
self.original_target / options["zip_name"],
|
||||||
self.original_target,
|
|
||||||
options["zip_name"],
|
|
||||||
),
|
|
||||||
format="zip",
|
format="zip",
|
||||||
root_dir=temp_dir.name,
|
root_dir=temp_dir.name,
|
||||||
)
|
)
|
||||||
@ -342,7 +339,7 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if self.split_manifest:
|
if self.split_manifest:
|
||||||
manifest_name = Path(base_name + "-manifest.json")
|
manifest_name = base_name.with_name(f"{base_name.stem}-manifest.json")
|
||||||
if self.use_folder_prefix:
|
if self.use_folder_prefix:
|
||||||
manifest_name = Path("json") / manifest_name
|
manifest_name = Path("json") / manifest_name
|
||||||
manifest_name = (self.target / manifest_name).resolve()
|
manifest_name = (self.target / manifest_name).resolve()
|
||||||
@ -416,7 +413,7 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
else:
|
else:
|
||||||
item.unlink()
|
item.unlink()
|
||||||
|
|
||||||
def generate_base_name(self, document: Document) -> str:
|
def generate_base_name(self, document: Document) -> Path:
|
||||||
"""
|
"""
|
||||||
Generates a unique name for the document, one which hasn't already been exported (or will be)
|
Generates a unique name for the document, one which hasn't already been exported (or will be)
|
||||||
"""
|
"""
|
||||||
@ -436,12 +433,12 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
filename_counter += 1
|
filename_counter += 1
|
||||||
return base_name
|
return Path(base_name)
|
||||||
|
|
||||||
def generate_document_targets(
|
def generate_document_targets(
|
||||||
self,
|
self,
|
||||||
document: Document,
|
document: Document,
|
||||||
base_name: str,
|
base_name: Path,
|
||||||
document_dict: dict,
|
document_dict: dict,
|
||||||
) -> tuple[Path, Path | None, Path | None]:
|
) -> tuple[Path, Path | None, Path | None]:
|
||||||
"""
|
"""
|
||||||
@ -449,25 +446,25 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
"""
|
"""
|
||||||
original_name = base_name
|
original_name = base_name
|
||||||
if self.use_folder_prefix:
|
if self.use_folder_prefix:
|
||||||
original_name = os.path.join("originals", original_name)
|
original_name = Path("originals") / original_name
|
||||||
original_target = (self.target / Path(original_name)).resolve()
|
original_target = (self.target / original_name).resolve()
|
||||||
document_dict[EXPORTER_FILE_NAME] = original_name
|
document_dict[EXPORTER_FILE_NAME] = str(original_name)
|
||||||
|
|
||||||
if not self.no_thumbnail:
|
if not self.no_thumbnail:
|
||||||
thumbnail_name = base_name + "-thumbnail.webp"
|
thumbnail_name = base_name.parent / (base_name.stem + "-thumbnail.webp")
|
||||||
if self.use_folder_prefix:
|
if self.use_folder_prefix:
|
||||||
thumbnail_name = os.path.join("thumbnails", thumbnail_name)
|
thumbnail_name = Path("thumbnails") / thumbnail_name
|
||||||
thumbnail_target = (self.target / Path(thumbnail_name)).resolve()
|
thumbnail_target = (self.target / thumbnail_name).resolve()
|
||||||
document_dict[EXPORTER_THUMBNAIL_NAME] = thumbnail_name
|
document_dict[EXPORTER_THUMBNAIL_NAME] = str(thumbnail_name)
|
||||||
else:
|
else:
|
||||||
thumbnail_target = None
|
thumbnail_target = None
|
||||||
|
|
||||||
if not self.no_archive and document.has_archive_version:
|
if not self.no_archive and document.has_archive_version:
|
||||||
archive_name = base_name + "-archive.pdf"
|
archive_name = base_name.parent / (base_name.stem + "-archive.pdf")
|
||||||
if self.use_folder_prefix:
|
if self.use_folder_prefix:
|
||||||
archive_name = os.path.join("archive", archive_name)
|
archive_name = Path("archive") / archive_name
|
||||||
archive_target = (self.target / Path(archive_name)).resolve()
|
archive_target = (self.target / archive_name).resolve()
|
||||||
document_dict[EXPORTER_ARCHIVE_NAME] = archive_name
|
document_dict[EXPORTER_ARCHIVE_NAME] = str(archive_name)
|
||||||
else:
|
else:
|
||||||
archive_target = None
|
archive_target = None
|
||||||
|
|
||||||
@ -572,7 +569,7 @@ class Command(CryptMixin, BaseCommand):
|
|||||||
perform_copy = False
|
perform_copy = False
|
||||||
|
|
||||||
if target.exists():
|
if target.exists():
|
||||||
source_stat = os.stat(source)
|
source_stat = source.stat()
|
||||||
target_stat = target.stat()
|
target_stat = target.stat()
|
||||||
if self.compare_checksums and source_checksum:
|
if self.compare_checksums and source_checksum:
|
||||||
target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
|
target_checksum = hashlib.md5(target.read_bytes()).hexdigest()
|
||||||
|
@ -63,11 +63,11 @@ class Document:
|
|||||||
/ "documents"
|
/ "documents"
|
||||||
/ "originals"
|
/ "originals"
|
||||||
/ f"{self.pk:07}.{self.file_type}.gpg"
|
/ f"{self.pk:07}.{self.file_type}.gpg"
|
||||||
).as_posix()
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def source_file(self):
|
def source_file(self):
|
||||||
return Path(self.source_path).open("rb")
|
return self.source_path.open("rb")
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def file_name(self):
|
def file_name(self):
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
|
||||||
import shutil
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
@ -51,8 +51,6 @@ from documents.permissions import set_permissions_for_object
|
|||||||
from documents.templating.workflows import parse_w_workflow_placeholders
|
from documents.templating.workflows import parse_w_workflow_placeholders
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from documents.classifier import DocumentClassifier
|
from documents.classifier import DocumentClassifier
|
||||||
from documents.data_models import ConsumableDocument
|
from documents.data_models import ConsumableDocument
|
||||||
from documents.data_models import DocumentMetadataOverrides
|
from documents.data_models import DocumentMetadataOverrides
|
||||||
@ -329,15 +327,16 @@ def cleanup_document_deletion(sender, instance, **kwargs):
|
|||||||
# Find a non-conflicting filename in case a document with the same
|
# Find a non-conflicting filename in case a document with the same
|
||||||
# name was moved to trash earlier
|
# name was moved to trash earlier
|
||||||
counter = 0
|
counter = 0
|
||||||
old_filename = os.path.split(instance.source_path)[1]
|
old_filename = Path(instance.source_path).name
|
||||||
(old_filebase, old_fileext) = os.path.splitext(old_filename)
|
old_filebase = Path(old_filename).stem
|
||||||
|
old_fileext = Path(old_filename).suffix
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
new_file_path = settings.EMPTY_TRASH_DIR / (
|
new_file_path = settings.EMPTY_TRASH_DIR / (
|
||||||
old_filebase + (f"_{counter:02}" if counter else "") + old_fileext
|
old_filebase + (f"_{counter:02}" if counter else "") + old_fileext
|
||||||
)
|
)
|
||||||
|
|
||||||
if os.path.exists(new_file_path):
|
if new_file_path.exists():
|
||||||
counter += 1
|
counter += 1
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
@ -361,26 +360,26 @@ def cleanup_document_deletion(sender, instance, **kwargs):
|
|||||||
files += (instance.source_path,)
|
files += (instance.source_path,)
|
||||||
|
|
||||||
for filename in files:
|
for filename in files:
|
||||||
if filename and os.path.isfile(filename):
|
if filename and filename.is_file():
|
||||||
try:
|
try:
|
||||||
os.unlink(filename)
|
filename.unlink()
|
||||||
logger.debug(f"Deleted file {filename}.")
|
logger.debug(f"Deleted file {filename}.")
|
||||||
except OSError as e:
|
except OSError as e:
|
||||||
logger.warning(
|
logger.warning(
|
||||||
f"While deleting document {instance!s}, the file "
|
f"While deleting document {instance!s}, the file "
|
||||||
f"{filename} could not be deleted: {e}",
|
f"{filename} could not be deleted: {e}",
|
||||||
)
|
)
|
||||||
elif filename and not os.path.isfile(filename):
|
elif filename and not filename.is_file():
|
||||||
logger.warning(f"Expected {filename} to exist, but it did not")
|
logger.warning(f"Expected {filename} to exist, but it did not")
|
||||||
|
|
||||||
delete_empty_directories(
|
delete_empty_directories(
|
||||||
os.path.dirname(instance.source_path),
|
Path(instance.source_path).parent,
|
||||||
root=settings.ORIGINALS_DIR,
|
root=settings.ORIGINALS_DIR,
|
||||||
)
|
)
|
||||||
|
|
||||||
if instance.has_archive_version:
|
if instance.has_archive_version:
|
||||||
delete_empty_directories(
|
delete_empty_directories(
|
||||||
os.path.dirname(instance.archive_path),
|
Path(instance.archive_path).parent,
|
||||||
root=settings.ARCHIVE_DIR,
|
root=settings.ARCHIVE_DIR,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -401,14 +400,14 @@ def update_filename_and_move_files(
|
|||||||
if isinstance(instance, CustomFieldInstance):
|
if isinstance(instance, CustomFieldInstance):
|
||||||
instance = instance.document
|
instance = instance.document
|
||||||
|
|
||||||
def validate_move(instance, old_path, new_path):
|
def validate_move(instance, old_path: Path, new_path: Path):
|
||||||
if not os.path.isfile(old_path):
|
if not old_path.is_file():
|
||||||
# Can't do anything if the old file does not exist anymore.
|
# Can't do anything if the old file does not exist anymore.
|
||||||
msg = f"Document {instance!s}: File {old_path} doesn't exist."
|
msg = f"Document {instance!s}: File {old_path} doesn't exist."
|
||||||
logger.fatal(msg)
|
logger.fatal(msg)
|
||||||
raise CannotMoveFilesException(msg)
|
raise CannotMoveFilesException(msg)
|
||||||
|
|
||||||
if os.path.isfile(new_path):
|
if new_path.is_file():
|
||||||
# Can't do anything if the new file already exists. Skip updating file.
|
# Can't do anything if the new file already exists. Skip updating file.
|
||||||
msg = f"Document {instance!s}: Cannot rename file since target path {new_path} already exists."
|
msg = f"Document {instance!s}: Cannot rename file since target path {new_path} already exists."
|
||||||
logger.warning(msg)
|
logger.warning(msg)
|
||||||
@ -436,16 +435,20 @@ def update_filename_and_move_files(
|
|||||||
old_filename = instance.filename
|
old_filename = instance.filename
|
||||||
old_source_path = instance.source_path
|
old_source_path = instance.source_path
|
||||||
|
|
||||||
instance.filename = generate_unique_filename(instance)
|
# Need to convert to string to be able to save it to the db
|
||||||
|
instance.filename = str(generate_unique_filename(instance))
|
||||||
move_original = old_filename != instance.filename
|
move_original = old_filename != instance.filename
|
||||||
|
|
||||||
old_archive_filename = instance.archive_filename
|
old_archive_filename = instance.archive_filename
|
||||||
old_archive_path = instance.archive_path
|
old_archive_path = instance.archive_path
|
||||||
|
|
||||||
if instance.has_archive_version:
|
if instance.has_archive_version:
|
||||||
instance.archive_filename = generate_unique_filename(
|
# Need to convert to string to be able to save it to the db
|
||||||
instance,
|
instance.archive_filename = str(
|
||||||
archive_filename=True,
|
generate_unique_filename(
|
||||||
|
instance,
|
||||||
|
archive_filename=True,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
move_archive = old_archive_filename != instance.archive_filename
|
move_archive = old_archive_filename != instance.archive_filename
|
||||||
@ -487,11 +490,11 @@ def update_filename_and_move_files(
|
|||||||
|
|
||||||
# Try to move files to their original location.
|
# Try to move files to their original location.
|
||||||
try:
|
try:
|
||||||
if move_original and os.path.isfile(instance.source_path):
|
if move_original and instance.source_path.is_file():
|
||||||
logger.info("Restoring previous original path")
|
logger.info("Restoring previous original path")
|
||||||
shutil.move(instance.source_path, old_source_path)
|
shutil.move(instance.source_path, old_source_path)
|
||||||
|
|
||||||
if move_archive and os.path.isfile(instance.archive_path):
|
if move_archive and instance.archive_path.is_file():
|
||||||
logger.info("Restoring previous archive path")
|
logger.info("Restoring previous archive path")
|
||||||
shutil.move(instance.archive_path, old_archive_path)
|
shutil.move(instance.archive_path, old_archive_path)
|
||||||
|
|
||||||
@ -512,17 +515,15 @@ def update_filename_and_move_files(
|
|||||||
|
|
||||||
# finally, remove any empty sub folders. This will do nothing if
|
# finally, remove any empty sub folders. This will do nothing if
|
||||||
# something has failed above.
|
# something has failed above.
|
||||||
if not os.path.isfile(old_source_path):
|
if not old_source_path.is_file():
|
||||||
delete_empty_directories(
|
delete_empty_directories(
|
||||||
os.path.dirname(old_source_path),
|
Path(old_source_path).parent,
|
||||||
root=settings.ORIGINALS_DIR,
|
root=settings.ORIGINALS_DIR,
|
||||||
)
|
)
|
||||||
|
|
||||||
if instance.has_archive_version and not os.path.isfile(
|
if instance.has_archive_version and not old_archive_path.is_file():
|
||||||
old_archive_path,
|
|
||||||
):
|
|
||||||
delete_empty_directories(
|
delete_empty_directories(
|
||||||
os.path.dirname(old_archive_path),
|
Path(old_archive_path).parent,
|
||||||
root=settings.ARCHIVE_DIR,
|
root=settings.ARCHIVE_DIR,
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -1219,10 +1220,7 @@ def run_workflows(
|
|||||||
)
|
)
|
||||||
files = None
|
files = None
|
||||||
if action.webhook.include_document:
|
if action.webhook.include_document:
|
||||||
with open(
|
with original_file.open("rb") as f:
|
||||||
original_file,
|
|
||||||
"rb",
|
|
||||||
) as f:
|
|
||||||
files = {
|
files = {
|
||||||
"file": (
|
"file": (
|
||||||
filename,
|
filename,
|
||||||
|
@ -41,11 +41,9 @@ class TestDocument(TestCase):
|
|||||||
Path(file_path).touch()
|
Path(file_path).touch()
|
||||||
Path(thumb_path).touch()
|
Path(thumb_path).touch()
|
||||||
|
|
||||||
with mock.patch("documents.signals.handlers.os.unlink") as mock_unlink:
|
with mock.patch("documents.signals.handlers.Path.unlink") as mock_unlink:
|
||||||
document.delete()
|
document.delete()
|
||||||
empty_trash([document.pk])
|
empty_trash([document.pk])
|
||||||
mock_unlink.assert_any_call(file_path)
|
|
||||||
mock_unlink.assert_any_call(thumb_path)
|
|
||||||
self.assertEqual(mock_unlink.call_count, 2)
|
self.assertEqual(mock_unlink.call_count, 2)
|
||||||
|
|
||||||
def test_document_soft_delete(self):
|
def test_document_soft_delete(self):
|
||||||
@ -63,7 +61,7 @@ class TestDocument(TestCase):
|
|||||||
Path(file_path).touch()
|
Path(file_path).touch()
|
||||||
Path(thumb_path).touch()
|
Path(thumb_path).touch()
|
||||||
|
|
||||||
with mock.patch("documents.signals.handlers.os.unlink") as mock_unlink:
|
with mock.patch("documents.signals.handlers.Path.unlink") as mock_unlink:
|
||||||
document.delete()
|
document.delete()
|
||||||
self.assertEqual(mock_unlink.call_count, 0)
|
self.assertEqual(mock_unlink.call_count, 0)
|
||||||
|
|
||||||
|
@ -34,12 +34,12 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), f"{document.pk:07d}.pdf")
|
self.assertEqual(generate_filename(document), Path(f"{document.pk:07d}.pdf"))
|
||||||
|
|
||||||
document.storage_type = Document.STORAGE_TYPE_GPG
|
document.storage_type = Document.STORAGE_TYPE_GPG
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(document),
|
generate_filename(document),
|
||||||
f"{document.pk:07d}.pdf.gpg",
|
Path(f"{document.pk:07d}.pdf.gpg"),
|
||||||
)
|
)
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
@override_settings(FILENAME_FORMAT="{correspondent}/{correspondent}")
|
||||||
@ -58,12 +58,12 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document.filename = generate_filename(document)
|
document.filename = generate_filename(document)
|
||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
self.assertEqual(document.filename, "none/none.pdf")
|
self.assertEqual(document.filename, Path("none/none.pdf"))
|
||||||
|
|
||||||
# Enable encryption and check again
|
# Enable encryption and check again
|
||||||
document.storage_type = Document.STORAGE_TYPE_GPG
|
document.storage_type = Document.STORAGE_TYPE_GPG
|
||||||
document.filename = generate_filename(document)
|
document.filename = generate_filename(document)
|
||||||
self.assertEqual(document.filename, "none/none.pdf.gpg")
|
self.assertEqual(document.filename, Path("none/none.pdf.gpg"))
|
||||||
|
|
||||||
document.save()
|
document.save()
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
document.filename = generate_filename(document)
|
document.filename = generate_filename(document)
|
||||||
self.assertEqual(document.filename, "none/none.pdf")
|
self.assertEqual(document.filename, Path("none/none.pdf"))
|
||||||
create_source_path_directory(document.source_path)
|
create_source_path_directory(document.source_path)
|
||||||
document.source_path.touch()
|
document.source_path.touch()
|
||||||
|
|
||||||
@ -137,7 +137,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
document.filename = generate_filename(document)
|
document.filename = generate_filename(document)
|
||||||
self.assertEqual(document.filename, "none/none.pdf")
|
self.assertEqual(document.filename, Path("none/none.pdf"))
|
||||||
create_source_path_directory(document.source_path)
|
create_source_path_directory(document.source_path)
|
||||||
Path(document.source_path).touch()
|
Path(document.source_path).touch()
|
||||||
|
|
||||||
@ -247,7 +247,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
document.filename = generate_filename(document)
|
document.filename = generate_filename(document)
|
||||||
self.assertEqual(document.filename, "none/none.pdf")
|
self.assertEqual(document.filename, Path("none/none.pdf"))
|
||||||
|
|
||||||
create_source_path_directory(document.source_path)
|
create_source_path_directory(document.source_path)
|
||||||
|
|
||||||
@ -269,11 +269,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
dt = DocumentType.objects.create(name="my_doc_type")
|
dt = DocumentType.objects.create(name="my_doc_type")
|
||||||
d = Document.objects.create(title="the_doc", mime_type="application/pdf")
|
d = Document.objects.create(title="the_doc", mime_type="application/pdf")
|
||||||
|
|
||||||
self.assertEqual(generate_filename(d), "none - the_doc.pdf")
|
self.assertEqual(generate_filename(d), Path("none - the_doc.pdf"))
|
||||||
|
|
||||||
d.document_type = dt
|
d.document_type = dt
|
||||||
|
|
||||||
self.assertEqual(generate_filename(d), "my_doc_type - the_doc.pdf")
|
self.assertEqual(generate_filename(d), Path("my_doc_type - the_doc.pdf"))
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{asn} - {title}")
|
@override_settings(FILENAME_FORMAT="{asn} - {title}")
|
||||||
def test_asn(self):
|
def test_asn(self):
|
||||||
@ -289,8 +289,8 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
archive_serial_number=None,
|
archive_serial_number=None,
|
||||||
checksum="B",
|
checksum="B",
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(d1), "652 - the_doc.pdf")
|
self.assertEqual(generate_filename(d1), Path("652 - the_doc.pdf"))
|
||||||
self.assertEqual(generate_filename(d2), "none - the_doc.pdf")
|
self.assertEqual(generate_filename(d2), Path("none - the_doc.pdf"))
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{title} {tag_list}")
|
@override_settings(FILENAME_FORMAT="{title} {tag_list}")
|
||||||
def test_tag_list(self):
|
def test_tag_list(self):
|
||||||
@ -298,7 +298,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
doc.tags.create(name="tag2")
|
doc.tags.create(name="tag2")
|
||||||
doc.tags.create(name="tag1")
|
doc.tags.create(name="tag1")
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc), "doc1 tag1,tag2.pdf")
|
self.assertEqual(generate_filename(doc), Path("doc1 tag1,tag2.pdf"))
|
||||||
|
|
||||||
doc = Document.objects.create(
|
doc = Document.objects.create(
|
||||||
title="doc2",
|
title="doc2",
|
||||||
@ -306,7 +306,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
mime_type="application/pdf",
|
mime_type="application/pdf",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc), "doc2.pdf")
|
self.assertEqual(generate_filename(doc), Path("doc2.pdf"))
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="//etc/something/{title}")
|
@override_settings(FILENAME_FORMAT="//etc/something/{title}")
|
||||||
def test_filename_relative(self):
|
def test_filename_relative(self):
|
||||||
@ -330,11 +330,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
created=d1,
|
created=d1,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc1), "2020-03-06.pdf")
|
self.assertEqual(generate_filename(doc1), Path("2020-03-06.pdf"))
|
||||||
|
|
||||||
doc1.created = datetime.date(2020, 11, 16)
|
doc1.created = datetime.date(2020, 11, 16)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc1), "2020-11-16.pdf")
|
self.assertEqual(generate_filename(doc1), Path("2020-11-16.pdf"))
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT="{added_year}-{added_month}-{added_day}",
|
FILENAME_FORMAT="{added_year}-{added_month}-{added_day}",
|
||||||
@ -347,11 +347,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
added=d1,
|
added=d1,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc1), "232-01-09.pdf")
|
self.assertEqual(generate_filename(doc1), Path("232-01-09.pdf"))
|
||||||
|
|
||||||
doc1.added = timezone.make_aware(datetime.datetime(2020, 11, 16, 1, 1, 1))
|
doc1.added = timezone.make_aware(datetime.datetime(2020, 11, 16, 1, 1, 1))
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc1), "2020-11-16.pdf")
|
self.assertEqual(generate_filename(doc1), Path("2020-11-16.pdf"))
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT="{correspondent}/{correspondent}/{correspondent}",
|
FILENAME_FORMAT="{correspondent}/{correspondent}/{correspondent}",
|
||||||
@ -389,11 +389,11 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), "0000001.pdf")
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
document.pk = 13579
|
document.pk = 13579
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), "0013579.pdf")
|
self.assertEqual(generate_filename(document), Path("0013579.pdf"))
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT=None)
|
@override_settings(FILENAME_FORMAT=None)
|
||||||
def test_format_none(self):
|
def test_format_none(self):
|
||||||
@ -402,7 +402,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), "0000001.pdf")
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
def test_try_delete_empty_directories(self):
|
def test_try_delete_empty_directories(self):
|
||||||
# Create our working directory
|
# Create our working directory
|
||||||
@ -428,7 +428,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), "0000001.pdf")
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{created__year}")
|
@override_settings(FILENAME_FORMAT="{created__year}")
|
||||||
def test_invalid_format_key(self):
|
def test_invalid_format_key(self):
|
||||||
@ -437,7 +437,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
document.mime_type = "application/pdf"
|
document.mime_type = "application/pdf"
|
||||||
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
document.storage_type = Document.STORAGE_TYPE_UNENCRYPTED
|
||||||
|
|
||||||
self.assertEqual(generate_filename(document), "0000001.pdf")
|
self.assertEqual(generate_filename(document), Path("0000001.pdf"))
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{title}")
|
@override_settings(FILENAME_FORMAT="{title}")
|
||||||
def test_duplicates(self):
|
def test_duplicates(self):
|
||||||
@ -564,7 +564,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
value_select="abc123",
|
value_select="abc123",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc), "document_apple.pdf")
|
self.assertEqual(generate_filename(doc), Path("document_apple.pdf"))
|
||||||
|
|
||||||
# handler should not have been called
|
# handler should not have been called
|
||||||
self.assertEqual(m.call_count, 0)
|
self.assertEqual(m.call_count, 0)
|
||||||
@ -576,7 +576,7 @@ class TestFileHandling(DirectoriesMixin, FileSystemAssertsMixin, TestCase):
|
|||||||
],
|
],
|
||||||
}
|
}
|
||||||
cf.save()
|
cf.save()
|
||||||
self.assertEqual(generate_filename(doc), "document_aubergine.pdf")
|
self.assertEqual(generate_filename(doc), Path("document_aubergine.pdf"))
|
||||||
# handler should have been called
|
# handler should have been called
|
||||||
self.assertEqual(m.call_count, 1)
|
self.assertEqual(m.call_count, 1)
|
||||||
|
|
||||||
@ -897,7 +897,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
pk=1,
|
pk=1,
|
||||||
checksum="1",
|
checksum="1",
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "This. is the title.pdf")
|
self.assertEqual(generate_filename(doc), Path("This. is the title.pdf"))
|
||||||
|
|
||||||
doc = Document.objects.create(
|
doc = Document.objects.create(
|
||||||
title="my\\invalid/../title:yay",
|
title="my\\invalid/../title:yay",
|
||||||
@ -905,7 +905,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
pk=2,
|
pk=2,
|
||||||
checksum="2",
|
checksum="2",
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "my-invalid-..-title-yay.pdf")
|
self.assertEqual(generate_filename(doc), Path("my-invalid-..-title-yay.pdf"))
|
||||||
|
|
||||||
@override_settings(FILENAME_FORMAT="{created}")
|
@override_settings(FILENAME_FORMAT="{created}")
|
||||||
def test_date(self):
|
def test_date(self):
|
||||||
@ -916,7 +916,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
pk=2,
|
pk=2,
|
||||||
checksum="2",
|
checksum="2",
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "2020-05-21.pdf")
|
self.assertEqual(generate_filename(doc), Path("2020-05-21.pdf"))
|
||||||
|
|
||||||
def test_dynamic_path(self):
|
def test_dynamic_path(self):
|
||||||
"""
|
"""
|
||||||
@ -935,7 +935,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
checksum="2",
|
checksum="2",
|
||||||
storage_path=StoragePath.objects.create(path="TestFolder/{{created}}"),
|
storage_path=StoragePath.objects.create(path="TestFolder/{{created}}"),
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "TestFolder/2020-06-25.pdf")
|
self.assertEqual(generate_filename(doc), Path("TestFolder/2020-06-25.pdf"))
|
||||||
|
|
||||||
def test_dynamic_path_with_none(self):
|
def test_dynamic_path_with_none(self):
|
||||||
"""
|
"""
|
||||||
@ -956,7 +956,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
checksum="2",
|
checksum="2",
|
||||||
storage_path=StoragePath.objects.create(path="{{asn}} - {{created}}"),
|
storage_path=StoragePath.objects.create(path="{{asn}} - {{created}}"),
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "none - 2020-06-25.pdf")
|
self.assertEqual(generate_filename(doc), Path("none - 2020-06-25.pdf"))
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT_REMOVE_NONE=True,
|
FILENAME_FORMAT_REMOVE_NONE=True,
|
||||||
@ -984,7 +984,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
checksum="2",
|
checksum="2",
|
||||||
storage_path=sp,
|
storage_path=sp,
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "TestFolder/2020-06-25.pdf")
|
self.assertEqual(generate_filename(doc), Path("TestFolder/2020-06-25.pdf"))
|
||||||
|
|
||||||
# Special case, undefined variable, then defined at the start of the template
|
# Special case, undefined variable, then defined at the start of the template
|
||||||
# This could lead to an absolute path after we remove the leading -none-, but leave the leading /
|
# This could lead to an absolute path after we remove the leading -none-, but leave the leading /
|
||||||
@ -993,7 +993,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
"{{ owner_username }}/{{ created_year }}/{{ correspondent }}/{{ title }}"
|
"{{ owner_username }}/{{ created_year }}/{{ correspondent }}/{{ title }}"
|
||||||
)
|
)
|
||||||
sp.save()
|
sp.save()
|
||||||
self.assertEqual(generate_filename(doc), "2020/does not matter.pdf")
|
self.assertEqual(generate_filename(doc), Path("2020/does not matter.pdf"))
|
||||||
|
|
||||||
def test_multiple_doc_paths(self):
|
def test_multiple_doc_paths(self):
|
||||||
"""
|
"""
|
||||||
@ -1028,8 +1028,14 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc_a), "ThisIsAFolder/4/2020-06-25.pdf")
|
self.assertEqual(
|
||||||
self.assertEqual(generate_filename(doc_b), "SomeImportantNone/2020-07-25.pdf")
|
generate_filename(doc_a),
|
||||||
|
Path("ThisIsAFolder/4/2020-06-25.pdf"),
|
||||||
|
)
|
||||||
|
self.assertEqual(
|
||||||
|
generate_filename(doc_b),
|
||||||
|
Path("SomeImportantNone/2020-07-25.pdf"),
|
||||||
|
)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT=None,
|
FILENAME_FORMAT=None,
|
||||||
@ -1064,8 +1070,11 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc_a), "0000002.pdf")
|
self.assertEqual(generate_filename(doc_a), Path("0000002.pdf"))
|
||||||
self.assertEqual(generate_filename(doc_b), "SomeImportantNone/2020-07-25.pdf")
|
self.assertEqual(
|
||||||
|
generate_filename(doc_b),
|
||||||
|
Path("SomeImportantNone/2020-07-25.pdf"),
|
||||||
|
)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT="{created_year_short}/{created_month_name_short}/{created_month_name}/{title}",
|
FILENAME_FORMAT="{created_year_short}/{created_month_name_short}/{created_month_name}/{title}",
|
||||||
@ -1078,7 +1087,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
pk=2,
|
pk=2,
|
||||||
checksum="2",
|
checksum="2",
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "89/Dec/December/The Title.pdf")
|
self.assertEqual(generate_filename(doc), Path("89/Dec/December/The Title.pdf"))
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT="{added_year_short}/{added_month_name}/{added_month_name_short}/{title}",
|
FILENAME_FORMAT="{added_year_short}/{added_month_name}/{added_month_name_short}/{title}",
|
||||||
@ -1091,7 +1100,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
pk=2,
|
pk=2,
|
||||||
checksum="2",
|
checksum="2",
|
||||||
)
|
)
|
||||||
self.assertEqual(generate_filename(doc), "84/August/Aug/The Title.pdf")
|
self.assertEqual(generate_filename(doc), Path("84/August/Aug/The Title.pdf"))
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT="{owner_username}/{title}",
|
FILENAME_FORMAT="{owner_username}/{title}",
|
||||||
@ -1124,8 +1133,8 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
checksum="3",
|
checksum="3",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(owned_doc), "user1/The Title.pdf")
|
self.assertEqual(generate_filename(owned_doc), Path("user1/The Title.pdf"))
|
||||||
self.assertEqual(generate_filename(no_owner_doc), "none/does matter.pdf")
|
self.assertEqual(generate_filename(no_owner_doc), Path("none/does matter.pdf"))
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT="{original_name}",
|
FILENAME_FORMAT="{original_name}",
|
||||||
@ -1171,17 +1180,20 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
original_filename="logs.txt",
|
original_filename="logs.txt",
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc_with_original), "someepdf.pdf")
|
self.assertEqual(generate_filename(doc_with_original), Path("someepdf.pdf"))
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(tricky_with_original),
|
generate_filename(tricky_with_original),
|
||||||
"some pdf with spaces and stuff.pdf",
|
Path("some pdf with spaces and stuff.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(generate_filename(no_original), "none.pdf")
|
self.assertEqual(generate_filename(no_original), Path("none.pdf"))
|
||||||
|
|
||||||
self.assertEqual(generate_filename(text_doc), "logs.txt")
|
self.assertEqual(generate_filename(text_doc), Path("logs.txt"))
|
||||||
self.assertEqual(generate_filename(text_doc, archive_filename=True), "logs.pdf")
|
self.assertEqual(
|
||||||
|
generate_filename(text_doc, archive_filename=True),
|
||||||
|
Path("logs.pdf"),
|
||||||
|
)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
FILENAME_FORMAT="XX{correspondent}/{title}",
|
FILENAME_FORMAT="XX{correspondent}/{title}",
|
||||||
@ -1206,7 +1218,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
|
|
||||||
# Ensure that filename is properly generated
|
# Ensure that filename is properly generated
|
||||||
document.filename = generate_filename(document)
|
document.filename = generate_filename(document)
|
||||||
self.assertEqual(document.filename, "XX/doc1.pdf")
|
self.assertEqual(document.filename, Path("XX/doc1.pdf"))
|
||||||
|
|
||||||
def test_complex_template_strings(self):
|
def test_complex_template_strings(self):
|
||||||
"""
|
"""
|
||||||
@ -1244,19 +1256,19 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"somepath/some where/2020-06-25/Does Matter.pdf",
|
Path("somepath/some where/2020-06-25/Does Matter.pdf"),
|
||||||
)
|
)
|
||||||
doc_a.checksum = "5"
|
doc_a.checksum = "5"
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"somepath/2024-10-01/Does Matter.pdf",
|
Path("somepath/2024-10-01/Does Matter.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
sp.path = "{{ document.title|lower }}{{ document.archive_serial_number - 2 }}"
|
sp.path = "{{ document.title|lower }}{{ document.archive_serial_number - 2 }}"
|
||||||
sp.save()
|
sp.save()
|
||||||
|
|
||||||
self.assertEqual(generate_filename(doc_a), "does matter23.pdf")
|
self.assertEqual(generate_filename(doc_a), Path("does matter23.pdf"))
|
||||||
|
|
||||||
sp.path = """
|
sp.path = """
|
||||||
somepath/
|
somepath/
|
||||||
@ -1275,13 +1287,13 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
sp.save()
|
sp.save()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"somepath/asn-000-200/Does Matter/Does Matter.pdf",
|
Path("somepath/asn-000-200/Does Matter/Does Matter.pdf"),
|
||||||
)
|
)
|
||||||
doc_a.archive_serial_number = 301
|
doc_a.archive_serial_number = 301
|
||||||
doc_a.save()
|
doc_a.save()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"somepath/asn-201-400/asn-3xx/Does Matter.pdf",
|
Path("somepath/asn-201-400/asn-3xx/Does Matter.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
@ -1310,7 +1322,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
with self.assertLogs(level=logging.WARNING) as capture:
|
with self.assertLogs(level=logging.WARNING) as capture:
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"0000002.pdf",
|
Path("0000002.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(len(capture.output), 1)
|
self.assertEqual(len(capture.output), 1)
|
||||||
@ -1345,7 +1357,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
with self.assertLogs(level=logging.WARNING) as capture:
|
with self.assertLogs(level=logging.WARNING) as capture:
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"0000002.pdf",
|
Path("0000002.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(len(capture.output), 1)
|
self.assertEqual(len(capture.output), 1)
|
||||||
@ -1413,7 +1425,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"invoices/1234.pdf",
|
Path("invoices/1234.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
with override_settings(
|
with override_settings(
|
||||||
@ -1427,7 +1439,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"Some Title_ChoiceOne.pdf",
|
Path("Some Title_ChoiceOne.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check for handling Nones well
|
# Check for handling Nones well
|
||||||
@ -1436,7 +1448,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"Some Title_Default Value.pdf",
|
Path("Some Title_Default Value.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
cf.name = "Invoice Number"
|
cf.name = "Invoice Number"
|
||||||
@ -1449,7 +1461,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"invoices/4567.pdf",
|
Path("invoices/4567.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
with override_settings(
|
with override_settings(
|
||||||
@ -1457,7 +1469,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"invoices/0.pdf",
|
Path("invoices/0.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_datetime_filter(self):
|
def test_datetime_filter(self):
|
||||||
@ -1496,7 +1508,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"2020/Some Title.pdf",
|
Path("2020/Some Title.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
with override_settings(
|
with override_settings(
|
||||||
@ -1504,7 +1516,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"2020-06-25/Some Title.pdf",
|
Path("2020-06-25/Some Title.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
with override_settings(
|
with override_settings(
|
||||||
@ -1512,7 +1524,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc_a),
|
generate_filename(doc_a),
|
||||||
"2024-10-01/Some Title.pdf",
|
Path("2024-10-01/Some Title.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_slugify_filter(self):
|
def test_slugify_filter(self):
|
||||||
@ -1539,7 +1551,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc),
|
generate_filename(doc),
|
||||||
"some-title-with-special-characters.pdf",
|
Path("some-title-with-special-characters.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Test with correspondent name containing spaces and special chars
|
# Test with correspondent name containing spaces and special chars
|
||||||
@ -1553,7 +1565,7 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc),
|
generate_filename(doc),
|
||||||
"johns-office-workplace/some-title-with-special-characters.pdf",
|
Path("johns-office-workplace/some-title-with-special-characters.pdf"),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Test with custom fields
|
# Test with custom fields
|
||||||
@ -1572,5 +1584,5 @@ class TestFilenameGeneration(DirectoriesMixin, TestCase):
|
|||||||
):
|
):
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
generate_filename(doc),
|
generate_filename(doc),
|
||||||
"brussels-belgium/some-title-with-special-characters.pdf",
|
Path("brussels-belgium/some-title-with-special-characters.pdf"),
|
||||||
)
|
)
|
||||||
|
@ -209,7 +209,7 @@ class TestExportImport(
|
|||||||
4,
|
4,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self._get_document_from_manifest(manifest, self.d1.id)["fields"]["title"],
|
self._get_document_from_manifest(manifest, self.d1.id)["fields"]["title"],
|
||||||
@ -235,9 +235,7 @@ class TestExportImport(
|
|||||||
).as_posix()
|
).as_posix()
|
||||||
self.assertIsFile(fname)
|
self.assertIsFile(fname)
|
||||||
self.assertIsFile(
|
self.assertIsFile(
|
||||||
(
|
self.target / element[document_exporter.EXPORTER_THUMBNAIL_NAME],
|
||||||
self.target / element[document_exporter.EXPORTER_THUMBNAIL_NAME]
|
|
||||||
).as_posix(),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with Path(fname).open("rb") as f:
|
with Path(fname).open("rb") as f:
|
||||||
@ -252,7 +250,7 @@ class TestExportImport(
|
|||||||
if document_exporter.EXPORTER_ARCHIVE_NAME in element:
|
if document_exporter.EXPORTER_ARCHIVE_NAME in element:
|
||||||
fname = (
|
fname = (
|
||||||
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]
|
self.target / element[document_exporter.EXPORTER_ARCHIVE_NAME]
|
||||||
).as_posix()
|
)
|
||||||
self.assertIsFile(fname)
|
self.assertIsFile(fname)
|
||||||
|
|
||||||
with Path(fname).open("rb") as f:
|
with Path(fname).open("rb") as f:
|
||||||
@ -312,7 +310,7 @@ class TestExportImport(
|
|||||||
)
|
)
|
||||||
|
|
||||||
self._do_export()
|
self._do_export()
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
|
|
||||||
st_mtime_1 = (self.target / "manifest.json").stat().st_mtime
|
st_mtime_1 = (self.target / "manifest.json").stat().st_mtime
|
||||||
|
|
||||||
@ -322,7 +320,7 @@ class TestExportImport(
|
|||||||
self._do_export()
|
self._do_export()
|
||||||
m.assert_not_called()
|
m.assert_not_called()
|
||||||
|
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
st_mtime_2 = (self.target / "manifest.json").stat().st_mtime
|
st_mtime_2 = (self.target / "manifest.json").stat().st_mtime
|
||||||
|
|
||||||
Path(self.d1.source_path).touch()
|
Path(self.d1.source_path).touch()
|
||||||
@ -334,7 +332,7 @@ class TestExportImport(
|
|||||||
self.assertEqual(m.call_count, 1)
|
self.assertEqual(m.call_count, 1)
|
||||||
|
|
||||||
st_mtime_3 = (self.target / "manifest.json").stat().st_mtime
|
st_mtime_3 = (self.target / "manifest.json").stat().st_mtime
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
|
|
||||||
self.assertNotEqual(st_mtime_1, st_mtime_2)
|
self.assertNotEqual(st_mtime_1, st_mtime_2)
|
||||||
self.assertNotEqual(st_mtime_2, st_mtime_3)
|
self.assertNotEqual(st_mtime_2, st_mtime_3)
|
||||||
@ -352,7 +350,7 @@ class TestExportImport(
|
|||||||
|
|
||||||
self._do_export()
|
self._do_export()
|
||||||
|
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
|
|
||||||
with mock.patch(
|
with mock.patch(
|
||||||
"documents.management.commands.document_exporter.copy_file_with_basic_stats",
|
"documents.management.commands.document_exporter.copy_file_with_basic_stats",
|
||||||
@ -360,7 +358,7 @@ class TestExportImport(
|
|||||||
self._do_export()
|
self._do_export()
|
||||||
m.assert_not_called()
|
m.assert_not_called()
|
||||||
|
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
|
|
||||||
self.d2.checksum = "asdfasdgf3"
|
self.d2.checksum = "asdfasdgf3"
|
||||||
self.d2.save()
|
self.d2.save()
|
||||||
@ -371,7 +369,7 @@ class TestExportImport(
|
|||||||
self._do_export(compare_checksums=True)
|
self._do_export(compare_checksums=True)
|
||||||
self.assertEqual(m.call_count, 1)
|
self.assertEqual(m.call_count, 1)
|
||||||
|
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
|
|
||||||
def test_update_export_deleted_document(self):
|
def test_update_export_deleted_document(self):
|
||||||
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
|
shutil.rmtree(Path(self.dirs.media_dir) / "documents")
|
||||||
@ -385,7 +383,7 @@ class TestExportImport(
|
|||||||
self.assertTrue(len(manifest), 7)
|
self.assertTrue(len(manifest), 7)
|
||||||
doc_from_manifest = self._get_document_from_manifest(manifest, self.d3.id)
|
doc_from_manifest = self._get_document_from_manifest(manifest, self.d3.id)
|
||||||
self.assertIsFile(
|
self.assertIsFile(
|
||||||
(self.target / doc_from_manifest[EXPORTER_FILE_NAME]).as_posix(),
|
str(self.target / doc_from_manifest[EXPORTER_FILE_NAME]),
|
||||||
)
|
)
|
||||||
self.d3.delete()
|
self.d3.delete()
|
||||||
|
|
||||||
@ -397,12 +395,12 @@ class TestExportImport(
|
|||||||
self.d3.id,
|
self.d3.id,
|
||||||
)
|
)
|
||||||
self.assertIsFile(
|
self.assertIsFile(
|
||||||
(self.target / doc_from_manifest[EXPORTER_FILE_NAME]).as_posix(),
|
self.target / doc_from_manifest[EXPORTER_FILE_NAME],
|
||||||
)
|
)
|
||||||
|
|
||||||
manifest = self._do_export(delete=True)
|
manifest = self._do_export(delete=True)
|
||||||
self.assertIsNotFile(
|
self.assertIsNotFile(
|
||||||
(self.target / doc_from_manifest[EXPORTER_FILE_NAME]).as_posix(),
|
self.target / doc_from_manifest[EXPORTER_FILE_NAME],
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertTrue(len(manifest), 6)
|
self.assertTrue(len(manifest), 6)
|
||||||
@ -416,20 +414,20 @@ class TestExportImport(
|
|||||||
)
|
)
|
||||||
|
|
||||||
self._do_export(use_filename_format=True)
|
self._do_export(use_filename_format=True)
|
||||||
self.assertIsFile((self.target / "wow1" / "c.pdf").as_posix())
|
self.assertIsFile(self.target / "wow1" / "c.pdf")
|
||||||
|
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
|
|
||||||
self.d1.title = "new_title"
|
self.d1.title = "new_title"
|
||||||
self.d1.save()
|
self.d1.save()
|
||||||
self._do_export(use_filename_format=True, delete=True)
|
self._do_export(use_filename_format=True, delete=True)
|
||||||
self.assertIsNotFile((self.target / "wow1" / "c.pdf").as_posix())
|
self.assertIsNotFile(self.target / "wow1" / "c.pdf")
|
||||||
self.assertIsNotDir((self.target / "wow1").as_posix())
|
self.assertIsNotDir(self.target / "wow1")
|
||||||
self.assertIsFile((self.target / "new_title" / "c.pdf").as_posix())
|
self.assertIsFile(self.target / "new_title" / "c.pdf")
|
||||||
self.assertIsFile((self.target / "manifest.json").as_posix())
|
self.assertIsFile(self.target / "manifest.json")
|
||||||
self.assertIsFile((self.target / "wow2" / "none.pdf").as_posix())
|
self.assertIsFile(self.target / "wow2" / "none.pdf")
|
||||||
self.assertIsFile(
|
self.assertIsFile(
|
||||||
(self.target / "wow2" / "none_01.pdf").as_posix(),
|
self.target / "wow2" / "none_01.pdf",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_export_missing_files(self):
|
def test_export_missing_files(self):
|
||||||
|
@ -20,7 +20,7 @@ def source_path_before(self):
|
|||||||
if self.storage_type == STORAGE_TYPE_GPG:
|
if self.storage_type == STORAGE_TYPE_GPG:
|
||||||
fname += ".gpg"
|
fname += ".gpg"
|
||||||
|
|
||||||
return (Path(settings.ORIGINALS_DIR) / fname).as_posix()
|
return Path(settings.ORIGINALS_DIR) / fname
|
||||||
|
|
||||||
|
|
||||||
def file_type_after(self):
|
def file_type_after(self):
|
||||||
@ -35,7 +35,7 @@ def source_path_after(doc):
|
|||||||
if doc.storage_type == STORAGE_TYPE_GPG:
|
if doc.storage_type == STORAGE_TYPE_GPG:
|
||||||
fname += ".gpg" # pragma: no cover
|
fname += ".gpg" # pragma: no cover
|
||||||
|
|
||||||
return (Path(settings.ORIGINALS_DIR) / fname).as_posix()
|
return Path(settings.ORIGINALS_DIR) / fname
|
||||||
|
|
||||||
|
|
||||||
@override_settings(PASSPHRASE="test")
|
@override_settings(PASSPHRASE="test")
|
||||||
|
Loading…
x
Reference in New Issue
Block a user