Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 10 additions & 0 deletions changelog.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,15 @@
# Changelog


## [3.5.2] - not yet released

All following changes are relevant only for the case when file server is
S3 bucket/CDN:

- poll nodes of type document for the status of preview thumbnail (only for S3 + CDN setups)
- adds `/api/documents/thumbnail-img-status/` endpoint


## [3.5.1] - 2025-05-11

### Changes
Expand Down
6 changes: 6 additions & 0 deletions papermerge/core/alembic/README
Original file line number Diff line number Diff line change
Expand Up @@ -26,3 +26,9 @@ Navigate back and forth:
$ alembic downgrade -1
$ alembic upgrade +1
```

To view migrations in chronological order:

```
alembic history
```
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@
down_revision: Union[str, None] = "1a5a9bffcad4"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
conn = op.get_bind()


def upgrade() -> None:
Expand Down Expand Up @@ -47,137 +46,127 @@ def upgrade() -> None:
with op.batch_alter_table("document_types") as batch_op:
batch_op.alter_column("user_id", existing_type=sa.UUID(), nullable=True)

if conn.dialect.name != "sqlite":
op.drop_constraint(
"unique document type per user", "document_types", type_="unique"
)
op.create_unique_constraint(
"unique document type per user",
"document_types",
["name", "user_id"],
)
op.create_unique_constraint(
"unique document type per group",
"document_types",
["name", "group_id"],
)
op.create_foreign_key(
"document_types_group_id_fkey",
"document_types",
"groups",
["group_id"],
["id"],
)
op.create_check_constraint(
constraint_name="check__user_id_not_null__or__group_id_not_null",
table_name="document_types",
condition="user_id IS NOT NULL OR group_id IS NOT NULL",
)
op.drop_constraint(
"unique document type per user", "document_types", type_="unique"
)
op.create_unique_constraint(
"unique document type per user",
"document_types",
["name", "user_id"],
)
op.create_unique_constraint(
"unique document type per group",
"document_types",
["name", "group_id"],
)
op.create_foreign_key(
"document_types_group_id_fkey",
"document_types",
"groups",
["group_id"],
["id"],
)
op.create_check_constraint(
constraint_name="check__user_id_not_null__or__group_id_not_null",
table_name="document_types",
condition="user_id IS NOT NULL OR group_id IS NOT NULL",
)
#### groups
op.add_column("groups", sa.Column("home_folder_id", sa.Uuid(), nullable=True))
op.add_column("groups", sa.Column("inbox_folder_id", sa.Uuid(), nullable=True))

if conn.dialect.name != "sqlite":
op.create_foreign_key(
"groups_inbox_folder_id_fkey",
"groups",
"folders",
["inbox_folder_id"],
["node_id"],
ondelete="CASCADE",
deferrable=True,
)
op.create_foreign_key(
"groups_home_folder_id_fkey",
"groups",
"folders",
["home_folder_id"],
["node_id"],
ondelete="CASCADE",
deferrable=True,
)
op.create_foreign_key(
"groups_inbox_folder_id_fkey",
"groups",
"folders",
["inbox_folder_id"],
["node_id"],
ondelete="CASCADE",
deferrable=True,
)
op.create_foreign_key(
"groups_home_folder_id_fkey",
"groups",
"folders",
["home_folder_id"],
["node_id"],
ondelete="CASCADE",
deferrable=True,
)
### nodes
op.add_column("nodes", sa.Column("group_id", sa.Uuid(), nullable=True))

with op.batch_alter_table("nodes") as batch_op:
batch_op.alter_column("user_id", existing_type=sa.UUID(), nullable=True)

if conn.dialect.name != "sqlite":
op.drop_constraint("unique title per parent per user", "nodes", type_="unique")
op.create_unique_constraint(
"unique title per parent per user",
"nodes",
["parent_id", "title", "user_id"],
)
op.create_unique_constraint(
"unique title per parent per group",
"nodes",
["parent_id", "title", "group_id"],
)
op.create_foreign_key(
"nodes_group_id_fkey",
"nodes",
"groups",
["group_id"],
["id"],
ondelete="CASCADE",
use_alter=True,
)
op.create_check_constraint(
constraint_name="check__user_id_not_null__or__group_id_not_null",
table_name="nodes",
condition="user_id IS NOT NULL OR group_id IS NOT NULL",
)
op.drop_constraint("unique title per parent per user", "nodes", type_="unique")
op.create_unique_constraint(
"unique title per parent per user",
"nodes",
["parent_id", "title", "user_id"],
)
op.create_unique_constraint(
"unique title per parent per group",
"nodes",
["parent_id", "title", "group_id"],
)
op.create_foreign_key(
"nodes_group_id_fkey",
"nodes",
"groups",
["group_id"],
["id"],
ondelete="CASCADE",
use_alter=True,
)
op.create_check_constraint(
constraint_name="check__user_id_not_null__or__group_id_not_null",
table_name="nodes",
condition="user_id IS NOT NULL OR group_id IS NOT NULL",
)

# tags
op.add_column("tags", sa.Column("group_id", sa.Uuid(), nullable=True))

with op.batch_alter_table("tags") as batch_op:
batch_op.alter_column("user_id", existing_type=sa.UUID(), nullable=True)

if conn.dialect.name != "sqlite":
op.drop_constraint("unique tag name per user", "tags", type_="unique")
op.create_unique_constraint(
"unique tag name per user", "tags", ["name", "user_id"]
)
op.create_unique_constraint(
"unique tag name per group", "tags", ["name", "group_id"]
)
op.create_foreign_key(None, "tags", "groups", ["group_id"], ["id"])
op.create_check_constraint(
constraint_name="check__user_id_not_null__or__group_id_not_null",
table_name="tags",
condition="user_id IS NOT NULL OR group_id IS NOT NULL",
)
op.drop_constraint("unique tag name per user", "tags", type_="unique")
op.create_unique_constraint("unique tag name per user", "tags", ["name", "user_id"])
op.create_unique_constraint(
"unique tag name per group", "tags", ["name", "group_id"]
)
op.create_foreign_key(None, "tags", "groups", ["group_id"], ["id"])
op.create_check_constraint(
constraint_name="check__user_id_not_null__or__group_id_not_null",
table_name="tags",
condition="user_id IS NOT NULL OR group_id IS NOT NULL",
)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###

# tags
if conn.dialect.name != "sqlite":
op.drop_constraint("unique tag name per user/group", "tags", type_="unique")
op.create_unique_constraint(
"unique tag name per user", "tags", ["name", "user_id"]
)
op.drop_constraint("unique tag name per user/group", "tags", type_="unique")
op.create_unique_constraint("unique tag name per user", "tags", ["name", "user_id"])

with op.batch_alter_table("tags") as batch_op:
batch_op.alter_column("user_id", existing_type=sa.UUID(), nullable=False)

op.drop_column("tags", "group_id")

# nodes
if conn.dialect.name != "sqlite":
op.drop_constraint("nodes_group_id_fkey", "nodes", type_="foreignkey")
op.drop_constraint(
"unique title per parent per user/group", "nodes", type_="unique"
)
op.create_unique_constraint(
"unique title per parent per user",
"nodes",
["parent_id", "title", "user_id"],
)
op.drop_constraint("nodes_group_id_fkey", "nodes", type_="foreignkey")
op.drop_constraint(
"unique title per parent per user/group", "nodes", type_="unique"
)
op.create_unique_constraint(
"unique title per parent per user",
"nodes",
["parent_id", "title", "user_id"],
)

with op.batch_alter_table("nodes") as batch_op:
batch_op.alter_column(
Expand All @@ -187,35 +176,32 @@ def downgrade() -> None:
op.drop_column("nodes", "group_id")

# groups
if conn.dialect.name != "sqlite":
op.drop_constraint("groups_home_folder_id_fkey", "groups", type_="foreignkey")
op.drop_constraint("groups_inbox_folder_id_fkey", "groups", type_="foreignkey")
op.drop_constraint("groups_home_folder_id_fkey", "groups", type_="foreignkey")
op.drop_constraint("groups_inbox_folder_id_fkey", "groups", type_="foreignkey")

op.drop_column("groups", "inbox_folder_id")
op.drop_column("groups", "home_folder_id")

# document_types
if conn.dialect.name != "sqlite":
op.drop_constraint(
"document_types_group_id_fkey", "document_types", type_="foreignkey"
)
op.drop_constraint(
"unique document type per user/group", "document_types", type_="unique"
)
op.create_unique_constraint(
"unique document type per user", "document_types", ["name", "user_id"]
)
op.drop_constraint(
"document_types_group_id_fkey", "document_types", type_="foreignkey"
)
op.drop_constraint(
"unique document type per user/group", "document_types", type_="unique"
)
op.create_unique_constraint(
"unique document type per user", "document_types", ["name", "user_id"]
)

with op.batch_alter_table("document_types") as batch_op:
batch_op.alter_column("user_id", existing_type=sa.UUID(), nullable=False)

op.drop_column("document_types", "group_id")

# custom_fields
if conn.dialect.name != "sqlite":
op.drop_constraint(
"custom_fields_group_id_fkey", "custom_fields", type_="foreignkey"
)
op.drop_constraint(
"custom_fields_group_id_fkey", "custom_fields", type_="foreignkey"
)

with op.batch_alter_table("custom_fields") as batch_op:
batch_op.alter_column("user_id", existing_type=sa.UUID(), nullable=False)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
"""add documents.preview_status and documents.preview_error fields

Revision ID: a03014b93c1e
Revises: 2118951c4d90
Create Date: 2025-05-12 07:25:19.171857

"""

from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision: str = "a03014b93c1e"
down_revision: Union[str, None] = "2118951c4d90"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None


def upgrade() -> None:
op.add_column("documents", sa.Column("preview_status", sa.String(), nullable=True))
op.add_column("documents", sa.Column("preview_error", sa.String(), nullable=True))


def downgrade() -> None:
op.drop_column("documents", "preview_error")
op.drop_column("documents", "preview_status")
13 changes: 13 additions & 0 deletions papermerge/core/cache/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
from papermerge.core import config
from .empty import Client as EmptyClient
from .redis_client import Client as RedisClient

settings = config.get_settings()

redis_url = settings.papermerge__redis__url
cache_enabled = settings.papermerge__main__cache_enabled

if redis_url and cache_enabled:
client = RedisClient(redis_url)
else:
client = EmptyClient()
10 changes: 10 additions & 0 deletions papermerge/core/cache/empty.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
class Client:

def get(self, key):
return None

def set(self, key, value, ex: int = 60): ...


def get_client():
return Client()
21 changes: 21 additions & 0 deletions papermerge/core/cache/redis_client.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import redis


class Client:
def __init__(self, url):
self.url = url
self.client = redis.from_url(url)

def get(self, key):
if self.client.exists(key):
return self.client.get(key).decode("utf-8")

return None

def set(self, key, value, ex: int = 60):
"""ex is number of SECONDS until key expires"""
self.client.set(key, value, ex)


def get_client(url):
return Client(url)
Loading