diff --git a/.gitignore b/.gitignore index 74e5f63a..573cb1d5 100644 --- a/.gitignore +++ b/.gitignore @@ -32,6 +32,7 @@ MANIFEST # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec +.aider* # Installer logs pip-log.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7aadb8cf..0b370c7d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -31,6 +31,6 @@ repos: - id: flake8 - repo: https://github.com/RobertCraigie/pyright-python - rev: v1.1.364 + rev: v1.1.389 hooks: - id: pyright diff --git a/Dockerfile b/Dockerfile index 1acc8b26..b3e3fa4b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,12 +22,13 @@ RUN apt-get update -y \ && poetry --version \ # Configure to use system instead of virtualenvs && poetry config virtualenvs.create false \ - && poetry install --no-root \ + && poetry install --no-root --no-cache --no-interaction \ # Clean-up && pip uninstall -y poetry virtualenv-clone virtualenv \ && apt-get remove -y gcc libc-dev libproj-dev \ && apt-get autoremove -y \ - && rm -rf /var/lib/apt/lists/* + && rm -rf /var/lib/apt/lists/* \ + && rm -rf /root/.cache/ COPY . /code/ diff --git a/apps/subscription_manager/__init__.py b/apps/__init__.py similarity index 100% rename from apps/subscription_manager/__init__.py rename to apps/__init__.py diff --git a/apps/cap_feed/admin.py b/apps/cap_feed/admin.py index c2f7babe..af211ebf 100644 --- a/apps/cap_feed/admin.py +++ b/apps/cap_feed/admin.py @@ -153,6 +153,7 @@ class FeedAdmin(admin.ModelAdmin): list_filter = ( 'format', 'country__region', + 'enable_polling', AutocompleteFilterFactory('Country', 'country'), ) search_fields = ['url'] diff --git a/apps/cap_feed/dataloaders.py b/apps/cap_feed/dataloaders.py index a0fdc3c6..a26a2940 100644 --- a/apps/cap_feed/dataloaders.py +++ b/apps/cap_feed/dataloaders.py @@ -63,7 +63,23 @@ def load_feed(keys: list[int]) -> list['FeedType']: return _load_model(Feed, keys) # type: ignore[reportGeneralTypeIssues] -def load_admin1_by_alert(keys: list[int]) -> list[list['Admin1Type']]: +def load_admin1_by_admin1s(keys_array: list[tuple[int]]) -> list[list['Admin1Type']]: + keys = [key for keys in keys_array for key in keys] + qs = Admin1.objects.filter(id__in=keys) + + _map = defaultdict(list) + admin1_map = {admin1.pk: admin1 for admin1 in qs.all()} + + for keys in keys_array: + for key in keys: + if key not in admin1_map: + continue + _map[keys].append(admin1_map[key]) + + return [_map[keys] for keys in keys_array] + + +def load_admin1s_by_alert(keys: list[int]) -> list[list['Admin1Type']]: qs = ( AlertAdmin1.objects.filter(alert__in=keys) .order_by() @@ -228,9 +244,13 @@ def load_continent(self): def load_feed(self): return DataLoader(load_fn=sync_to_async(load_feed)) + @cached_property + def load_admin1_by_admin1s(self): + return DataLoader(load_fn=sync_to_async(load_admin1_by_admin1s)) + @cached_property def load_admin1s_by_alert(self): - return DataLoader(load_fn=sync_to_async(load_admin1_by_alert)) + return DataLoader(load_fn=sync_to_async(load_admin1s_by_alert)) @cached_property def load_admin1s_by_country(self): diff --git a/apps/cap_feed/factories.py b/apps/cap_feed/factories.py new file mode 100644 index 00000000..c3a6ec94 --- /dev/null +++ b/apps/cap_feed/factories.py @@ -0,0 +1,71 @@ +import datetime + +import factory +from factory.django import DjangoModelFactory + +from .models import Admin1, Alert, AlertInfo, Country, Feed, Region + + +class RegionFactory(DjangoModelFactory): + ifrc_go_id = factory.Sequence(lambda n: n) + name = factory.Sequence(lambda n: f'Region-{n}') + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = Region + + +class CountryFactory(DjangoModelFactory): + ifrc_go_id = factory.Sequence(lambda n: n) + name = factory.Sequence(lambda n: f'Country-{n}') + iso3 = factory.Sequence(lambda n: f"{n:0>3}") + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = Country + + +class FeedFactory(DjangoModelFactory): + url = factory.Sequence(lambda n: f"https://source-{n}.com/test") + format = Feed.Format.RSS + polling_interval = Feed.PoolingInterval.I_10m + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = Feed + + +class Admin1Factory(DjangoModelFactory): + ifrc_go_id = factory.Sequence(lambda n: n) + name = factory.Sequence(lambda n: f'Admin1-{n}') + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = Admin1 + + +class AlertFactory(DjangoModelFactory): + url = factory.Sequence(lambda n: f"https://alert-{n}.com/test") + identifier = "Identifier-X" + sender = "Sender-X" + sent = datetime.datetime(year=2024, month=1, day=1) + status = Alert.Status.ACTUAL + msg_type = Alert.MsgType.ALERT + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = Alert + + @factory.post_generation + def admin1s(self, create, extracted, **_): + if not create: + return + if extracted: + for author in extracted: + self.admin1s.add(author) # type: ignore[reportAttributeAccessIssue] + + +class AlertInfoFactory(DjangoModelFactory): + event = "Event-X" + category = AlertInfo.Category.HEALTH + urgency = AlertInfo.Urgency.IMMEDIATE + severity = AlertInfo.Severity.EXTREME + certainty = AlertInfo.Certainty.OBSERVED + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = AlertInfo diff --git a/apps/cap_feed/filters.py b/apps/cap_feed/filters.py index 81ac31dc..7928b520 100644 --- a/apps/cap_feed/filters.py +++ b/apps/cap_feed/filters.py @@ -12,40 +12,15 @@ from .models import Admin1, Alert, AlertInfo, Country, Feed, Region -@strawberry_django.filters.filter(Alert, lookups=True) -class AlertFilter: +@strawberry_django.filters.filter(AlertInfo, lookups=True) +class AlertInfoFilter: id: strawberry.auto - country: strawberry.auto - sent: strawberry.auto - - @strawberry_django.filter_field - def region( - self, - queryset: models.QuerySet, - value: strawberry.ID, - prefix: str, - ) -> tuple[models.QuerySet, models.Q]: - return queryset, models.Q(**{f"{prefix}country__region": value}) - - @strawberry_django.filter_field - def admin1( - self, - queryset: models.QuerySet, - value: strawberry.ID, - prefix: str, - ) -> tuple[models.QuerySet, models.Q]: - return queryset, models.Q(**{f"{prefix}admin1s": value}) def _info_enum_fields(self, field, queryset, value, prefix) -> tuple[models.QuerySet, models.Q]: if value: - alias_field = f"_infos_{field}_list" - queryset = queryset.alias( - **{ - # NOTE: To avoid duplicate alerts when joining infos - alias_field: ArrayAgg(f"{prefix}infos__{field}"), - } - ) - return queryset, models.Q(**{f"{prefix}{alias_field}__overlap": value}) + # NOTE: With this field, disctinct should be used by the client + print(f"{prefix}{field}__in") + return queryset, models.Q(**{f"{prefix}{field}__in": value}) return queryset, models.Q() @strawberry_django.filter_field @@ -85,9 +60,30 @@ def category( return self._info_enum_fields("category", queryset, value, prefix) -@strawberry_django.filters.filter(AlertInfo, lookups=True) -class AlertInfoFilter: +@strawberry_django.filters.filter(Alert, lookups=True) +class AlertFilter: id: strawberry.auto + country: strawberry.auto + sent: strawberry.auto + infos: AlertInfoFilter | None + + @strawberry_django.filter_field + def region( + self, + queryset: models.QuerySet, + value: strawberry.ID, + prefix: str, + ) -> tuple[models.QuerySet, models.Q]: + return queryset, models.Q(**{f"{prefix}country__region": value}) + + @strawberry_django.filter_field + def admin1( + self, + queryset: models.QuerySet, + value: strawberry.ID, + prefix: str, + ) -> tuple[models.QuerySet, models.Q]: + return queryset, models.Q(**{f"{prefix}admin1s": value}) @strawberry_django.filters.filter(Feed, lookups=True) diff --git a/apps/cap_feed/fixtures/test_data.json b/apps/cap_feed/fixtures/test_data.json index 33bdae15..e7579437 100644 --- a/apps/cap_feed/fixtures/test_data.json +++ b/apps/cap_feed/fixtures/test_data.json @@ -18,7 +18,8 @@ "pk": 1, "fields": { "name": "test_country", - "iso3": "ISO" + "iso3": "ISO", + "region": 1 } }, { diff --git a/apps/cap_feed/migrations/0009_alert_is_processed_by_subscription.py b/apps/cap_feed/migrations/0009_alert_is_processed_by_subscription.py new file mode 100644 index 00000000..89e59009 --- /dev/null +++ b/apps/cap_feed/migrations/0009_alert_is_processed_by_subscription.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.13 on 2024-11-24 09:22 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('cap_feed', '0008_alert_cap_feed_alert_not_expired_idx'), + ] + + operations = [ + migrations.AddField( + model_name='alert', + name='is_processed_by_subscription', + field=models.BooleanField(null=True), + ), + ] diff --git a/apps/cap_feed/migrations/0010_alter_alert_is_processed_by_subscription_and_more.py b/apps/cap_feed/migrations/0010_alter_alert_is_processed_by_subscription_and_more.py new file mode 100644 index 00000000..7029a2aa --- /dev/null +++ b/apps/cap_feed/migrations/0010_alter_alert_is_processed_by_subscription_and_more.py @@ -0,0 +1,22 @@ +# Generated by Django 4.2.13 on 2024-11-24 09:31 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('cap_feed', '0009_alert_is_processed_by_subscription'), + ] + + operations = [ + migrations.AlterField( + model_name='alert', + name='is_processed_by_subscription', + field=models.BooleanField(default=False, null=True), + ), + migrations.AddIndex( + model_name='alert', + index=models.Index(condition=models.Q(('is_processed_by_subscription', False)), fields=['is_processed_by_subscription'], name='cap_feed_alert_subscription_ix'), + ), + ] diff --git a/apps/cap_feed/migrations/0011_alter_alertinfo_category_alter_alertinfo_certainty_and_more.py b/apps/cap_feed/migrations/0011_alter_alertinfo_category_alter_alertinfo_certainty_and_more.py new file mode 100644 index 00000000..11d2e7af --- /dev/null +++ b/apps/cap_feed/migrations/0011_alter_alertinfo_category_alter_alertinfo_certainty_and_more.py @@ -0,0 +1,33 @@ +# Generated by Django 4.2.13 on 2024-11-26 12:10 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('cap_feed', '0010_alter_alert_is_processed_by_subscription_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='alertinfo', + name='category', + field=models.CharField(choices=[('Geo', 'Geo'), ('Met', 'Met'), ('Safety', 'Safety'), ('Security', 'Security'), ('Rescue', 'Rescue'), ('Fire', 'Fire'), ('Health', 'Health'), ('Env', 'Env'), ('Transport', 'Transport'), ('Infra', 'Infra'), ('CBRNE', 'CBRNE'), ('Other', 'Other')], db_index=True), + ), + migrations.AlterField( + model_name='alertinfo', + name='certainty', + field=models.CharField(choices=[('Observed', 'Observed'), ('Likely', 'Likely'), ('Possible', 'Possible'), ('Unlikely', 'Unlikely'), ('Unknown', 'Unknown')], db_index=True), + ), + migrations.AlterField( + model_name='alertinfo', + name='severity', + field=models.CharField(choices=[('Extreme', 'Extreme'), ('Severe', 'Severe'), ('Moderate', 'Moderate'), ('Minor', 'Minor'), ('Unknown', 'Unknown')], db_index=True), + ), + migrations.AlterField( + model_name='alertinfo', + name='urgency', + field=models.CharField(choices=[('Immediate', 'Immediate'), ('Expected', 'Expected'), ('Future', 'Future'), ('Past', 'Past'), ('Unknown', 'Unknown')], db_index=True), + ), + ] diff --git a/apps/cap_feed/models.py b/apps/cap_feed/models.py index 4acbe7d7..47d4aac6 100644 --- a/apps/cap_feed/models.py +++ b/apps/cap_feed/models.py @@ -191,6 +191,9 @@ class Scope(models.TextChoices): # XXX: Not used, maybe we need to use this in # This is updated by the system to filter out is_expired is_expired = models.BooleanField(default=False) + # TODO: Keep this true for existing alerts and then default=True for future alerts + # NOTE: null=True is to avoid full rewrite of the table: https://docs.djangoproject.com/en/5.1/ref/migration-operations/#addfield # noqa + is_processed_by_subscription = models.BooleanField(default=False, null=True) identifier = models.CharField() sender = models.CharField() @@ -219,7 +222,12 @@ class Meta: # type: ignore [reportIncompatibleVariableOverride] fields=['is_expired'], name='%(app_label)s_%(class)s_not_expired_idx', condition=models.Q(is_expired=False), - ) + ), + models.Index( + fields=['is_processed_by_subscription'], + name='%(app_label)s_%(class)s_subscription_ix', + condition=models.Q(is_processed_by_subscription=False), + ), ] def __init__(self, *args, **kwargs): @@ -300,12 +308,12 @@ class Certainty(models.TextChoices): alert = models.ForeignKey(Alert, on_delete=models.CASCADE, related_name='infos') language = models.CharField(blank=True, default='en-US') - category = models.CharField(choices=Category.choices) + category = models.CharField(choices=Category.choices, db_index=True) event = models.CharField() response_type = models.CharField(choices=ResponseType.choices, blank=True, null=True, default=None) - urgency = models.CharField(choices=Urgency.choices) - severity = models.CharField(choices=Severity.choices) - certainty = models.CharField(choices=Certainty.choices) + urgency = models.CharField(choices=Urgency.choices, db_index=True) + severity = models.CharField(choices=Severity.choices, db_index=True) + certainty = models.CharField(choices=Certainty.choices, db_index=True) audience = models.CharField(blank=True, null=True, default=None) event_code = models.CharField(blank=True, null=True, default=None) # effective = models.DateTimeField(default=Alert.objects.get(pk=alert).sent) diff --git a/apps/cap_feed/queries.py b/apps/cap_feed/queries.py index fb1ca2a0..31ac83a4 100644 --- a/apps/cap_feed/queries.py +++ b/apps/cap_feed/queries.py @@ -1,10 +1,17 @@ +import typing + import strawberry import strawberry_django from django.db import models from strawberry_django.filters import apply as apply_filters +from strawberry_django.pagination import OffsetPaginationInput from main.graphql.context import Info -from utils.strawberry.paginations import CountList, pagination_field +from utils.strawberry.paginations import ( + CountList, + count_list_resolver, + pagination_field, +) from .filters import ( Admin1Filter, @@ -138,6 +145,22 @@ async def alert(self, info: Info, pk: strawberry.ID) -> AlertType | None: async def alert_info(self, info: Info, pk: strawberry.ID) -> AlertInfoType | None: return await AlertInfoType.get_queryset(None, None, info).filter(pk=pk).afirst() + @strawberry_django.field + async def historical_alerts( + self, + info: Info, + filters: typing.Optional[AlertFilter] = strawberry.UNSET, + pagination: typing.Optional[OffsetPaginationInput] = strawberry.UNSET, + ) -> CountList[AlertType]: + queryset = get_alert_queryset(None, is_active=False) + return count_list_resolver( + info, + queryset, + AlertType, + filters=filters, # type: ignore[reportArgumentType] + pagination=pagination, # type: ignore[reportArgumentType] + ) + @strawberry.type class PrivateQuery: diff --git a/apps/cap_feed/tests.py b/apps/cap_feed/tests.py index 8d1aacc9..65f98cf2 100644 --- a/apps/cap_feed/tests.py +++ b/apps/cap_feed/tests.py @@ -14,7 +14,7 @@ class AlertModelTests(TestCase): - fixtures = ['cap_feed/fixtures/test_data.json'] + fixtures = ['apps/cap_feed/fixtures/test_data.json'] def create_alert(self, url='', days=1): alert = Alert() @@ -75,7 +75,7 @@ def test_django_timezone_is_utc(self): assert timezone.get_default_timezone_name() == 'UTC' assert timezone.get_current_timezone_name() == 'UTC' - def test_expired_alert_is_removed(self): + def test_expired_alert_is_kept(self): """ Is an expired alert identified and removed from the database? """ @@ -83,8 +83,8 @@ def test_expired_alert_is_removed(self): previous_alert_count = Alert.objects.count() previous_alert_info_count = AlertInfo.objects.count() tasks.tag_expired_alerts() - assert Alert.objects.count() == previous_alert_count - 1 - assert AlertInfo.objects.count() == previous_alert_info_count - 1 + assert Alert.objects.count() == previous_alert_count + assert AlertInfo.objects.count() == previous_alert_info_count def test_active_alert_is_kept(self): """ @@ -92,26 +92,26 @@ def test_active_alert_is_kept(self): """ self.create_alert(days=1) previous_alert_count = Alert.objects.filter(is_expired=False).count() - previous_alert_info_count = AlertInfo.objects.filter(is_expired=False).count() + previous_alert_info_count = AlertInfo.objects.filter(alert__is_expired=False).count() total_previous_alert_count = Alert.objects.count() total_previous_alert_info_count = AlertInfo.objects.count() tasks.tag_expired_alerts() assert Alert.objects.filter(is_expired=False).count() == previous_alert_count - assert AlertInfo.objects.filter(is_expired=False).count() == previous_alert_info_count + assert AlertInfo.objects.filter(alert__is_expired=False).count() == previous_alert_info_count assert Alert.objects.count() == total_previous_alert_count assert AlertInfo.objects.count() == total_previous_alert_info_count - def test_deleted_alert_is_removed(self): + def test_deleted_alert_is_expired(self): """ Is an existing active alert removed from the database when it is deleted from the feed? """ self.create_alert(url='test_url', days=1) - previous_alert_count = Alert.objects.count() - previous_alert_info_count = AlertInfo.objects.count() + previous_alert_count = Alert.objects.filter(is_expired=False).count() + previous_alert_info_count = AlertInfo.objects.filter(alert__is_expired=False).count() with mock.patch('sys.stdout', new=StringIO()): get_alerts(Feed.objects.get(url="test_feed"), set()) - assert Alert.objects.count() == previous_alert_count - 1 - assert AlertInfo.objects.count() == previous_alert_info_count - 1 + assert Alert.objects.filter(is_expired=False).count() == previous_alert_count - 1 + assert AlertInfo.objects.filter(alert__is_expired=False).count() == previous_alert_info_count - 1 def test_persisting_alert_is_kept(self): """ diff --git a/apps/cap_feed/types.py b/apps/cap_feed/types.py index 8938d5c4..276dcabb 100644 --- a/apps/cap_feed/types.py +++ b/apps/cap_feed/types.py @@ -140,7 +140,7 @@ class Admin1Type: id: strawberry.ID ifrc_go_id: strawberry.ID | None name = string_field(Admin1.name) - bbox: PolygonScalar | None + bbox: PolygonScalar | None # XXX: Use dataloader instead? if typing.TYPE_CHECKING: country_id = Admin1.country_id diff --git a/apps/common/templatetags/custom_tags.py b/apps/common/templatetags/custom_tags.py new file mode 100644 index 00000000..356f0162 --- /dev/null +++ b/apps/common/templatetags/custom_tags.py @@ -0,0 +1,17 @@ +from django import template +from django.conf import settings +from django.core.files.storage import FileSystemStorage, get_storage_class +from django.templatetags.static import static + +register = template.Library() + +StorageClass = get_storage_class() + + +@register.filter(is_safe=True) +def static_full_path(path): + static_path = static(path) + if StorageClass == FileSystemStorage: + return f"{settings.APP_HTTP_PROTOCOL}://{settings.APP_DOMAIN}{static_path}" + # With s3 storage + return static_path diff --git a/apps/subscription/admin.py b/apps/subscription/admin.py index ee78e04a..9d8cbf0e 100644 --- a/apps/subscription/admin.py +++ b/apps/subscription/admin.py @@ -1,22 +1,22 @@ +from admin_auto_filters.filters import AutocompleteFilterFactory from django.contrib import admin -from apps.subscription_manager.models import SubscriptionAlerts +from .models import SubscriptionAlert, UserAlertSubscription -from .models import Subscription +@admin.register(UserAlertSubscription) +class UserAlertSubscriptionAdmin(admin.ModelAdmin): + list_display = ["name", "is_active"] + search_fields = ("name",) + autocomplete_fields = ("user",) + list_filter = ( + AutocompleteFilterFactory("User", "user"), + "is_active", + ) -class SubscriptionAlertsInline(admin.StackedInline): - model = SubscriptionAlerts - extra = 0 - -class SubscriptionAdmin(admin.ModelAdmin): - # using = 'AlertDB' - list_display = ["id", "subscription_name"] - search_fields = ["id", "subscription_name"] - - inlines = [SubscriptionAlertsInline] - - -# Register your models here. -admin.site.register(Subscription) # , SubscriptionAdmin) +@admin.register(SubscriptionAlert) +class SubscriptionAlertAdmin(admin.ModelAdmin): + list_display = ["subscription", "alert"] + autocomplete_fields = ["subscription", "alert"] + list_filter = (AutocompleteFilterFactory("Subscription", "subscription"),) diff --git a/apps/subscription/emails.py b/apps/subscription/emails.py new file mode 100644 index 00000000..ce71941e --- /dev/null +++ b/apps/subscription/emails.py @@ -0,0 +1,92 @@ +from datetime import timedelta + +from django.db import models +from django.utils import timezone +from django.utils.encoding import force_bytes +from django.utils.http import urlsafe_base64_encode + +from apps.subscription.models import SubscriptionAlert, UserAlertSubscription +from apps.user.models import EmailNotificationType, User +from main.permalinks import Permalink +from main.tokens import TokenManager +from utils.emails import send_email + + +def generate_unsubscribe_user_alert_subscription_url(subscription: UserAlertSubscription) -> str: + uid = urlsafe_base64_encode(force_bytes(subscription.pk)) + # TODO: Fix typing + token = TokenManager.user_subscription_unsubscribe_generator.make_token(subscription) # type: ignore[reportArgumentType] + return Permalink.unsubscribe_user_alert_subscription(uid, token) + + +def generate_user_alert_subscription_email_context( + user: User, + email_frequency: UserAlertSubscription.EmailFrequency, +) -> tuple[dict, models.QuerySet[UserAlertSubscription]]: + # NOTE: Number of subscription is static and less than UserAlertSubscription.LIMIT_PER_USER + subscription_qs = UserAlertSubscription.objects.filter(user=user, email_frequency=email_frequency) + + if email_frequency == UserAlertSubscription.EmailFrequency.DAILY: + from_datetime_threshold = timezone.now() - timedelta(hours=24) + elif email_frequency == UserAlertSubscription.EmailFrequency.WEEKLY: + from_datetime_threshold = timezone.now() - timedelta(days=7) + elif email_frequency == UserAlertSubscription.EmailFrequency.MONTHLY: + # TODO: Calculate days instead of using 30 days + from_datetime_threshold = timezone.now() - timedelta(days=30) + + subscription_data = [ + { + 'subscription': subscription, + 'unsubscribe_url': generate_unsubscribe_user_alert_subscription_url(subscription), + 'latest_alerts': [ + subscription_alert.alert + # NOTE: N+1 query, but N < 10 for now + # TODO: Index/partition alert__sent column? + for subscription_alert in ( + SubscriptionAlert.objects.select_related('alert') + .filter( + subscription=subscription, + alert__sent__gte=from_datetime_threshold, + ) + .order_by('-alert__sent')[:5] + ) + ], + } + for subscription in subscription_qs + ] + + context = { + 'subscriptions': subscription_data, + } + + return context, subscription_qs + + +def send_user_alert_subscription_email(user: User, email_frequency: UserAlertSubscription.EmailFrequency): + context, subscription_qs = generate_user_alert_subscription_email_context(user, email_frequency) + sent_at = timezone.now() + + send_email( + user=user, + email_type=EmailNotificationType.ALERT_SUBSCRIPTIONS, + subject="Daily Alerts", # TODO: Is this fine? + email_html_template='emails/subscription/body.html', + email_text_template='emails/subscription/body.txt', + context=context, + ) + + # Post action + subscription_qs.update(email_last_sent_at=sent_at) + + +def send_user_alert_subscriptions_email(email_frequency: UserAlertSubscription.EmailFrequency): + # TODO: Send in parallel if email service supports it + users_qs = User.objects.filter( + id__in=UserAlertSubscription.objects.filter(email_frequency=email_frequency).values('user'), + ) + + # TODO: Handle failure + for user in users_qs.iterator(): + # TODO: Trigger this as cronjob + # TODO: Pass timezone.now for ref time + send_user_alert_subscription_email(user, email_frequency) diff --git a/apps/subscription/enums.py b/apps/subscription/enums.py new file mode 100644 index 00000000..624be73a --- /dev/null +++ b/apps/subscription/enums.py @@ -0,0 +1,28 @@ +import strawberry + +from apps.cap_feed.enums import ( + AlertInfoCategoryEnum, + AlertInfoCertaintyEnum, + AlertInfoSeverityEnum, + AlertInfoUrgencyEnum, +) +from utils.strawberry.enums import get_enum_name_from_django_field + +from .models import UserAlertSubscription + +UserAlertSubscriptionEmailFrequencyEnum = strawberry.enum( + UserAlertSubscription.EmailFrequency, name='UserAlertSubscriptionEmailFrequencyEnum' +) + + +enum_map = { + get_enum_name_from_django_field(field): enum + for field, enum in ( + (UserAlertSubscription.email_frequency, UserAlertSubscriptionEmailFrequencyEnum), + # Filters + (UserAlertSubscription.filter_alert_urgencies, AlertInfoUrgencyEnum), + (UserAlertSubscription.filter_alert_severities, AlertInfoSeverityEnum), + (UserAlertSubscription.filter_alert_certainties, AlertInfoCertaintyEnum), + (UserAlertSubscription.filter_alert_categories, AlertInfoCategoryEnum), + ) +} diff --git a/apps/subscription/factories.py b/apps/subscription/factories.py new file mode 100644 index 00000000..7d614d66 --- /dev/null +++ b/apps/subscription/factories.py @@ -0,0 +1,16 @@ +import factory +from factory.django import DjangoModelFactory + +from .models import UserAlertSubscription + + +class UserAlertSubscriptionFactory(DjangoModelFactory): + name = factory.Sequence(lambda n: f'Subscription-{n}') + filter_alert_admin1s = [] + filter_alert_urgencies = [] + filter_alert_severities = [] + filter_alert_certainties = [] + filter_alert_categories = [] + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = UserAlertSubscription diff --git a/apps/subscription/filters.py b/apps/subscription/filters.py new file mode 100644 index 00000000..5d0d2686 --- /dev/null +++ b/apps/subscription/filters.py @@ -0,0 +1,11 @@ +import strawberry +import strawberry_django + +from .models import UserAlertSubscription + + +@strawberry_django.filters.filter(UserAlertSubscription, lookups=True) +class UserAlertSubscriptionFilter: + id: strawberry.auto + is_active: strawberry.auto + notify_by_email: strawberry.auto diff --git a/apps/subscription/migrations/0001_initial.py b/apps/subscription/migrations/0001_initial.py index baa5c5e1..a0f88f7e 100644 --- a/apps/subscription/migrations/0001_initial.py +++ b/apps/subscription/migrations/0001_initial.py @@ -1,7 +1,8 @@ -# Generated by Django 5.0.3 on 2024-03-11 12:14 +# Generated by Django 4.2.13 on 2024-11-17 13:22 -import django.contrib.postgres.fields +from django.conf import settings from django.db import migrations, models +import django.db.models.deletion class Migration(migrations.Migration): @@ -9,22 +10,37 @@ class Migration(migrations.Migration): initial = True dependencies = [ + migrations.swappable_dependency(settings.AUTH_USER_MODEL), + ('cap_feed', '0008_alert_cap_feed_alert_not_expired_idx'), ] operations = [ migrations.CreateModel( - name='Subscription', + name='SubscriptionAlert', fields=[ - ('id', models.AutoField(primary_key=True, serialize=False)), - ('subscription_name', models.CharField(default='', max_length=512, verbose_name='subscription_name')), - ('user_id', models.IntegerField(default=0, verbose_name='user_id')), - ('country_ids', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(verbose_name='country_ids'), default=list, size=None)), - ('admin1_ids', django.contrib.postgres.fields.ArrayField(base_field=models.IntegerField(verbose_name='admin1_ids'), default=list, size=None)), - ('urgency_array', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(verbose_name='urgency_array'), default=list, size=None)), - ('severity_array', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(verbose_name='severity_array'), default=list, size=None)), - ('certainty_array', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(verbose_name='certainty_array'), default=list, size=None)), - ('subscribe_by', django.contrib.postgres.fields.ArrayField(base_field=models.CharField(verbose_name='subscribe_by'), default=list, size=None)), - ('sent_flag', models.IntegerField(default=0, verbose_name='sent_flag')), + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('alert', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='cap_feed.alert')), ], ), + migrations.CreateModel( + name='UserAlertSubscription', + fields=[ + ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('created_at', models.DateTimeField(auto_now_add=True)), + ('modified_at', models.DateTimeField(auto_now=True)), + ('name', models.CharField(max_length=255)), + ('is_active', models.BooleanField(default=True)), + ('alert_filters', models.JSONField(default=dict)), + ('notify_by_email', models.BooleanField(default=False)), + ('email_frequency', models.PositiveSmallIntegerField(choices=[(1, 'Daily'), (2, 'Weekly'), (3, 'Monthly')], default=2)), + ('email_last_sent_at', models.DateTimeField(blank=True, null=True)), + ('alerts', models.ManyToManyField(blank=True, related_name='subscriptions', through='subscription.SubscriptionAlert', to='cap_feed.alert')), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), + ], + ), + migrations.AddField( + model_name='subscriptionalert', + name='subscription', + field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='subscription.useralertsubscription'), + ), ] diff --git a/apps/subscription/migrations/0002_remove_useralertsubscription_alert_filters_and_more.py b/apps/subscription/migrations/0002_remove_useralertsubscription_alert_filters_and_more.py new file mode 100644 index 00000000..94e8409e --- /dev/null +++ b/apps/subscription/migrations/0002_remove_useralertsubscription_alert_filters_and_more.py @@ -0,0 +1,51 @@ +# Generated by Django 4.2.13 on 2024-11-24 09:22 + +import django.contrib.postgres.fields +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + dependencies = [ + ('cap_feed', '0009_alert_is_processed_by_subscription'), + ('subscription', '0001_initial'), + ] + + operations = [ + migrations.RemoveField( + model_name='useralertsubscription', + name='alert_filters', + ), + migrations.AddField( + model_name='useralertsubscription', + name='filter_alert_admin1s', + field=django.contrib.postgres.fields.ArrayField(base_field=models.BigIntegerField(), blank=True, default=list, size=None), + ), + migrations.AddField( + model_name='useralertsubscription', + name='filter_alert_categories', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(choices=[('Geo', 'Geo'), ('Met', 'Met'), ('Safety', 'Safety'), ('Security', 'Security'), ('Rescue', 'Rescue'), ('Fire', 'Fire'), ('Health', 'Health'), ('Env', 'Env'), ('Transport', 'Transport'), ('Infra', 'Infra'), ('CBRNE', 'CBRNE'), ('Other', 'Other')]), blank=True, default=list, size=None), + ), + migrations.AddField( + model_name='useralertsubscription', + name='filter_alert_certainties', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(choices=[('Observed', 'Observed'), ('Likely', 'Likely'), ('Possible', 'Possible'), ('Unlikely', 'Unlikely'), ('Unknown', 'Unknown')]), blank=True, default=list, size=None), + ), + migrations.AddField( + model_name='useralertsubscription', + name='filter_alert_country', + field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.PROTECT, to='cap_feed.country'), + preserve_default=False, + ), + migrations.AddField( + model_name='useralertsubscription', + name='filter_alert_severities', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(choices=[('Extreme', 'Extreme'), ('Severe', 'Severe'), ('Moderate', 'Moderate'), ('Minor', 'Minor'), ('Unknown', 'Unknown')]), blank=True, default=list, size=None), + ), + migrations.AddField( + model_name='useralertsubscription', + name='filter_alert_urgencies', + field=django.contrib.postgres.fields.ArrayField(base_field=models.CharField(choices=[('Immediate', 'Immediate'), ('Expected', 'Expected'), ('Future', 'Future'), ('Past', 'Past'), ('Unknown', 'Unknown')]), blank=True, default=list, size=None), + ), + ] diff --git a/apps/subscription/migrations/0003_alter_useralertsubscription_is_active.py b/apps/subscription/migrations/0003_alter_useralertsubscription_is_active.py new file mode 100644 index 00000000..594d728d --- /dev/null +++ b/apps/subscription/migrations/0003_alter_useralertsubscription_is_active.py @@ -0,0 +1,18 @@ +# Generated by Django 4.2.13 on 2024-11-25 16:21 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('subscription', '0002_remove_useralertsubscription_alert_filters_and_more'), + ] + + operations = [ + migrations.AlterField( + model_name='useralertsubscription', + name='is_active', + field=models.BooleanField(default=False), + ), + ] diff --git a/apps/subscription/models.py b/apps/subscription/models.py index c1e69ed0..53b44d46 100644 --- a/apps/subscription/models.py +++ b/apps/subscription/models.py @@ -1,52 +1,54 @@ -from typing import TYPE_CHECKING - from django.contrib.postgres.fields import ArrayField from django.db import models +from django.utils.translation import gettext + +from apps.cap_feed.models import Alert, AlertInfo, Country +from apps.user.models import User + + +class UserAlertSubscription(models.Model): + LIMIT_PER_USER = 10 + + class EmailFrequency(models.IntegerChoices): + DAILY = 1, gettext("Daily") + WEEKLY = 2, gettext("Weekly") + MONTHLY = 3, gettext("Monthly") + + created_at = models.DateTimeField(auto_now_add=True) + modified_at = models.DateTimeField(auto_now=True) + + name = models.CharField(max_length=255) + user = models.ForeignKey(User, on_delete=models.CASCADE) + is_active = models.BooleanField(default=False) + + # TODO: Keep change history? + # Filters + filter_alert_country = models.ForeignKey(Country, on_delete=models.PROTECT) + filter_alert_admin1s = ArrayField(models.BigIntegerField(), blank=True, default=list) + filter_alert_urgencies = ArrayField(models.CharField(choices=AlertInfo.Urgency.choices), blank=True, default=list) + filter_alert_severities = ArrayField(models.CharField(choices=AlertInfo.Severity.choices), blank=True, default=list) + filter_alert_certainties = ArrayField(models.CharField(choices=AlertInfo.Certainty.choices), blank=True, default=list) + filter_alert_categories = ArrayField(models.CharField(choices=AlertInfo.Category.choices), blank=True, default=list) + + # Notification config + notify_by_email = models.BooleanField(default=False) + email_frequency = models.PositiveSmallIntegerField(choices=EmailFrequency.choices, default=EmailFrequency.WEEKLY) + email_last_sent_at = models.DateTimeField(null=True, blank=True) + + alerts = models.ManyToManyField( + Alert, + blank=True, + through="SubscriptionAlert", + related_name="subscriptions", + ) + + filter_alert_country_id: int + + def __str__(self): + return self.name + -if TYPE_CHECKING: - from django.db.models.fields.related_descriptors import ManyRelatedManager - - from apps.subscription_manager.models import Alert - - -class Subscription(models.Model): - id = models.AutoField(primary_key=True) - subscription_name = models.CharField(default="", verbose_name="subscription_name", max_length=512) - user_id = models.IntegerField(default=0, verbose_name="user_id") - country_ids = ArrayField(models.IntegerField(verbose_name='country_ids'), default=list) - admin1_ids = ArrayField(models.IntegerField(verbose_name='admin1_ids'), default=list) - urgency_array = ArrayField(models.CharField(verbose_name='urgency_array'), default=list) - severity_array = ArrayField(models.CharField(verbose_name='severity_array'), default=list) - certainty_array = ArrayField(models.CharField(verbose_name='certainty_array'), default=list) - subscribe_by = ArrayField(models.CharField(verbose_name="subscribe_by"), default=list) - sent_flag = models.IntegerField(default=0, verbose_name="sent_flag") - - if TYPE_CHECKING: - alert_set: ManyRelatedManager[Alert] - - def get_alert_id_list(self): - alerts_list = [] - alerts = self.alert_set.all() - - for alert in alerts: - alerts_list.append(alert.id) - return alerts_list - - def save(self, *args, force_insert=False, force_update=False, **kwargs): - from django.core.cache import cache - - from apps.subscription_manager.tasks import subscription_mapper - - super().save(force_insert, force_update, *args, **kwargs) - # Add the subscription id as a view lock, so user will not view the subscription during - # mappings. - cache.add("v" + str(self.id), True, timeout=None) - subscription_mapper.apply_async(args=(self.pk,), queue='subscription_manager') - - def delete( # type: ignore[reportIncompatibleMethodOverride] - self, - *args, - force_insert=False, - force_update=False, - ) -> tuple[int, dict[str, int]]: - return super().delete(force_insert, force_update) +# XXX: Change name? +class SubscriptionAlert(models.Model): + subscription = models.ForeignKey(UserAlertSubscription, on_delete=models.CASCADE, related_name="+") + alert = models.ForeignKey(Alert, on_delete=models.CASCADE, related_name="+") diff --git a/apps/subscription/mutations.py b/apps/subscription/mutations.py new file mode 100644 index 00000000..2bada102 --- /dev/null +++ b/apps/subscription/mutations.py @@ -0,0 +1,121 @@ +import strawberry +from asgiref.sync import sync_to_async + +from main.graphql.context import Info +from utils.strawberry.mutations import ( + MutationEmptyResponseType, + MutationResponseType, + _CustomErrorType, + mutation_is_not_valid, + process_input_data, +) +from utils.strawberry.transformers import convert_serializer_to_type + +from .queries import UserAlertSubscriptionType +from .serializers import ( + UserAlertSubscriptionSerializer, + UserAlertSubscriptionUnsubscribeSerializer, +) + +UserAlertSubscriptionInput = convert_serializer_to_type(UserAlertSubscriptionSerializer, name="UserAlertSubscriptionInput") +UserAlertSubscriptionUnsubscribeInput = convert_serializer_to_type( + UserAlertSubscriptionUnsubscribeSerializer, + name='UserAlertSubscriptionUnsubscribeInput', +) + + +@strawberry.type +class PublicMutation: + + @strawberry.mutation + @sync_to_async + def unsubscribe_user_alert_subscription( + self, + data: UserAlertSubscriptionUnsubscribeInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationEmptyResponseType: + serializer = UserAlertSubscriptionUnsubscribeSerializer( + data=process_input_data(data), context={"request": info.context.request} + ) + if errors := mutation_is_not_valid(serializer): + return MutationEmptyResponseType( + ok=False, + errors=errors, + ) + serializer.save() + # Set user activation + return MutationEmptyResponseType() + + +@strawberry.type +class PrivateMutation: + @strawberry.mutation + @sync_to_async + def create_user_alert_subscription( + self, + data: UserAlertSubscriptionInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationResponseType[UserAlertSubscriptionType]: + serializer = UserAlertSubscriptionSerializer( + data=process_input_data(data), + context={'request': info.context.request}, + ) + if errors := mutation_is_not_valid(serializer): + return MutationResponseType( + ok=False, + errors=errors, + ) + obj = serializer.save() + return MutationResponseType( + result=obj, # type: ignore[reportReturnType] + ) + + @strawberry.mutation + @sync_to_async + def update_user_alert_subscription( + self, + id: strawberry.ID, + data: UserAlertSubscriptionInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationResponseType[UserAlertSubscriptionType]: + instance = UserAlertSubscriptionType.get_queryset(None, None, info).filter(id=id).first() + if instance is None: + return MutationResponseType( + ok=False, + errors=_CustomErrorType.generate_message(message="Doesn't exists in the database"), + ) + serializer = UserAlertSubscriptionSerializer( + instance=instance, + data=process_input_data(data), + context={'request': info.context.request}, + partial=True, + ) + if errors := mutation_is_not_valid(serializer): + return MutationResponseType( + ok=False, + errors=errors, + ) + obj = serializer.save() + return MutationResponseType( + result=obj, # type: ignore[reportReturnType] + ) + + @strawberry.mutation + @sync_to_async + def delete_user_alert_subscription( + self, + id: strawberry.ID, + info: Info, + ) -> MutationResponseType[UserAlertSubscriptionType]: + instance = UserAlertSubscriptionType.get_queryset(None, None, info).filter(id=id).first() + if instance is None: + return MutationResponseType( + ok=False, + errors=_CustomErrorType.generate_message(message="Doesn't exists in the database"), + ) + instance_id = instance.id + instance.delete() + instance.id = instance_id + return MutationResponseType( + result=instance, # type: ignore[reportReturnType] + ) diff --git a/apps/subscription/orders.py b/apps/subscription/orders.py new file mode 100644 index 00000000..f761b26c --- /dev/null +++ b/apps/subscription/orders.py @@ -0,0 +1,13 @@ +import strawberry +import strawberry_django + +from .models import UserAlertSubscription + + +@strawberry_django.ordering.order(UserAlertSubscription) +class UserAlertSubscriptionOrder: + id: strawberry.auto + created_at: strawberry.auto + modified_at: strawberry.auto + name: strawberry.auto + is_active: strawberry.auto diff --git a/apps/subscription/queries.py b/apps/subscription/queries.py new file mode 100644 index 00000000..4b9c8aa1 --- /dev/null +++ b/apps/subscription/queries.py @@ -0,0 +1,59 @@ +import typing + +import strawberry +import strawberry_django +from strawberry_django.pagination import OffsetPaginationInput + +from apps.cap_feed.filters import AlertFilter +from apps.cap_feed.orders import AlertOrder +from apps.cap_feed.types import AlertType, get_alert_queryset +from main.graphql.context import Info +from utils.strawberry.paginations import ( + CountList, + count_list_resolver, + pagination_field, +) + +from .filters import UserAlertSubscriptionFilter +from .models import UserAlertSubscription +from .orders import UserAlertSubscriptionOrder +from .types import UserAlertSubscriptionType + + +@strawberry.type +class PrivateQuery: + user_alert_subscriptions: CountList[UserAlertSubscriptionType] = pagination_field( + pagination=True, + filters=UserAlertSubscriptionFilter, + order=UserAlertSubscriptionOrder, + ) + + @strawberry_django.field + async def user_alert_subscription(self, info: Info, pk: strawberry.ID) -> UserAlertSubscriptionType | None: + return await UserAlertSubscriptionType.get_queryset(None, None, info).filter(pk=pk).afirst() + + @strawberry_django.field + async def subscripted_alerts( + self, + info: Info, + filters: typing.Optional[AlertFilter] = strawberry.UNSET, + order: typing.Optional[AlertOrder] = strawberry.UNSET, + pagination: typing.Optional[OffsetPaginationInput] = strawberry.UNSET, + ) -> CountList[AlertType]: + # XXX: Add DISTINCT as default to avoid duplicate alerts + if filters is strawberry.UNSET: + filters = AlertFilter(DISTINCT=True) # type: ignore[reportCallIssue] + else: + filters.DISTINCT = True # type: ignore[reportCallIssue] + + queryset = get_alert_queryset(None, is_active=False).filter( + subscriptions__in=UserAlertSubscription.objects.filter(user=info.context.request.user).all(), + ) + return count_list_resolver( + info, + queryset, + AlertType, + filters=filters, # type: ignore[reportArgumentType] + order=order, # type: ignore[reportArgumentType] + pagination=pagination, # type: ignore[reportArgumentType] + ) diff --git a/apps/subscription/schema.py b/apps/subscription/schema.py deleted file mode 100644 index 4b27c5d1..00000000 --- a/apps/subscription/schema.py +++ /dev/null @@ -1,313 +0,0 @@ -import os -import random -import string -from unittest.mock import patch - -import graphene -from graphene_django import DjangoObjectType - -from .models import Subscription - -# from graphql_jwt.decorators import login_required - - -URGENCY_ARRAY = ["immediate", "expected", "future", "past", "unknown"] - -SEVERITY_ARRAY = ["extreme", "severe", "moderate", "minor", "unknown"] - -CERTAINTY_ARRAY = ["observed", "likely", "possible", "unlikely", "unknown"] - - -def mock_save(self: Subscription, *args, **kwargs): - super(Subscription, self).save(*args, **kwargs) - - -def get_random_string(length): - # choose from all lowercase letter - letters = string.ascii_lowercase - result_str = ''.join(random.choice(letters) for _ in range(length)) - return result_str - - -def get_random_integer_array(start, end): - array = [] - current = start - while current < end: - current = random.randint(current + 1, end) - array.append(current) - return array - - -def get_random_string_array(candicates): - index_array = get_random_integer_array(0, len(candicates) - 1) - string_array = [] - for index in index_array: - string_array.append(candicates[index]) - return string_array - - -class SubscriptionType(DjangoObjectType): - class Meta: - model = Subscription - fields = [ - "id", - "subscription_name", - "user_id", - "country_ids", - "admin1_ids", - "urgency_array", - "severity_array", - "certainty_array", - "subscribe_by", - "sent_flag", - ] - - -def create_subscription( - user_id, - subscription_name, - country_ids, - admin1_ids, - urgency_array, - severity_array, - certainty_array, - subscribe_by, - sent_flag, -): - subscription = Subscription( - user_id=user_id, - subscription_name=subscription_name, - country_ids=country_ids, - admin1_ids=admin1_ids, - urgency_array=urgency_array, - severity_array=severity_array, - certainty_array=certainty_array, - subscribe_by=subscribe_by, - sent_flag=sent_flag, - ) - subscription.save() - return subscription - - -class CreateSubscription(graphene.Mutation): - class Arguments: - subscription_name = graphene.String(required=True) - country_ids = graphene.List(graphene.Int) - admin1_ids = graphene.List(graphene.Int) - urgency_array = graphene.List(graphene.String) - severity_array = graphene.List(graphene.String) - certainty_array = graphene.List(graphene.String) - subscribe_by = graphene.List(graphene.String) - sent_flag = graphene.Int(required=True) - - subscription = graphene.Field(SubscriptionType) - - # @login_required - @classmethod - def mutate( - cls, - info, - subscription_name, - country_ids, - admin1_ids, - urgency_array, - severity_array, - certainty_array, - subscribe_by, - sent_flag, - ): - subscription = create_subscription( - info.context.user.id, - subscription_name, - country_ids, - admin1_ids, - urgency_array, - severity_array, - certainty_array, - subscribe_by, - sent_flag, - ) - return cls(subscription=subscription) # type: ignore[reportCallIssue] - - -class CreateSubscriptionTest(graphene.Mutation): - class Arguments: - user_id = graphene.Int(required=True) - subscription_name = graphene.String(required=True) - country_ids = graphene.List(graphene.Int) - admin1_ids = graphene.List(graphene.Int) - urgency_array = graphene.List(graphene.String) - severity_array = graphene.List(graphene.String) - certainty_array = graphene.List(graphene.String) - subscribe_by = graphene.List(graphene.String) - sent_flag = graphene.Int(required=True) - - subscription = graphene.Field(SubscriptionType) - - @classmethod - def mutate( - cls, - info, - user_id, - subscription_name, - country_ids, - admin1_ids, - urgency_array, - severity_array, - certainty_array, - subscribe_by, - sent_flag, - ): - subscription = create_subscription( - user_id, - subscription_name, - country_ids, - admin1_ids, - urgency_array, - severity_array, - certainty_array, - subscribe_by, - sent_flag, - ) - return cls(subscription=subscription) # type: ignore[reportCallIssue] - - -class DeleteSubscription(graphene.Mutation): - class Arguments: - subscription_id = graphene.Int(required=True) - - success = graphene.Boolean() - error_message = graphene.String() - - # @login_required - @classmethod - def mutate(cls, info, subscription_id): - subscription = Subscription.objects.get(id=subscription_id) - login_user_id = info.context.user.id - if subscription.user_id != login_user_id: - return cls( - success=False, - error_message='Delete operation is not authorized to this user.', - ) # type: ignore[reportCallIssue] - subscription.delete() - return cls(success=True) # type: ignore[reportCallIssue] - - -class UpdateSubscription(graphene.Mutation): - class Arguments: - subscription_id = graphene.Int(required=True) - subscription_name = graphene.String(required=True) - country_ids = graphene.List(graphene.Int) - admin1_ids = graphene.List(graphene.Int) - urgency_array = graphene.List(graphene.String) - severity_array = graphene.List(graphene.String) - certainty_array = graphene.List(graphene.String) - subscribe_by = graphene.List(graphene.String) - sent_flag = graphene.Int(required=True) - - success = graphene.Boolean() - error_message = graphene.String() - - @classmethod - def mutate( - cls, - info, - subscription_id, - subscription_name, - country_ids, - admin1_ids, - urgency_array, - severity_array, - certainty_array, - subscribe_by, - sent_flag, - ): - subscription = Subscription.objects.get(id=subscription_id) - login_user_id = info.context.user.id - if subscription.user_id != login_user_id: - return cls( - success=False, - error_message='Update operation is not authorized to this user.', - ) # type: ignore[reportCallIssue] - subscription.subscription_name = subscription_name - subscription.country_ids = country_ids - subscription.admin1_ids = admin1_ids - subscription.urgency_array = urgency_array - subscription.severity_array = severity_array - subscription.certainty_array = certainty_array - subscription.subscribe_by = subscribe_by - subscription.sent_flag = sent_flag - subscription.save() - return cls(success=True) # type: ignore[reportCallIssue] - - -@patch.object(Subscription, 'save', mock_save) -class GenerateTestSubscriptions(graphene.Mutation): - class Arguments: - user_id = graphene.Int(required=True) - case_numbers = graphene.Int(required=True) - - success = graphene.Boolean() - error_message = graphene.String() - - # @login_required - @classmethod - def mutate(cls, info, user_id, case_numbers): - if case_numbers > 10000: - return cls( - success=False, - error_message='You should not be add cases ' 'more than 10000 at one time.', - ) # type: ignore[reportCallIssue] - with patch.object(Subscription, 'save', mock_save): - for _ in range(0, case_numbers): - subscription = create_subscription( - user_id + random.randint(-10, 10), - "test_case_" + get_random_string(10), - get_random_integer_array(100000, 100100), - get_random_integer_array(1000000, 1001000), - get_random_string_array(URGENCY_ARRAY), - get_random_string_array(SEVERITY_ARRAY), - get_random_string_array(CERTAINTY_ARRAY), - ["email"], - 0, - ) - subscription.save() - return cls(success=True) # type: ignore[reportCallIssue] - - -class Mutation(graphene.ObjectType): - if os.environ['TEST_MODE'] == "True": - create_subscription_test = CreateSubscriptionTest.Field() - create_subscription = CreateSubscription.Field() - delete_subscription = DeleteSubscription.Field() - update_subscription = UpdateSubscription.Field() - generate_test_subscriptions = GenerateTestSubscriptions.Field() - - -class Query(graphene.ObjectType): - list_all_subscription = graphene.List(SubscriptionType) - list_subscription = graphene.List( - SubscriptionType, - country_ids=graphene.List(graphene.Int), - admin1_ids=graphene.List(graphene.Int), - urgency_array=graphene.List(graphene.String), - severity_array=graphene.List(graphene.String), - certainty_array=graphene.List(graphene.String), - ) - get_subscription = graphene.Field(SubscriptionType, subscription_id=graphene.Int()) - - # @login_required - def resolve_list_all_subscription(self, info): - return Subscription.objects.filter(user_id=info.context.user.id).order_by('-id') - - def resolve_list_subscription(self, info, country_ids, admin1_ids, urgency_array, severity_array, certainty_array): - return Subscription.objects.filter( - country_ids__contains=country_ids, - admin1_ids__contains=admin1_ids, - urgency_array__contains=urgency_array, - severity_array__contains=severity_array, - certainty_array__contains=certainty_array, - ).order_by('-id') - - def resolve_get_subscription(self, info, subscription_id): - return Subscription.objects.get(id=subscription_id) diff --git a/apps/subscription/serializers.py b/apps/subscription/serializers.py new file mode 100644 index 00000000..78a57f2b --- /dev/null +++ b/apps/subscription/serializers.py @@ -0,0 +1,96 @@ +from django.utils.http import urlsafe_base64_decode +from django.utils.translation import gettext +from rest_framework import serializers + +from apps.cap_feed.models import Admin1 +from main.tokens import TokenManager +from utils.strawberry.serializers import IntegerIDField + +from .models import UserAlertSubscription + + +class UserAlertSubscriptionSerializer(serializers.ModelSerializer): + # To map Int -> ID + filter_alert_admin1s = serializers.ListField(child=IntegerIDField(required=True), required=True) + + class Meta: + model = UserAlertSubscription + fields = ( + "name", + "is_active", + "notify_by_email", + "email_frequency", + # Filters + "filter_alert_country", + "filter_alert_admin1s", + "filter_alert_urgencies", + "filter_alert_severities", + "filter_alert_certainties", + "filter_alert_categories", + ) + + def validate_is_active(self, is_active): + if is_active: + qs = UserAlertSubscription.objects.filter(user=self.context["request"].user, is_active=True) + if self.instance and self.instance.pk: + qs = qs.exclude(pk=self.instance.pk) + if qs.count() >= UserAlertSubscription.LIMIT_PER_USER: + raise serializers.ValidationError( + gettext( + "Only %(limit)s active subscriptions are allowed" + % { + "limit": UserAlertSubscription.LIMIT_PER_USER, + } + ) + ) + return is_active + + def validate_filter_alert_admin1s(self, filter_alert_admin1s): + available_admin1s_ids = set(Admin1.objects.filter(id__in=filter_alert_admin1s).values_list("id", flat=True)) + if invalid_ids := list(set(filter_alert_admin1s) - available_admin1s_ids): + raise serializers.ValidationError(f"This Admin1 ids are missing in database: {list(invalid_ids)}") + return filter_alert_admin1s + + def create(self, validated_data): + validated_data["user"] = self.context["request"].user + instance = super().create(validated_data) + return instance + + +class UserAlertSubscriptionUnsubscribeSerializer(serializers.Serializer): + uuid = serializers.CharField(required=True) + token = serializers.CharField(required=True) + + def _validate_token(self, attrs): + token_generator = TokenManager.user_subscription_unsubscribe_generator + + try: + uid = urlsafe_base64_decode(attrs['uuid']).decode('utf-8') + user_subscription = UserAlertSubscription.objects.get(pk=uid) + except ( + TypeError, + ValueError, + OverflowError, + UserAlertSubscription.DoesNotExist, + ): + user_subscription = None + + # TODO: Fix check_token typing + if user_subscription is not None and token_generator.check_token( + user_subscription, # type: ignore[reportArgumentType] + attrs['token'], + ): + return user_subscription + raise serializers.ValidationError(gettext('Invalid or expired token')) + + def validate(self, attrs): + return { + **attrs, + "user_subscription": self._validate_token(attrs), + } + + def save(self, **_): + assert isinstance(self.validated_data, dict) + user_subscription = self.validated_data["user_subscription"] + user_subscription.notify_by_email = False + user_subscription.save(update_fields=("notify_by_email",)) diff --git a/apps/subscription/tasks.py b/apps/subscription/tasks.py new file mode 100644 index 00000000..5679d891 --- /dev/null +++ b/apps/subscription/tasks.py @@ -0,0 +1,113 @@ +import logging +import time +import typing + +from celery import shared_task +from django.db import connection, models, transaction + +from apps.cap_feed.models import Alert, AlertAdmin1, AlertInfo +from apps.subscription.models import SubscriptionAlert, UserAlertSubscription +from main.cache import CacheKey +from utils.common import redis_lock + +from .emails import send_user_alert_subscriptions_email + +logger = logging.getLogger(__name__) + + +def _tb_name(model: typing.Type[models.Model]): + return model._meta.db_table + + +def _cl_name(field): + return field.field.column + + +TAG_MUTATION_RAW_QUERY = f''' + WITH alert_data AS ( + SELECT + alert.id AS id, + alert.{_cl_name(Alert.country)} AS country_id, + -- XXX: Remove unknown admins? + ARRAY_REMOVE(ARRAY_AGG(distinct alert_admin1.{_cl_name(AlertAdmin1.admin1)}), NULL) AS admin1s, + ARRAY_REMOVE(ARRAY_AGG(distinct alert_info.{_cl_name(AlertInfo.urgency)}), NULL) AS urgencies, + ARRAY_REMOVE(ARRAY_AGG(distinct alert_info.{_cl_name(AlertInfo.severity)}), NULL) AS severities, + ARRAY_REMOVE(ARRAY_AGG(distinct alert_info.{_cl_name(AlertInfo.certainty)}), NULL) AS certainties, + ARRAY_REMOVE(ARRAY_AGG(distinct alert_info.{_cl_name(AlertInfo.category)}), NULL) AS categories + FROM + {_tb_name(Alert)} AS alert + LEFT JOIN {_tb_name(AlertInfo)} AS alert_info ON alert_info.alert_id = alert.id + LEFT JOIN {_tb_name(AlertAdmin1)} AS alert_admin1 ON alert_admin1.alert_id = alert.id + WHERE alert.{_cl_name(Alert.is_processed_by_subscription)} IS FALSE + GROUP BY alert.id, alert.country_id + ), + tagged_alerts AS ( + SELECT + subscriptions.id AS subscription_id, + alert_data.id AS alert_id + FROM + alert_data + CROSS JOIN {_tb_name(UserAlertSubscription)} AS subscriptions + WHERE + subscriptions.{_cl_name(UserAlertSubscription.filter_alert_country)} = alert_data.country_id AND + ( + COALESCE(array_length(subscriptions.{_cl_name(UserAlertSubscription.filter_alert_admin1s)}, 1), 0) = 0 OR + -- subscriptions.{_cl_name(UserAlertSubscription.filter_alert_admin1s)} && alert_data.admin1s::integer[] + subscriptions.{_cl_name(UserAlertSubscription.filter_alert_admin1s)} && alert_data.admin1s + ) AND ( + COALESCE(array_length(subscriptions.{_cl_name(UserAlertSubscription.filter_alert_urgencies)}, 1), 0) = 0 OR + subscriptions.{_cl_name(UserAlertSubscription.filter_alert_urgencies)} && alert_data.urgencies + ) AND ( + COALESCE(array_length(subscriptions.{_cl_name(UserAlertSubscription.filter_alert_severities)}, 1), 0) = 0 OR + subscriptions.{_cl_name(UserAlertSubscription.filter_alert_severities)} && alert_data.severities + ) AND ( + COALESCE(array_length(subscriptions.{_cl_name(UserAlertSubscription.filter_alert_certainties)}, 1), 0) = 0 OR + subscriptions.{_cl_name(UserAlertSubscription.filter_alert_certainties)} && alert_data.certainties + ) AND ( + COALESCE(array_length(subscriptions.{_cl_name(UserAlertSubscription.filter_alert_categories)}, 1), 0) = 0 OR + subscriptions.{_cl_name(UserAlertSubscription.filter_alert_categories)} && alert_data.categories + ) + ), + -- Insert tagged alerts to subscriptions + add_tagged_alerts AS ( + INSERT INTO "{_tb_name(SubscriptionAlert)}" ( + subscription_id, + alert_id + ) ( + SELECT * FROM tagged_alerts + ) + ) + -- Flag processed alerts + UPDATE {_tb_name(Alert)} + SET {_cl_name(Alert.is_processed_by_subscription)} = TRUE + WHERE id in ( + SELECT id FROM alert_data + ) +''' + + +@shared_task +def process_pending_subscription_alerts(): + with redis_lock(CacheKey.RedisLockKey.SUBSCRIPTION_TAG_ALERTS) as acquired: + if not acquired: + logger.warning(f'{CacheKey.RedisLockKey.SUBSCRIPTION_TAG_ALERTS} is already running') + return + start_time = time.time() + with transaction.atomic(): + with connection.cursor() as cursor: + cursor.execute(TAG_MUTATION_RAW_QUERY) + logger.info(f'Tagged pending alerts to subscriptions. Runtime: {time.time() - start_time} seconds') + + +@shared_task +def send_daily_user_alert_subscriptions_email(): + with redis_lock(CacheKey.RedisLockKey.SEND_DAILY_USER_ALERT_SUBSCRIPTION_EMAIL) as acquired: + if not acquired: + logger.warning(f'{CacheKey.RedisLockKey.SEND_DAILY_USER_ALERT_SUBSCRIPTION_EMAIL} is already running') + return + start_time = time.time() + send_user_alert_subscriptions_email(UserAlertSubscription.EmailFrequency.DAILY) + logger.info(f'Send daily user alert subscription email. Runtime: {time.time() - start_time} seconds') + + +# TODO: Add tasks to clean up SubscriptionAlert table data for old entries diff --git a/apps/subscription/templates/index.html b/apps/subscription/templates/index.html deleted file mode 100644 index d05ad0f8..00000000 --- a/apps/subscription/templates/index.html +++ /dev/null @@ -1,22 +0,0 @@ - - - - - - - IFRC Alert Hub - - - - -
-

Welcome to This Website

-
- {% csrf_token %} - {{ form.as_p }} - -
-
- - - \ No newline at end of file diff --git a/apps/subscription/tests.py b/apps/subscription/tests.py deleted file mode 100644 index 2ed913a4..00000000 --- a/apps/subscription/tests.py +++ /dev/null @@ -1,714 +0,0 @@ -import json -from unittest.mock import patch - -from django.test import Client -from graphene_django.utils.testing import GraphQLTestCase - -from apps.user.models import User - -from .models import Subscription -from .schema import ( - create_subscription, - get_random_integer_array, - get_random_string, - get_random_string_array, -) - - -def get_subscription(subscription_id): - return Subscription.objects.get(id=subscription_id) - - -def mock_save(self: Subscription, *args, **kwargs): - super(Subscription, self).save(*args, **kwargs) - - -def mock_delete(self: Subscription, *args, **kwargs): - super(Subscription, self).delete(*args, **kwargs) - - -@patch.object(Subscription, 'save', mock_save) -class TestCase(GraphQLTestCase): - GRAPHQL_URL = "/subscription/graphql" - client = Client() - - # Setup data for the tests - @classmethod - def setUpTestData(cls): - # Create a test user - cls.user = User.objects.create_user(username='test1', email='test1@example.com', password='testpassword') - # Create another user - cls.user = User.objects.create_user(username='test2', email='test2@example.com', password='testpassword') - with patch.object(Subscription, 'save', mock_save): - # Create subscriptions for user 1 - create_subscription( - user_id=1, - subscription_name="test_group1", - country_ids=[1, 2, 3], - admin1_ids=[1, 2, 3], - urgency_array=["immediate"], - severity_array=["severe"], - certainty_array=["observed"], - subscribe_by=["sms", "email"], - sent_flag=0, - ) - create_subscription( - user_id=1, - subscription_name="test_group1", - country_ids=[1], - admin1_ids=[1], - urgency_array=["expected"], - severity_array=["extreme"], - certainty_array=["likely"], - subscribe_by=["sms", "email"], - sent_flag=0, - ) - create_subscription( - user_id=1, - subscription_name="test_group1", - country_ids=[2, 3], - admin1_ids=[2, 3], - urgency_array=["immediate", "expected"], - severity_array=["severe", "extreme"], - certainty_array=["observed", "likely"], - subscribe_by=["sms", "email"], - sent_flag=0, - ) - # Create a subscription for user 2 - create_subscription( - user_id=2, - subscription_name="test_group2", - country_ids=[1, 2, 3], - admin1_ids=[1, 2, 3], - urgency_array=["immediate", "expected"], - severity_array=["severe", "extreme"], - certainty_array=["observed", "likely"], - subscribe_by=["sms", "email"], - sent_flag=0, - ) - - def setUp(self): - # Log in the user - self.client.login(email='test1@example.com', password='testpassword') - - # Test query for list all subscriptions - def test_query_list_all_subscription(self): - response = self.query( - ''' - query { - listAllSubscription { - certaintyArray - countryIds - admin1Ids - id - sentFlag - severityArray - subscribeBy - subscriptionName - urgencyArray - userId - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listAllSubscription']), 3) - self.assertEqual(content['data']['listAllSubscription'][0]['id'], '3') - self.assertEqual(content['data']['listAllSubscription'][0]['subscriptionName'], 'test_group1') - self.assertEqual(content['data']['listAllSubscription'][0]['countryIds'], [2, 3]) - self.assertEqual(content['data']['listAllSubscription'][0]['admin1Ids'], [2, 3]) - self.assertEqual(content['data']['listAllSubscription'][0]['urgencyArray'], ["immediate", "expected"]) - self.assertEqual(content['data']['listAllSubscription'][0]['severityArray'], ["severe", "extreme"]) - self.assertEqual(content['data']['listAllSubscription'][0]['certaintyArray'], ["observed", "likely"]) - self.assertEqual(content['data']['listAllSubscription'][0]['subscribeBy'], ["sms", "email"]) - self.assertEqual(content['data']['listAllSubscription'][0]['sentFlag'], 0) - - # Test query for list subscriptions by countryId filters - def test_query_list_subscription_by_country_ids(self): - response = self.query( - ''' - query { - listSubscription(countryIds: [2,3], - admin1Ids: [], - urgencyArray: [], - severityArray: [], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 3) - - # Test query for list subscriptions by admin1Ids filters - def test_query_list_subscription_by_admin1_ids(self): - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [2,3], - urgencyArray: [], - severityArray: [], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 3) - - # Test query for list subscriptions by urgencyArray filters - def test_query_list_subscription_by_urgency_array(self): - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: ["immediate"], - severityArray: [], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 3) - - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: ["immediate", "expected"], - severityArray: [], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 2) - - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: ["hello_world"], - severityArray: [], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 0) - - # Test query for list subscriptions by severityArray filters - def test_query_list_subscription_by_severity_array(self): - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: [], - severityArray: ["severe"], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 3) - - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: [], - severityArray: ["severe", "extreme"], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 2) - - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: [], - severityArray: ["hello_world"], - certaintyArray: [] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 0) - - # Test query for list subscriptions by certaintyArray filters - def test_query_list_subscription_by_certainty_array(self): - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: [], - severityArray: [], - certaintyArray: ["observed"] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 3) - - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: [], - severityArray: [], - certaintyArray: ["observed", "likely"] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 2) - - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [], - urgencyArray: [], - severityArray: [], - certaintyArray: ["hello_world"] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 0) - - # Test query for list subscriptions with filter combinations - def test_query_list_subscription_with_filter_combinations(self): - response = self.query( - ''' - query { - listSubscription(countryIds: [], - admin1Ids: [2,3], - urgencyArray: ["immediate", "expected"], - severityArray: ["severe"], - certaintyArray: ["observed"] - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(len(content['data']['listSubscription']), 2) - - # Test query for get subscription by id - def test_query_get_subscription(self): - response = self.query( - ''' - query { - getSubscription(subscriptionId: 1 - ) { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(content['data']['getSubscription']['id'], '1') - self.assertEqual(content['data']['getSubscription']['subscriptionName'], 'test_group1') - self.assertEqual(content['data']['getSubscription']['countryIds'], [1, 2, 3]) - self.assertEqual(content['data']['getSubscription']['admin1Ids'], [1, 2, 3]) - self.assertEqual(content['data']['getSubscription']['urgencyArray'], ["immediate"]) - self.assertEqual(content['data']['getSubscription']['severityArray'], ["severe"]) - self.assertEqual(content['data']['getSubscription']['certaintyArray'], ["observed"]) - self.assertEqual(content['data']['getSubscription']['subscribeBy'], ["sms", "email"]) - self.assertEqual(content['data']['getSubscription']['sentFlag'], 0) - - # Test mutation for create subscription - def test_query_create_subscription(self): - response = self.query( - ''' - mutation { - createSubscription ( - subscriptionName: "test_group3", - countryIds: [1,2,3], - admin1Ids: [1,2,3], - urgencyArray: ["immediate","expected"], - severityArray: ["severe", "extreme"], - certaintyArray: ["observed","likely"], - subscribeBy: ["sms", "email"], - sentFlag: 0 - ){ - subscription { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(content['data']['createSubscription']['subscription']['id'], '5') - self.assertEqual(content['data']['createSubscription']['subscription']['subscriptionName'], 'test_group3') - self.assertEqual(content['data']['createSubscription']['subscription']['countryIds'], [1, 2, 3]) - self.assertEqual(content['data']['createSubscription']['subscription']['admin1Ids'], [1, 2, 3]) - self.assertEqual(content['data']['createSubscription']['subscription']['urgencyArray'], ["immediate", "expected"]) - self.assertEqual(content['data']['createSubscription']['subscription']['severityArray'], ["severe", "extreme"]) - self.assertEqual(content['data']['createSubscription']['subscription']['certaintyArray'], ["observed", "likely"]) - self.assertEqual(content['data']['createSubscription']['subscription']['subscribeBy'], ["sms", "email"]) - self.assertEqual(content['data']['createSubscription']['subscription']['sentFlag'], 0) - - # Test mutation for create subscription test - def test_query_create_subscription_test(self): - response = self.query( - ''' - mutation { - createSubscriptionTest ( - userId: 3, - subscriptionName: "test_group3", - countryIds: [1,2,3], - admin1Ids: [1,2,3], - urgencyArray: ["immediate","expected"], - severityArray: ["severe", "extreme"], - certaintyArray: ["observed","likely"], - subscribeBy: ["sms", "email"], - sentFlag: 0 - ){ - subscription { - id - subscriptionName - userId - countryIds - admin1Ids - urgencyArray - severityArray - certaintyArray - subscribeBy - sentFlag - } - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['id'], '6') - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['subscriptionName'], 'test_group3') - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['userId'], 3) - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['countryIds'], [1, 2, 3]) - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['admin1Ids'], [1, 2, 3]) - self.assertEqual( - content['data']['createSubscriptionTest']['subscription']['urgencyArray'], ["immediate", "expected"] - ) - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['severityArray'], ["severe", "extreme"]) - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['certaintyArray'], ["observed", "likely"]) - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['subscribeBy'], ["sms", "email"]) - self.assertEqual(content['data']['createSubscriptionTest']['subscription']['sentFlag'], 0) - - # Test mutation for update subscription - def test_query_update_subscription(self): - response = self.query( - ''' - mutation { - updateSubscription ( - subscriptionId: 1 - subscriptionName: "updated_test_group1", - countryIds: [1,2,3], - admin1Ids: [1,2,3], - urgencyArray: ["immediate","expected"], - severityArray: ["severe", "extreme"], - certaintyArray: ["observed","likely"], - subscribeBy: ["sms", "email"], - sentFlag: 0 - ){ - success - errorMessage - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertTrue(content['data']['updateSubscription']['success']) - self.assertIsNone(content['data']['updateSubscription']['errorMessage']) - self.assertEqual(get_subscription(1).subscription_name, "updated_test_group1") - - # Test mutation for update subscription without permission - def test_query_update_subscription_without_permission(self): - response = self.query( - ''' - mutation { - updateSubscription ( - subscriptionId: 4 - subscriptionName: "updated_test_group1", - countryIds: [1,2,3], - admin1Ids: [1,2,3], - urgencyArray: ["immediate","expected"], - severityArray: ["severe", "extreme"], - certaintyArray: ["observed","likely"], - subscribeBy: ["sms", "email"], - sentFlag: 0 - ){ - success - errorMessage - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertFalse(content['data']['updateSubscription']['success']) - self.assertIsNotNone(content['data']['updateSubscription']['errorMessage']) - - # Test mutation for delete subscription - def test_query_delete_subscription(self): - with patch.object(Subscription, 'delete', mock_delete): - response = self.query( - ''' - mutation { - deleteSubscription ( - subscriptionId: 1 - ){ - success - errorMessage - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertTrue(content['data']['deleteSubscription']['success']) - self.assertIsNone(content['data']['deleteSubscription']['errorMessage']) - - # Test mutation for delete subscription without permission - def test_query_delete_subscription_without_permission(self): - response = self.query( - ''' - mutation { - deleteSubscription ( - subscriptionId: 4 - ){ - success - errorMessage - } - } - ''' - ) - self.assertResponseNoErrors(response) - - content = json.loads(response.content) - self.assertFalse(content['data']['deleteSubscription']['success']) - self.assertIsNotNone(content['data']['deleteSubscription']['errorMessage']) - - def test_get_random_string(self): - self.assertTrue(len(get_random_string(10)), 10) - self.assertTrue(len(get_random_string(12)), 12) - self.assertTrue(len(get_random_string(15)), 15) - - def test_get_random_integer_array(self): - int_array = get_random_integer_array(10, 20) - for target_int in int_array: - self.assertTrue(10 <= target_int <= 20) - - def test_get_random_string_array(self): - candidates = ["a", "bb", "ccc", "dddd", "eeeee", "fgh"] - string_array = get_random_string_array(candidates) - for target_string in string_array: - self.assertTrue(target_string in candidates) diff --git a/apps/subscription_manager/migrations/__init__.py b/apps/subscription/tests/__init__.py similarity index 100% rename from apps/subscription_manager/migrations/__init__.py rename to apps/subscription/tests/__init__.py diff --git a/apps/subscription/tests/test_mutations.py b/apps/subscription/tests/test_mutations.py new file mode 100644 index 00000000..155f5d67 --- /dev/null +++ b/apps/subscription/tests/test_mutations.py @@ -0,0 +1,554 @@ +import copy + +from apps.cap_feed.factories import Admin1Factory, CountryFactory, RegionFactory +from apps.cap_feed.models import AlertInfo +from apps.subscription.factories import UserAlertSubscriptionFactory +from apps.subscription.models import UserAlertSubscription +from apps.user.factories import UserFactory +from main.tests import TestCase + + +class TestSubscriptionMutation(TestCase): + class Mutation: + CREATE_USER_ALERT_SUBSCRIPTION = ''' + mutation createUserAlertSubscription($data: UserAlertSubscriptionInput!) { + private { + createUserAlertSubscription(data: $data) { + ok + errors + result { + id + name + createdAt + modifiedAt + isActive + + # Notification config + notifyByEmail + emailFrequency + emailFrequencyDisplay + emailLastSentAt + + # Filters + # -- ForeignKey + filterAlertCountryId + filterAlertCountry { + id + name + } + filterAlertAdmin1s + filterAlertAdmin1sDisplay { + id + name + } + + # -- Enum + filterAlertUrgencies + filterAlertSeverities + filterAlertCertainties + filterAlertCategories + filterAlertUrgenciesDisplay + filterAlertSeveritiesDisplay + filterAlertCertaintiesDisplay + filterAlertCategoriesDisplay + } + } + } + } + ''' + + UPDATE_USER_ALERT_SUBSCRIPTION = ''' + mutation updateUserAlertSubscription($id: ID!, $data: UserAlertSubscriptionInput!) { + private { + updateUserAlertSubscription(id: $id, data: $data) { + ok + errors + result { + id + name + createdAt + modifiedAt + isActive + + # Notification config + notifyByEmail + emailFrequency + emailFrequencyDisplay + emailLastSentAt + + # Filters + # -- ForeignKey + filterAlertCountryId + filterAlertCountry { + id + name + } + filterAlertAdmin1s + filterAlertAdmin1sDisplay { + id + name + } + + # -- Enum + filterAlertUrgencies + filterAlertSeverities + filterAlertCertainties + filterAlertCategories + filterAlertUrgenciesDisplay + filterAlertSeveritiesDisplay + filterAlertCertaintiesDisplay + filterAlertCategoriesDisplay + } + } + } + } + ''' + + DELETE_USER_ALERT_SUBSCRIPTION = ''' + mutation DeleteUserAlertSubscription($id: ID!) { + private { + deleteUserAlertSubscription(id: $id) { + ok + errors + result { + id + name + } + } + } + } + ''' + + def setUp(self): + super().setUp() + self.user = UserFactory.create() + + self.r_asia = RegionFactory.create(name="Asia") + self.c_nepal = CountryFactory.create(region=self.r_asia) + self.ad_bagmati = Admin1Factory.create(country=self.c_nepal) + + self.valid_data = dict( + name="MySub", + isActive=True, + # Email + notifyByEmail=False, + emailFrequency=self.genum(UserAlertSubscription.EmailFrequency.MONTHLY), + # Filter + filterAlertCountry=self.gID(self.c_nepal.id), + filterAlertAdmin1s=[self.gID(self.ad_bagmati.id)], + filterAlertUrgencies=[self.genum(AlertInfo.Urgency.IMMEDIATE)], + filterAlertSeverities=[], + filterAlertCertainties=[], + filterAlertCategories=[], + ) + + def create_subscription(self, **kwargs): + return UserAlertSubscriptionFactory.create( + **{ + "filter_alert_country": self.c_nepal, + **kwargs, + } + ) + + def _query_create(self, data, **kwargs): + return self.query_check( + self.Mutation.CREATE_USER_ALERT_SUBSCRIPTION, + variables={"data": data}, + **kwargs, + ) + + def _query_update(self, id: int, data, **kwargs): + return self.query_check( + self.Mutation.UPDATE_USER_ALERT_SUBSCRIPTION, + variables={"id": self.gID(id), "data": data}, + **kwargs, + ) + + def _query_delete(self, id: int, **kwargs): + return self.query_check( + self.Mutation.DELETE_USER_ALERT_SUBSCRIPTION, + variables={"id": self.gID(id)}, + **kwargs, + ) + + def test_create_subscription(self): + data = copy.deepcopy(self.valid_data) + + # Without Login session + content = self._query_create(data, assert_errors=True) + + # Login + self.force_login(self.user) + + # Create subscription + content = self._query_create(data) + sub_data = content["data"]["private"]["createUserAlertSubscription"] + self.assertEqual(sub_data["ok"], True, content) + self.assertEqual(sub_data["errors"], None, content) + self.assertEqual(sub_data["result"]["name"], data["name"], content) + self.assertNotEqual(sub_data["result"]["id"], None, content) + + def test_create_subscription_validation_misc(self): + self.force_login(self.user) + data = copy.deepcopy(self.valid_data) + + # Let"s remove some fields from valid data + data.pop("filterAlertUrgencies") + data.pop("filterAlertSeverities") + data.pop("filterAlertCertainties") + data.pop("filterAlertCategories") + + content = self._query_create(data) + sub_data = content["data"]["private"]["createUserAlertSubscription"] + self.assertEqual(sub_data["ok"], True, content) + self.assertEqual(sub_data["errors"], None, content) + + # Let"s set invalid data to some of the fields + data["filterAlertAdmin1s"] = ["hi-there", self.gID(self.ad_bagmati.id), "hi-there-again"] + content = self._query_create(data) + sub_data = content["data"]["private"]["createUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "client_id": None, + "field": "filterAlertAdmin1s", + "messages": None, + "object_errors": [ + { + "array_errors": None, + "client_id": None, + "field": 0, + "messages": "A valid integer is required.", + "object_errors": None, + }, + { + "array_errors": None, + "client_id": None, + "field": 2, + "messages": "A valid integer is required.", + "object_errors": None, + }, + ], + } + ], + ) + + data["filterAlertCountry"] = "hi-there" + data["filterAlertAdmin1s"] = [] + content = self._query_create(data) + sub_data = content["data"]["private"]["createUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "client_id": None, + "field": "filterAlertCountry", + "messages": "Incorrect type. Expected pk value, received str.", + "object_errors": None, + } + ], + ) + + def test_create_subscription_validation_admin1s(self): + self.force_login(self.user) + data = copy.deepcopy(self.valid_data) + data["filterAlertAdmin1s"] = [ + self.gID(self.ad_bagmati.id), + "1000000000", + ] + + content = self._query_create(data) + sub_data = content["data"]["private"]["createUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": [ + { + "client_id": "nonMemberErrors", + "messages": "This Admin1 ids are missing in database: [1000000000]", + "object_errors": None, + } + ], + "client_id": None, + "field": "filterAlertAdmin1s", + "messages": None, + "object_errors": None, + } + ], + content, + ) + + def test_create_subscription_validation_is_active(self): + user = UserFactory.create() + user2 = UserFactory.create() + self.force_login(user) + data = copy.deepcopy(self.valid_data) + + common_subs_kwargs = dict( + filter_alert_country=self.c_nepal, + ) + + # Create dummy subscriptions + UserAlertSubscriptionFactory.create_batch(10, is_active=False, user=user, **common_subs_kwargs) + UserAlertSubscriptionFactory.create_batch(10, is_active=True, user=user2, **common_subs_kwargs) + + def _assert_success(content): + sub_data = content["data"]["private"]["createUserAlertSubscription"] + self.assertEqual(sub_data["ok"], True, content) + self.assertEqual(sub_data["errors"], None, content) + + def _assert_failure(content): + sub_data = content["data"]["private"]["createUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "client_id": None, + "field": "isActive", + "messages": "Only 10 active subscriptions are allowed", + "object_errors": None, + } + ], + ) + + content = self._query_create(data) + _assert_success(content) + + # Create dummy subscriptions to main user + UserAlertSubscriptionFactory.create_batch(9, is_active=True, user=user, **common_subs_kwargs) + + content = self._query_create(data) + _assert_failure(content) + + data["isActive"] = False + content = self._query_create(data) + _assert_success(content) + + data.pop("isActive") # Should be same as False + content = self._query_create(data) + _assert_success(content) + + assert UserAlertSubscription.objects.filter(user=user, is_active=True).count() <= 10 + + def test_update_subscription(self): + subscription = self.create_subscription(user=self.user) + data = copy.deepcopy(self.valid_data) + + # Without Login session + content = self._query_update(subscription.id, data, assert_errors=True) + + # Login + self.force_login(self.user) + + # update subscription + content = self._query_update(subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], True, content) + self.assertEqual(sub_data["errors"], None, content) + self.assertEqual(sub_data["result"]["name"], data["name"], content) + self.assertNotEqual(sub_data["result"]["id"], None, content) + + def test_update_subscription_validation_misc(self): + subscription = self.create_subscription(user=self.user) + self.force_login(self.user) + data = copy.deepcopy(self.valid_data) + + # Let"s remove some fields from valid data + data.pop("filterAlertUrgencies") + data.pop("filterAlertSeverities") + data.pop("filterAlertCertainties") + data.pop("filterAlertCategories") + + content = self._query_update(subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], True, content) + self.assertEqual(sub_data["errors"], None, content) + + # Let"s set invalid data to some of the fields + data["filterAlertAdmin1s"] = ["hi-there", self.gID(self.ad_bagmati.id), "hi-there-again"] + content = self._query_update(subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "client_id": None, + "field": "filterAlertAdmin1s", + "messages": None, + "object_errors": [ + { + "array_errors": None, + "client_id": None, + "field": 0, + "messages": "A valid integer is required.", + "object_errors": None, + }, + { + "array_errors": None, + "client_id": None, + "field": 2, + "messages": "A valid integer is required.", + "object_errors": None, + }, + ], + } + ], + ) + + data["filterAlertCountry"] = "hi-there" + data["filterAlertAdmin1s"] = [] + content = self._query_update(subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "client_id": None, + "field": "filterAlertCountry", + "messages": "Incorrect type. Expected pk value, received str.", + "object_errors": None, + } + ], + ) + + def test_update_subscription_validation_admin1s(self): + subscription = self.create_subscription(user=self.user) + self.force_login(self.user) + data = copy.deepcopy(self.valid_data) + data["filterAlertAdmin1s"] = [ + self.gID(self.ad_bagmati.id), + "1000000000", + ] + + content = self._query_update(subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": [ + { + "client_id": "nonMemberErrors", + "messages": "This Admin1 ids are missing in database: [1000000000]", + "object_errors": None, + } + ], + "client_id": None, + "field": "filterAlertAdmin1s", + "messages": None, + "object_errors": None, + } + ], + content, + ) + + def test_update_subscription_validation_is_active(self): + user = UserFactory.create() + user2 = UserFactory.create() + + subscription = self.create_subscription(user=user) + other_subscription = self.create_subscription(user=user2) + + self.force_login(user) + data = copy.deepcopy(self.valid_data) + + common_subs_kwargs = dict( + filter_alert_country=self.c_nepal, + ) + + # update dummy subscriptions + UserAlertSubscriptionFactory.create_batch(10, is_active=False, user=user, **common_subs_kwargs) + UserAlertSubscriptionFactory.create_batch(10, is_active=True, user=user2, **common_subs_kwargs) + + content = self._query_update(subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], True, content) + self.assertEqual(sub_data["errors"], None, content) + + # update dummy subscriptions to main user + UserAlertSubscriptionFactory.create_batch(10, is_active=True, user=user, **common_subs_kwargs) + content = self._query_update(subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "client_id": None, + "field": "isActive", + "messages": "Only 10 active subscriptions are allowed", + "object_errors": None, + } + ], + ) + + # Others subscription + content = self._query_update(other_subscription.id, data) + sub_data = content["data"]["private"]["updateUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "field": "nonFieldErrors", + "messages": "Doesn't exists in the database", + "object_errors": None, + } + ], + ) + + def test_delete_subscription(self): + user = UserFactory.create() + user2 = UserFactory.create() + + subscription = self.create_subscription(user=user) + other_subscription = self.create_subscription(user=user2) + + self.force_login(user) + + content = self._query_delete(subscription.id) + sub_data = content["data"]["private"]["deleteUserAlertSubscription"] + self.assertEqual(sub_data["ok"], True, content) + self.assertEqual(sub_data["errors"], None, content) + + # Others subscription + content = self._query_delete(other_subscription.id) + sub_data = content["data"]["private"]["deleteUserAlertSubscription"] + self.assertEqual(sub_data["ok"], False, content) + self.assertNotEqual(sub_data["errors"], None, content) + self.assertEqual( + sub_data["errors"], + [ + { + "array_errors": None, + "field": "nonFieldErrors", + "messages": "Doesn't exists in the database", + "object_errors": None, + } + ], + ) diff --git a/apps/subscription/tests/test_queries.py b/apps/subscription/tests/test_queries.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/subscription/tests/test_subscription_alert_tagging.py b/apps/subscription/tests/test_subscription_alert_tagging.py new file mode 100644 index 00000000..ecdbf38a --- /dev/null +++ b/apps/subscription/tests/test_subscription_alert_tagging.py @@ -0,0 +1,56 @@ +from apps.cap_feed.factories import ( + Admin1Factory, + AlertFactory, + AlertInfoFactory, + CountryFactory, + FeedFactory, + RegionFactory, +) +from apps.cap_feed.models import Alert, AlertInfo +from apps.subscription.factories import UserAlertSubscriptionFactory +from apps.subscription.models import SubscriptionAlert +from apps.subscription.tasks import process_pending_subscription_alerts +from apps.user.factories import UserFactory +from main.tests import TestCase + + +class TestSubscriptionMutation(TestCase): + def setUp(self): + super().setUp() + self.user = UserFactory.create() + + self.r_asia = RegionFactory.create(name="Asia") + self.c_nepal = CountryFactory.create(region=self.r_asia) + self.ad_bagmati = Admin1Factory.create(country=self.c_nepal) + + def test_subscription_alert_tagging(self): + feed1 = FeedFactory.create(country=self.c_nepal) + alert1 = AlertFactory.create( + feed=feed1, + country=self.c_nepal, + admin1s=[self.ad_bagmati], + # is_processed_by_subscription=True, + ) + AlertInfoFactory.create( + alert=alert1, + category=AlertInfo.Category.HEALTH, + urgency=AlertInfo.Urgency.IMMEDIATE, + severity=AlertInfo.Severity.EXTREME, + certainty=AlertInfo.Certainty.OBSERVED, + ) + + UserAlertSubscriptionFactory.create( + user=self.user, + filter_alert_country=self.c_nepal, + filter_alert_admin1s=[self.ad_bagmati.id], + filter_alert_urgencies=[AlertInfo.Urgency.IMMEDIATE], + filter_alert_severities=[], + filter_alert_certainties=[], + filter_alert_categories=[], + ) + + assert Alert.objects.filter(is_processed_by_subscription=False).count() == 1 + assert SubscriptionAlert.objects.count() == 0 + process_pending_subscription_alerts() + assert Alert.objects.filter(is_processed_by_subscription=False).count() == 0 + assert SubscriptionAlert.objects.count() != 0 diff --git a/apps/subscription/types.py b/apps/subscription/types.py new file mode 100644 index 00000000..a8d99957 --- /dev/null +++ b/apps/subscription/types.py @@ -0,0 +1,93 @@ +import typing + +import strawberry +import strawberry_django +from django.db import models +from strawberry_django.pagination import OffsetPaginationInput + +from apps.cap_feed.filters import AlertFilter +from apps.cap_feed.orders import AlertOrder +from apps.cap_feed.types import Admin1Type, AlertType, CountryType +from main.graphql.context import Info +from utils.common import get_queryset_for_model +from utils.strawberry.enums import enum_display_field, enum_field +from utils.strawberry.paginations import CountList, count_list_resolver +from utils.strawberry.types import string_field + +from .models import UserAlertSubscription + + +@strawberry_django.type(UserAlertSubscription) +class UserAlertSubscriptionType: + id: strawberry.ID + created_at: strawberry.auto + modified_at: strawberry.auto + + name = string_field(UserAlertSubscription.name) + is_active: strawberry.auto + + # Filters + filter_alert_country_id: strawberry.ID + filter_alert_admin1s: list[strawberry.ID] + # Enum - Value + filter_alert_urgencies = enum_field(UserAlertSubscription.filter_alert_urgencies) + filter_alert_severities = enum_field(UserAlertSubscription.filter_alert_severities) + filter_alert_certainties = enum_field(UserAlertSubscription.filter_alert_certainties) + filter_alert_categories = enum_field(UserAlertSubscription.filter_alert_categories) + # Enum - Display + filter_alert_urgencies_display = enum_display_field(UserAlertSubscription.filter_alert_urgencies) + filter_alert_severities_display = enum_display_field(UserAlertSubscription.filter_alert_severities) + filter_alert_certainties_display = enum_display_field(UserAlertSubscription.filter_alert_certainties) + filter_alert_categories_display = enum_display_field(UserAlertSubscription.filter_alert_categories) + + notify_by_email: strawberry.auto + email_frequency = enum_field(UserAlertSubscription.email_frequency) + email_frequency_display = enum_display_field(UserAlertSubscription.email_frequency) + email_last_sent_at: strawberry.auto + + @staticmethod + def get_queryset(_, queryset: models.QuerySet | None, info: Info): + return get_queryset_for_model(UserAlertSubscription, queryset).filter( + user=info.context.request.user, + ) + + @strawberry_django.field + async def alerts( + self, + info: Info, + root: strawberry.Parent[UserAlertSubscription], + filters: typing.Optional[AlertFilter] = strawberry.UNSET, + order: typing.Optional[AlertOrder] = strawberry.UNSET, + pagination: typing.Optional[OffsetPaginationInput] = strawberry.UNSET, + ) -> CountList[AlertType]: + queryset = AlertType.get_queryset(None, None, info).filter( + subscriptions=root.pk, + ) + return count_list_resolver( + info, + queryset, + AlertType, + filters=filters, # type: ignore[reportArgumentType] + order=order, # type: ignore[reportArgumentType] + pagination=pagination, # type: ignore[reportArgumentType] + ) + + @strawberry.field + async def filter_alert_country( + self, + info: Info, + root: strawberry.Parent[UserAlertSubscription], + ) -> CountryType: + return await info.context.dl.cap_feed.load_country.load(root.filter_alert_country_id) + + @strawberry.field + async def filter_alert_admin1s_display( + self, + info: Info, + root: strawberry.Parent[UserAlertSubscription], + ) -> list[Admin1Type]: + if root.filter_alert_admin1s: + return await info.context.dl.cap_feed.load_admin1_by_admin1s.load( + tuple(root.filter_alert_admin1s), + ) + return [] diff --git a/apps/subscription/views.py b/apps/subscription/views.py index fd0e0449..163873c9 100644 --- a/apps/subscription/views.py +++ b/apps/subscription/views.py @@ -1,3 +1,36 @@ -# from django.shortcuts import render +from django.contrib.auth.decorators import login_required +from django.http import HttpResponse +from django.template import loader -# Create your views here. +from .emails import generate_user_alert_subscription_email_context +from .models import UserAlertSubscription + +USER_ALERT_SUBSCRIPTION_EMAIL_PREVIEW_MESSAGE = """ + To use email_frequency in GET params, Please specify integer values. Default is Daily
+ Use this for reference

+ """ + '
'.join( + [f"{frequency.label}: {frequency.value}" for frequency in UserAlertSubscription.EmailFrequency] +) + + +@login_required +def user_alert_subscription_email_preview(request): + try: + email_frequency = int( + request.GET.get( + "email_frequency", + UserAlertSubscription.EmailFrequency.DAILY, + ) + ) + if email_frequency not in UserAlertSubscription.EmailFrequency: + return HttpResponse(USER_ALERT_SUBSCRIPTION_EMAIL_PREVIEW_MESSAGE) + email_frequency = UserAlertSubscription.EmailFrequency(email_frequency) + except ValueError: + return HttpResponse(USER_ALERT_SUBSCRIPTION_EMAIL_PREVIEW_MESSAGE) + + context, _ = generate_user_alert_subscription_email_context( + request.user, + email_frequency, + ) + template = loader.get_template("emails/subscription/body.html") + return HttpResponse(template.render(context, request)) diff --git a/apps/subscription_manager/admin.py b/apps/subscription_manager/admin.py deleted file mode 100644 index bc8bc536..00000000 --- a/apps/subscription_manager/admin.py +++ /dev/null @@ -1,27 +0,0 @@ -from django.contrib import admin - -from apps.cap_feed.models import AlertAdmin1, AlertInfo - -from .models import Alert, SubscriptionAlerts - - -class AlertAdmin1Inline(admin.StackedInline): - model = AlertAdmin1 - extra = 0 - - -class AlertInfoInline(admin.StackedInline): - model = AlertInfo - extra = 0 - - -class AlertAdmin(admin.ModelAdmin): - # using = 'AlertDB' - list_display = ["id", "sent"] - search_fields = ["id"] - - inlines = [AlertInfoInline, AlertAdmin1Inline] - - -admin.site.register(SubscriptionAlerts) -admin.site.register(Alert) diff --git a/apps/subscription_manager/apps.py b/apps/subscription_manager/apps.py deleted file mode 100644 index 27076364..00000000 --- a/apps/subscription_manager/apps.py +++ /dev/null @@ -1,22 +0,0 @@ -import os -import sys - -from django.apps import AppConfig - - -class SubscriptionManagerConfig(AppConfig): - default_auto_field = 'django.db.models.BigAutoField' - name = 'apps.subscription_manager' - - def ready(self): - if ('WEBSITE_HOSTNAME' in os.environ and 'migrate' not in sys.argv and 'collectstatic' not in sys.argv) or ( - 'WEBSITE_HOSTNAME' not in os.environ and 'runserver' in sys.argv - ): - pass - # Used for testing locks - # from .subscription_alert_mapping import map_subscription_to_alert, \ - # map_alert_to_subscription, delete_alert_to_subscription - # from django.core.cache import cache - # cache.clear() - # map_subscription_to_alert(3) - # delete_alert_to_subscription(3408) diff --git a/apps/subscription_manager/management/commands/initcache.py b/apps/subscription_manager/management/commands/initcache.py deleted file mode 100644 index ac012d9b..00000000 --- a/apps/subscription_manager/management/commands/initcache.py +++ /dev/null @@ -1,31 +0,0 @@ -from django.core.management.base import BaseCommand - -from main.cache import cache - - -class Command(BaseCommand): - help = "This commands helps to clear cache" - - def handle(self, *args, **options): - # import time - all_keys = cache.keys("*") - print(f"previous keys: {all_keys}") - cache.clear() - print("Clear Cache") - - # dict = {"8": {"id": 8, "event": "Marine Weather Statement", "category": "Met", - # "country_name": "Teyvat_1", "admin1s": ["Meng De"], - # "sent": "2023-08-22 21:40:59.514832+00:00"} - # } - # alert_dict = {} - # start_time = time.time() - # for i in range(10000): - # list = [] - # for j in range(200): - # list.append(dict) - # alert_dict[i] = list - # #cache.set("anything", list, timeout=None) - # #print(f"time taken 1: {time.time() - start_time }") - # cache.set("anything", alert_dict, timeout=None) - # cache.get("anything") - # print(f"time taken 2: {time.time() - start_time}") diff --git a/apps/subscription_manager/management/commands/initdatabase.py b/apps/subscription_manager/management/commands/initdatabase.py deleted file mode 100644 index d77a87f2..00000000 --- a/apps/subscription_manager/management/commands/initdatabase.py +++ /dev/null @@ -1,17 +0,0 @@ -from django.core.cache import cache -from django.core.management.base import BaseCommand - -from apps.subscription_manager.subscription_alert_mapping import ( - map_subscriptions_to_alert, -) - - -class Command(BaseCommand): - help = "Starting inputting alerts from alert database into subscription database" - - def handle(self, *args, **options): - cache.clear() - print("Clear Cache") - # Converting all alerts in alert database into subscription database - map_subscriptions_to_alert() - print("All alerts data in alert database has been mapped with each subscription.") diff --git a/apps/subscription_manager/migrations/0001_initial.py b/apps/subscription_manager/migrations/0001_initial.py deleted file mode 100644 index 0931386f..00000000 --- a/apps/subscription_manager/migrations/0001_initial.py +++ /dev/null @@ -1,94 +0,0 @@ -# Generated by Django 5.0.3 on 2024-03-11 12:14 - -import django.db.models.deletion -from django.db import migrations, models - - -class Migration(migrations.Migration): - - initial = True - - dependencies = [ - ('subscription', '0001_initial'), - ] - - operations = [ - migrations.CreateModel( - name='CapFeedAdmin1', - fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('name', models.CharField(max_length=255)), - ], - options={ - 'db_table': 'cap_feed_admin1', - 'managed': False, - }, - ), - migrations.CreateModel( - name='CapFeedAlert', - fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('sent', models.DateTimeField()), - ], - options={ - 'db_table': 'cap_feed_alert', - 'managed': False, - }, - ), - migrations.CreateModel( - name='CapFeedAlertadmin1', - fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ], - options={ - 'db_table': 'cap_feed_alertadmin1', - 'managed': False, - }, - ), - migrations.CreateModel( - name='CapFeedAlertinfo', - fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('category', models.CharField()), - ('event', models.CharField(max_length=255)), - ('urgency', models.CharField()), - ('severity', models.CharField()), - ('certainty', models.CharField()), - ], - options={ - 'db_table': 'cap_feed_alertinfo', - 'managed': False, - }, - ), - migrations.CreateModel( - name='CapFeedCountry', - fields=[ - ('id', models.BigAutoField(primary_key=True, serialize=False)), - ('name', models.CharField(max_length=255)), - ], - options={ - 'db_table': 'cap_feed_country', - 'managed': False, - }, - ), - migrations.CreateModel( - name='Alert', - fields=[ - ('id', models.IntegerField(primary_key=True, serialize=False)), - ], - ), - migrations.CreateModel( - name='SubscriptionAlerts', - fields=[ - ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('sent', models.BooleanField(default=False)), - ('alert', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='subscription_manager.alert')), - ('subscription', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='subscription.subscription')), - ], - ), - migrations.AddField( - model_name='alert', - name='subscriptions', - field=models.ManyToManyField(through='subscription_manager.SubscriptionAlerts', to='subscription.subscription'), - ), - ] diff --git a/apps/subscription_manager/migrations/0002_delete_capfeedadmin1_delete_capfeedalert_and_more.py b/apps/subscription_manager/migrations/0002_delete_capfeedadmin1_delete_capfeedalert_and_more.py deleted file mode 100644 index 30d4569e..00000000 --- a/apps/subscription_manager/migrations/0002_delete_capfeedadmin1_delete_capfeedalert_and_more.py +++ /dev/null @@ -1,28 +0,0 @@ -# Generated by Django 4.2.11 on 2024-03-26 12:27 - -from django.db import migrations - - -class Migration(migrations.Migration): - - dependencies = [ - ('subscription_manager', '0001_initial'), - ] - - operations = [ - migrations.DeleteModel( - name='CapFeedAdmin1', - ), - migrations.DeleteModel( - name='CapFeedAlert', - ), - migrations.DeleteModel( - name='CapFeedAlertadmin1', - ), - migrations.DeleteModel( - name='CapFeedAlertinfo', - ), - migrations.DeleteModel( - name='CapFeedCountry', - ), - ] diff --git a/apps/subscription_manager/models.py b/apps/subscription_manager/models.py deleted file mode 100644 index d3e2845d..00000000 --- a/apps/subscription_manager/models.py +++ /dev/null @@ -1,15 +0,0 @@ -# These files are used for storing required models of subscriptions and their correlated alerts -from django.db import models - -from apps.subscription.models import Subscription - - -class Alert(models.Model): - id = models.IntegerField(primary_key=True) - subscriptions = models.ManyToManyField(Subscription, through="SubscriptionAlerts") - - -class SubscriptionAlerts(models.Model): - subscription = models.ForeignKey(Subscription, on_delete=models.CASCADE) - alert = models.ForeignKey(Alert, on_delete=models.CASCADE) - sent = models.BooleanField(default=False) diff --git a/apps/subscription_manager/subscription_alert_mapping.py b/apps/subscription_manager/subscription_alert_mapping.py deleted file mode 100644 index 1e3839eb..00000000 --- a/apps/subscription_manager/subscription_alert_mapping.py +++ /dev/null @@ -1,179 +0,0 @@ -import json - -from django.db import transaction - -from apps.cap_feed.models import Admin1 as CapFeedAdmin1 -from apps.cap_feed.models import Alert as CapFeedAlert -from main.cache import cache - -from .models import Alert, Subscription -from .tasks import process_immediate_alerts - - -def map_subscriptions_to_alert(): - subscriptions = Subscription.objects.all() - for subscription in subscriptions: - map_subscription_to_alert(subscription.id) - - -def map_subscription_to_alert(subscription_id): - updated_alerts = [] - # Only if the subscription finished its last mapping, we start to map the new one. - update_subscription_locked = cache.lock(subscription_id, timeout=None) - try: - update_subscription_locked.acquire(blocking=True) - # Make sure that in the process of second update, - # the user still cannot view subscription alerts. - cache.set("v" + str(subscription_id), True, timeout=None) - subscription = Subscription.objects.filter(id=subscription_id).first() - - if subscription is None: - return None - subscription.alert_set.clear() - # This stores alerts that are already processed. - potential_alert_ids = [] - # This stores matched alerts. - for admin1_id in subscription.admin1_ids: - admin1 = CapFeedAdmin1.objects.filter(id=admin1_id).first() - if admin1 is None: - continue - potential_alert_set = admin1.alert_set.all() - - for alert in potential_alert_set: - alert_id = alert.pk - if alert_id in potential_alert_ids: - continue - potential_alert_ids.append(alert_id) - # Lock the alert to not be deleted during matching - deleted_alert_lock = cache.lock("a" + str(alert_id), timeout=None) - # If this lock is already locked, meaning it is being deleted, we just skip - # processing it. - if deleted_alert_lock.locked(): - continue - try: - # If the alert is not to be deleted, lock it so the potential deletion of this - # alert can be delayed. - deleted_alert_lock.acquire(blocking=True) - for info in alert.alertinfo_set.all(): - if ( - info.severity in subscription.severity_array - and info.certainty in subscription.certainty_array - and info.urgency in subscription.urgency_array - ): - - internal_alert = Alert.objects.filter(id=alert.pk).first() - if internal_alert is None: - internal_alert = Alert.objects.create(id=alert.pk) - internal_alert.save() - - updated_alerts.append(internal_alert) - break - except Exception: - pass - finally: - deleted_alert_lock.release() - - subscription.alert_set.add(*updated_alerts) - # print([alert.id for alert in subscription.alert_set.all()]) - # Subscription Locks For Testing - # time.sleep(20) - - except Exception as exception: - print(f"Creation Exception: {exception}") - - finally: - lock = cache.get("v" + str(subscription_id)) - if lock is not None and lock is True: - cache.delete("v" + str(subscription_id)) - - update_subscription_locked.release() - - return "Mapping Finished!" - - -def map_alert_to_subscription(alert_id): - alert = CapFeedAlert.objects.filter(id=alert_id).prefetch_related('admin1s', 'capfeedalertinfo_set').first() - - if alert is None: - return f"Alert with id {alert_id} is not existed" - - converted_alert = Alert.objects.filter(id=alert_id).first() - - if converted_alert is not None: - return f"Alert with id {alert_id} is already converted and matched subscription" - - internal_alert = None - updated_subscriptions = [] - - alert_admin1_ids = [admin1.pk for admin1 in alert.admin1s.all()] - subscriptions = Subscription.objects.filter(admin1_ids__overlap=alert_admin1_ids) - - with transaction.atomic(): - for subscription in subscriptions: - matching_info = None - for info in alert.alertinfo_set.all(): - if ( - info.severity in subscription.severity_array - and info.certainty in subscription.certainty_array - and info.urgency in subscription.urgency_array - ): - matching_info = info - break - - if matching_info: - if internal_alert is None: - internal_alert = Alert.objects.create(id=alert.pk) - - updated_subscriptions.append(subscription) - - if subscription.sent_flag == 0: - process_immediate_alerts(subscription.id) - - if internal_alert: - internal_alert.subscriptions.add(*updated_subscriptions) - internal_alert.save() - - if updated_subscriptions: - subscription_ids = [subscription.id for subscription in updated_subscriptions] - return f"Incoming Alert {alert_id} is successfully converted. " f"Mapped Subscription id are {subscription_ids}." - - return f"Incoming Alert {alert_id} is not mapped with any subscription." - - -def delete_alert_to_subscription(alert_id): - alert_to_be_deleted = Alert.objects.filter(id=alert_id).first() - if alert_to_be_deleted is None: - return f"Alert with id {alert_id} is not found in subscription database." - alert_lock = cache.lock("a" + str(alert_id), timeout=None) - updated_subscription_ids = [] - try: - alert_lock.acquire(blocking=True) - subscriptions = alert_to_be_deleted.subscriptions.all() - updated_subscription_ids = [subscription.id for subscription in subscriptions] - with transaction.atomic(): - alert_to_be_deleted.subscriptions.clear() - - except Exception as exception: - print(f"Delete Exception: {exception}") - - finally: - alert_lock.release() - - if len(updated_subscription_ids) != 0: - return ( - f"Alert {alert_id} is successfully deleted from subscription database. " - f"Updated Subscription id are " - f"{updated_subscription_ids}." - ) - - return f"Alert {alert_id} is successfully deleted from subscription database. " - - -def get_subscription_alerts_without_mapping_records(subscription_id): - subscription = Subscription.objects.filter(id=subscription_id).first() - if subscription is None: - return False - - map_subscription_to_alert(subscription) - subscription_alerts_dict = subscription.get_alert_id_list() - return json.dumps(subscription_alerts_dict, indent=None) diff --git a/apps/subscription_manager/tasks.py b/apps/subscription_manager/tasks.py deleted file mode 100644 index 2d2f900e..00000000 --- a/apps/subscription_manager/tasks.py +++ /dev/null @@ -1,130 +0,0 @@ -from celery import shared_task -from django.conf import settings -from django.core.mail import send_mail -from django.template.loader import render_to_string -from django.utils.html import strip_tags - -from apps.user.models import User - - -@shared_task -def send_subscription_email(user_id, subject, template_name, context=None): - try: - user = User.objects.get(id=user_id) - except User.DoesNotExist: - return "Invalid User ID" - - context = context or {} - context.update( - { - 'user': user, - } - ) - - message = render_to_string(template_name, context) - - send_mail( - subject=subject, - message=strip_tags(message), - html_message=message, - from_email=settings.DEFAULT_FROM_EMAIL, - recipient_list=[user.email], - fail_silently=True, - ) - - return "Done" - - -@shared_task -def process_immediate_alerts(subscription_id): - from .models import SubscriptionAlerts # Subscription, - - # subscription = Subscription.objects.get(id=subscription_id) - # subscription_name = subscription.subscription_name - # user_id = subscription.user_id # pylint: disable=W0612 - - related_alerts = SubscriptionAlerts.objects.filter(subscription=subscription_id, sent=False) - - if not related_alerts: - return - - # related_alerts_count = related_alerts.count() - - # viewer_link = "https://alert-hub-frontend.azurewebsites.net/account/subscription" - - # context = { # pylint: disable=W0612 - # 'title': subscription_name, - # 'count': related_alerts_count, - # 'viewer_link': viewer_link, - # } - - # send_subscription_email.delay(user_id, 'New Alerts Matching Your Subscription', - # 'subscription_email.html', context) - - related_alerts.update(sent=True) - - -@shared_task -def process_non_immediate_alerts(sent_flag): - from .models import Subscription, SubscriptionAlerts - - subscriptions = Subscription.objects.filter(sent_flag=sent_flag) - - for subscription in subscriptions: - subscription_id = subscription.id - # subscription_name = subscription.subscription_name - # user_id = subscription.user_id # pylint: disable=W0612 - - related_alerts = SubscriptionAlerts.objects.filter(subscription=subscription_id, sent=False) - - if not related_alerts: - continue - - # related_alerts_count = related_alerts.count() - - # viewer_link = "https://alert-hub-frontend.azurewebsites.net/account/subscription" - - # context = { # pylint: disable=W0612 - # 'title': subscription_name, - # 'count': related_alerts_count, - # 'viewer_link': viewer_link, - # } - - # send_subscription_email.delay(user_id, 'New Alerts Matching Your Subscription', - # 'subscription_email.html', context) - - related_alerts.update(sent=True) - - -@shared_task -def get_incoming_alert(alert_id): - from .subscription_alert_mapping import map_alert_to_subscription - - return map_alert_to_subscription(alert_id) - - -@shared_task -def get_removed_alert(alert_id): - from .subscription_alert_mapping import delete_alert_to_subscription - - return delete_alert_to_subscription(alert_id) - - -@shared_task -def initialise_task(): - from .subscription_alert_mapping import map_subscriptions_to_alert - - map_subscriptions_to_alert() - - -@shared_task -def subscription_mapper(subscription_id): - from apps.subscription.models import Subscription - from apps.subscription_manager.subscription_alert_mapping import ( - map_subscription_to_alert, - ) - - try: - map_subscription_to_alert(subscription_id) - except Subscription.DoesNotExist: - print(f"Subscription {subscription_id} not exist") diff --git a/apps/subscription_manager/templates/subscription_email.html b/apps/subscription_manager/templates/subscription_email.html deleted file mode 100644 index 7d7d1231..00000000 --- a/apps/subscription_manager/templates/subscription_email.html +++ /dev/null @@ -1,33 +0,0 @@ - - - - [IFRC] New alert update from your subscriptions - - - -

Alert Update: Please visit your alert subscriptions to view details.

- -

There are {{ count }} new alerts matching your subscription - '{{ title }}'.

-

You can view this update by clicking here.

- -{% if alerts %} -

Alert Summary

- -{% endif %} - - - diff --git a/apps/subscription_manager/tests.py b/apps/subscription_manager/tests.py deleted file mode 100644 index 32831450..00000000 --- a/apps/subscription_manager/tests.py +++ /dev/null @@ -1,580 +0,0 @@ -from django.core.cache import cache -from django.test import TestCase -from django.utils import timezone - -from apps.cap_feed.models import Admin1 as CapFeedAdmin1 -from apps.cap_feed.models import Alert as CapFeedAlert -from apps.cap_feed.models import AlertInfo as CapFeedAlertinfo -from apps.cap_feed.models import Country as CapFeedCountry - -from .models import Alert, Subscription -from .subscription_alert_mapping import ( - delete_alert_to_subscription, - map_alert_to_subscription, - map_subscription_to_alert, - map_subscriptions_to_alert, -) - -# Since Subscription System can only have read-access to Alert DB, the tables in external models -# need to be simulated on Subscription DB, otherwise the test data will not be inserted. -# This makes sure that we could mock exact data we want on these models and test the operations -# that manipulate them. - - -class SubscriptionManagerTestCase(TestCase): - # Setup data for the tests - @classmethod - def setUpClass(cls): - teyvat_1 = CapFeedCountry.objects.create(name="Teyvat_1") - teyvat_1.save() - teyvat_2 = CapFeedCountry.objects.create(name="Teyvat_2") - teyvat_2.save() - - # create admin data for migrations - admin1_1 = CapFeedAdmin1.objects.create(name="Meng De", country=teyvat_1) - admin1_1.save() - admin1_2 = CapFeedAdmin1.objects.create(name="Li Yue", country=teyvat_1) - admin1_2.save() - admin1_3 = CapFeedAdmin1.objects.create(name="Xu Mi", country=teyvat_2) - admin1_3.save() - admin1_4 = CapFeedAdmin1.objects.create(name="Feng Dan", country=teyvat_2) - admin1_4.save() - - # create alert data - alert_1 = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_1) - alert_1.admin1s.add(admin1_1, admin1_2) - alert_1.save() - alert_info_1 = CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Expected", - severity="Minor", - certainty="Observed", - alert=alert_1, - ) - alert_info_2 = CapFeedAlertinfo.objects.create( - category="Met", - event="Thunderstormwarning", - urgency="Future", - severity="Moderate", - certainty="Likely", - alert=alert_1, - ) - alert_info_1.save() - alert_info_2.save() - - alert_2 = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_2) - alert_2.admin1s.add(admin1_3, admin1_4) - alert_2.save() - alert_info_3 = CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Expected", - severity="Minor", - certainty="Likely", - alert=alert_2, - ) - alert_info_4 = CapFeedAlertinfo.objects.create( - category="Met", - event="Thunderstormwarning", - urgency="Immediate", - severity="Moderate", - certainty="Observed", - alert=alert_2, - ) - alert_info_3.save() - alert_info_4.save() - - alert_3 = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_1) - alert_3.admin1s.add(admin1_1) - alert_3.save() - alert_info_5 = CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Expected", - severity="Minor", - certainty="Possible", - alert=alert_3, - ) - alert_info_5.save() - - alert_4 = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_2) - alert_4.admin1s.add(admin1_4) - alert_4.save() - alert_info_6 = CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Expected", - severity="Severe", - certainty="Possible", - alert=alert_4, - ) - alert_info_6.save() - - cache.clear() - - super(SubscriptionManagerTestCase, cls).setUpClass() - - @classmethod - def tearDownClass(cls): - # Clean up any resources if necessary - super().tearDownClass() - - # Test: Creation of subscriptions and check whether subscriptions matched expected list of - # alerts - def test_subscription_creation_1(self): - urgency_list = ["Expected", "Future"] - severity_list = ["Minor", "Moderate"] - certainty_list = ["Likely", "Observed", "Possible"] - subscription = Subscription.objects.create( - subscription_name="Subscription 1", - user_id=1, - country_ids=[1], - admin1_ids=[1, 2], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - expected = [1, 3] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - def test_subscription_creation_2(self): - urgency_list = ["Expected"] - severity_list = ["Severe"] - certainty_list = ["Possible"] - subscription = Subscription.objects.create( - subscription_name="Subscription 2", - user_id=1, - country_ids=[2], - admin1_ids=[3, 4], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - - expected = [4] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - subscription.delete() - - def test_subscription_creation_all_alerts_in_country_1(self): - urgency_list = ["Expected", "Immediate", "Future"] - severity_list = ["Minor", "Severe", "Moderate"] - certainty_list = ["Likely", "Possible", "Observed"] - subscription = Subscription.objects.create( - subscription_name="Subscription 3", - user_id=1, - country_ids=[2], - admin1_ids=[1, 2], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - expected = [1, 3] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - subscription.delete() - - def test_subscription_creation_all_alerts_in_country_2(self): - urgency_list = ["Expected", "Immediate", "Future"] - severity_list = ["Minor", "Severe", "Moderate"] - certainty_list = ["Likely", "Possible", "Observed"] - subscription = Subscription.objects.create( - subscription_name="Subscription 4", - user_id=1, - country_ids=[2], - admin1_ids=[3, 4], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - expected = [2, 4] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - subscription.delete() - - # Test: update subscription by severity, certainty, and urgency and check corresponding alerts - def test_subscription_update_1(self): - urgency_list = ["Expected", "Immediate", "Future"] - severity_list = ["Minor", "Severe", "Moderate"] - certainty_list = ["Likely", "Possible", "Observed"] - subscription = Subscription.objects.create( - subscription_name="Subscription 5", - user_id=1, - country_ids=[2], - admin1_ids=[3, 4], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - expected = [2, 4] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - # Update urgency, severity, certainty for the subscription - urgency_list = ["Expected"] - severity_list = ["Severe"] - certainty_list = ["Possible"] - subscription.urgency_array = urgency_list - subscription.severity_array = severity_list - subscription.certainty_array = certainty_list - - subscription.save() - - map_subscription_to_alert(subscription.pk) - expected = [4] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - subscription.delete() - - # Test: update subscription by regions and check corresponding alerts - - def test_subscription_update_2(self): - urgency_list = ["Expected", "Immediate", "Future"] - severity_list = ["Minor", "Severe", "Moderate"] - certainty_list = ["Likely", "Possible", "Observed"] - subscription = Subscription.objects.create( - subscription_name="Subscription 6", - user_id=1, - country_ids=[2], - admin1_ids=[1, 2], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - expected = [1, 3] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - # Update admin1 for the subscription - admin1_ids = [3, 4] - subscription.admin1_ids = admin1_ids - subscription.save() - map_subscription_to_alert(subscription.pk) - - expected = [2, 4] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - subscription.delete() - - # Test: delete subscription and check many subscription - to - many field - def test_subscription_delete_1(self): - urgency_list = ["Expected", "Immediate", "Future"] - severity_list = ["Minor", "Severe", "Moderate"] - certainty_list = ["Likely", "Possible", "Observed"] - subscription = Subscription.objects.create( - subscription_name="Subscription 7", - user_id=1, - country_ids=[2], - admin1_ids=[1, 2], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - expected = [1, 3] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - # Delete the subscription - subscription.delete() - - # Check if there is still many-to-many relationship between deleted subscriptions and - # corresponding alerts - - for alert in Alert.objects.filter(id__in=actual).all(): - alert_subscriptions = alert.subscriptions.all() - self.assertQuerysetEqual(alert_subscriptions, []) - - def test_subscription_delete_2(self): - urgency_list = ["Expected"] - severity_list = ["Severe"] - certainty_list = ["Possible"] - subscription = Subscription.objects.create( - subscription_name="Subscription 8", - user_id=1, - country_ids=[2], - admin1_ids=[3, 4], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - - expected = [4] - actual = subscription.get_alert_id_list() - self.assertListEqual(expected, actual) - - subscription.delete() - - # Check if there is still many-to-many relationship between deleted subscriptions and - # corresponding alerts - for alert in Alert.objects.filter(id__in=actual).all(): - alert_subscriptions = alert.subscriptions.all() - self.assertQuerysetEqual(alert_subscriptions, []) - - # Test incoming alert that is not existed - def test_incoming_alert_that_is_not_existed(self): - result = map_alert_to_subscription(100) - expected = "Alert with id 100 is not existed" - self.assertEqual(expected, result) - - # Test incoming alert is already converted - def test_incoming_alert_with_already_existed_id(self): - # Create New subscription that maps the incoming alert - urgency_list = ["Expected", "Future"] - severity_list = ["Minor", "Moderate"] - certainty_list = ["Likely", "Observed"] - subscription = Subscription.objects.create( - subscription_name="Common Subscription", - user_id=1, - country_ids=[2], - admin1_ids=[1, 2], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(subscription.pk) - # Try to map alert with id 2 to the new subscription, though it is already mapped to the - # above susbcription - result = map_alert_to_subscription(1) - expected = "Alert with id 1 is already converted and matched subscription" - self.assertEqual(expected, result) - - # Test incoming alert and test if it matches the existing subscription - def test_incoming_alert_mapping_subscription(self): - # create the subscription - urgency_list = ["Expected", "Future"] - severity_list = ["Minor", "Moderate"] - certainty_list = ["Likely", "Observed"] - common_subscription = Subscription.objects.create( - subscription_name="Common Subscription", - user_id=1, - country_ids=[2], - admin1_ids=[1, 2], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - - # simulate the incoming alert - teyvat_1 = CapFeedCountry.objects.get(id=1) - admin1_1 = CapFeedAdmin1.objects.get(id=1) - admin1_2 = CapFeedAdmin1.objects.get(id=2) - mocked_incoming_alert = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_1) - mocked_incoming_alert.admin1s.add(admin1_1, admin1_2) - mocked_incoming_alert.save() - CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Expected", - severity="Minor", - certainty="Observed", - alert=mocked_incoming_alert, - ) - - # Check if the alert maps the susbcriptions - result = map_alert_to_subscription(mocked_incoming_alert.pk) - updated_subscription_ids = [common_subscription.pk] - expected = ( - f"Incoming Alert {mocked_incoming_alert.pk} is successfully converted. " - f"Mapped Subscription id " - f"are {updated_subscription_ids}." - ) - self.assertEqual(expected, result) - - # Test incoming alert when it is not mapped with any subscription - def test_incoming_alert_not_mapping_subscription_cache(self): - # simulate the incoming alert - teyvat_1 = CapFeedCountry.objects.get(id=1) - admin1_1 = CapFeedAdmin1.objects.get(id=1) - mocked_incoming_alert = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_1) - mocked_incoming_alert.admin1s.add(admin1_1) - mocked_incoming_alert.save() - CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Very Urgent", - severity="Minor", - certainty="Likely", - alert=mocked_incoming_alert, - ) - result = map_alert_to_subscription(mocked_incoming_alert.pk) - expected = f"Incoming Alert {mocked_incoming_alert.pk} is not mapped with any subscription." - self.assertEqual(expected, result) - - # Test deleted alert with id that is not existed - def test_deleted_alert_that_is_not_existed(self): - result = delete_alert_to_subscription(100) - expected = "Alert with id 100 is not found in subscription database." - self.assertEqual(expected, result) - - # Test deleted alert and test whether previously corresponded subscriptions is updated - def test_deleted_alert_that_previously_mapped_subscription(self): - # create the subscription - urgency_list = ["Very Urgent"] - severity_list = ["Minor"] - certainty_list = ["Likely"] - common_subscription = Subscription.objects.create( - subscription_name="Common Subscription", - user_id=1, - country_ids=[2], - admin1_ids=[1], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(common_subscription.pk) - - # simulate the incoming alert - teyvat_1 = CapFeedCountry.objects.get(id=1) - admin1_1 = CapFeedAdmin1.objects.get(id=1) - mocked_incoming_alert = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_1) - mocked_incoming_alert_id = mocked_incoming_alert.pk - mocked_incoming_alert.admin1s.add(admin1_1) - mocked_incoming_alert.save() - CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Very Urgent", - severity="Minor", - certainty="Likely", - alert=mocked_incoming_alert, - ) - - # Map the alert to the susbcriptions - map_alert_to_subscription(mocked_incoming_alert.pk) - # Check if subscription deletes the alert in its corresponding alert list - result = delete_alert_to_subscription(mocked_incoming_alert_id) - updated_subscription_ids = [common_subscription.pk] - expected = ( - f"Alert {mocked_incoming_alert_id} is successfully " - f"deleted from subscription database. " - f"Updated Subscription id are " - f"{updated_subscription_ids}." - ) - self.assertEqual(expected, result) - - # Test deleted alert that is not mapped with any subscription(rare case) - def test_unmapped_deleted_alerts(self): - # create the subscription - urgency_list = ["Very Urgent"] - severity_list = ["Minor"] - certainty_list = ["Likely"] - common_subscription = Subscription.objects.create( - subscription_name="Common Subscription", - user_id=1, - country_ids=[2], - admin1_ids=[1], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - map_subscription_to_alert(common_subscription.pk) - # simulate the incoming alert - teyvat_1 = CapFeedCountry.objects.get(id=1) - admin1_1 = CapFeedAdmin1.objects.get(id=1) - mocked_incoming_alert = CapFeedAlert.objects.create(sent=timezone.now(), country=teyvat_1) - mocked_incoming_alert_id = mocked_incoming_alert.pk - mocked_incoming_alert.admin1s.add(admin1_1) - mocked_incoming_alert.save() - CapFeedAlertinfo.objects.create( - category="Met", - event="Marine Weather Statement", - urgency="Very Urgent", - severity="Minor", - certainty="Likely", - alert=mocked_incoming_alert, - ) - # Map and then delete the corresponding subscription. - # This will create a rare case that no subscription mapping the alerts - map_alert_to_subscription(mocked_incoming_alert_id) - common_subscription.delete() - - # Delete alert that is not mapped with any subscription - result = delete_alert_to_subscription(mocked_incoming_alert_id) - # Check results - expected = f"Alert {mocked_incoming_alert_id} is successfully deleted " f"from subscription database. " - self.assertEqual(expected, result) - - # Test map all subscriptions to alerts - def test_mapping_all_subscriptions_to_alerts(self): - urgency_list = ["Expected", "Future"] - severity_list = ["Minor", "Moderate"] - certainty_list = ["Likely", "Observed", "Possible"] - subscription_1 = Subscription.objects.create( - subscription_name="Subscriptions1", - user_id=1, - country_ids=[1], - admin1_ids=[1, 2], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - - urgency_list = ["Expected"] - severity_list = ["Severe"] - certainty_list = ["Possible"] - subscription_2 = Subscription.objects.create( - subscription_name="Subscriptions2", - user_id=1, - country_ids=[2], - admin1_ids=[3, 4], - urgency_array=urgency_list, - severity_array=severity_list, - certainty_array=certainty_list, - subscribe_by=[1], - sent_flag=0, - ) - - map_subscriptions_to_alert() - - expected = [1, 3] - actual = [] - for alert in subscription_1.alert_set.all(): - actual.append(alert.pk) - self.assertListEqual(expected, actual) - - expected = [4] - actual = [] - for alert in subscription_2.alert_set.all(): - actual.append(alert.pk) - self.assertListEqual(expected, actual) diff --git a/apps/subscription_manager/urls.py b/apps/subscription_manager/urls.py deleted file mode 100644 index bbf8e584..00000000 --- a/apps/subscription_manager/urls.py +++ /dev/null @@ -1,16 +0,0 @@ -from django.urls import path - -from . import views - -urlpatterns = [ - path( - 'get_subscription_alerts//', - views.get_subscirption_alerts, - name='Get All Alerts Matching Subscriptions', - ), - path( - 'get_subscription_alerts_in_real_time//', - views.get_subscription_alerts_in_real_time, - name='Get All Alerts Matching Subscriptions Without Real Time Computation', - ), -] diff --git a/apps/subscription_manager/views.py b/apps/subscription_manager/views.py deleted file mode 100644 index 96ec9567..00000000 --- a/apps/subscription_manager/views.py +++ /dev/null @@ -1,36 +0,0 @@ -import json - -from django.core.cache import cache -from django.http import HttpResponse - -from apps.subscription.models import Subscription - -from .subscription_alert_mapping import get_subscription_alerts_without_mapping_records - - -def get_subscirption_alerts(request, subscription_id): - try: - # Try to acquire the lock without waiting. If there is a lock, it means subscription is - # still mapping the alerts. - lock_acquired = cache.get("v" + str(subscription_id)) - if lock_acquired is not None and lock_acquired is True: - return HttpResponse("Subscription is still matching alerts!", status=202) - - subscription = Subscription.objects.get(id=subscription_id) - except Subscription.DoesNotExist: - return HttpResponse("Subscription is not found!", status=404) - - alert_list = subscription.get_alert_id_list() - - # if len(alert_list) == 0: - # return HttpResponse("[]", status=200) - - result = json.dumps(alert_list) - return HttpResponse(result, status=200) - - -def get_subscription_alerts_in_real_time(request, subscription_id): - result = get_subscription_alerts_without_mapping_records(subscription_id) - if not result: - return HttpResponse("Subscription is not found!", status=404) - return HttpResponse(result) diff --git a/apps/user/emails.py b/apps/user/emails.py new file mode 100644 index 00000000..e01b1f03 --- /dev/null +++ b/apps/user/emails.py @@ -0,0 +1,74 @@ +from django.utils import timezone +from django.utils.encoding import force_bytes +from django.utils.http import urlsafe_base64_encode + +from main.permalinks import Permalink +from main.tokens import TokenManager +from utils.emails import send_email + +from .models import EmailNotificationType, User + + +def send_account_activation(user: User): + """ + Generate a one-use only link for account activation and send it to the + user. + """ + uid = urlsafe_base64_encode(force_bytes(user.pk)) + token = TokenManager.account_activation_token_generator.make_token(user) + context = { + 'activation_url': Permalink.user_activation(uid, token), + } + send_email( + user=user, + email_type=EmailNotificationType.ACCOUNT_ACTIVATION, + subject="Account Activation", + email_html_template='emails/user/activation/body.html', + email_text_template='emails/user/activation/body.txt', + context=context, + ) + + +def send_password_reset( + user: User, + client_ip: str | None = None, + device_type: str | None = None, +) -> tuple[str, str]: + """ + Generate a one-use only link for resetting password and send it to the + user. + """ + uid = urlsafe_base64_encode(force_bytes(user.pk)) + token = TokenManager.password_reset_token_generator.make_token(user) + context = { + 'time': timezone.now(), + 'location': client_ip, + 'device': device_type, + 'password_reset_url': Permalink.user_password_reset(uid, token), + } + send_email( + user=user, + email_type=EmailNotificationType.PASSWORD_RESET, + subject="Alert Hub: Password Reset", + email_html_template='emails/user/password_reset/body.html', + email_text_template='emails/user/password_reset/body.txt', + context=context, + ) + return uid, token + + +def send_password_changed_notification(user, client_ip, device_type): + context = { + 'time': timezone.now(), + 'location': client_ip, + 'device': device_type, + 'frontend_forgot_password': Permalink.FORGOT_PASSWORD, + } + send_email( + user=user, + email_type=EmailNotificationType.PASSWORD_CHANGED, + subject='Alert Hub: Password Changed', + email_html_template='emails/user/password_changed/body.html', + email_text_template='emails/user/password_changed/body.txt', + context=context, + ) diff --git a/apps/user/factories.py b/apps/user/factories.py new file mode 100644 index 00000000..2d0e955d --- /dev/null +++ b/apps/user/factories.py @@ -0,0 +1,23 @@ +import factory +from factory import fuzzy +from factory.django import DjangoModelFactory + +from .models import User + + +class UserFactory(DjangoModelFactory): + first_name = factory.Faker('first_name') + last_name = factory.Faker('last_name') + email = factory.Sequence(lambda n: f'{n}@xyz.com') + password_text = fuzzy.FuzzyText(length=15) + password = factory.PostGeneration(lambda user, *args, **kwargs: user.set_password(user.password_text)) + + class Meta: # type: ignore[reportIncompatibleVariableOverride] + model = User + + @classmethod + def _create(cls, model_class, *args, **kwargs): + password_text = kwargs.pop('password_text') + user = super()._create(model_class, *args, **kwargs) + user.password_text = password_text # If raw password is needed while testing + return user diff --git a/apps/user/models.py b/apps/user/models.py index b5df9513..67139ac5 100644 --- a/apps/user/models.py +++ b/apps/user/models.py @@ -22,12 +22,12 @@ def _create_user(self, email, password, **extra_fields): user.save(using=self._db) return user - def create_user(self, email=None, password=None, **extra_fields): + def create_user(self, email, password=None, **extra_fields): extra_fields.setdefault('is_staff', False) extra_fields.setdefault('is_superuser', False) return self._create_user(email, password, **extra_fields) - def create_superuser(self, email=None, password=None, **extra_fields): + def create_superuser(self, email, password=None, **extra_fields): extra_fields.setdefault('is_staff', True) extra_fields.setdefault('is_superuser', True) extra_fields.setdefault('is_active', True) @@ -46,16 +46,9 @@ class EmailNotificationType(models.IntegerChoices): PASSWORD_RESET = 2, _('Password Reset') PASSWORD_CHANGED = 3, _('Password Changed') NEWS_AND_OFFERS = 4, _('News And Offers') + ALERT_SUBSCRIPTIONS = 5, _('Alert Subscriptions') # Other emails are configured using subscriptions - @classmethod - def get_opt_emails(cls): - always_send = [ - cls.ACCOUNT_ACTIVATION, - cls.PASSWORD_RESET, - ] - return {enum.name: (enum.value, enum.label) for enum in cls if enum.value not in always_send} - class User(AbstractUser): class OptEmailNotificationType(models.IntegerChoices): @@ -65,7 +58,6 @@ class OptEmailNotificationType(models.IntegerChoices): username = None email = models.EmailField(verbose_name=_('email'), unique=True, blank=False, max_length=255) - # bounced_email = models.BooleanField(verbose_name=_('Email tagged as bounced'), default=False) email_opt_outs = ArrayField( models.IntegerField( @@ -91,7 +83,14 @@ class OptEmailNotificationType(models.IntegerChoices): EMAIL_FIELD = USERNAME_FIELD = "email" REQUIRED_FIELDS = [] - objects = CustomUserManager() # type: ignore [reportAssignmentType,reportGeneralTypeIssues] + objects: CustomUserManager = CustomUserManager() # type: ignore [reportAssignmentType,reportGeneralTypeIssues] + + # TODO: Make first_name and last_name not nullable + def get_full_name(self): + """ + Return the first_name plus the last_name, with a space in between. + """ + return (" ".join([val for val in [self.first_name, self.last_name] if val])).strip() def save(self, *args, **kwargs): self.display_name = self.get_full_name() or f'User#{self.pk}' @@ -110,4 +109,4 @@ def unsubscribe_email(self, email_type, save=False) -> None: self.save(update_fields=('email_opt_outs',)) def is_email_subscribed_for(self, email_type) -> bool: - return email_type in self.email_opt_outs and email_type in self.OPT_EMAIL_NOTIFICATION_TYPES + return not (email_type in self.email_opt_outs and email_type in self.OPT_EMAIL_NOTIFICATION_TYPES) diff --git a/apps/user/mutations.py b/apps/user/mutations.py index b2392430..797e0275 100644 --- a/apps/user/mutations.py +++ b/apps/user/mutations.py @@ -1,30 +1,182 @@ import strawberry +from asgiref.sync import sync_to_async +from django.contrib.auth import login, logout, update_session_auth_hash + +from main.graphql.context import Info +from utils.strawberry.mutations import ( + MutationEmptyResponseType, + MutationResponseType, + mutation_is_not_valid, + process_input_data, +) +from utils.strawberry.transformers import convert_serializer_to_type + +from .queries import UserMeType +from .serializers import ( + UserActivationSerializer, + UserLoginSerializer, + UserMeSerializer, + UserPasswordChangeSerializer, + UserPasswordResetConfirmSerializer, + UserPasswordResetTriggerSerializer, + UserRegisterSerializer, +) + +UserLoginInput = convert_serializer_to_type(UserLoginSerializer, name="UserLoginInput") +UserRegisterInput = convert_serializer_to_type(UserRegisterSerializer, name="UserRegisterInput") +UserActivationInput = convert_serializer_to_type(UserActivationSerializer, name="UserActivationInput") + +UserMeInput = convert_serializer_to_type(UserMeSerializer, partial=True, name='UserMeInput') + +UserPasswordResetTriggerInput = convert_serializer_to_type( + UserPasswordResetTriggerSerializer, name='UserPasswordResetTriggerInput' +) +UserPasswordResetConfirmInput = convert_serializer_to_type( + UserPasswordResetConfirmSerializer, name='UserPasswordResetConfirmInput' +) + +UserPasswordChangeInput = convert_serializer_to_type(UserPasswordChangeSerializer, name='UserPasswordChangeInput') + + +@strawberry.type +class PublicMutation: + + @strawberry.mutation + @sync_to_async + def login( + self, + data: UserLoginInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationResponseType[UserMeType]: + serializer = UserLoginSerializer(data=process_input_data(data), context={"request": info.context.request}) + if errors := mutation_is_not_valid(serializer): + return MutationResponseType( + ok=False, + errors=errors, + ) + assert isinstance(serializer.validated_data, dict) + user = serializer.validated_data["user"] + login(info.context.request, user) + return MutationResponseType(result=user) + + @strawberry.mutation + @sync_to_async + def register( + self, + data: UserRegisterInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationEmptyResponseType: + serializer = UserRegisterSerializer(data=process_input_data(data), context={"request": info.context.request}) + if errors := mutation_is_not_valid(serializer): + return MutationEmptyResponseType( + ok=False, + errors=errors, + ) + serializer.save() + return MutationEmptyResponseType() + + @strawberry.mutation + @sync_to_async + def account_activation( + self, + data: UserActivationInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationEmptyResponseType: + serializer = UserActivationSerializer(data=process_input_data(data), context={"request": info.context.request}) + if errors := mutation_is_not_valid(serializer): + return MutationEmptyResponseType( + ok=False, + errors=errors, + ) + serializer.save() + # Set user activation + return MutationEmptyResponseType() + + @strawberry.mutation + @sync_to_async + def password_reset_trigger( + self, + data: UserPasswordResetTriggerInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationEmptyResponseType: + serializer = UserPasswordResetTriggerSerializer( + data=process_input_data(data), + context={'request': info.context.request}, + ) + if errors := mutation_is_not_valid(serializer): + return MutationEmptyResponseType( + ok=False, + errors=errors, + ) + serializer.save() + return MutationEmptyResponseType() + + @strawberry.mutation + @sync_to_async + def password_reset_confirm( + self, + data: UserPasswordResetConfirmInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationEmptyResponseType: + serializer = UserPasswordResetConfirmSerializer( + data=process_input_data(data), + context={'request': info.context.request}, + ) + if errors := mutation_is_not_valid(serializer): + return MutationEmptyResponseType( + ok=False, + errors=errors, + ) + serializer.save() + return MutationEmptyResponseType() @strawberry.type -class Mutation: - """ - class ErrorType(graphene.ObjectType): - verifyCode = graphene.String() - email = graphene.String() - session = graphene.String() - userName = graphene.String() - user = graphene.String() - - - Existing Mutation - - register - - captcha - - verify_token using email - - logout - - change_email - - forget_password - - update_profile - - first_name: str - - last_name: str - - country: str - - city: str - - avatar: str - """ - - noop: strawberry.ID = strawberry.ID('noop') +class PrivateMutation: + @strawberry.mutation + @sync_to_async + def logout(self, info: Info) -> MutationEmptyResponseType: + if info.context.request.user.is_authenticated: + logout(info.context.request) + return MutationEmptyResponseType(ok=True) + return MutationEmptyResponseType(ok=False) + + @strawberry.mutation + @sync_to_async + def change_user_password( + self, + data: UserPasswordChangeInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationEmptyResponseType: + serializer = UserPasswordChangeSerializer(data=process_input_data(data), context={'request': info.context.request}) + if errors := mutation_is_not_valid(serializer): + return MutationEmptyResponseType( + ok=False, + errors=errors, + ) + serializer.save() + update_session_auth_hash(info.context.request, info.context.request.user) # type: ignore[reportArgumentType] + return MutationEmptyResponseType() + + @strawberry.mutation + @sync_to_async + def update_me( + self, + data: UserMeInput, # type: ignore[reportInvalidTypeForm] + info: Info, + ) -> MutationResponseType[UserMeType]: + serializer = UserMeSerializer( + instance=info.context.request.user, + data=process_input_data(data), + context={'request': info.context.request}, + partial=True, + ) + if errors := mutation_is_not_valid(serializer): + return MutationResponseType( + ok=False, + errors=errors, + ) + user = serializer.save() + return MutationResponseType( + result=user, # type: ignore[reportReturnType] + ) diff --git a/apps/user/serializers.py b/apps/user/serializers.py new file mode 100644 index 00000000..120a8fa1 --- /dev/null +++ b/apps/user/serializers.py @@ -0,0 +1,189 @@ +import logging + +from django.contrib.auth import authenticate +from django.contrib.auth.password_validation import validate_password +from django.db import transaction +from django.utils.http import urlsafe_base64_decode +from django.utils.translation import gettext +from rest_framework import serializers + +from main.tokens import TokenManager +from utils.common import get_client_ip, get_device_type +from utils.hcaptcha import CaptchaSerializerMixin + +from .emails import ( + send_account_activation, + send_password_changed_notification, + send_password_reset, +) +from .models import User + +logger = logging.getLogger(__name__) + + +def validate_token(attrs, token_generator) -> User: + try: + uid = urlsafe_base64_decode(attrs['uuid']).decode('utf-8') + user = User.objects.get(pk=uid) + except ( + TypeError, + ValueError, + OverflowError, + User.DoesNotExist, + ): + user = None + + if user is not None and token_generator.check_token(user, attrs['token']): + return user + raise serializers.ValidationError(gettext('Invalid or expired token')) + + +class UserLoginSerializer(serializers.Serializer): + email = serializers.EmailField() + password = serializers.CharField() + + def validate_password(self, password): + validate_password(password=password) + return password + + def validate(self, attrs): + # NOTE: authenticate only works for active users + authenticate_user = authenticate( + email=attrs["email"].lower(), + password=attrs["password"], + ) + # User doesn't exists in the system. + if authenticate_user is None: + raise serializers.ValidationError(gettext("No active account found with the given credentials")) + return {"user": authenticate_user} + + +# TODO: User ModelSerializer +class UserRegisterSerializer(CaptchaSerializerMixin, serializers.Serializer): + first_name = serializers.CharField(required=True) + last_name = serializers.CharField(required=True) + email = serializers.EmailField(required=True) + password = serializers.CharField(required=True) + + def validate_email(self, email) -> str: + if User.objects.filter(email__iexact=email).exists(): + raise serializers.ValidationError(gettext('This email is already registered.')) + return email.lower() + + def validate_password(self, password): + validate_password(password=password) + return password + + def create(self, validated_data): + with transaction.atomic(): + new_user = User.objects.create_user( + first_name=validated_data['first_name'], + last_name=validated_data['last_name'], + email=validated_data['email'], + password=validated_data['password'], + is_active=False, + ) + transaction.on_commit(lambda: send_account_activation(new_user)) + return new_user + + +class UserActivationSerializer(serializers.Serializer): + uuid = serializers.CharField(required=True) + token = serializers.CharField(required=True) + + def validate(self, attrs): + return {**attrs, "user": validate_token(attrs, TokenManager.account_activation_token_generator)} + + def save(self, **_): + assert isinstance(self.validated_data, dict) + user = self.validated_data["user"] + user.is_active = True + user.save(update_fields=("is_active",)) + + +class UserPasswordResetTriggerSerializer(CaptchaSerializerMixin, serializers.Serializer): + email = serializers.EmailField(required=True) + + def validate(self, attrs): + email = attrs['email'].lower() + user = User.objects.filter(email=email).first() + if user is None: + raise serializers.ValidationError(gettext("User with that email doesn't exists!!")) + return { + **attrs, + 'user': user, + } + + def save(self, **_): + assert isinstance(self.validated_data, dict) + user = self.validated_data['user'] + client_ip = get_client_ip(self.context['request']) + device_type = get_device_type(self.context['request']) + send_password_reset(user=user, client_ip=client_ip, device_type=device_type) + + +class UserPasswordResetConfirmSerializer(CaptchaSerializerMixin, serializers.Serializer): + uuid = serializers.CharField(required=True) + token = serializers.CharField(required=True) + new_password = serializers.CharField(required=True) + + def validate_new_password(self, password): + validate_password(password) + return password + + def validate(self, attrs): + return {**attrs, "user": validate_token(attrs, TokenManager.password_reset_token_generator)} + + def save(self, **_): + assert isinstance(self.validated_data, dict) + user = self.validated_data['user'] + new_password = self.validated_data['new_password'] + user.set_password(new_password) + user.save(update_fields=('password',)) + client_ip = get_client_ip(self.context['request']) + device_type = get_device_type(self.context['request']) + transaction.on_commit( + lambda: send_password_changed_notification(user=user, client_ip=client_ip, device_type=device_type) + ) + + +class UserPasswordChangeSerializer(serializers.Serializer): + old_password = serializers.CharField(required=True, write_only=True) + new_password = serializers.CharField(required=True, write_only=True) + + def validate_old_password(self, password): + user = self.context['request'].user + if not user.check_password(password): + raise serializers.ValidationError(gettext('Invalid Old Password')) + return password + + def validate_new_password(self, password): + validate_password(password) + return password + + def validate(self, attrs): + if attrs["old_password"] == attrs["new_password"]: + raise serializers.ValidationError(gettext("New and old provided passwords are same")) + return attrs + + def save(self, **_): + assert isinstance(self.validated_data, dict) + user = self.context['request'].user + new_password = self.validated_data['new_password'] + user.set_password(new_password) + user.save(update_fields=('password',)) + client_ip = get_client_ip(self.context['request']) + device_type = get_device_type(self.context['request']) + transaction.on_commit( + lambda: send_password_changed_notification(user=user, client_ip=client_ip, device_type=device_type) + ) + + +class UserMeSerializer(serializers.ModelSerializer): + class Meta: + model = User + fields = ( + 'first_name', + 'last_name', + 'email_opt_outs', + ) diff --git a/apps/user/tests/__init__.py b/apps/user/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/apps/user/tests/test_mutations.py b/apps/user/tests/test_mutations.py new file mode 100644 index 00000000..e946377c --- /dev/null +++ b/apps/user/tests/test_mutations.py @@ -0,0 +1,322 @@ +from unittest import mock + +from apps.user.emails import ( + send_account_activation, + send_password_changed_notification, + send_password_reset, +) +from apps.user.factories import UserFactory +from apps.user.models import User +from main.tests import TestCase + + +class TestUserMutation(TestCase): + def setUp(self): + # This is used in 2 test + self.login_mutation = ''' + mutation Mutation($data: UserLoginInput!) { + public { + login(data: $data) { + ok + result { + id + firstName + lastName + email + } + } + } + } + ''' + super().setUp() + + def test_login(self): + # Try with random user + variables = dict(data=dict(email='xyz@xyz.com', password='pasword-xyz')) + content = self.query_check(self.login_mutation, variables=variables) + assert content['data']['public']['login']['ok'] is False + + # Try with real user + user = UserFactory.create(email=variables['data']['email']) + variables['data'] = dict(email=user.email, password=user.password_text) + content = self.query_check(self.login_mutation, variables=variables) + assert content['data']['public']['login']['ok'] is True + self.assertEqual(content['data']['public']['login']['result']['id'], self.gID(user.id), content) + self.assertEqual(content['data']['public']['login']['result']['email'], user.email, content) + + @mock.patch('utils.hcaptcha.requests') + @mock.patch('apps.user.serializers.send_account_activation', side_effect=send_account_activation) + def test_register(self, send_account_activation_mock, captcha_requests_mock): + mutation = ''' + mutation Mutation($data: UserRegisterInput!) { + public { + register(data: $data) { + ok + errors + } + } + } + ''' + + # input without email + variables = dict( + data=dict( + email='invalid-email', + firstName='john', + lastName='cena', + password='dummy-password', + captcha='captcha', + ) + ) + + # With invalid captcha + captcha_requests_mock.post.return_value.json.return_value = {'success': False} + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation, variables=variables) + assert content['data']['public']['register']['ok'] is False + + # With valid captcha now + captcha_requests_mock.post.return_value.json.return_value = {'success': True} + # With invalid email + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation, variables=variables) + assert content['data']['public']['register']['ok'] is False + self.assertEqual(len(content['data']['public']['register']['errors']), 1, content) + + # With valid input + variables['data']['email'] = 'john@Cena.com' + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation, variables=variables) + assert content['data']['public']['register']['ok'] is True + # Make sure password reset message is send + user = User.objects.get(email=variables['data']['email'].lower()) + send_account_activation_mock.assert_called_once_with(user) + self.assertEqual(user.email, variables['data']['email'].lower()) + + def test_logout(self): + query = ''' + query Query { + public { + me { + id + email + } + } + } + ''' + logout_mutation = ''' + mutation Mutation { + private { + logout { + ok + } + } + } + ''' + user = UserFactory.create() + # # Without Login session + content = self.query_check(query) + self.assertEqual(content['data']['public']['me'], None, content) + + # # Login + self.force_login(user) + + # Query Me (Success) + content = self.query_check(query) + self.assertEqual(content['data']['public']['me']['id'], self.gID(user.id), content) + self.assertEqual(content['data']['public']['me']['email'], user.email, content) + # # Logout + content = self.query_check(logout_mutation) + assert content['data']['private']['logout']['ok'] is True + # Query Me (with empty again) + content = self.query_check(query) + self.assertEqual(content['data']['public']['me'], None, content) + + @mock.patch('utils.hcaptcha.requests') + @mock.patch('apps.user.serializers.send_password_reset', side_effect=send_password_reset) + @mock.patch('apps.user.serializers.send_password_changed_notification', side_effect=send_password_changed_notification) + def test_password_reset( + self, + send_password_changed_notification_mock, + send_password_reset_mock, + captcha_requests_mock, + ): + mutation_reset = ''' + mutation Mutation($data: UserPasswordResetTriggerInput!) { + public { + passwordResetTrigger(data: $data) { + ok + errors + } + } + } + ''' + + mutation_confirm = ''' + mutation Mutation($data: UserPasswordResetConfirmInput!) { + public { + passwordResetConfirm(data: $data) { + ok + errors + } + } + } + ''' + # input without email + variables = dict( + data=dict( + email='invalid-email', + captcha='captcha', + ) + ) + + # With invalid captcha + captcha_requests_mock.post.return_value.json.return_value = {'success': False} + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_reset, variables=variables) + assert content['data']['public']['passwordResetTrigger']['ok'] is False + + # With valid captcha now + captcha_requests_mock.post.return_value.json.return_value = {'success': True} + # With invalid email + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_reset, variables=variables) + assert content['data']['public']['passwordResetTrigger']['ok'] is False + self.assertEqual(len(content['data']['public']['passwordResetTrigger']['errors']), 1, content) + + # With unknown user email + variables['data']['email'] = 'john@cena.com' + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_reset, variables=variables) + assert content['data']['public']['passwordResetTrigger']['ok'] is False + self.assertEqual(len(content['data']['public']['passwordResetTrigger']['errors']), 1, content) + + # With known user email + UserFactory.create(email=variables['data']['email']) + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_reset, variables=variables) + assert content['data']['public']['passwordResetTrigger']['ok'] is True + # Make sure password reset message is send + user = User.objects.get(email=variables['data']['email']) + send_password_reset_mock.assert_called_once_with( + user=user, + client_ip='127.0.0.1', + device_type=None, + ) + + # Try password reset confirm + uid, token = send_password_reset( + *send_password_reset_mock.call_args.args, + **send_password_reset_mock.call_args.kwargs, + ) + new_password = 'new-password-123' + variables['data'] = dict( + uuid='haha', + token='huhu', + newPassword=new_password, + captcha='captcha', + ) + + def _check_user_password(is_changed): + user.refresh_from_db() + assert user.check_password(new_password) is is_changed + if is_changed: + send_password_changed_notification_mock.assert_called_once_with( + user=user, + client_ip='127.0.0.1', + device_type=None, + ) + else: + send_password_changed_notification_mock.assert_not_called() + + # -- With Invalid captcha + _check_user_password(False) + captcha_requests_mock.post.return_value.json.return_value = {'success': False} + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_confirm, variables=variables) + assert content['data']['public']['passwordResetConfirm']['ok'] is False + _check_user_password(False) + # -- With valid captcha + captcha_requests_mock.post.return_value.json.return_value = {'success': True} + # -- With invalid uid/token + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_confirm, variables=variables) + assert content['data']['public']['passwordResetConfirm']['ok'] is False + _check_user_password(False) + # -- With valid uid/token + variables['data'].update(dict(uuid=uid, token=token)) + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_confirm, variables=variables) + assert content['data']['public']['passwordResetConfirm']['ok'] is True + _check_user_password(True) + # -- Try again, it should fail + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation_confirm, variables=variables) + assert content['data']['public']['passwordResetConfirm']['ok'] is False + _check_user_password(True) + + @mock.patch( + 'apps.user.serializers.send_password_changed_notification', + side_effect=send_password_changed_notification, + ) + def test_password_change(self, send_password_changed_notification_mock): + mutation = ''' + mutation Mutation($data: UserPasswordChangeInput!) { + private { + changeUserPassword(data: $data) { + ok + errors + } + } + } + ''' + # input without email + variables = dict(data=dict(oldPassword='', newPassword='new-password-123')) + # Without authentication -- + content = self.query_check(mutation, variables=variables, assert_errors=True) + # With authentication + user = UserFactory.create() + self.force_login(user) + # With invalid old password -- + content = self.query_check(mutation, variables=variables) + assert content['data']['private']['changeUserPassword']['ok'] is False + self.assertEqual(len(content['data']['private']['changeUserPassword']['errors']), 1, content) + # With valid password -- + variables['data']['oldPassword'] = user.password_text + with self.captureOnCommitCallbacks(execute=True): + content = self.query_check(mutation, variables=variables) + assert content['data']['private']['changeUserPassword']['ok'] is True + # Make sure password reset message is send + send_password_changed_notification_mock.assert_called_once_with( + user=user, + client_ip='127.0.0.1', + device_type=None, + ) + + def test_update_me(self): + mutation = ''' + mutation Mutation($data: UserMeInput!) { + private { + updateMe(data: $data) { + ok + errors + } + } + } + ''' + user = UserFactory.create() + + variables = dict( + data=dict( + firstName="Admin", + lastName="AH", + emailOptOuts=[self.genum(User.OptEmailNotificationType.NEWS_AND_OFFERS)], + ) + ) + # Without authentication ----- + content = self.query_check(mutation, variables=variables, assert_errors=True) + # With authentication ----- + self.force_login(user) + content = self.query_check(mutation, variables=variables) + assert content['data']['private']['updateMe']['ok'] is True, content + assert content['data']['private']['updateMe']['errors'] is None, content diff --git a/apps/user/types.py b/apps/user/types.py new file mode 100644 index 00000000..e69de29b diff --git a/gh-docker-compose.yml b/gh-docker-compose.yml index 29ce942c..cccd4b64 100644 --- a/gh-docker-compose.yml +++ b/gh-docker-compose.yml @@ -45,12 +45,16 @@ services: # Redis config CELERY_BROKER_URL: redis://redis:6379/0 CACHE_REDIS_URL: redis://redis:6379/1 + TEST_CACHE_REDIS_URL: redis://redis:6379/11 # Email config EMAIL_HOST: fake EMAIL_PORT: 1025 EMAIL_HOST_USER: fake EMAIL_HOST_PASSWORD: fake DEFAULT_FROM_EMAIL: alert-hub-dev + # Misc + HCAPTCHA_SECRET: "0x0000000000000000000000000000000000000000" + HCAPTCHA_SITEKEY: "10000000-ffff-ffff-ffff-000000000001" volumes: - ./:/code - ./ci-share/:/ci-share/ diff --git a/helm/templates/argo-hooks/hook-job.yaml b/helm/templates/argo-hooks/hook-job.yaml new file mode 100644 index 00000000..904fe33f --- /dev/null +++ b/helm/templates/argo-hooks/hook-job.yaml @@ -0,0 +1,50 @@ +{{- range $hookName, $hook := .Values.argoHooks }} + +{{- if $hook.enabled }} + +--- +apiVersion: batch/v1 +kind: Job +metadata: + {{- if $hook.preservehistory }} + generateName: {{ template "ifrcgo-alert-hub.fullname" $ }}-{{ $hookName }}- + {{- else }} + name: {{ template "ifrcgo-alert-hub.fullname" $ }}-{{ $hookName }} + {{- end }} + annotations: + argocd.argoproj.io/hook: {{ $hook.hook }} +spec: + template: + spec: + restartPolicy: "Never" + containers: + - name: {{ $hookName }} + image: "{{ $.Values.image.name }}:{{ $.Values.image.tag }}" + command: + {{- range $hook.command }} + - "{{ . }}" + {{- end }} + resources: + requests: + cpu: {{ default $.Values.api.resources.requests.cpu $hook.requestsCpu }} + memory: {{ default $.Values.api.resources.requests.memory $hook.requestsMemory }} + limits: + cpu: {{ default $.Values.api.resources.limits.cpu $hook.limitsCpu }} + memory: {{ default $.Values.api.resources.limits.memory $hook.limitsMemory }} + env: + - name: DJANGO_APP_TYPE + value: hook + envFrom: + - secretRef: + name: {{ template "ifrcgo-alert-hub.secretname" $ }} + - configMapRef: + name: {{ template "ifrcgo-alert-hub.fullname" $ }}-api-configmap + + {{- with $.Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + +{{- end }} + +{{- end }} diff --git a/helm/templates/config/configmap.yaml b/helm/templates/config/configmap.yaml index ef039dc6..557cac4a 100644 --- a/helm/templates/config/configmap.yaml +++ b/helm/templates/config/configmap.yaml @@ -11,6 +11,7 @@ data: DJANGO_APP_ENVIRONMENT: {{ .Values.environment | quote | upper }} DJANGO_DEBUG: {{ required "env.DJANGO_DEBUG" .Values.env.DJANGO_DEBUG | quote }} DJANGO_TIME_ZONE: {{ required "env.DJANGO_TIME_ZONE" .Values.env.DJANGO_TIME_ZONE | quote }} + # App Domain APP_DOMAIN: {{ required "env.APP_DOMAIN" .Values.env.APP_DOMAIN | quote }} APP_HTTP_PROTOCOL: {{ required "env.APP_HTTP_PROTOCOL" .Values.env.APP_HTTP_PROTOCOL | quote }} @@ -18,12 +19,21 @@ data: DJANGO_ALLOWED_HOSTS: {{ required "env.DJANGO_ALLOWED_HOSTS" .Values.env.DJANGO_ALLOWED_HOSTS | quote }} SESSION_COOKIE_DOMAIN: {{ required "env.SESSION_COOKIE_DOMAIN" .Values.env.SESSION_COOKIE_DOMAIN | quote }} CSRF_COOKIE_DOMAIN: {{ required "env.CSRF_COOKIE_DOMAIN" .Values.env.CSRF_COOKIE_DOMAIN | quote }} + # CORS CORS_ALLOWED_ORIGINS: {{ required "env.CORS_ALLOWED_ORIGINS" .Values.env.CORS_ALLOWED_ORIGINS | quote }} CORS_ALLOWED_ORIGIN_REGEXES: {{ default "" .Values.env.CORS_ALLOWED_ORIGIN_REGEXES | quote }} + # Misc UPTIME_WORKER_HEARTBEAT: {{ .Values.env.UPTIME_WORKER_HEARTBEAT | quote }} DEFAULT_FROM_EMAIL: {{ required "env.DEFAULT_FROM_EMAIL" .Values.env.DEFAULT_FROM_EMAIL | quote }} + EMAIL_USE_TLS: {{ required "env.EMAIL_USE_TLS" .Values.env.EMAIL_USE_TLS | quote }} + # Sentry SENTRY_TRACES_SAMPLE_RATE: {{ required "env.SENTRY_TRACES_SAMPLE_RATE" .Values.env.SENTRY_TRACES_SAMPLE_RATE | quote }} SENTRY_PROFILE_SAMPLE_RATE: {{ required "env.SENTRY_PROFILE_SAMPLE_RATE" .Values.env.SENTRY_PROFILE_SAMPLE_RATE | quote }} + + # Additional configs + {{- range $name, $value := .Values.envAdditional }} + {{ $name }}: {{ $value | quote }} + {{- end }} diff --git a/helm/templates/config/secret.yaml b/helm/templates/config/secret.yaml index 64695c81..e0113487 100644 --- a/helm/templates/config/secret.yaml +++ b/helm/templates/config/secret.yaml @@ -11,19 +11,27 @@ metadata: type: Opaque stringData: DJANGO_SECRET_KEY: {{ required "secrets.DJANGO_SECRET_KEY" .Values.secrets.DJANGO_SECRET_KEY | quote }} + # Database DB_NAME: {{ required "secrets.DB_NAME" .Values.secrets.DB_NAME | quote }} DB_USER: {{ required "secrets.DB_USER" .Values.secrets.DB_USER | quote }} DB_PASSWORD: {{ required "secrets.DB_PASSWORD" .Values.secrets.DB_PASSWORD | quote }} DB_HOST: {{ required "secrets.DB_HOST" .Values.secrets.DB_HOST | quote }} DB_PORT: {{ required "secrets.DB_PORT" .Values.secrets.DB_PORT | quote }} + # Sentry SENTRY_DSN: {{ required "secrets.SENTRY_DSN" .Values.secrets.SENTRY_DSN | quote }} + # Email EMAIL_HOST: {{ required "secrets.EMAIL_HOST" .Values.secrets.EMAIL_HOST | quote }} EMAIL_PORT: {{ required "secrets.EMAIL_PORT" .Values.secrets.EMAIL_PORT | quote }} EMAIL_HOST_USER: {{ required "secrets.EMAIL_HOST_USER" .Values.secrets.EMAIL_HOST_USER | quote }} EMAIL_HOST_PASSWORD: {{ required "secrets.EMAIL_HOST_PASSWORD" .Values.secrets.EMAIL_HOST_PASSWORD | quote }} + + # Hcaptcha + HCAPTCHA_SECRET: {{ required "secrets.HCAPTCHA_SECRET" .Values.secrets.HCAPTCHA_SECRET | quote }} + HCAPTCHA_SITEKEY: {{ required "secrets.HCAPTCHA_SITEKEY" .Values.secrets.HCAPTCHA_SITEKEY | quote }} + # Redis {{- if .Values.redis.enabled }} CELERY_BROKER_URL: "redis://{{ printf "%s-master" (include "common.names.fullname" .Subcharts.redis) }}:6379/0" @@ -33,4 +41,9 @@ stringData: CACHE_REDIS_URL: {{ required "env.CACHE_REDIS_URL" .Values.env.CACHE_REDIS_URL | quote }} {{- end }} + # Additional secrets + {{- range $name, $value := .Values.secretsAdditional }} + {{ $name }}: {{ $value | quote }} + {{- end }} + {{- end }} diff --git a/helm/values-test.yaml b/helm/values-test.yaml index 3a98bfd5..6ed57d39 100644 --- a/helm/values-test.yaml +++ b/helm/values-test.yaml @@ -25,6 +25,9 @@ env: CORS_ALLOWED_ORIGINS: dummy.com # Misc DEFAULT_FROM_EMAIL: dummy@dummy.com +envAdditional: + ENABLE_MAGIC: "true" + MAGIC_TYPE: fun secretsName: "" secrets: @@ -42,3 +45,9 @@ secrets: EMAIL_HOST_PASSWORD: dummy # Sentry SENTRY_DSN: dummy.com/dummy-id + # Hcaptcha + HCAPTCHA_SITEKEY: "10000000-ffff-ffff-ffff-000000000001" + HCAPTCHA_SECRET: "0x0000000000000000000000000000000000000000" +secretsAdditional: + ENABLE_MAGIC_SECRET: "true" + MAGIC_KEY: to-much-fun diff --git a/helm/values.yaml b/helm/values.yaml index dbda29ca..d94a2843 100644 --- a/helm/values.yaml +++ b/helm/values.yaml @@ -67,6 +67,17 @@ worker: cpu: "1" memory: 1Gi +argoHooks: + # NOTE: Make sure keys are lowercase + db-migrate: + enabled: true + hook: PostSync + preservehistory: true + command: ["./manage.py", "migrate"] + collect-static: + enabled: true + hook: PostSync + command: ["./manage.py", "collectstatic", "--noinput"] env: # DJANGO_APP_ENVIRONMENT: using .environment @@ -85,10 +96,15 @@ env: CORS_ALLOWED_ORIGIN_REGEXES: # Misc UPTIME_WORKER_HEARTBEAT: + EMAIL_USE_TLS: true DEFAULT_FROM_EMAIL: # Sentry SENTRY_TRACES_SAMPLE_RATE: 0.2 SENTRY_PROFILE_SAMPLE_RATE: 0.2 +# NOTE: Used to pass additional configs to api/worker containers +# NOTE: Not used by azure vault +envAdditional: + # USE_S3_BUCKET: "true" secretsName: "" secrets: @@ -111,6 +127,13 @@ secrets: CELERY_BROKER_URL: # - Cache CACHE_REDIS_URL: + # Hcaptcha + HCAPTCHA_SITEKEY: + HCAPTCHA_SECRET: +# NOTE: Used to pass additional secrets to api/worker containers +# NOTE: Not used by azure vault +secretsAdditional: + # AWS_S3_ACCESS_KEY_ID: "VERY-SENSITIVE-KEY-ID" # Azure configurations azure: diff --git a/main/cache.py b/main/cache.py index bf503beb..4b7f33a4 100644 --- a/main/cache.py +++ b/main/cache.py @@ -10,3 +10,5 @@ class RedisLockKey: POLL_FEED = _BASE + 'poll_feed_{}' TAG_EXPIRE_ALERT = _BASE + 'tag_expire_alert' REMOVE_EXPIRE_PROCESSED_ALERT = _BASE + 'remove_expire_processed_alert' + SUBSCRIPTION_TAG_ALERTS = _BASE + 'subscription_tag_alerts' + SEND_DAILY_USER_ALERT_SUBSCRIPTION_EMAIL = _BASE + 'send_daily_user_alert_subscription_email' diff --git a/main/celery.py b/main/celery.py index 7c9c027d..6ba96ff7 100644 --- a/main/celery.py +++ b/main/celery.py @@ -37,6 +37,16 @@ def on_configure(self): # type: ignore[reportIncompatibleVariableOverride] 'schedule': timedelta(days=1), 'options': {'queue': 'default'}, }, + f'{INTERNAL_CELERY_TASK_NAME_PREFIX}process_pending_subscription_alerts': { + 'task': 'apps.subscription.tasks.process_pending_subscription_alerts', + 'schedule': timedelta(minutes=30), # TODO: Lower this? + 'options': {'queue': 'default'}, + }, + f'{INTERNAL_CELERY_TASK_NAME_PREFIX}send_daily_user_alert_subscriptions_email': { + 'task': 'apps.subscription.tasks.send_daily_user_alert_subscriptions_email', + 'schedule': timedelta(days=1), + 'options': {'queue': 'default'}, + }, f'{INTERNAL_CELERY_TASK_NAME_PREFIX}uptime_push': { 'task': 'main.celery.uptime_push', 'schedule': timedelta(minutes=30), diff --git a/main/graphql/enums.py b/main/graphql/enums.py index f762773b..51035418 100644 --- a/main/graphql/enums.py +++ b/main/graphql/enums.py @@ -3,11 +3,13 @@ import strawberry from apps.cap_feed.enums import enum_map as cap_feed_enum_map +from apps.subscription.enums import enum_map as subscription_enum_map from apps.user.enums import enum_map as user_enum_map ENUM_TO_STRAWBERRY_ENUM_MAP: dict[str, type] = { **user_enum_map, **cap_feed_enum_map, + **subscription_enum_map, } @@ -28,20 +30,23 @@ def generate_app_enum_collection_data(name): return type( name, (), - {field_name: [AppEnumData(e) for e in enum] for field_name, enum in ENUM_TO_STRAWBERRY_ENUM_MAP.items()}, + { + field_name: [AppEnumData(e) for e in enum] # type: ignore[reportGeneralTypeIssues] + for field_name, enum in ENUM_TO_STRAWBERRY_ENUM_MAP.items() + }, ) -AppEnumCollectionData = generate_app_enum_collection_data('AppEnumCollectionData') +AppEnumCollectionData = generate_app_enum_collection_data("AppEnumCollectionData") def generate_type_for_enum(name, Enum): return strawberry.type( dataclasses.make_dataclass( - f'AppEnumCollection{name}', + f"AppEnumCollection{name}", [ - ('key', Enum), - ('label', str), + ("key", Enum), + ("label", str), ], ) ) @@ -73,7 +78,7 @@ def generate_type_for_enums(): ] return strawberry.type( dataclasses.make_dataclass( - 'AppEnumCollection', + "AppEnumCollection", enum_fields, ) ) diff --git a/main/graphql/schema.py b/main/graphql/schema.py index 51a51c27..f72e77b3 100644 --- a/main/graphql/schema.py +++ b/main/graphql/schema.py @@ -2,8 +2,11 @@ from strawberry.django.views import AsyncGraphQLView # Imported to make sure strawberry custom modules are loadded first -import utils.strawberry.transformers # noqa +import utils.strawberry.transformers # pyright: ignore[reportUnusedImport] # type: ignore # noqa F401 from apps.cap_feed import queries as cap_feed_queries +from apps.subscription import mutations as subscription_mutations +from apps.subscription import queries as subscription_queries +from apps.user import mutations as user_mutations from apps.user import queries as user_queries from .context import GraphQLContext @@ -33,17 +36,23 @@ class PublicQuery( class PrivateQuery( user_queries.PrivateQuery, cap_feed_queries.PrivateQuery, + subscription_queries.PrivateQuery, ): id: strawberry.ID = strawberry.ID('private') @strawberry.type -class PublicMutation: +class PublicMutation( + user_mutations.PublicMutation, +): id: strawberry.ID = strawberry.ID('public') @strawberry.type -class PrivateMutation: +class PrivateMutation( + user_mutations.PrivateMutation, + subscription_mutations.PrivateMutation, +): id: strawberry.ID = strawberry.ID('private') diff --git a/main/permalinks.py b/main/permalinks.py new file mode 100644 index 00000000..36f6251e --- /dev/null +++ b/main/permalinks.py @@ -0,0 +1,19 @@ +from django.conf import settings + + +class Permalink: + BASE_URL = f'{settings.APP_FRONTEND_HOST}/permalink' + + FORGOT_PASSWORD = f'{BASE_URL}/forgot-password' + + @classmethod + def user_password_reset(cls, uid: str, token: str): + return f'{cls.BASE_URL}/user-password-reset/{uid}/{token}' + + @classmethod + def user_activation(cls, uid: str, token: str): + return f'{cls.BASE_URL}/user-activation/{uid}/{token}' + + @classmethod + def unsubscribe_user_alert_subscription(cls, uid: str, token: str): + return f'{cls.BASE_URL}/unsubscribe-user-alert-subscription/{uid}/{token}' diff --git a/main/production.py b/main/production.py deleted file mode 100644 index f5ee3e10..00000000 --- a/main/production.py +++ /dev/null @@ -1,55 +0,0 @@ -import os - -from .settings import * # noqa -from .settings import BASE_DIR - -# Configure the domain name using the environment variable -# that Azure automatically creates for us. -ALLOWED_HOSTS = [os.environ['WEBSITE_HOSTNAME']] if 'WEBSITE_HOSTNAME' in os.environ else [] -CSRF_TRUSTED_ORIGINS = ['https://' + os.environ['WEBSITE_HOSTNAME']] if 'WEBSITE_HOSTNAME' in os.environ else [] -DEBUG = False - -MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', -] - -DEFAULT_FILE_STORAGE = 'main.azure_storage.AzureMediaStorage' -STATICFILES_STORAGE = 'main.azure_storage.AzureStaticStorage' - -AZURE_ACCOUNT_NAME = os.getenv('AZURE_ACCOUNT_NAME') -AZURE_ACCOUNT_KEY = os.getenv('AZURE_ACCOUNT_KEY') -AZURE_CUSTOM_DOMAIN = f'{AZURE_ACCOUNT_NAME}.blob.core.windows.net' - -STATIC_URL = f'https://{AZURE_CUSTOM_DOMAIN}/static/' -STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') - -MEDIA_URL = f'https://{AZURE_CUSTOM_DOMAIN}/media/' -MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles') - -# Configure Postgres database based on connection string of the libpq Keyword/Value form -# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING -conn_str = os.environ['AZURE_POSTGRESQL_CONNECTIONSTRING'] -conn_str_params = {pair.split('=')[0]: pair.split('=')[1] for pair in conn_str.split(' ')} -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql', - 'NAME': conn_str_params['dbname'], - 'HOST': conn_str_params['host'], - 'USER': conn_str_params['user'], - 'PASSWORD': conn_str_params['password'], - } -} - -CELERY_BROKER_URL = os.environ.get("CELERY_BROKER_URL") -CELERY_ACCEPT_CONTENT = ['application/json'] -CELERY_TASK_SERIALIZER = 'json' -CELERY_RESULT_BACKEND = 'django-db' -CELERY_CACHE_BACKEND = 'django-cache' -CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler' -CELERY_BROKER_CONNECTION_RETRY_ON_STARTUP = True diff --git a/main/settings.py b/main/settings.py index 37f3b9af..0f718d6b 100644 --- a/main/settings.py +++ b/main/settings.py @@ -14,6 +14,7 @@ from pathlib import Path import environ +from azure.identity import DefaultAzureCredential from django.utils.translation import gettext_lazy as _ from main import sentry @@ -25,7 +26,7 @@ DJANGO_DEBUG=(bool, False), DJANGO_SECRET_KEY=str, DJANGO_TIME_ZONE=(str, 'UTC'), - DJANGO_APP_TYPE=str, # web/worker + DJANGO_APP_TYPE=str, # web/worker/hook DJANGO_APP_ENVIRONMENT=str, # dev/prod # App Domain APP_RELEASE=(str, 'develop'), @@ -51,12 +52,32 @@ CELERY_BROKER_URL=str, # redis://redis:6379/0 # Cache CACHE_REDIS_URL=str, # redis://redis:6379/1 + TEST_CACHE_REDIS_URL=(str, None), # redis://redis:6379/11 # Email EMAIL_HOST=str, + EMAIL_USE_TLS=(bool, True), EMAIL_PORT=(int, 587), EMAIL_HOST_USER=str, EMAIL_HOST_PASSWORD=str, DEFAULT_FROM_EMAIL=str, + # Storage + # -- S3 + USE_S3_BUCKET=(bool, False), + AWS_S3_AWS_ENDPOINT_URL=str, + AWS_S3_ACCESS_KEY_ID=str, + AWS_S3_SECRET_ACCESS_KEY=str, + AWS_S3_REGION=str, + S3_STATIC_BUCKET_NAME=str, + S3_MEDIA_BUCKET_NAME=str, + # -- Azure blob storage + USE_AZURE_STORAGE=(bool, False), + AZURE_STORAGE_MEDIA_CONTAINER=str, # media + AZURE_STORAGE_STATIC_CONTAINER=str, # static + AZURE_STORAGE_CONNECTION_STRING=(str, None), + AZURE_STORAGE_ACCOUNT_NAME=str, + AZURE_STORAGE_ACCOUNT_KEY=(str, None), + AZURE_STORAGE_TOKEN_CREDENTIAL=(str, None), + AZURE_STORAGE_MANAGED_IDENTITY=(bool, False), # Sentry SENTRY_DSN=(str, None), SENTRY_TRACES_SAMPLE_RATE=(float, 0.2), @@ -66,6 +87,8 @@ CORS_ALLOWED_ORIGIN_REGEXES=(list, []), # Misc UPTIME_WORKER_HEARTBEAT=(str, None), + HCAPTCHA_SITEKEY=str, + HCAPTCHA_SECRET=str, ) # SECURITY WARNING: keep the secret key used in production secret! @@ -94,15 +117,16 @@ 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', + "whitenoise.runserver_nostatic", 'django.contrib.staticfiles', 'django.contrib.gis', # External 'strawberry_django', 'admin_auto_filters', 'django_celery_beat', - 'django_extensions', 'corsheaders', 'storages', + 'django_premailer', # External - Health-check 'health_check', # required 'health_check.db', # stock Django health checkers @@ -117,26 +141,27 @@ 'apps.user', 'apps.cap_feed', 'apps.subscription', - 'apps.subscription_manager', ] MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'corsheaders.middleware.CorsMiddleware', - 'django.middleware.locale.LocaleMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'main.middlewares.SentryTransactionMiddleware', + "django.middleware.security.SecurityMiddleware", + "whitenoise.middleware.WhiteNoiseMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "corsheaders.middleware.CorsMiddleware", + "django.middleware.locale.LocaleMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "main.middlewares.SentryTransactionMiddleware", ] AUTHENTICATION_BACKENDS = [ 'django.contrib.auth.backends.ModelBackend', ] +LOGIN_URL = "admin:login" ROOT_URLCONF = 'main.urls' @@ -208,6 +233,9 @@ { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, + { + 'NAME': 'main.validators.MaximumLengthValidator', + }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, @@ -231,11 +259,87 @@ # TODO: Use custom config for static files STATICFILES_DIRS = (str(BASE_DIR.joinpath('static')),) -STATIC_URL = env('DJANGO_STATIC_URL') -MEDIA_URL = env('DJANGO_MEDIA_URL') +STATIC_URL = env("DJANGO_STATIC_URL") +MEDIA_URL = env("DJANGO_MEDIA_URL") + +# Django storage +STORAGES = { + "default": { + # XXX: This is not recommended for Production, but we don't need persistant storage until now + "BACKEND": "django.core.files.storage.FileSystemStorage", + }, + "staticfiles": { + "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage", + }, +} + +if env("USE_AZURE_STORAGE"): + + AZURE_STORAGE_CONFIG_OPTIONS = { + "connection_string": env("AZURE_STORAGE_CONNECTION_STRING"), + "overwrite_files": False, + } + + if not env("AZURE_STORAGE_CONNECTION_STRING"): + AZURE_STORAGE_CONFIG_OPTIONS.update( + { + "account_name": env("AZURE_STORAGE_ACCOUNT_NAME"), + "account_key": env("AZURE_STORAGE_ACCOUNT_KEY"), + "token_credential": env("AZURE_STORAGE_TOKEN_CREDENTIAL"), + } + ) + + if env("AZURE_STORAGE_MANAGED_IDENTITY"): + AZURE_STORAGE_CONFIG_OPTIONS["token_credential"] = DefaultAzureCredential() + + STORAGES = { + "default": { + "BACKEND": "storages.backends.azure_storage.AzureStorage", + "OPTIONS": { + **AZURE_STORAGE_CONFIG_OPTIONS, + "azure_container": env("AZURE_STORAGE_MEDIA_CONTAINER"), + }, + }, + "staticfiles": { + "BACKEND": "storages.backends.azure_storage.AzureStorage", + "OPTIONS": { + **AZURE_STORAGE_CONFIG_OPTIONS, + "azure_container": env("AZURE_STORAGE_STATIC_CONTAINER"), + "overwrite_files": True, + }, + }, + } + +elif env("USE_S3_BUCKET"): + AWS_S3_ENDPOINT_URL = env("AWS_S3_AWS_ENDPOINT_URL") + + AWS_S3_ACCESS_KEY_ID = env("AWS_S3_ACCESS_KEY_ID") + AWS_S3_SECRET_ACCESS_KEY = env("AWS_S3_SECRET_ACCESS_KEY") + AWS_S3_REGION_NAME = env("AWS_S3_REGION") + + STORAGES = { + "default": { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": { + "bucket_name": env("S3_MEDIA_BUCKET_NAME"), + "location": "media/", + "file_overwrite": False, + }, + }, + "staticfiles": { + "BACKEND": "storages.backends.s3.S3Storage", + "OPTIONS": { + "bucket_name": env("S3_STATIC_BUCKET_NAME"), + "location": "static/", + "file_overwrite": True, + }, + }, + } + +else: + STATIC_ROOT = env("DJANGO_STATIC_ROOT") + MEDIA_ROOT = env("DJANGO_MEDIA_ROOT") -STATIC_ROOT = env('DJANGO_STATIC_ROOT') -MEDIA_ROOT = env('DJANGO_MEDIA_ROOT') # Default primary key field type # https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field @@ -244,6 +348,8 @@ # CELERY CELERY_RESULT_BACKEND = CELERY_BROKER_URL = env('CELERY_BROKER_URL') CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler' +CELERY_TASK_SOFT_TIME_LIMIT = 30 * 60 # 30 mins max (To tackle worst cases) +CELERY_TASK_TIME_LIMIT = 35 * 60 # CORS @@ -284,10 +390,13 @@ STRAWBERRY_MAX_PAGINATION_LIMIT = 100 # Cache +CACHE_REDIS_URL = env('CACHE_REDIS_URL') +TEST_CACHE_REDIS_URL = env('TEST_CACHE_REDIS_URL') + CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': env('CACHE_REDIS_URL'), + 'LOCATION': CACHE_REDIS_URL, 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', }, @@ -295,7 +404,8 @@ } # HEALTH-CHECK -REDIS_URL = env('CACHE_REDIS_URL') +REDIS_URL = CACHE_REDIS_URL +TEST_CACHE_REDIS_URL = env('TEST_CACHE_REDIS_URL') HEALTHCHECK_CACHE_KEY = "alert_hub_healthcheck_key" HEALTH_CHECK = { 'DISK_USAGE_MAX': 80, # percent @@ -309,7 +419,7 @@ # Email - SMTP Settings EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' -EMAIL_USE_TLS = True +EMAIL_USE_TLS = env('EMAIL_USE_TLS') EMAIL_HOST = env('EMAIL_HOST') EMAIL_PORT = env('EMAIL_PORT') EMAIL_HOST_USER = env('EMAIL_HOST_USER') @@ -411,3 +521,11 @@ def log_render_extra_context(record): # modeltranslation configs # -- NOTE: "en" is used as default languages in the codebase, changing this will break logics MODELTRANSLATION_DEFAULT_LANGUAGE = "en" # Also the fallback + +# CAPTCHA +HCAPTCHA_SITEKEY = env('HCAPTCHA_SITEKEY') +HCAPTCHA_SECRET = env('HCAPTCHA_SECRET') + +PREMAILER_OPTIONS = dict( + disable_validation=not DEBUG, # Disable validation in production +) diff --git a/main/testing.py b/main/testing.py deleted file mode 100644 index 48de1378..00000000 --- a/main/testing.py +++ /dev/null @@ -1,172 +0,0 @@ -""" -Django settings for main project. - -Generated by 'django-admin startproject' using Django 4.0.2. - -For more information on this file, see -https://docs.djangoproject.com/en/4.0/topics/settings/ - -For the full list of settings and their values, see -https://docs.djangoproject.com/en/4.0/ref/settings/ -""" - -import os -from pathlib import Path - -# Build paths inside the project like this: BASE_DIR / 'subdir'. -BASE_DIR = Path(__file__).resolve().parent.parent - -# Quick-start development settings - unsuitable for production -# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/ - -# SECURITY WARNING: keep the secret key used in production secret! -SECRET_KEY = os.getenv('SECRET_KEY') - -# SECURITY WARNING: don't run with debug turned on in production! -DEBUG = True - -ALLOWED_HOSTS = ['*'] - -# Application definition - -INSTALLED_APPS = [ - 'daphne', - 'channels', - 'django_celery_results', - 'django_celery_beat', - 'cap_feed.apps.CapFeedConfig', - 'django.contrib.admin', - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.messages', - 'django.contrib.staticfiles', - 'django_extensions', - 'corsheaders', -] - -MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', - 'corsheaders.middleware.CorsMiddleware', -] - -ROOT_URLCONF = 'main.urls' - -TEMPLATES = [ - { - 'BACKEND': 'django.template.backends.django.DjangoTemplates', - 'DIRS': [BASE_DIR / "templates"], - 'APP_DIRS': True, - 'OPTIONS': { - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.contrib.messages.context_processors.messages', - ], - }, - }, -] - -WSGI_APPLICATION = 'main.wsgi.application' - - -# Database -# https://docs.djangoproject.com/en/4.0/ref/settings/#databases - -# To use sqllite as the database engine, -# uncomment the following block and comment out the Postgres section below - -# DATABASES = { -# 'default': { -# 'ENGINE': 'django.db.backends.sqlite3', -# 'NAME': BASE_DIR / 'db.sqlite3', -# } -# } - - -# Configure Postgres database for local development -# Set these environment variables in the .env file for this project. -DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': os.environ.get('DBNAME'), - 'HOST': os.environ.get('DBHOST'), - 'USER': os.environ.get('DBUSER'), - 'PASSWORD': os.environ.get('DBPASS'), - } -} - - -# Password validation -# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators - -AUTH_PASSWORD_VALIDATORS = [ - { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', - }, -] - - -# Internationalization -# https://docs.djangoproject.com/en/4.0/topics/i18n/ - -LANGUAGE_CODE = 'en-us' - -TIME_ZONE = 'UTC' - -USE_I18N = True - -USE_TZ = True - - -# Static files (CSS, JavaScript, Images) -# https://docs.djangoproject.com/en/4.0/howto/static-files/ - -STATICFILES_DIRS = (str(BASE_DIR.joinpath('static')),) -STATIC_URL = 'static/' - -# Default primary key field type -# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field - -DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField' -CELERY_broker_url = os.environ.get("CELERY_broker_url") -accept_content = ['application/json'] -task_serializer = 'json' -result_backend = 'django-db' -cache_backend = 'django-cache' -CELERY_BEAT_SCHEDULER = 'django_celery_beat.schedulers:DatabaseScheduler' - -ASGI_APPLICATION = "main.asgi.application" - -CORS_ALLOWED_ORIGINS = ["http://localhost:3000", "http://localhost:8000", "http://127.0.0.1:9000"] - -CHANNEL_LAYERS = { - 'default': { - 'BACKEND': 'channels_redis.core.RedisChannelLayer', - 'CONFIG': { - "hosts": [os.environ.get("REDIS_URL")], - }, - }, -} - -CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', - } -} diff --git a/main/tests/__init__.py b/main/tests/__init__.py new file mode 100644 index 00000000..f55b1cbb --- /dev/null +++ b/main/tests/__init__.py @@ -0,0 +1,3 @@ +from .base import TestCase + +__all__ = ["TestCase"] diff --git a/main/tests/base.py b/main/tests/base.py new file mode 100644 index 00000000..f736e440 --- /dev/null +++ b/main/tests/base.py @@ -0,0 +1,184 @@ +from enum import Enum +from typing import Dict + +from django.conf import settings +from django.db import models +from django.test import TestCase as BaseTestCase +from django.test import override_settings + +TEST_CACHES = { + 'default': { + 'BACKEND': 'django_redis.cache.RedisCache', + 'LOCATION': settings.TEST_CACHE_REDIS_URL, + 'OPTIONS': { + 'CLIENT_CLASS': 'django_redis.client.DefaultClient', + }, + 'KEY_PREFIX': 'test_dj_cache-', + }, + 'local-memory': { + 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + }, +} + +FILE_SYSTEM_TEST_STORAGES_CONFIGS = dict( + DJANGO_USE_S3=False, + STORAGES={ + 'default': { + 'BACKEND': 'django.core.files.storage.FileSystemStorage', + }, + 'staticfiles': { + 'BACKEND': 'django.contrib.staticfiles.storage.StaticFilesStorage', + }, + }, +) + +S3_TEST_STORAGES_CONFIGS = dict( + DJANGO_USE_S3=True, + AWS_S3_BUCKET_STATIC='ah-static', + AWS_S3_BUCKET_MEDIA='ah-media', + AWS_S3_ACCESS_KEY_ID='FAKE-ACCESS-KEY', + AWS_S3_SECRET_ACCESS_KEY='FAKE-SECRET-KEY', + AWS_S3_ENDPOINT_URL='https://fake.s3.endpoint', + STORAGES={ + # Need to manually override here as this is auto selected on startup + 'default': { + 'BACKEND': 'main.storages.S3MediaStorage', + }, + 'staticfiles': { + 'BACKEND': 'main.storages.S3StaticStorage', + }, + }, +) + + +@override_settings( + DEBUG=True, + EMAIL_BACKEND='django.core.mail.backends.console.EmailBackend', + MEDIA_ROOT='rest-media-temp', + STORAGES=FILE_SYSTEM_TEST_STORAGES_CONFIGS['STORAGES'], + CACHES=TEST_CACHES, + CELERY_TASK_ALWAYS_EAGER=True, +) +class TestCase(BaseTestCase): + + def setUp(self): + from django.core.cache import cache + + # Clear all test cache + cache.clear() + super().setUp() + + def force_login(self, user): + self.client.force_login(user) + + def logout(self): + self.client.logout() + + def query_check( + self, + query: str, + assert_errors: bool = False, + variables: dict | None = None, + files: dict | None = None, + **kwargs, + ) -> Dict: + import json + + if files: + # Request type: form data + response = self.client.post( + "/graphql/", + data={ + 'operations': json.dumps( + { + 'query': query, + 'variables': variables, + } + ), + **files, + 'map': json.dumps(kwargs.pop('map')), + }, + **kwargs, + ) + else: + # Request type: json + response = self.client.post( + "/graphql/", + data={ + 'query': query, + 'variables': variables, + }, + content_type="application/json", + **kwargs, + ) + if assert_errors: + self.assertResponseHasErrors(response) + else: + self.assertResponseNoErrors(response) + return response.json() + + def assertResponseNoErrors(self, resp, msg=None): + """ + Assert that the call went through correctly. 200 means the syntax is ok, + if there are no `errors`, + the call was fine. + :resp HttpResponse: Response + """ + content = resp.json() + self.assertEqual(resp.status_code, 200, msg or content) + self.assertNotIn("errors", list(content.keys()), msg or content) + + def assertResponseHasErrors(self, resp, msg=None): + """ + Assert that the call was failing. Take care: Even with errors, + GraphQL returns status 200! + :resp HttpResponse: Response + """ + content = resp.json() + self.assertIn("errors", list(content.keys()), msg or content) + + def genum(self, _enum: models.TextChoices | models.IntegerChoices | Enum): + """ + Return appropriate enum value. + """ + if _enum: + return _enum.name + + def gdatetime(self, _datetime): + if _datetime: + return _datetime.isoformat() + + def gID(self, pk): + if pk: + return str(pk) + + def get_media_url(self, path): + return f'http://testserver/media/{path}' + + def _dict_with_keys( + self, + data: dict, + include_keys=None, + ignore_keys=None, + ): + if all([ignore_keys, include_keys]): + raise Exception('Please use one of the options among include_keys, ignore_keys') + return { + key: value + for key, value in data.items() + if ((ignore_keys is not None and key not in ignore_keys) or (include_keys is not None and key in include_keys)) + } + + # def assertDictEqual(self, left, right, messages, ignore_keys=None, include_keys=None): + # self.assertEqual( + # self._dict_with_keys(left, ignore_keys=ignore_keys, include_keys=include_keys), + # self._dict_with_keys(right, ignore_keys=ignore_keys, include_keys=include_keys), + # messages, + # ) + + def assertListDictEqual(self, left, right, messages, ignore_keys=None, include_keys=None): + self.assertEqual( + [self._dict_with_keys(item, ignore_keys=ignore_keys, include_keys=include_keys) for item in left], + [self._dict_with_keys(item, ignore_keys=ignore_keys, include_keys=include_keys) for item in right], + messages, + ) diff --git a/main/tokens.py b/main/tokens.py new file mode 100644 index 00000000..808c2f50 --- /dev/null +++ b/main/tokens.py @@ -0,0 +1,43 @@ +import typing + +from django.contrib.auth.tokens import PasswordResetTokenGenerator + +from utils.tokens import BaseTokenGenerator + + +def _generate_generator(name: str, _make_hash_value: None | typing.Callable = None, **kwargs): + def _default_make_hash_func(_, user, timestamp): + return str(user.pk) + str(timestamp) + + _name = f'{name}TokenGenerator' + if _make_hash_value is None: + _make_hash_value = _default_make_hash_func + + return type( + _name, + (BaseTokenGenerator,), + dict(key_salt=_name, _make_hash_value=_make_hash_value, **kwargs), + )() + + +def account_activation_token_generator_make_hash_value(_, user, timestamp): + return str(user.pk) + str(user.is_active) + str(timestamp) + + +def user_subscription_unsubscribe_generator_make_hash_value(_, user_subscription, timestamp): + return str(user_subscription.pk) + str(user_subscription.notify_by_email) + str(timestamp) + + +class TokenManager: + password_reset_token_generator = PasswordResetTokenGenerator() + account_activation_token_generator = _generate_generator( + 'AccountActivationTokenGenerator', + timeout=7 * 86400, + _make_hash_value=account_activation_token_generator_make_hash_value, + ) + + user_subscription_unsubscribe_generator = _generate_generator( + 'UserSubscriptionUnsubscribeGenerator', + timeout=7 * 86400, + _make_hash_value=user_subscription_unsubscribe_generator_make_hash_value, + ) diff --git a/main/urls.py b/main/urls.py index 3601844f..88cde0e0 100644 --- a/main/urls.py +++ b/main/urls.py @@ -17,8 +17,10 @@ from django.conf import settings from django.conf.urls.static import static from django.contrib import admin -from django.urls import include, path +from django.urls import include, path, re_path +from django.views.decorators.csrf import csrf_exempt +from apps.subscription.views import user_alert_subscription_email_preview from main.graphql.schema import CustomAsyncGraphQLView from main.graphql.schema import schema as graphql_schema @@ -27,9 +29,11 @@ path('health-check/', include('health_check.urls')), path( 'graphql/', - CustomAsyncGraphQLView.as_view( - schema=graphql_schema, - graphiql=False, + csrf_exempt( + CustomAsyncGraphQLView.as_view( + schema=graphql_schema, + graphql_ide=False, + ), ), name='graphql', ), @@ -38,7 +42,15 @@ if settings.DEBUG: - urlpatterns.append(path('graphiql/', CustomAsyncGraphQLView.as_view(schema=graphql_schema))) + urlpatterns.extend( + [ + path( + 'graphiql/', + csrf_exempt(CustomAsyncGraphQLView.as_view(schema=graphql_schema)), + ), + re_path(r'^dev/user-alert-subscription-email/preview/$', user_alert_subscription_email_preview), + ] + ) # Static and media file URLs urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) diff --git a/main/validators.py b/main/validators.py new file mode 100644 index 00000000..f2f5c43b --- /dev/null +++ b/main/validators.py @@ -0,0 +1,22 @@ +from django.core.exceptions import ValidationError +from django.utils.translation import gettext + + +class MaximumLengthValidator: + def __init__(self, max_length=128): + self.max_length = max_length + + def validate(self, password, user=None): + if len(password) > self.max_length: + raise ValidationError( + gettext( + "This password is too long. It must contain at most %(max_length)d characters.", + ), + code="password_too_long", + params={"max_length": self.max_length}, + ) + + def get_help_text(self): + return gettext( + "Your password must contain at most %(max_length)d characters.", + ) % {"max_length": self.max_length} diff --git a/poetry.lock b/poetry.lock index a8770a05..647be526 100644 --- a/poetry.lock +++ b/poetry.lock @@ -14,20 +14,6 @@ files = [ [package.dependencies] vine = ">=5.0.0,<6.0.0" -[[package]] -name = "aniso8601" -version = "9.0.1" -description = "A library for parsing ISO 8601 strings." -optional = false -python-versions = "*" -files = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, -] - -[package.extras] -dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] - [[package]] name = "asgiref" version = "3.8.1" @@ -71,6 +57,63 @@ files = [ {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, ] +[[package]] +name = "azure-core" +version = "1.32.0" +description = "Microsoft Azure Core Library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_core-1.32.0-py3-none-any.whl", hash = "sha256:eac191a0efb23bfa83fddf321b27b122b4ec847befa3091fa736a5c32c50d7b4"}, + {file = "azure_core-1.32.0.tar.gz", hash = "sha256:22b3c35d6b2dae14990f6c1be2912bf23ffe50b220e708a28ab1bb92b1c730e5"}, +] + +[package.dependencies] +requests = ">=2.21.0" +six = ">=1.11.0" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["aiohttp (>=3.0)"] + +[[package]] +name = "azure-identity" +version = "1.19.0" +description = "Microsoft Azure Identity Library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_identity-1.19.0-py3-none-any.whl", hash = "sha256:e3f6558c181692d7509f09de10cca527c7dce426776454fb97df512a46527e81"}, + {file = "azure_identity-1.19.0.tar.gz", hash = "sha256:500144dc18197d7019b81501165d4fa92225f03778f17d7ca8a2a180129a9c83"}, +] + +[package.dependencies] +azure-core = ">=1.31.0" +cryptography = ">=2.5" +msal = ">=1.30.0" +msal-extensions = ">=1.2.0" +typing-extensions = ">=4.0.0" + +[[package]] +name = "azure-storage-blob" +version = "12.24.0" +description = "Microsoft Azure Blob Storage Client Library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "azure_storage_blob-12.24.0-py3-none-any.whl", hash = "sha256:4f0bb4592ea79a2d986063696514c781c9e62be240f09f6397986e01755bc071"}, + {file = "azure_storage_blob-12.24.0.tar.gz", hash = "sha256:eaaaa1507c8c363d6e1d1342bd549938fdf1adec9b1ada8658c8f5bf3aea844e"}, +] + +[package.dependencies] +azure-core = ">=1.30.0" +cryptography = ">=2.1.4" +isodate = ">=0.6.1" +typing-extensions = ">=4.6.0" + +[package.extras] +aio = ["azure-core[aio] (>=1.30.0)"] + [[package]] name = "billiard" version = "4.2.0" @@ -82,6 +125,44 @@ files = [ {file = "billiard-4.2.0.tar.gz", hash = "sha256:9a3c3184cb275aa17a732f93f65b20c525d3d9f253722d26a82194803ade5a2c"}, ] +[[package]] +name = "boto3" +version = "1.35.70" +description = "The AWS SDK for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "boto3-1.35.70-py3-none-any.whl", hash = "sha256:ca385708f83f01b3f27d9d675880d2458cb3b40ed1e25da688f551454ed0c112"}, + {file = "boto3-1.35.70.tar.gz", hash = "sha256:121dce8c7102eea6a6047d46bcd74e8a24dac793a4a3857de4f4bad9c12566fd"}, +] + +[package.dependencies] +botocore = ">=1.35.70,<1.36.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.10.0,<0.11.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.35.70" +description = "Low-level, data-driven core of boto 3." +optional = false +python-versions = ">=3.8" +files = [ + {file = "botocore-1.35.70-py3-none-any.whl", hash = "sha256:ba8a4797cf7c5d9c237e67a62692f5146e895613fd3e6a43b00b66f3a8c7fc73"}, + {file = "botocore-1.35.70.tar.gz", hash = "sha256:18d1bb505722d9efd50c50719ed8de7284bfe6d3908a9e08756a7646e549da21"}, +] + +[package.dependencies] +jmespath = ">=0.7.1,<2.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = {version = ">=1.25.4,<2.2.0 || >2.2.0,<3", markers = "python_version >= \"3.10\""} + +[package.extras] +crt = ["awscrt (==0.22.0)"] + [[package]] name = "celery" version = "5.4.0" @@ -163,6 +244,85 @@ files = [ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "charset-normalizer" version = "3.3.2" @@ -367,6 +527,84 @@ files = [ [package.extras] dev = ["polib"] +[[package]] +name = "cryptography" +version = "43.0.3" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = ">=3.7" +files = [ + {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, + {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, + {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, + {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, + {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, + {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, + {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, + {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, + {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, + {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, + {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, + {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, + {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "cssselect" +version = "1.2.0" +description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, + {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, +] + +[[package]] +name = "cssutils" +version = "2.11.1" +description = "A CSS Cascading Style Sheets library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "cssutils-2.11.1-py3-none-any.whl", hash = "sha256:a67bfdfdff4f3867fab43698ec4897c1a828eca5973f4073321b3bccaf1199b1"}, + {file = "cssutils-2.11.1.tar.gz", hash = "sha256:0563a76513b6af6eebbe788c3bf3d01c920e46b3f90c8416738c5cfc773ff8e2"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["cssselect", "importlib-resources", "jaraco.test (>=5.1)", "lxml", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + [[package]] name = "decorator" version = "5.1.1" @@ -461,20 +699,6 @@ develop = ["coverage[toml] (>=5.0a4)", "furo (>=2021.8.17b43,<2021.9.dev0)", "py docs = ["furo (>=2021.8.17b43,<2021.9.dev0)", "sphinx (>=3.5.0)", "sphinx-notfound-page"] testing = ["coverage[toml] (>=5.0a4)", "pytest (>=4.6.11)"] -[[package]] -name = "django-extensions" -version = "3.2.3" -description = "Extensions for Django" -optional = false -python-versions = ">=3.6" -files = [ - {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, - {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, -] - -[package.dependencies] -Django = ">=3.2" - [[package]] name = "django-health-check" version = "3.18.2" @@ -507,6 +731,20 @@ files = [ [package.dependencies] Django = ">=4.2" +[[package]] +name = "django-premailer" +version = "0.2.0" +description = "Django template tag that turns CSS blocks into style attributes using premailer." +optional = false +python-versions = "*" +files = [ + {file = "django-premailer-0.2.0.tar.gz", hash = "sha256:561b8a10bd1673794d55830c1a6479cfdfdb61d314f24f5ebc5685cd32023df6"}, + {file = "django_premailer-0.2.0-py2.py3-none-any.whl", hash = "sha256:307e386e3c4b3e0d030f172c2af84eb6c64724b36f568046b4515be3f3fed357"}, +] + +[package.dependencies] +premailer = "3.0.0" + [[package]] name = "django-redis" version = "5.4.0" @@ -537,6 +775,9 @@ files = [ ] [package.dependencies] +azure-core = {version = ">=1.13", optional = true, markers = "extra == \"azure\""} +azure-storage-blob = {version = ">=12", optional = true, markers = "extra == \"azure\""} +boto3 = {version = ">=1.4.4", optional = true, markers = "extra == \"s3\""} Django = ">=3.2" [package.extras] @@ -627,59 +868,48 @@ files = [ tests = ["asttokens (>=2.1.0)", "coverage", "coverage-enable-subprocess", "ipython", "littleutils", "pytest", "rich"] [[package]] -name = "gprof2dot" -version = "2022.7.29" -description = "Generate a dot graph from the output of several profilers." -optional = false -python-versions = ">=2.7" -files = [ - {file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"}, - {file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"}, -] - -[[package]] -name = "graphene" -version = "3.3" -description = "GraphQL Framework for Python" +name = "factory-boy" +version = "3.3.1" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "graphene-3.3-py2.py3-none-any.whl", hash = "sha256:bb3810be33b54cb3e6969506671eb72319e8d7ba0d5ca9c8066472f75bf35a38"}, - {file = "graphene-3.3.tar.gz", hash = "sha256:529bf40c2a698954217d3713c6041d69d3f719ad0080857d7ee31327112446b0"}, + {file = "factory_boy-3.3.1-py2.py3-none-any.whl", hash = "sha256:7b1113c49736e1e9995bc2a18f4dbf2c52cf0f841103517010b1d825712ce3ca"}, + {file = "factory_boy-3.3.1.tar.gz", hash = "sha256:8317aa5289cdfc45f9cae570feb07a6177316c82e34d14df3c2e1f22f26abef0"}, ] [package.dependencies] -aniso8601 = ">=8,<10" -graphql-core = ">=3.1,<3.3" -graphql-relay = ">=3.1,<3.3" +Faker = ">=0.7.0" [package.extras] -dev = ["black (==22.3.0)", "coveralls (>=3.3,<4)", "flake8 (>=4,<5)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>=6,<7)", "pytest-asyncio (>=0.16,<2)", "pytest-benchmark (>=3.4,<4)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytz (==2022.1)", "snapshottest (>=0.6,<1)"] -test = ["coveralls (>=3.3,<4)", "iso8601 (>=1,<2)", "mock (>=4,<5)", "pytest (>=6,<7)", "pytest-asyncio (>=0.16,<2)", "pytest-benchmark (>=3.4,<4)", "pytest-cov (>=3,<4)", "pytest-mock (>=3,<4)", "pytz (==2022.1)", "snapshottest (>=0.6,<1)"] +dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "mongomock", "mypy", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] +doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] [[package]] -name = "graphene-django" -version = "3.2.1" -description = "Graphene Django integration" +name = "faker" +version = "33.0.0" +description = "Faker is a Python package that generates fake data for you." optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "graphene-django-3.2.1.tar.gz", hash = "sha256:52145037872d2575974c4bb2be224756ffeafe5a4e20f9c4367519622965812b"}, - {file = "graphene_django-3.2.1-py2.py3-none-any.whl", hash = "sha256:3fbdd8d4990ecec326c59d68edfcaf9a7bc9c4dbdcbf88b11ac46dfc10240e49"}, + {file = "Faker-33.0.0-py3-none-any.whl", hash = "sha256:68e5580cb6b4226710886e595eabc13127149d6e71e9d1db65506a7fbe2c7fce"}, + {file = "faker-33.0.0.tar.gz", hash = "sha256:9b01019c1ddaf2253ca2308c0472116e993f4ad8fc9905f82fa965e0c6f932e9"}, ] [package.dependencies] -Django = ">=3.2" -graphene = ">=3.0,<4" -graphql-core = ">=3.1.0,<4" -graphql-relay = ">=3.1.1,<4" -promise = ">=2.1" -text-unidecode = "*" +python-dateutil = ">=2.4" +typing-extensions = "*" -[package.extras] -dev = ["coveralls", "django-filter (>=22.1)", "djangorestframework (>=3.6.3)", "mock", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-django (>=4.5.2)", "pytest-random-order", "pytz", "ruff (==0.1.2)"] -rest-framework = ["djangorestframework (>=3.6.3)"] -test = ["coveralls", "django-filter (>=22.1)", "djangorestframework (>=3.6.3)", "mock", "pytest (>=7.3.1)", "pytest-cov", "pytest-django (>=4.5.2)", "pytest-random-order", "pytz"] +[[package]] +name = "gprof2dot" +version = "2022.7.29" +description = "Generate a dot graph from the output of several profilers." +optional = false +python-versions = ">=2.7" +files = [ + {file = "gprof2dot-2022.7.29-py2.py3-none-any.whl", hash = "sha256:f165b3851d3c52ee4915eb1bd6cca571e5759823c2cd0f71a79bda93c2dc85d6"}, + {file = "gprof2dot-2022.7.29.tar.gz", hash = "sha256:45b4d298bd36608fccf9511c3fd88a773f7a1abc04d6cd39445b11ba43133ec5"}, +] [[package]] name = "graphql-core" @@ -692,20 +922,6 @@ files = [ {file = "graphql_core-3.2.3-py3-none-any.whl", hash = "sha256:5766780452bd5ec8ba133f8bf287dc92713e3868ddd83aee4faab9fc3e303dc3"}, ] -[[package]] -name = "graphql-relay" -version = "3.2.0" -description = "Relay library for graphql-core" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "graphql-relay-3.2.0.tar.gz", hash = "sha256:1ff1c51298356e481a0be009ccdff249832ce53f30559c1338f22a0e0d17250c"}, - {file = "graphql_relay-3.2.0-py3-none-any.whl", hash = "sha256:c9b22bd28b170ba1fe674c74384a8ff30a76c8e26f88ac3aa1584dd3179953e5"}, -] - -[package.dependencies] -graphql-core = ">=3.2,<3.3" - [[package]] name = "idna" version = "3.7" @@ -776,6 +992,17 @@ files = [ {file = "iso639_lang-2.2.3-py3-none-any.whl", hash = "sha256:472290a2dfccfddc7eb1bd2d1ca6b174e988093c2ccb8b74743fe190625a9c61"}, ] +[[package]] +name = "isodate" +version = "0.7.2" +description = "An ISO 8601 date/time/duration parser and formatter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "isodate-0.7.2-py3-none-any.whl", hash = "sha256:28009937d8031054830160fce6d409ed342816b543597cece116d966c6d99e15"}, + {file = "isodate-0.7.2.tar.gz", hash = "sha256:4cd1aa0f43ca76f4a6c6c0292a85f40b35ec2e43e315b59f06e6d32171a953e6"}, +] + [[package]] name = "jedi" version = "0.19.1" @@ -795,6 +1022,17 @@ docs = ["Jinja2 (==2.11.3)", "MarkupSafe (==1.1.1)", "Pygments (==2.8.1)", "alab qa = ["flake8 (==5.0.4)", "mypy (==0.971)", "types-setuptools (==67.2.0.1)"] testing = ["Django", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + [[package]] name = "kombu" version = "5.3.7" @@ -827,6 +1065,160 @@ sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=2.8.0)"] +[[package]] +name = "lxml" +version = "5.3.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +files = [ + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11)"] + [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -841,6 +1233,51 @@ files = [ [package.dependencies] traitlets = "*" +[[package]] +name = "more-itertools" +version = "10.5.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.8" +files = [ + {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, + {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, +] + +[[package]] +name = "msal" +version = "1.31.1" +description = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect." +optional = false +python-versions = ">=3.7" +files = [ + {file = "msal-1.31.1-py3-none-any.whl", hash = "sha256:29d9882de247e96db01386496d59f29035e5e841bcac892e6d7bf4390bf6bd17"}, + {file = "msal-1.31.1.tar.gz", hash = "sha256:11b5e6a3f802ffd3a72107203e20c4eac6ef53401961b880af2835b723d80578"}, +] + +[package.dependencies] +cryptography = ">=2.5,<46" +PyJWT = {version = ">=1.0.0,<3", extras = ["crypto"]} +requests = ">=2.0.0,<3" + +[package.extras] +broker = ["pymsalruntime (>=0.14,<0.18)", "pymsalruntime (>=0.17,<0.18)"] + +[[package]] +name = "msal-extensions" +version = "1.2.0" +description = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism." +optional = false +python-versions = ">=3.7" +files = [ + {file = "msal_extensions-1.2.0-py3-none-any.whl", hash = "sha256:cf5ba83a2113fa6dc011a254a72f1c223c88d7dfad74cc30617c4679a417704d"}, + {file = "msal_extensions-1.2.0.tar.gz", hash = "sha256:6f41b320bfd2933d631a215c91ca0dd3e67d84bd1a2f50ce917d5874ec646bef"}, +] + +[package.dependencies] +msal = ">=1.29,<2" +portalocker = ">=1.4,<3" + [[package]] name = "packaging" version = "24.0" @@ -897,20 +1334,40 @@ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] -name = "promise" -version = "2.3" -description = "Promises/A+ implementation for Python" +name = "portalocker" +version = "2.10.1" +description = "Wraps the portalocker recipe for easy usage" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "promise-2.3.tar.gz", hash = "sha256:dfd18337c523ba4b6a58801c164c1904a9d4d1b1747c7d5dbf45b693a49d93d0"}, + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, ] [package.dependencies] -six = "*" +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} [package.extras] -test = ["coveralls", "futures", "mock", "pytest (>=2.7.3)", "pytest-benchmark", "pytest-cov"] +docs = ["sphinx (>=1.7.1)"] +redis = ["redis"] +tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] + +[[package]] +name = "premailer" +version = "3.0.0" +description = "Turns CSS blocks into style attributes" +optional = false +python-versions = "*" +files = [ + {file = "premailer-3.0.0-py2.py3-none-any.whl", hash = "sha256:bdbcb9258914944a3f5cf817bfe549305f077a7f043b2ce66139bc0e8cb3d2a5"}, + {file = "premailer-3.0.0.tar.gz", hash = "sha256:474306d473f6686ebe31255b3a86b5d88190a28ee16fa31fdf4d4ef6d3c42347"}, +] + +[package.dependencies] +cssselect = "*" +cssutils = "*" +lxml = "*" +requests = "*" [[package]] name = "prompt-toolkit" @@ -1060,6 +1517,17 @@ files = [ [package.extras] tests = ["pytest"] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pygments" version = "2.18.0" @@ -1074,6 +1542,26 @@ files = [ [package.extras] windows-terminal = ["colorama (>=0.4.6)"] +[[package]] +name = "pyjwt" +version = "2.10.0" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "PyJWT-2.10.0-py3-none-any.whl", hash = "sha256:543b77207db656de204372350926bed5a86201c4cbff159f623f79c7bb487a15"}, + {file = "pyjwt-2.10.0.tar.gz", hash = "sha256:7628a7eb7938959ac1b26e819a1df0fd3259505627b575e4bad6d08f76db695c"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + [[package]] name = "pytest" version = "8.2.1" @@ -1188,6 +1676,33 @@ files = [ {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] +[[package]] +name = "pywin32" +version = "308" +description = "Python for Window Extensions" +optional = false +python-versions = "*" +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, +] + [[package]] name = "redis" version = "5.0.4" @@ -1227,6 +1742,23 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "s3transfer" +version = "0.10.4" +description = "An Amazon S3 Transfer Manager" +optional = false +python-versions = ">=3.8" +files = [ + {file = "s3transfer-0.10.4-py3-none-any.whl", hash = "sha256:244a76a24355363a68164241438de1b72f8781664920260c48465896b712a41e"}, + {file = "s3transfer-0.10.4.tar.gz", hash = "sha256:29edc09801743c21eb5ecbc617a152df41d3c287f67b615f73e5f750583666a7"}, +] + +[package.dependencies] +botocore = ">=1.33.2,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] + [[package]] name = "sentry-sdk" version = "2.6.0" @@ -1324,17 +1856,17 @@ tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "strawberry-graphql" -version = "0.232.0" +version = "0.247.2" description = "A library for creating GraphQL APIs" optional = false python-versions = "<4.0,>=3.8" files = [ - {file = "strawberry_graphql-0.232.0-py3-none-any.whl", hash = "sha256:57198bfe8db29760259f71c8d0d3f052719ad19ff2cd0ecb7d8ef85d6dfa0977"}, - {file = "strawberry_graphql-0.232.0.tar.gz", hash = "sha256:c297d12554c2e9016f62e461f6cfe222acdca1d61556ceaf99e719ee10ae5209"}, + {file = "strawberry_graphql-0.247.2-py3-none-any.whl", hash = "sha256:47397023facb042e99ab72e839bf187100488f238dd7c9bb6964b941b8dc79d1"}, + {file = "strawberry_graphql-0.247.2.tar.gz", hash = "sha256:480f24ff9eaa25785458fc109b5c142ab84fe3024bc3a9666fefd085ce941561"}, ] [package.dependencies] -graphql-core = ">=3.2.0,<3.3.0" +graphql-core = ">=3.2.0,<3.4.0" python-dateutil = ">=2.7.0,<3.0.0" typing-extensions = ">=4.5.0" @@ -1355,39 +1887,27 @@ pydantic = ["pydantic (>1.6.1)"] pyinstrument = ["pyinstrument (>=4.0.0)"] quart = ["quart (>=0.19.3)"] sanic = ["sanic (>=20.12.2)"] -starlite = ["starlite (>=1.48.0)"] [[package]] name = "strawberry-graphql-django" -version = "0.38.0" +version = "0.49.1" description = "Strawberry GraphQL Django extension" optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" files = [ - {file = "strawberry_graphql_django-0.38.0-py3-none-any.whl", hash = "sha256:28f8416197d43f83314390eed7c8f2597406bf696b9195371cec37e4f56d70c6"}, - {file = "strawberry_graphql_django-0.38.0.tar.gz", hash = "sha256:5682442734d6e3908bfac5c2d7dcd6d4635865acb57585f2e5a509fa2ba97970"}, + {file = "strawberry_graphql_django-0.49.1-py3-none-any.whl", hash = "sha256:308ed5113bae92f7cc6f05fa7a178124fc1b72a4508f9a68828a28b99a9dacf1"}, + {file = "strawberry_graphql_django-0.49.1.tar.gz", hash = "sha256:05ac677edc57e6e7411080574b607750109c5e879af3b524dfb0307f438c9800"}, ] [package.dependencies] asgiref = ">=3.8" -django = ">=3.2" -strawberry-graphql = ">=0.212.0" +django = ">=4.2" +strawberry-graphql = ">=0.236.0" [package.extras] debug-toolbar = ["django-debug-toolbar (>=3.4)"] enum = ["django-choices-field (>=2.2.2)"] -[[package]] -name = "text-unidecode" -version = "1.3" -description = "The most basic Text::Unidecode port" -optional = false -python-versions = "*" -files = [ - {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, - {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, -] - [[package]] name = "traitlets" version = "5.14.3" @@ -1447,6 +1967,17 @@ files = [ {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] +[[package]] +name = "ua-parser" +version = "0.18.0" +description = "Python port of Browserscope's user agent parser" +optional = false +python-versions = "*" +files = [ + {file = "ua-parser-0.18.0.tar.gz", hash = "sha256:db51f1b59bfaa82ed9e2a1d99a54d3e4153dddf99ac1435d51828165422e624e"}, + {file = "ua_parser-0.18.0-py2.py3-none-any.whl", hash = "sha256:9d94ac3a80bcb0166823956a779186c746b50ea4c9fd9bf30fdb758553c38950"}, +] + [[package]] name = "urllib3" version = "2.2.1" @@ -1464,6 +1995,20 @@ h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "user-agents" +version = "2.2.0" +description = "A library to identify devices (phones, tablets) and their capabilities by parsing browser user agent strings." +optional = false +python-versions = "*" +files = [ + {file = "user-agents-2.2.0.tar.gz", hash = "sha256:d36d25178db65308d1458c5fa4ab39c9b2619377010130329f3955e7626ead26"}, + {file = "user_agents-2.2.0-py3-none-any.whl", hash = "sha256:a98c4dc72ecbc64812c4534108806fb0a0b3a11ec3fd1eafe807cee5b0a942e7"}, +] + +[package.dependencies] +ua-parser = ">=0.10.0" + [[package]] name = "uwsgi" version = "2.0.25.1" @@ -1507,7 +2052,21 @@ files = [ {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, ] +[[package]] +name = "whitenoise" +version = "6.8.2" +description = "Radically simplified static file serving for WSGI applications" +optional = false +python-versions = ">=3.9" +files = [ + {file = "whitenoise-6.8.2-py3-none-any.whl", hash = "sha256:df12dce147a043d1956d81d288c6f0044147c6d2ab9726e5772ac50fb45d2280"}, + {file = "whitenoise-6.8.2.tar.gz", hash = "sha256:486bd7267a375fa9650b136daaec156ac572971acc8bf99add90817a530dd1d4"}, +] + +[package.extras] +brotli = ["brotli"] + [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "d8a651e3845a71a84160ce78f73374a3cf3a4aad6694ea534cee95af03448999" +content-hash = "4e6b1edb2f6be1ec6d35cac48b0ce7b835ecb5c2e5fd1beccef3702e3b3e3aa6" diff --git a/pyproject.toml b/pyproject.toml index 22201964..e1f824ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,30 +8,32 @@ package-mode = false [tool.poetry.dependencies] python = "^3.11" -django = "^4.2" +Django = ">=4.2,<4.3" celery = "^5.3" django-celery-beat = "*" -django-cors-headers = "^4.2.0" +django-cors-headers = "*" django-environ = "*" -django-extensions = "^3.2.3" django-redis = "^5.3.0" -django-storages = "^1.13.2" +django-storages = {extras = ["s3", "azure", "azure-identity"], version = "*"} django-admin-autocomplete-filter = "*" django-modeltranslation = "==0.19.5" iso639-lang = "^2.1.0" -graphene-django = "*" psycopg2-binary = "^2.9.9" pytz = "*" colorlog = "*" requests = "*" -strawberry-graphql = "^0.232.0" -strawberry-graphql-django = "0.38.0" +strawberry-graphql-django = {extras = ["strawberry-graphql"], version = "0.49.1"} sentry-sdk = "*" djangorestframework = "*" validators = "*" django-health-check = "*" psutil = "*" uwsgi = "*" +django-premailer = "*" +user-agents = "*" +factory-boy = "*" +azure-identity = "*" +whitenoise = "*" # Prod level static server for Admin panel [tool.poetry.dev-dependencies] pytest = "*" diff --git a/pytest.ini b/pytest.ini index 25ed5714..64a4676c 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,4 +1,5 @@ [pytest] DJANGO_SETTINGS_MODULE = main.settings addopts = --ignore=legacy/ +python_files = tests.py test_*.py *_tests.py log_cli = true diff --git a/schema.graphql b/schema.graphql index 073980e8..a40cb832 100644 --- a/schema.graphql +++ b/schema.graphql @@ -34,16 +34,13 @@ input AlertFilter { id: IDBaseFilterLookup country: DjangoModelFilterInput sent: DatetimeDatetimeFilterLookup + infos: AlertInfoFilter AND: AlertFilter OR: AlertFilter NOT: AlertFilter DISTINCT: Boolean region: ID admin1: ID - urgency: [AlertInfoUrgencyEnum!] - severity: [AlertInfoSeverityEnum!] - certainty: [AlertInfoCertaintyEnum!] - category: [AlertInfoCategoryEnum!] } type AlertInfoAreaCircleType { @@ -106,6 +103,10 @@ input AlertInfoFilter { OR: AlertInfoFilter NOT: AlertInfoFilter DISTINCT: Boolean + urgency: [AlertInfoUrgencyEnum!] + severity: [AlertInfoSeverityEnum!] + certainty: [AlertInfoCertaintyEnum!] + category: [AlertInfoCategoryEnum!] } input AlertInfoOrder { @@ -252,6 +253,11 @@ type AppEnumCollection { AlertInfoUrgency: [AppEnumCollectionAlertInfoUrgency!]! AlertInfoSeverity: [AppEnumCollectionAlertInfoSeverity!]! AlertInfoCertainty: [AppEnumCollectionAlertInfoCertainty!]! + UserAlertSubscriptionEmailFrequency: [AppEnumCollectionUserAlertSubscriptionEmailFrequency!]! + UserAlertSubscriptionFilterAlertUrgencies: [AppEnumCollectionUserAlertSubscriptionFilterAlertUrgencies!]! + UserAlertSubscriptionFilterAlertSeverities: [AppEnumCollectionUserAlertSubscriptionFilterAlertSeverities!]! + UserAlertSubscriptionFilterAlertCertainties: [AppEnumCollectionUserAlertSubscriptionFilterAlertCertainties!]! + UserAlertSubscriptionFilterAlertCategories: [AppEnumCollectionUserAlertSubscriptionFilterAlertCategories!]! } type AppEnumCollectionAlertInfoCategory { @@ -304,11 +310,49 @@ type AppEnumCollectionFeedStatus { label: String! } +type AppEnumCollectionUserAlertSubscriptionEmailFrequency { + key: UserAlertSubscriptionEmailFrequencyEnum! + label: String! +} + +type AppEnumCollectionUserAlertSubscriptionFilterAlertCategories { + key: AlertInfoCategoryEnum! + label: String! +} + +type AppEnumCollectionUserAlertSubscriptionFilterAlertCertainties { + key: AlertInfoCertaintyEnum! + label: String! +} + +type AppEnumCollectionUserAlertSubscriptionFilterAlertSeverities { + key: AlertInfoSeverityEnum! + label: String! +} + +type AppEnumCollectionUserAlertSubscriptionFilterAlertUrgencies { + key: AlertInfoUrgencyEnum! + label: String! +} + type AppEnumCollectionUserEmailOptOuts { key: OptEmailNotificationTypeEnum! label: String! } +input BoolBaseFilterLookup { + """Exact match. Filter will be skipped on `null` value""" + exact: Boolean + + """Assignment test. Filter will be skipped on `null` value""" + isNull: Boolean + + """ + Exact match of items in a given list. Filter will be skipped on `null` value + """ + inList: [Boolean!] +} + type ContinentType { id: ID! name: String! @@ -348,6 +392,9 @@ type CountryTypeCountList { items: [CountryType!]! } +"""A generic type to return error messages""" +scalar CustomErrorType + """Date with time (isoformat)""" scalar DateTime @@ -530,6 +577,11 @@ type Mutation { private: PrivateMutation! } +type MutationEmptyResponseType { + ok: Boolean! + errors: CustomErrorType +} + input OffsetPaginationInput { offset: Int! = 0 limit: Int! = -1 @@ -552,15 +604,29 @@ enum Ordering { scalar PolygonScalar type PrivateMutation { + logout: MutationEmptyResponseType! + changeUserPassword(data: UserPasswordChangeInput!): MutationEmptyResponseType! + updateMe(data: UserMeInput!): UserMeTypeMutationResponseType! + createUserAlertSubscription(data: UserAlertSubscriptionInput!): UserAlertSubscriptionTypeMutationResponseType! + updateUserAlertSubscription(id: ID!, data: UserAlertSubscriptionInput!): UserAlertSubscriptionTypeMutationResponseType! + deleteUserAlertSubscription(id: ID!): UserAlertSubscriptionTypeMutationResponseType! id: ID! } type PrivateQuery { noop: ID! + userAlertSubscriptions(filters: UserAlertSubscriptionFilter, order: UserAlertSubscriptionOrder, pagination: OffsetPaginationInput): UserAlertSubscriptionTypeCountList! + userAlertSubscription(pk: ID!): UserAlertSubscriptionType + subscriptedAlerts(filters: AlertFilter, order: AlertOrder, pagination: OffsetPaginationInput): AlertTypeCountList! id: ID! } type PublicMutation { + login(data: UserLoginInput!): UserMeTypeMutationResponseType! + register(data: UserRegisterInput!): MutationEmptyResponseType! + accountActivation(data: UserActivationInput!): MutationEmptyResponseType! + passwordResetTrigger(data: UserPasswordResetTriggerInput!): MutationEmptyResponseType! + passwordResetConfirm(data: UserPasswordResetConfirmInput!): MutationEmptyResponseType! id: ID! } @@ -579,6 +645,7 @@ type PublicQuery { feed(pk: ID!): FeedType alert(pk: ID!): AlertType alertInfo(pk: ID!): AlertInfoType + historicalAlerts(filters: AlertFilter, pagination: OffsetPaginationInput): AlertTypeCountList! id: ID! } @@ -614,6 +681,97 @@ type RegionTypeCountList { items: [RegionType!]! } +input UserActivationInput { + uuid: String! + token: String! +} + +enum UserAlertSubscriptionEmailFrequencyEnum { + DAILY + WEEKLY + MONTHLY +} + +input UserAlertSubscriptionFilter { + id: IDBaseFilterLookup + isActive: BoolBaseFilterLookup + notifyByEmail: BoolBaseFilterLookup + AND: UserAlertSubscriptionFilter + OR: UserAlertSubscriptionFilter + NOT: UserAlertSubscriptionFilter + DISTINCT: Boolean +} + +input UserAlertSubscriptionInput { + name: String! + filterAlertCountry: ID! + filterAlertAdmin1s: [ID!]! + isActive: Boolean + notifyByEmail: Boolean + emailFrequency: UserAlertSubscriptionEmailFrequencyEnum + filterAlertUrgencies: [AlertInfoUrgencyEnum!] + filterAlertSeverities: [AlertInfoSeverityEnum!] + filterAlertCertainties: [AlertInfoCertaintyEnum!] + filterAlertCategories: [AlertInfoCategoryEnum!] +} + +input UserAlertSubscriptionOrder { + id: Ordering + createdAt: Ordering + modifiedAt: Ordering + name: Ordering + isActive: Ordering +} + +type UserAlertSubscriptionType { + id: ID! + createdAt: DateTime! + modifiedAt: DateTime! + isActive: Boolean! + filterAlertCountryId: ID! + filterAlertAdmin1s: [ID!]! + notifyByEmail: Boolean! + emailLastSentAt: DateTime + name: String! + filterAlertUrgencies: [AlertInfoUrgencyEnum!]! + filterAlertSeverities: [AlertInfoSeverityEnum!]! + filterAlertCertainties: [AlertInfoCertaintyEnum!]! + filterAlertCategories: [AlertInfoCategoryEnum!]! + filterAlertUrgenciesDisplay: [String!]! + filterAlertSeveritiesDisplay: [String!]! + filterAlertCertaintiesDisplay: [String!]! + filterAlertCategoriesDisplay: [String!]! + emailFrequency: UserAlertSubscriptionEmailFrequencyEnum! + emailFrequencyDisplay: String! + alerts(filters: AlertFilter, order: AlertOrder, pagination: OffsetPaginationInput): AlertTypeCountList! + filterAlertCountry: CountryType! + filterAlertAdmin1sDisplay: [Admin1Type!]! +} + +type UserAlertSubscriptionTypeCountList { + limit: Int! + offset: Int! + count: Int! + items: [UserAlertSubscriptionType!]! +} + +type UserAlertSubscriptionTypeMutationResponseType { + ok: Boolean! + errors: CustomErrorType + result: UserAlertSubscriptionType +} + +input UserLoginInput { + email: String! + password: String! +} + +input UserMeInput { + firstName: String + lastName: String + emailOptOuts: [OptEmailNotificationTypeEnum!] +} + type UserMeType { id: ID! firstName: String @@ -623,4 +781,35 @@ type UserMeType { phoneNumber: String country: String city: String +} + +type UserMeTypeMutationResponseType { + ok: Boolean! + errors: CustomErrorType + result: UserMeType +} + +input UserPasswordChangeInput { + oldPassword: String! + newPassword: String! +} + +input UserPasswordResetConfirmInput { + captcha: String! + uuid: String! + token: String! + newPassword: String! +} + +input UserPasswordResetTriggerInput { + captcha: String! + email: String! +} + +input UserRegisterInput { + captcha: String! + firstName: String! + lastName: String! + email: String! + password: String! } \ No newline at end of file diff --git a/static/images/go-logo-long.png b/static/images/go-logo-long.png new file mode 100644 index 00000000..93b96b59 Binary files /dev/null and b/static/images/go-logo-long.png differ diff --git a/templates/emails/base.html b/templates/emails/base.html new file mode 100644 index 00000000..93ee4522 --- /dev/null +++ b/templates/emails/base.html @@ -0,0 +1,47 @@ +{% load premailer %} +{% load custom_tags %} +{% load static %} + +{% premailer %} + + + + + {% block head %}{% endblock %} + + + +
+ +
+ + + {% load i18n %}{% autoescape off %} + + + +
+ {% block title %}{% endblock %} + + +
+
+ +
+ {% block body %}{% endblock %} +
+

Thank you for using Alert Hub

+

The Alert Hub team

+
+
+ + {% endautoescape %} +
+ + + +{% endpremailer %} diff --git a/templates/emails/base.txt b/templates/emails/base.txt new file mode 100644 index 00000000..86447e42 --- /dev/null +++ b/templates/emails/base.txt @@ -0,0 +1,13 @@ +{% block head %}{% endblock %} + +{% block title %}{% endblock %} + +{% block body %}{% endblock %} + +Thank you for using Alert Hub +The Alert Hub team + +{% if email_type in unsubscribe_email_types %} +Would you prefer to not receive these kinds of emails anymore? +Use this link to unsubscribe "{{ protocol }}://{{ domain }}{% url 'unsubscribe_email' uidb64=unsubscribe_email_id token=unsubscribe_email_token email_type=email_type %}" +{% endif %} diff --git a/templates/emails/subscription/body.html b/templates/emails/subscription/body.html new file mode 100644 index 00000000..0f00f6ba --- /dev/null +++ b/templates/emails/subscription/body.html @@ -0,0 +1,26 @@ + + + + +

+{% for subscription in subscriptions %} +Subscription: {{ subscription.name }} +
+Latest alerts: + {% for alert in subscription.latest_alerts %} + - {{ alert.url }} -- {{ alert.sent }} +
+ + {% endfor %} + +
+ +Unsubscribe: {{ subscription.unsubscribe_url }} +{% endfor %} +

+ +

+

+ + + diff --git a/templates/emails/subscription/body.txt b/templates/emails/subscription/body.txt new file mode 100644 index 00000000..e69de29b diff --git a/templates/emails/user/activation/body.html b/templates/emails/user/activation/body.html new file mode 100644 index 00000000..27ce8f0c --- /dev/null +++ b/templates/emails/user/activation/body.html @@ -0,0 +1,15 @@ +{% extends "emails/base.html" %} + +{% block body %} +
+

+ Please click on the link below to activate your account: +

+ + Activate your account + +
+ +{% endblock %} diff --git a/templates/emails/user/activation/body.txt b/templates/emails/user/activation/body.txt new file mode 100644 index 00000000..0b03d557 --- /dev/null +++ b/templates/emails/user/activation/body.txt @@ -0,0 +1,8 @@ +{% extends "emails/base.txt" %} + +{% block body %} + +Please open the link below to activate your account: +"{{ activation_url }}" + +{% endblock %} diff --git a/templates/emails/user/password_changed/body.html b/templates/emails/user/password_changed/body.html new file mode 100644 index 00000000..c3a6b120 --- /dev/null +++ b/templates/emails/user/password_changed/body.html @@ -0,0 +1,64 @@ +{% extends "emails/base.html" %} + +{% block head %} + +{% endblock %} +{% block title %} + Your password has been changed successfully. +{% endblock %} +{% block body %} +

+ Hi there {{ user.first_name }}, +
+ Your password has been changed successfully. +

+ + + + + + + + + + + + + +
Time{{time}}
Location{{location}}
Device{{device}}
+
+

+ If you are aware of this change, you can disregard this email. + If this was not triggered by you, please reset your password. +

+ + Reset Password + +
+{% endblock%} diff --git a/templates/emails/user/password_changed/body.txt b/templates/emails/user/password_changed/body.txt new file mode 100644 index 00000000..af419754 --- /dev/null +++ b/templates/emails/user/password_changed/body.txt @@ -0,0 +1,15 @@ +{% extends "emails/base.txt" %} + +{% block title %} + Your password has been changed successfully. +{% endblock %} +{% block body %} + Hi there {{ user.first_name }}, + Your password has been changed successfully. + Time: {{time}} + Location: {{location}} + Device: {{device}} + If you are aware of this change, you can disregard this email. + If this was not triggered by you, please reset your password. + Reset Password: {{ frontend_forgot_password }} +{% endblock%} diff --git a/templates/emails/user/password_reset/body.html b/templates/emails/user/password_reset/body.html new file mode 100644 index 00000000..96edf752 --- /dev/null +++ b/templates/emails/user/password_reset/body.html @@ -0,0 +1,45 @@ +{% extends "emails/base.html" %} + +{% block head %} + +{% endblock %} +{% block title %} + Reset Password +{% endblock %} +{% block body %} +

+ It seems you've forgotten your password. +

+
+

+ If you didn't request to reset your password, you may simply ignore this email. +

+ + Reset your password + +
+ {% if location or device %} +

More detail on password reset trigger

+ + + + + + + + + + + + + +
Time{{time}}
Location{{location}}
Device{{device}}
+ {% endif %} +{% endblock %} diff --git a/templates/emails/user/password_reset/body.txt b/templates/emails/user/password_reset/body.txt new file mode 100644 index 00000000..d24d1ace --- /dev/null +++ b/templates/emails/user/password_reset/body.txt @@ -0,0 +1,22 @@ +{% block title %} +Reset Password +{% endblock %} + +{% block body %} + {% if welcome %} + Before you can start using your account, you'll first need to create a password. + You can do this by clicking on the link below: + Set your password + {% else %} + It seems you've forgotten your password. + If you didn't request to reset your password, you may simply ignore this email. + Reset your password + {% endif %} + {{ password_reset_url }} + {% if location or device %} + More detail on password reset trigger + Time: {{time}} + Location: {{location}} + Device: {{device}} + {% endif %} +{% endblock %} diff --git a/utils/common.py b/utils/common.py index de166238..09b26d9f 100644 --- a/utils/common.py +++ b/utils/common.py @@ -9,6 +9,7 @@ from django.conf import settings from django.db import models +from user_agents import parse from main.cache import cache @@ -44,6 +45,23 @@ def logger_log_extra(context_data): } +def get_client_ip(request): + x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR') + if x_forwarded_for: + ip = x_forwarded_for.split(',')[-1].strip() + else: + ip = request.META.get('REMOTE_ADDR') + return ip + + +def get_device_type(request): + http_agent = request.META.get('HTTP_USER_AGENT') + if http_agent: + user_agent = parse(http_agent) + return user_agent.browser.family + ',' + user_agent.os.family + return + + @contextmanager def redis_lock(lock_id: str): timeout_at = time.monotonic() + settings.REDIS_LOCK_EXPIRE - 3 diff --git a/utils/emails.py b/utils/emails.py new file mode 100644 index 00000000..2cf72123 --- /dev/null +++ b/utils/emails.py @@ -0,0 +1,104 @@ +import logging + +from django.conf import settings +from django.contrib.auth.tokens import PasswordResetTokenGenerator +from django.core.mail import EmailMultiAlternatives +from django.template import loader +from django.utils.encoding import force_bytes +from django.utils.http import urlsafe_base64_encode + +from apps.user.models import EmailNotificationType, User + +logger = logging.getLogger(__name__) + + +def _base_send_email( + subject: str | None, + subject_template: str | None, + email_html_template: str, + email_text_template: str, + context: dict, + from_email: str, + to_email: str, +): + """ + Send a django.core.mail.EmailMultiAlternatives to `to_email`. + Renders provided templates and send it to to_email + Low level, Don't use this directly + """ + # Subject + if subject_template: + subject = loader.render_to_string(subject_template, context) + elif subject: + subject = ''.join( + # Email subject *must not* contain newlines + subject.splitlines() + ) + else: + raise ValueError('Both arguments subject/subject_template cannot be None') + + # Body + html_content = loader.render_to_string(email_html_template, context) + text_content = loader.render_to_string(email_text_template, context) + # Email message + email_message = EmailMultiAlternatives( + subject=subject, + body=text_content, # Plain text + from_email=from_email, + to=[to_email], + ) + # HTML + email_message.attach_alternative(html_content, "text/html") + # Send email + email_message.send() + + +def send_email( + user: User, + email_type: EmailNotificationType, + subject: str | None, + email_html_template: str, + email_text_template: str, + context: None | dict = None, + subject_template: str | None = None, +): + """ + Validates email request + Add common context variable + """ + # NOTE: We don't handle bounced email status + if not user.is_email_subscribed_for(email_type): + logger.warning( + '[{}] Email not sent: User <{}>({}) has not subscribed!!'.format( + email_type, + user.email, + user.pk, + ) + ) + return + + if context is None: + context = {} + context.update( + { + 'client_domain': settings.APP_FRONTEND_HOST, + 'protocol': settings.APP_HTTP_PROTOCOL, + 'domain': settings.APP_DOMAIN, + 'user': user, + 'email_type': email_type, + # WIP + 'unsubscribe_email_types': User.OPT_EMAIL_NOTIFICATION_TYPES, + 'unsubscribe_email_token': PasswordResetTokenGenerator().make_token(user), + 'unsubscribe_email_id': urlsafe_base64_encode(force_bytes(user.pk)), + } + ) + + _base_send_email( + subject=subject, + subject_template=subject_template, + email_html_template=email_html_template, + email_text_template=email_text_template, + context=context, + from_email=settings.DEFAULT_FROM_EMAIL, + to_email=user.email, + ) diff --git a/utils/hcaptcha.py b/utils/hcaptcha.py new file mode 100644 index 00000000..dee85c6c --- /dev/null +++ b/utils/hcaptcha.py @@ -0,0 +1,29 @@ +import requests +from django.conf import settings +from django.utils.translation import gettext +from rest_framework import serializers + + +def validate_hcaptcha(captcha): + CAPTCHA_VERIFY_URL = 'https://hcaptcha.com/siteverify' + + data = { + 'sitekey': settings.HCAPTCHA_SITEKEY, + 'secret': settings.SECRET_KEY, + 'response': captcha, + } + + response = requests.post(url=CAPTCHA_VERIFY_URL, data=data) + + response_json = response.json() + return response_json['success'] + + +class CaptchaSerializerMixin(serializers.Serializer): + captcha = serializers.CharField(write_only=True, required=True) + + def validate_captcha(self, captcha): + # TODO: Remove this return + return + if not validate_hcaptcha(captcha): + raise serializers.ValidationError(gettext('Invalid captcha! Please, Try Again')) diff --git a/utils/strawberry/enums.py b/utils/strawberry/enums.py index f82db6e5..70745a95 100644 --- a/utils/strawberry/enums.py +++ b/utils/strawberry/enums.py @@ -19,17 +19,17 @@ def get_enum_name_from_django_field( - field: 'GET_ENUM_NAME_FROM_DJANGO_FIELD_FIELD_TYPE', + field: "GET_ENUM_NAME_FROM_DJANGO_FIELD_FIELD_TYPE", field_name=None, model_name=None, serializer_name=None, ): def _have_model(_field): - if hasattr(_field, 'model') or hasattr(getattr(_field, 'Meta', None), 'model'): + if hasattr(_field, "model") or hasattr(getattr(_field, "Meta", None), "model"): return True def _get_serializer_name(_field): - if hasattr(_field, 'parent'): + if hasattr(_field, "parent"): return type(_field.parent).__name__ if field_name is None or model_name is None: @@ -40,19 +40,27 @@ def _get_serializer_name(_field): model_name=model_name, serializer_name=serializer_name, ) - if isinstance(field, serializers.ChoiceField): + if isinstance(field, serializers.ListField): + if isinstance(field.child, serializers.ChoiceField): + if _have_model(field.parent): + if model_name is None: + assert field.parent is not None + model_name = field.parent.Meta.model.__name__ # type: ignore[reportAttributeAccessIssue] + serializer_name = _get_serializer_name(field) + field_name = field_name or field.field_name + elif isinstance(field, serializers.ChoiceField): if isinstance(field.parent, serializers.ListField): if _have_model(field.parent.parent): if model_name is None: assert field.parent.parent is not None - model_name = field.parent.parent.Meta.model.__name__ + model_name = field.parent.parent.Meta.model.__name__ # type: ignore[reportAttributeAccessIssue] serializer_name = _get_serializer_name(field.parent) field_name = field_name or field.parent.field_name else: if _have_model(field.parent): if model_name is None: assert field.parent is not None - model_name = field.parent.Meta.model.__name__ + model_name = field.parent.Meta.model.__name__ # type: ignore[reportAttributeAccessIssue] serializer_name = _get_serializer_name(field) field_name = field_name or field.field_name elif isinstance(field, ArrayField): @@ -74,12 +82,12 @@ def _get_serializer_name(_field): serializer_name = _get_serializer_name(field) field_name = field_name or field.name if field_name is None: - raise Exception(f'{field=} should have a name') + raise Exception(f"{field=} should have a name") if model_name: - return f'{model_name}{to_camel_case(field_name.title())}' + return f"{model_name}{to_camel_case(field_name.title())}" if serializer_name: - return f'{serializer_name}{to_camel_case(field_name.title())}' - raise Exception(f'{serializer_name=} should have a value') + return f"{serializer_name}{to_camel_case(field_name.title())}" + raise Exception(f"{serializer_name=} should have a value") def enum_display_field(field) -> typing.Callable[..., str]: # type: ignore[reportGeneralTypeIssues] FIXME diff --git a/utils/strawberry/mutations.py b/utils/strawberry/mutations.py index 6d8e04e9..b12f3836 100644 --- a/utils/strawberry/mutations.py +++ b/utils/strawberry/mutations.py @@ -3,17 +3,21 @@ from dataclasses import is_dataclass import strawberry -from strawberry.types import Info +from asgiref.sync import sync_to_async +from django.db import models, transaction +from rest_framework import serializers from strawberry.utils.str_converters import to_camel_case +from main.graphql.context import Info from utils.common import to_snake_case +from utils.strawberry.transformers import convert_serializer_to_type logger = logging.getLogger(__name__) ResultTypeVar = typing.TypeVar("ResultTypeVar") -ARRAY_NON_MEMBER_ERRORS = 'nonMemberErrors' +ARRAY_NON_MEMBER_ERRORS = "nonMemberErrors" # generalize all the CustomErrorType CustomErrorType = strawberry.scalar( @@ -24,13 +28,13 @@ ) -# TODO: Add tests def process_input_data(data) -> dict | list: """ Return dict from Strawberry Input Object NOTE: strawberry.asdict doesn't handle nested and strawberry.UNSET Related issue: https://github.com/strawberry-graphql/strawberry/issues/3265 https://github.com/strawberry-graphql/strawberry/blob/d2c0fb4d2d363929c9ac10161884d004ab9cf555/strawberry/object_type.py#L395 + """ # TODO: Write test if type(data) in [tuple, list]: @@ -62,11 +66,11 @@ class ArrayNestedErrorType: object_errors: typing.Optional[list[typing.Optional[CustomErrorType]]] def keys(self): - return ['client_id', 'messages', 'object_errors'] + return ["client_id", "messages", "object_errors"] def __getitem__(self, key): key = to_snake_case(key) - if key in ('object_errors',) and getattr(self, key): + if key in ("object_errors",) and getattr(self, key): return [dict(each) for each in getattr(self, key)] return getattr(self, key) @@ -79,14 +83,14 @@ class _CustomErrorType: object_errors: typing.Optional[list[typing.Optional[CustomErrorType]]] array_errors: typing.Optional[list[typing.Optional[ArrayNestedErrorType]]] - DEFAULT_ERROR_MESSAGE = 'Something unexpected has occurred. Please contact an admin to fix this issue.' + DEFAULT_ERROR_MESSAGE = "Something unexpected has occurred. Please contact an admin to fix this issue." @staticmethod def generate_message(message: str = DEFAULT_ERROR_MESSAGE) -> CustomErrorType: return CustomErrorType( [ dict( - field='nonFieldErrors', + field="nonFieldErrors", messages=message, object_errors=None, array_errors=None, @@ -95,18 +99,18 @@ def generate_message(message: str = DEFAULT_ERROR_MESSAGE) -> CustomErrorType: ) def keys(self): - return ['field', 'client_id', 'messages', 'object_errors', 'array_errors'] + return ["field", "client_id", "messages", "object_errors", "array_errors"] def __getitem__(self, key): key = to_snake_case(key) - if key in ('object_errors', 'array_errors') and getattr(self, key): + if key in ("object_errors", "array_errors") and getattr(self, key): return [dict(each) for each in getattr(self, key)] return getattr(self, key) def serializer_error_to_error_types(errors: dict, initial_data: dict | None = None) -> list: initial_data = initial_data or dict() - node_client_id = initial_data.get('client_id') + node_client_id = initial_data.get("client_id") error_types = list() for field, value in errors.items(): if isinstance(value, dict): @@ -114,7 +118,7 @@ def serializer_error_to_error_types(errors: dict, initial_data: dict | None = No _CustomErrorType( client_id=node_client_id, field=to_camel_case(field), - object_errors=value, # type: ignore[reportGeneralTypeIssues] + object_errors=serializer_error_to_error_types(value), array_errors=None, messages=None, ) @@ -130,7 +134,7 @@ def serializer_error_to_error_types(errors: dict, initial_data: dict | None = No array_errors=[ ArrayNestedErrorType( client_id=ARRAY_NON_MEMBER_ERRORS, - messages=''.join(str(msg) for msg in value), + messages="".join(str(msg) for msg in value), object_errors=None, ) ], @@ -142,8 +146,9 @@ def serializer_error_to_error_types(errors: dict, initial_data: dict | None = No error_types.append( _CustomErrorType( client_id=node_client_id, - field=to_camel_case(field), - messages=', '.join(str(msg) for msg in value), + # TODO: Properly transform field(as number) into array_errors + field=to_camel_case(field) if isinstance(field, str) else field, + messages=", ".join(str(msg) for msg in value), object_errors=None, array_errors=None, ) @@ -155,7 +160,7 @@ def serializer_error_to_error_types(errors: dict, initial_data: dict | None = No # array item might not have error continue # fetch array.item.client_id from the initial data - array_client_id = initial_data[field][pos].get('client_id', f'NOT_FOUND_{pos}') + array_client_id = initial_data[field][pos].get("client_id", f"NOT_FOUND_{pos}") array_errors.append( ArrayNestedErrorType( client_id=array_client_id, @@ -177,7 +182,7 @@ def serializer_error_to_error_types(errors: dict, initial_data: dict | None = No error_types.append( _CustomErrorType( field=to_camel_case(field), - messages=' '.join(str(msg) for msg in value), + messages=" ".join(str(msg) for msg in value), array_errors=None, object_errors=None, ) @@ -221,13 +226,196 @@ class MutationEmptyResponseType: errors: typing.Optional[CustomErrorType] = None -def get_serializer_context(info: Info): +def get_serializer_context(info: Info, extra_context: typing.Optional[dict]): return { - 'graphql_info': info, - 'request': info.context.request, - 'active_project': info.context.active_project, + "graphql_info": info, + "request": info.context.request, + "extra_context": extra_context, } def generate_error_message(message: str = _CustomErrorType.DEFAULT_ERROR_MESSAGE) -> CustomErrorType: return _CustomErrorType.generate_message(message) + + +class ModelMutation: + InputType: type + PartialInputType: type + + def __init__( + self, + name: str, + serializer_class: typing.Type[serializers.Serializer], + ): + self.serializer_class = serializer_class + # Generated types + self.InputType = convert_serializer_to_type( + self.serializer_class, + name=name + "CreateInput", + ) + self.PartialInputType = convert_serializer_to_type( + self.serializer_class, + name=name + "UpdateInput", + partial=True, + ) + + @staticmethod + def check_permissions(info, permission) -> CustomErrorType | None: + return None + # if permission and not info.context.has_perm(permission): + # errors = CustomErrorType([ + # dict( + # field="nonFieldErrors", + # messages="You don't have enough permission", + # object_errors=None, + # array_errors=None, + # ) + # ]) + # return errors + + @staticmethod + @sync_to_async + def handle_mutation( + serializer_class, + data, + info, + extra_context: typing.Optional[dict], + **kwargs, + ) -> tuple[CustomErrorType | None, models.Model | None]: + serializer = serializer_class( + data=data, + context=get_serializer_context(info, extra_context=extra_context), + **kwargs, + ) + if errors := mutation_is_not_valid(serializer): + return errors, None + try: + with transaction.atomic(): + instance = serializer.save() + except Exception: + logger.error("Failed to handle mutation", exc_info=True) + return _CustomErrorType.generate_message(), None + return None, instance + + @staticmethod + @sync_to_async + def handle_delete(instance: models.Model) -> tuple[CustomErrorType | None, models.Model | None]: + try: + with transaction.atomic(): + old_id = instance.pk + instance.delete() + instance.pk = old_id + return None, instance + except Exception: + logger.error("Failed to handle delete mutation", exc_info=True) + return _CustomErrorType.generate_message(), None + + async def handle_create_mutation( + self, + data, + info: Info, + permission, + extra_context: typing.Optional[dict] = None, + ) -> MutationResponseType: + if errors := self.check_permissions(info, permission): + return MutationResponseType(ok=False, errors=errors) + errors, saved_instance = await self.handle_mutation( + self.serializer_class, + process_input_data(data), + info, + extra_context, + ) + if errors: + return MutationResponseType(ok=False, errors=errors) + return MutationResponseType(result=saved_instance) + + async def handle_update_mutation( + self, + data, + info: Info, + permission, + instance: models.Model, + extra_context: typing.Optional[dict] = None, + ) -> MutationResponseType: + if errors := self.check_permissions(info, permission): + return MutationResponseType(ok=False, errors=errors) + errors, saved_instance = await self.handle_mutation( + self.serializer_class, + process_input_data(data), + info, + extra_context, + instance=instance, + partial=True, + ) + if errors: + return MutationResponseType(ok=False, errors=errors) + return MutationResponseType(result=saved_instance) + + async def handle_delete_mutation(self, instance: models.Model | None, info: Info, permission) -> MutationResponseType: + if errors := self.check_permissions(info, permission): + return MutationResponseType(ok=False, errors=errors) + if instance is None: + return MutationResponseType( + ok=False, + errors=_CustomErrorType.generate_message("Doesn't exists"), + ) + errors, deleted_instance = await self.handle_delete(instance) + if errors: + return MutationResponseType(ok=False, errors=errors) + return MutationResponseType(result=deleted_instance) + + async def handle_bulk_mutation( + self, + base_queryset: models.QuerySet, + items: list | None, + delete_ids: list[strawberry.ID] | None, + info: Info, + permission, + extra_context: typing.Optional[dict] = None, + ) -> BulkMutationResponseType: + if errors := self.check_permissions(info, permission): + return BulkMutationResponseType(errors=[errors]) + + errors = [] + + # Delete - First + deleted_instances = [] + delete_qs = base_queryset.filter(id__in=delete_ids).order_by("id") + async for item in delete_qs.all(): + _errors, _saved_instance = await self.handle_delete(item) + if _errors: + errors.append(_errors) + else: + deleted_instances.append(_saved_instance) + + # Create/Update - Then + results = [] + for data in items or []: + _data = process_input_data(data) + assert isinstance(_data, dict) + _id = _data.pop("id", None) + instance = None + if _id: + instance = await base_queryset.filter(id=_id).afirst() + partial = False + if instance: + partial = True + _errors, _saved_instance = await self.handle_mutation( + self.serializer_class, + _data, + info, + extra_context, + instance=instance, + partial=partial, + ) + if _errors: + errors.append(_errors) + else: + results.append(_saved_instance) + + return BulkMutationResponseType( + errors=errors, + # Data + results=results, + deleted=deleted_instances, + ) diff --git a/utils/strawberry/paginations.py b/utils/strawberry/paginations.py index e501326c..b1ddbe9c 100644 --- a/utils/strawberry/paginations.py +++ b/utils/strawberry/paginations.py @@ -123,14 +123,14 @@ def resolver( args: list[Any], kwargs: dict[str, Any], ) -> Any: - pk: int = kwargs.get('pk', strawberry.UNSET) - filters: Type = kwargs.get('filters', strawberry.UNSET) - order: Type = kwargs.get('order', strawberry.UNSET) - pagination: OffsetPaginationInput = kwargs.get('pagination', strawberry.UNSET) + pk: int = kwargs.get("pk", strawberry.UNSET) + filters: Type = kwargs.get("filters", strawberry.UNSET) + order: Type = kwargs.get("order", strawberry.UNSET) + pagination: OffsetPaginationInput = kwargs.get("pagination", strawberry.UNSET) if self.django_model is None or self._base_type is None: # This needs to be fixed by developers - raise Exception('django_model should be defined!!') + raise Exception("django_model should be defined!!") queryset = self.django_model.objects.all() @@ -145,13 +145,13 @@ def resolver( queryset = apply_orders(order, queryset, info=info) # Add a default order_by id if there is none defined/used if not queryset.query.order_by: - queryset = queryset.order_by('-pk') + queryset = queryset.order_by("-pk") _current_queryset = queryset._chain() # type: ignore[reportGeneralTypeIssues] @sync_to_async def get_count(): - return _current_queryset.values('pk').count() + return _current_queryset.values("pk").count() pagination = process_pagination(pagination) @@ -164,6 +164,40 @@ def get_count(): ) +# TODO: Migrate to strawberry_django.offset_paginated +def count_list_resolver( + info: Info | None, + queryset, + base_type, + pk: int = strawberry.UNSET, + filters: Type = strawberry.UNSET, + order: Type = strawberry.UNSET, + pagination: OffsetPaginationInput = strawberry.UNSET, +) -> Any: + queryset = apply_filters(filters, queryset, info, pk) + queryset = apply_orders(order, queryset, info=info) + + # Add a default order_by id if there is none defined/used + if not queryset.query.order_by: + queryset = queryset.order_by("-pk") + + _current_queryset = queryset._chain() # type: ignore[reportGeneralTypeIssues] + + @sync_to_async + def get_count(): + return _current_queryset.values("pk").count() + + pagination = process_pagination(pagination) + + queryset = StrawberryDjangoCountList().apply_pagination(queryset, pagination) + return CountList[base_type]( # type: ignore[reportGeneralTypeIssues] + get_count=get_count, + queryset=queryset, + limit=pagination.limit, + offset=pagination.offset, + ) + + def pagination_field( resolver=None, *, diff --git a/utils/strawberry/serializers.py b/utils/strawberry/serializers.py index dc01ae37..7960bcd7 100644 --- a/utils/strawberry/serializers.py +++ b/utils/strawberry/serializers.py @@ -1,3 +1,4 @@ +from django.db import models from rest_framework import serializers @@ -17,3 +18,22 @@ class StringIDField(serializers.CharField): """ pass + + +class CustomCharField(serializers.CharField): + """ + This is match utils/strawberry/types.py::string_field logic + """ + + def run_validation(self, data=serializers.empty): + if data is None and self.allow_blank and not self.allow_null: + data = "" + return super().run_validation(data) # type: ignore[reportArgumentType] + + +serializers.ModelSerializer.serializer_field_mapping.update( + { + models.CharField: CustomCharField, + models.TextField: CustomCharField, + } +) diff --git a/utils/strawberry/transformers.py b/utils/strawberry/transformers.py index d35c22f8..dc369362 100644 --- a/utils/strawberry/transformers.py +++ b/utils/strawberry/transformers.py @@ -13,9 +13,9 @@ from rest_framework import fields as drf_fields from rest_framework import serializers from strawberry.annotation import StrawberryAnnotation -from strawberry.field import StrawberryField from strawberry.file_uploads import Upload as StrawberryUploadField -from strawberry.type import get_object_definition +from strawberry.types.base import get_object_definition +from strawberry.types.field import StrawberryField from strawberry_django.type import _process_type from . import types @@ -69,7 +69,7 @@ def convert_serializer_field_to_generic_scalar(_): @get_strawberry_type_from_serializer_field.register(serializers.Field) # type: ignore[reportArgumentType] -def convert_serializer_field_to_string(field): +def convert_serializer_field_to_string(_): return str @@ -123,7 +123,7 @@ def convert_serializer_field_to_enum(field): # Try django_enumfield (NOTE: Let's try to avoid this) custom_name = type(list(field.choices.values())[-1]).__name__ if custom_name is None: - raise Exception(f'Enum name generation failed for {field=}') + raise Exception(f"Enum name generation failed for {field=}") return ENUM_TO_STRAWBERRY_ENUM_MAP[custom_name] @@ -138,10 +138,10 @@ def convert_serializer_to_type(serializer_class, name=None, partial=False): ref_name = name if ref_name is None: serializer_name = serializer_class.__name__ - serializer_name = ''.join(''.join(serializer_name.split('ModelSerializer')).split('Serializer')) - ref_name = f'{serializer_name}NestInputType' + serializer_name = "".join("".join(serializer_name.split("ModelSerializer")).split("Serializer")) + ref_name = f"{serializer_name}NestInputType" if partial: - ref_name = f'{serializer_name}NestUpdateInputType' + ref_name = f"{serializer_name}NestUpdateInputType" cached_type = convert_serializer_to_type_cache.get(ref_name, None) if cached_type: @@ -167,20 +167,16 @@ def convert_serializer_field(field, convert_choices_to_enum=True, force_optional is_required = field.required and not force_optional if field.default != drf_fields.empty: if field.default.__class__.__hash__ is None: # Mutable - kwargs['default_factory'] = lambda: field.default # type: ignore[reportGeneralTypeIssues] FIXME + kwargs["default_factory"] = lambda: field.default # type: ignore[reportGeneralTypeIssues] FIXME else: - kwargs['default'] = field.default + kwargs["default"] = field.default else: - kwargs['default'] = dataclasses.MISSING + kwargs["default"] = dataclasses.MISSING if isinstance(field, serializers.ChoiceField) and not convert_choices_to_enum: graphql_type = str else: graphql_type = get_strawberry_type_from_serializer_field(field) - # if graphql_type == str: - # is_required = not field.null and not field.blank - # kwargs['parse_value'] -> null -> '' -- when not is_required - # XXX: does UNSET has any issue here? # if it is a tuple or a list it means that we are returning # the graphql type and the child type @@ -189,15 +185,15 @@ def convert_serializer_field(field, convert_choices_to_enum=True, force_optional graphql_type = graphql_type[0] if isinstance(field, serializers.Serializer): - pass + graphql_type = convert_serializer_to_type(field.__class__, partial=force_optional) elif isinstance(field, serializers.ListSerializer): field = field.child of_type = convert_serializer_to_type(field.__class__, partial=force_optional) graphql_type = list[of_type] if not is_required: - if 'default' not in kwargs or 'default_factory' not in kwargs: - kwargs['default'] = strawberry.UNSET + if "default" not in kwargs or "default_factory" not in kwargs: + kwargs["default"] = strawberry.UNSET graphql_type = typing.Optional[graphql_type] return graphql_type, StrawberryField( @@ -293,9 +289,24 @@ def _process_type( obj_definition = get_object_definition(response) assert obj_definition is not None for field in obj_definition.fields: - if field.name.endswith('_id'): + if field.name.endswith("_id"): field.django_name = field.name # type: ignore[reportGeneralTypeIssues] FIXME return response + # XXX: Reverting remote change to fix issue + # `TypeError: DjangoModelFilterInput.__init__() got an unexpected keyword argument '_get_id'` + # Which happens when a `field: {pk: 'id'}` filter is used. Try this for alerts query + # `{country: {pk: "1"}}` + # Using this: https://github.com/strawberry-graphql/strawberry-django/blob/v0.38.0/strawberry_django/filters.py # noqa: E501 + # By overwriting this: https://github.com/strawberry-graphql/strawberry-django/blob/v0.49.1/strawberry_django/filters.py#L61 # noqa: E501 + @strawberry.input + class DjangoModelFilterInput: + pk: strawberry.ID -import_module('strawberry_django.type')._process_type = MonkeyPatch._process_type # type: ignore[reportGeneralTypeIssues] + @classmethod + def get_django_model_filter_input_type(cls): + return cls.DjangoModelFilterInput + + +import_module("strawberry_django.type")._process_type = MonkeyPatch._process_type # type: ignore[reportGeneralTypeIssues] +import_module("strawberry_django.filters").get_django_model_filter_input_type = MonkeyPatch.get_django_model_filter_input_type # type: ignore[reportGeneralTypeIssues] # noqa: E501 diff --git a/utils/strawberry/types.py b/utils/strawberry/types.py index 75d6ed96..686b8955 100644 --- a/utils/strawberry/types.py +++ b/utils/strawberry/types.py @@ -31,8 +31,8 @@ def parse_value(cls, node): PolygonScalar = strawberry.scalar( - typing.NewType('PolygonScalar', typing.Any), # type: ignore[reportGeneralTypeIssues] - description='', # TODO: Add description + typing.NewType("PolygonScalar", typing.Any), # type: ignore[reportGeneralTypeIssues] + description="", # TODO: Add description serialize=GIS.serialize, parse_value=GIS.parse_value, ) @@ -42,7 +42,7 @@ def string_field( field: typing.Union[ DjangoBaseField, models.query_utils.DeferredAttribute, - '_FieldDescriptor', + "_FieldDescriptor", ] ): """ @@ -71,7 +71,7 @@ def string_(root) -> str: @strawberry.field def nullable_string_(root) -> typing.Optional[str]: _value = _get_value(root) - if _value == '': + if _value == "": return return _value diff --git a/utils/tokens.py b/utils/tokens.py new file mode 100644 index 00000000..9279fde7 --- /dev/null +++ b/utils/tokens.py @@ -0,0 +1,54 @@ +import abc + +from django.contrib.auth.tokens import PasswordResetTokenGenerator +from django.utils.crypto import constant_time_compare +from django.utils.http import base36_to_int + + +# WIP +class BaseTokenGenerator(abc.ABC, PasswordResetTokenGenerator): + """ + Using PasswordResetTokenGenerator to create a reusable token generator clases + """ + + timeout = 3 * 86400 # (3 days, in seconds) + key_salt: str + + def check_token(self, user, token): + """ + Check that a password reset token is correct for a given user. + """ + if not (user and token): + return False + # Parse the token + try: + ts_b36, _ = token.split("-") + except ValueError: + return False + + try: + ts = base36_to_int(ts_b36) + except ValueError: + return False + + # Check that the timestamp/uid has not been tampered with + for secret in [self.secret, *self.secret_fallbacks]: + if constant_time_compare( + self._make_token_with_timestamp(user, ts, secret), + token, + ): + break + else: + return False + + # --- Custom code (Here we replace settings.PASSWORD_RESET_TIMEOUT with self.timeout) + # https://github.com/django/django/blob/main/django/contrib/auth/tokens.py#L79 + # Check the timestamp is within limit. + if (self._num_seconds(self._now()) - ts) > self.timeout: + return False + # --- Custom code + + return True + + @abc.abstractmethod + def _make_hash_value(self, user, timestamp) -> str: ...