From 452d3bc97bf7cf72d2d9931d922c61779c8622d8 Mon Sep 17 00:00:00 2001 From: Ryan Johnson Date: Thu, 17 Oct 2024 10:19:08 -0700 Subject: [PATCH] add kitsune.l10n app for handling content localization --- bin/run-celery-beat.sh | 3 + docker-compose.yml | 13 + kitsune/l10n/__init__.py | 0 kitsune/l10n/admin.py | 287 +++++ kitsune/l10n/apps.py | 9 + .../l10n/jinja2/l10n/llm_system_message.txt | 135 +++ kitsune/l10n/jinja2/l10n/llm_user_message.txt | 29 + kitsune/l10n/llm.py | 86 ++ kitsune/l10n/migrations/0001_initial.py | 277 +++++ kitsune/l10n/migrations/__init__.py | 0 kitsune/l10n/models.py | 243 +++++ kitsune/l10n/prompt.py | 87 ++ kitsune/l10n/signals.py | 29 + kitsune/l10n/tasks.py | 102 ++ kitsune/l10n/templatetags/__init__.py | 0 kitsune/l10n/templatetags/jinja_helpers.py | 6 + kitsune/l10n/tests/__init__.py | 13 + kitsune/l10n/tests/test_utils.py | 108 ++ kitsune/l10n/tests/test_wiki_create.py | 657 ++++++++++++ kitsune/l10n/tests/test_wiki_manage.py | 741 +++++++++++++ kitsune/l10n/utils.py | 227 ++++ kitsune/l10n/wiki.py | 495 +++++++++ kitsune/settings.py | 3 + kitsune/wiki/badges.py | 6 +- .../jinja2/wiki/includes/document_macros.html | 16 +- kitsune/wiki/signals.py | 4 + kitsune/wiki/views.py | 5 +- poetry.lock | 984 +++++++++++++++++- pyproject.toml | 5 +- 29 files changed, 4545 insertions(+), 25 deletions(-) create mode 100755 bin/run-celery-beat.sh create mode 100644 kitsune/l10n/__init__.py create mode 100644 kitsune/l10n/admin.py create mode 100644 kitsune/l10n/apps.py create mode 100644 kitsune/l10n/jinja2/l10n/llm_system_message.txt create mode 100644 kitsune/l10n/jinja2/l10n/llm_user_message.txt create mode 100644 kitsune/l10n/llm.py create mode 100644 kitsune/l10n/migrations/0001_initial.py create mode 100644 kitsune/l10n/migrations/__init__.py create mode 100644 kitsune/l10n/models.py create mode 100644 kitsune/l10n/prompt.py create mode 100644 kitsune/l10n/signals.py create mode 100644 kitsune/l10n/tasks.py create mode 100644 kitsune/l10n/templatetags/__init__.py create mode 100644 kitsune/l10n/templatetags/jinja_helpers.py create mode 100644 kitsune/l10n/tests/__init__.py create mode 100644 kitsune/l10n/tests/test_utils.py create mode 100644 kitsune/l10n/tests/test_wiki_create.py create mode 100644 kitsune/l10n/tests/test_wiki_manage.py create mode 100644 kitsune/l10n/utils.py create mode 100644 kitsune/l10n/wiki.py create mode 100644 kitsune/wiki/signals.py diff --git a/bin/run-celery-beat.sh b/bin/run-celery-beat.sh new file mode 100755 index 00000000000..518241269cb --- /dev/null +++ b/bin/run-celery-beat.sh @@ -0,0 +1,3 @@ +#!/bin/bash + +exec celery -A kitsune beat diff --git a/docker-compose.yml b/docker-compose.yml index 31aad7e45a9..993d0c997d9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -67,6 +67,19 @@ services: - postgres - redis + beat: + build: + context: . + target: base + command: celery -A kitsune beat -l info + env_file: .env + volumes: + - ./:/app:delegated + user: ${UID:-kitsune} + depends_on: + - postgres + - redis + mailcatcher: image: schickling/mailcatcher ports: diff --git a/kitsune/l10n/__init__.py b/kitsune/l10n/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/kitsune/l10n/admin.py b/kitsune/l10n/admin.py new file mode 100644 index 00000000000..dc897546757 --- /dev/null +++ b/kitsune/l10n/admin.py @@ -0,0 +1,287 @@ +from datetime import timedelta + +from django import forms +from django.conf import settings +from django.contrib import admin +from django.core.exceptions import ValidationError +from django.core.validators import validate_slug +from django.shortcuts import redirect +from django.urls import reverse +from django.utils.html import format_html + + +from kitsune.l10n.models import ( + RevisionActivityRecord, + LANGUAGE_CHOICES_EXCLUDING_DEFAULT, + MachineTranslationConfiguration, + MachineTranslationServiceRecord, +) +from kitsune.l10n.utils import duration_to_text, text_to_duration + + +class SimpleDurationField(forms.DurationField): + def prepare_value(self, value): + if isinstance(value, timedelta): + return duration_to_text(value) + return value + + def to_python(self, value): + if value in self.empty_values: + return None + if isinstance(value, timedelta): + return value + try: + value = text_to_duration(str(value)) + except OverflowError: + raise ValidationError( + self.error_messages["overflow"].format( + min_days=timedelta.min.days, max_days=timedelta.max.days + ), + code="overflow", + ) + if value is None: + raise ValidationError(self.error_messages["invalid"], code="invalid") + return value + + +class MultipleSlugField(forms.Field): + widget = forms.Textarea( + attrs=dict( + rows=3, + placeholder=( + 'Enter each slug on a new line. Slugs that end with "*" will match as a prefix.' + ), + ) + ) + + def prepare_value(self, value): + if isinstance(value, list): + return "\n".join(value) + return value + + def to_python(self, value): + if not value: + return [] + + result, errors = [], [] + for slug in value.splitlines(): + if not slug.strip(): + continue + try: + if slug.count("*") > 1: + raise ValidationError("") + validate_slug(slug.rstrip("*")) + except ValidationError: + if not errors: + errors.append( + "A valid slug consists of letters, numbers, underscores or hyphens, " + 'but may end with "*" to match as a prefix.' + ) + errors.append(f"'{slug}' is not a valid slug.") + else: + result.append(slug) + + if errors: + raise ValidationError(errors) + return result + + +class MachineTranslationConfigurationForm(forms.ModelForm): + + heartbeat_period = SimpleDurationField() + review_grace_period = SimpleDurationField() + post_review_grace_period = SimpleDurationField(label="Post-review grace period") + locale_team_inactivity_grace_period = SimpleDurationField( + label="Locale-team inactivity grace period" + ) + enabled_languages = forms.MultipleChoiceField( + choices=LANGUAGE_CHOICES_EXCLUDING_DEFAULT, + widget=forms.CheckboxSelectMultiple, + label="Languages enabled for machine translation", + required=False, + ) + limit_to_slugs = MultipleSlugField( + label="Limit machine translation to these KB article slugs", + required=False, + ) + disabled_slugs = MultipleSlugField( + label="Disable machine translation for these KB article slugs", + required=False, + ) + + class Meta: + model = MachineTranslationConfiguration + fields = "__all__" + + +@admin.register(MachineTranslationConfiguration) +class MachineTranslationConfigurationAdmin(admin.ModelAdmin): + list_display = ( + "is_enabled", + "llm_name", + "heartbeat_period", + "review_grace_period", + "post_review_grace_period", + "locale_team_inactivity_grace_period", + "enabled_languages", + "limit_to_slugs", + "disabled_slugs", + "limit_to_approved_after", + "limit_to_approver_in_group", + ) + + form = MachineTranslationConfigurationForm + + def has_add_permission(self, request): + return False + + def has_delete_permission(self, request, obj=None): + return False + + def changelist_view(self, request, extra_context=None): + obj = MachineTranslationConfiguration.load() + return redirect( + reverse("admin:l10n_machinetranslationconfiguration_change", args=[obj.id]) + ) + + +class MachineTranslationServiceRecordLocaleFilter(admin.SimpleListFilter): + title = "Target Locale" + parameter_name = "target_locale" + + def lookups(self, request, model_admin): + return [ + (locale, f"{settings.LOCALES[locale].english} ({locale})") + for locale in MachineTranslationServiceRecord.objects.values_list( + "target_locale", flat=True + ).distinct() + ] + + def queryset(self, request, queryset): + if self.value(): + return queryset.filter(target_locale=self.value()) + return queryset + + +@admin.register(MachineTranslationServiceRecord) +class MachineTranslationServiceRecordAdmin(admin.ModelAdmin): + + exclude = ("source_revision", "details") + list_display = ( + "created", + "target_locale", + "source_revision_link", + "source_attribute", + "service", + "model_name", + ) + list_filter = ( + MachineTranslationServiceRecordLocaleFilter, + "created", + "service", + "model_name", + ) + readonly_fields = ( + "created", + "target_locale", + "source_revision_link", + "source_attribute", + "service", + "model_name", + "llm_input", + "llm_output", + "langchain_model_configuration", + ) + ordering = ( + "-created", + "target_locale", + "source_revision", + "source_attribute", + "service", + "model_name", + ) + + @admin.display(description="LLM input") + def llm_input(self, obj): + return "\n\n".join(obj.details["input"]) + + @admin.display(description="LLM output") + def llm_output(self, obj): + return obj.details["output"] + + @admin.display(description="LangChain model configuration") + def langchain_model_configuration(self, obj): + return "\n".join(f"{k}: {v}" for k, v in obj.details["model_info"].items()) + + @admin.display(description="Source revision") + def source_revision_link(self, obj): + rev = obj.source_revision + doc = rev.document + return format_html( + '[{}] {} (#{})', + rev.get_absolute_url(), + doc.locale, + doc.title, + rev.id, + ) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False + + +class RevisionActivityRecordLocaleFilter(admin.SimpleListFilter): + title = "Locale" + parameter_name = "locale" + + def lookups(self, request, model_admin): + return [ + (locale, f"{settings.LOCALES[locale].english} ({locale})") + for locale in RevisionActivityRecord.objects.values_list( + "revision__document__locale", flat=True + ).distinct() + ] + + def queryset(self, request, queryset): + if self.value(): + return queryset.filter(revision__document__locale=self.value()) + return queryset + + +@admin.register(RevisionActivityRecord) +class RevisionActivityRecordAdmin(admin.ModelAdmin): + exclude = ("revision",) + list_select_related = ("revision",) + list_display = ("revision_link", "action", "action_timestamp") + list_filter = (RevisionActivityRecordLocaleFilter, "action") + readonly_fields = ("revision_link", "action", "action_timestamp") + + @admin.display(description="Timestamp of Action") + def action_timestamp(self, obj): + return obj.action_timestamp + + @admin.display(description="Revision") + def revision_link(self, obj): + rev = obj.revision + doc = rev.document + return format_html( + '[{}] {} (#{})', + rev.get_absolute_url(), + doc.locale, + doc.title, + rev.id, + ) + + def has_add_permission(self, request): + return False + + def has_change_permission(self, request, obj=None): + return False + + def has_delete_permission(self, request, obj=None): + return False diff --git a/kitsune/l10n/apps.py b/kitsune/l10n/apps.py new file mode 100644 index 00000000000..96331328bdd --- /dev/null +++ b/kitsune/l10n/apps.py @@ -0,0 +1,9 @@ +from django.apps import AppConfig + + +class L10nConfig(AppConfig): + name = "kitsune.l10n" + default_auto_field = "django.db.models.BigAutoField" + + def ready(self): + import kitsune.l10n.signals # noqa diff --git a/kitsune/l10n/jinja2/l10n/llm_system_message.txt b/kitsune/l10n/jinja2/l10n/llm_system_message.txt new file mode 100644 index 00000000000..4d1f808d849 --- /dev/null +++ b/kitsune/l10n/jinja2/l10n/llm_system_message.txt @@ -0,0 +1,135 @@ +Assume the role of an expert at translating software-related technical documents from {{ source_language }} to {{ target_language }}. +{%- if example %} + +Next, I'm going to provide you with what we'll call the "antecedent" example. The "antecedent" example consists of two parts. The first part will be a piece of {{ source_language }} text, and the second part will be the translation of that {{ source_language }} text into {{ target_language }}. Remember this "antecedent" example, because it will be needed later. The "antecedent" example will be delimited by <<>> and <<>>. + +<<>> +Here is the {{ source_language }} text of the "antecedent" example (delimited by <<>> and <<>>): + +<<>>{{ example.source_text|safe }}<<>> + +Here is the corresponding {{ target_language }} translation of the {{ source_language }} text of the "antecedent" example (delimited by <<>> and <<>>): + +<<>>{{ example.target_text|safe }}<<>> +<<>> +{%- endif %} +{%- if include_wiki_instructions %} + +Next, remember the following definitions of new terms (delimited by <<>> and <<>>): + +<<>> +Definition of "wiki-hook": +A "wiki-hook" is a string that case-sensitively matches the entire regular expression that follows (delimited by <<>> and <<>>, and specified using Python’s regular expression syntax in Python's raw string notation): + +<<>> +r"\[\[(Image|Video|V|Button|UI|Include|I|Template|T):.*?\]\]" +<<>> + +Definition of "wiki-article-link": +A "wiki-article-link" is a string that case-sensitively matches the entire regular expression that follows (delimited by <<>> and <<>>, and specified using Python’s regular expression syntax in Python's raw string notation): + +<<>> +r"\[\[(?!Image:|Video:|V:|Button:|UI:|Include:|I:|Template:|T:)[^|]+?(?:\|(?P.+?))?\]\]" +<<>> + +Definition of "wiki-external-link": +A "wiki-external-link" is a string that case-sensitively matches the entire regular expression that follows (delimited by <<>> and <<>>, and specified using Python’s regular expression syntax in Python's string and raw string notation): + +<<>> +r"\[((mailto:|git://|irc://|https?://|ftp://|/)[^<>\]\[" + "\x00-\x20\x7f" + r"]*)\s*(?P.*?)\]" +<<>> +{%- if example %} + +Definition of "antecedent-wiki-map": +The "antecedent-wiki-map" is a Python dictionary built from the "antecedent" example provided earlier. A Python dictionary maps keys to their values. Each "wiki-hook", "wiki-article-link", and "wiki-external-link" in the English text of the "antecedent" example becomes a key in the "antecedent-wiki-map", and each key's value is its corresponding translation found in the {{ target_language }} text of the "antecedent" example. +{%- endif %} +<<>> +{%- endif %} + +Next, remember the following "special instructions" (delimited by <<>> and <<>>): + +<<>> +Special Instruction 1: +The following case-sensitive strings of text, each delimited by quotes, must not be changed: +- "Anonym" +- "Bugzilla" +- "Camino" +- "Fakespot" +- "Firebug" +- "Firefox" +- "Firefox for Android" +- "Firefox for iOS" +- "Firefox for Enterprise" +- "Firefox Focus" +- "Firefox Relay" +- "Firefox Developer Edition" +- "Firefox Friends" +- "Firefox Nightly" +- "Firefox OS" +- "Firefox Rocket" +- "Foxkeh" +- "Lightbeam" +- "MDN" +- "MDN Plus" +- "Minimo" +- "Mozilla" +- "Mozillians" +- "Mozilla Communities" +- "Mozilla Reps" +- "Mozilla Webmaker" +- "Mozilla Wordmark" +- "Mozilla Wordmark + Symbol" +- "Mozilla VPN" +- "Mozilla Monitor" +- "Pocket" +- "Pontoon" +- "QMO" +- "SUMO" +- "Sunbird" +- "Sync" +- "Thunderbird" +- "Thunderbird for Android" +- "View Source" +- "VPN" +- "XUL" +- "Android" +- "iOS" +- "Linux" +- "Mac" +- "MacOS" +- "Windows" +{% if include_wiki_instructions -%} +- "{note}" +- "{/note}" +- "{warning}" +- "{/warning}" +- "{/for}" +- "__TOC__" + +Special Instruction 2: +Strings that case-sensitively match the entire regular expression (specified using Python’s regular expression syntax in raw string notation) r"\{(for|key|filepath) .*?\}" must not be changed. + +Special Instruction 3: +For strings that case-sensitively match the entire regular expression (specified using Python’s regular expression syntax in raw string notation) r"\{(?:button|menu|pref) (?P.*?)\}", only translate the text within the named group "description", and keep the rest of the string unchanged. + +{% if example -%} +Special Instruction 4: +Each "wiki-hook" must be translated as follows. First, check if the "wiki-hook" is a key within the "antecedent-wiki-map". If it is a key within the "antecedent-wiki-map", use its value from the "antecedent-wiki-map" as its translation. If it is not a key within the "antecedent-wiki-map", it must remain unchanged. + +Special Instruction 5: +Each "wiki-article-link" must be translated as follows. First, check if the "wiki-article-link" is a key within the "antecedent-wiki-map". If it is a key within the "antecedent-wiki-map", use its value from the "antecedent-wiki-map" as its translation. If it is not a key within the "antecedent-wiki-map", only translate the text within its named group "description", and keep the rest of the string unchanged. + +Special Instruction 6: +Each "wiki-external-link" must be translated as follows. First, check if the "wiki-external-link" is a key within the "antecedent-wiki-map". If it is a key within the "antecedent-wiki-map", use its value from the "antecedent-wiki-map" as its translation. If it is not a key within the "antecedent-wiki-map", only translate the text within its named group "description", and keep the rest of the string unchanged. +{%- else -%} +Special Instruction 4: +Each "wiki-hook" must not be changed. + +Special Instruction 5: +For each "wiki-article-link", only translate the text within its named group "description", and keep the rest of the string unchanged. + +Special Instruction 6: +For each "wiki-external-link", only translate the text within its named group "description", and keep the rest of the string unchanged. +{%- endif %} +{%- endif %} +<<>> diff --git a/kitsune/l10n/jinja2/l10n/llm_user_message.txt b/kitsune/l10n/jinja2/l10n/llm_user_message.txt new file mode 100644 index 00000000000..da43b4df16d --- /dev/null +++ b/kitsune/l10n/jinja2/l10n/llm_user_message.txt @@ -0,0 +1,29 @@ +Please translate the {{ source_language }} text (delimited by <<>> and <<>>) into {{ target_language }}, by strictly obeying each of the following steps: + +{% if example and include_wiki_instructions -%} +Step 1: Build the "antecedent-wiki-map", and remember it. + +Step 2: Compare the {{ source_language }} text you've been asked to translate with the {{ source_language }} text of the "antecedent" example, and determine which parts are the same and which parts are different. + +Step 3: For each part that is the same, copy its corresponding translation from the {{ target_language }} text of the "antecedent" example. + +Step 4: For each part that is different, freshly translate that part while obeying the "special instructions" defined earlier. + +Step 5: Review your translation, replacing each "wiki-hook", "wiki-article-link", and "wiki-external-link", if it matches a key within the "antecedent-wiki-map", with its value within the "antecedent-wiki-map". + +Step 6: In your response, include the "antecedent-wiki-map", describe what you did for each step, and delimit your resulting translation with {{ result_delimiter_begin|safe }} and {{ result_delimiter_end|safe }}. +{%- elif example -%} +Step 1: Compare the {{ source_language }} text you've been asked to translate with the {{ source_language }} text of the "antecedent" example, and determine which parts are the same and which parts are different. + +Step 2: For each part that is the same, copy its corresponding translation from the {{ target_language }} text of the "antecedent" example. + +Step 3: For each part that is different, freshly translate that part while obeying the "special instructions" defined earlier. + +Step 4: In your response, include the "antecedent-wiki-map", describe what you did for each step, and delimit your resulting translation with {{ result_delimiter_begin|safe }} and {{ result_delimiter_end|safe }}. +{%- else -%} +Step 1: Obey the "special instructions" while translating. + +Step 2: In your response, delimit your resulting translation with {{ result_delimiter_begin|safe }} and {{ result_delimiter_end|safe }}. +{%- endif %} + +<<>>{{ source_text|safe }}<<>> diff --git a/kitsune/l10n/llm.py b/kitsune/l10n/llm.py new file mode 100644 index 00000000000..cb5bb8639e6 --- /dev/null +++ b/kitsune/l10n/llm.py @@ -0,0 +1,86 @@ +from functools import cache + +from kitsune.l10n.models import MachineTranslationServiceRecord +from kitsune.l10n.prompt import get_prompt, get_result + + +def is_openai_model(model_name): + """ + Returns whether or not the given model name is an OpenAI model. + """ + return any( + model_name.startswith(prefix) + for prefix in ( + "gpt-", + "chatgpt-", + "o1-", + ) + ) + + +def record_localization(doc, content_attribute, target_locale, llm, prompt, response): + """ + Records the machine translation API transaction in the database. + """ + if content_attribute == "title": + content_attribute = "document.title" + + service = ( + MachineTranslationServiceRecord.SERVICE_OPENAI_API + if is_openai_model(llm.model_name) + else MachineTranslationServiceRecord.SERVICE_VERTEX_AI_API + ) + + return MachineTranslationServiceRecord.objects.create( + service=service, + model_name=llm.model_name, + target_locale=target_locale, + source_attribute=content_attribute, + source_revision=doc.latest_localizable_revision, + details=dict( + model_info=llm.model_dump(), + input=[msg.pretty_repr() for msg in prompt], + output=response.content, + ), + ) + + +@cache +def get_chat_model(model_name): + """ + Returns a LangChain chat model instance based on the given model name. + """ + # The Vertex AI API has a rate limit of 60 requests-per-minute for their + # flagship model. The OpenAI API is rate-limited per organization, which + # in our case is 10K requests-per-minute. The average latency of Vertex + # AI API requests seems to be about 3-4 seconds, and I expect the average + # latency of OpenAI API requests to be more than a second, so for now we + # can avoid using a rate limiter (also the InMemoryRateLimiter that + # LangChain provides is not really stable yet). + + kwargs = dict( + model=model_name, + temperature=0, + max_tokens=None, + max_retries=2, + timeout=120, + ) + + if is_openai_model(model_name): + from langchain_openai import ChatOpenAI as ChatAI + else: + from langchain_google_vertexai import ChatVertexAI as ChatAI + + return ChatAI(**kwargs) + + +def get_localization(model_name, doc, content_attribute, target_locale): + """ + Invokes the LLM specified by the given model name to localize the value of + the given content attribute of the given document targeting the given locale. + """ + prompt = get_prompt(doc, content_attribute, target_locale) + llm = get_chat_model(model_name) + response = llm.invoke(prompt) + record_localization(doc, content_attribute, target_locale, llm, prompt, response) + return get_result(response.content) diff --git a/kitsune/l10n/migrations/0001_initial.py b/kitsune/l10n/migrations/0001_initial.py new file mode 100644 index 00000000000..9ca882f7949 --- /dev/null +++ b/kitsune/l10n/migrations/0001_initial.py @@ -0,0 +1,277 @@ +# Generated by Django 4.2.16 on 2024-12-09 08:45 + +import datetime +from django.db import migrations, models +import django.db.models.deletion + + +class Migration(migrations.Migration): + + initial = True + + dependencies = [ + ("auth", "0012_alter_user_first_name_max_length"), + ("wiki", "0017_alter_document_tags"), + ] + + operations = [ + migrations.CreateModel( + name="MachineTranslationConfiguration", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "is_enabled", + models.BooleanField(default=False, verbose_name="Enable machine translation"), + ), + ( + "llm_name", + models.CharField(blank=True, max_length=100, verbose_name="LLM model name"), + ), + ( + "heartbeat_period", + models.DurationField( + default=datetime.timedelta(seconds=14400), + help_text="The management of existing machine translations, as well as the generation of any new machine translations, will be performed at this interval.", + verbose_name="Heartbeat period", + ), + ), + ( + "review_grace_period", + models.DurationField( + default=datetime.timedelta(days=3), + help_text="The grace period provided for the machine translation to be reviewed, after which it will be automatically approved.", + verbose_name="Review grace period", + ), + ), + ( + "post_review_grace_period", + models.DurationField( + default=datetime.timedelta(days=3), + help_text="The grace period provided after the machine translation has been reviewed and rejected, after which the machine translation will be automatically approved if no other translation has been approved within that period.", + verbose_name="Post-review grace period", + ), + ), + ( + "locale_team_inactivity_grace_period", + models.DurationField( + default=datetime.timedelta(days=30), + help_text="If a leader or reviewer of a locale team has not created or reviewed a KB article within this period of time, the locale team will be considered inactive.", + verbose_name="Locale-team inactivity grace period", + ), + ), + ( + "enabled_languages", + models.JSONField( + blank=True, + default=list, + help_text="The languages enabled for machine translation.", + ), + ), + ( + "limit_to_slugs", + models.JSONField( + blank=True, + default=list, + help_text="Limit machine translation to these KB article slugs.", + ), + ), + ( + "disabled_slugs", + models.JSONField( + blank=True, + default=list, + help_text="Disable machine translation for these KB article slugs.", + ), + ), + ( + "limit_to_approved_after", + models.DateTimeField( + blank=True, + default=datetime.datetime.now, + null=True, + verbose_name="Limit machine translation to KB article revisions approved after this date and time", + ), + ), + ( + "limit_to_approver_in_group", + models.ForeignKey( + blank=True, + default=None, + null=True, + on_delete=django.db.models.deletion.SET_DEFAULT, + related_name="+", + to="auth.group", + verbose_name="Limit machine translation to KB article revisions approved by users within this group", + ), + ), + ], + options={ + "verbose_name": "Machine translation configuration", + "verbose_name_plural": "Machine translation configuration", + }, + ), + migrations.CreateModel( + name="RevisionActivityRecord", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ( + "action", + models.PositiveSmallIntegerField( + choices=[ + (0, "Created as awaiting review"), + (1, "Created as already approved"), + (2, "Rejected because no longer relevant"), + (3, "Approved because not reviewed within grace period"), + ( + 4, + "Created as already approved because nothing else approved within grace period after rejection", + ), + ] + ), + ), + ( + "revision", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="l10n_actions", + to="wiki.revision", + ), + ), + ], + options={ + "indexes": [models.Index(fields=["action"], name="action_idx")], + }, + ), + migrations.CreateModel( + name="MachineTranslationServiceRecord", + fields=[ + ( + "id", + models.BigAutoField( + auto_created=True, primary_key=True, serialize=False, verbose_name="ID" + ), + ), + ("created", models.DateTimeField(default=datetime.datetime.now)), + ( + "service", + models.CharField( + choices=[("openai-api", "OpenAI API"), ("vertex-ai-api", "Vertex AI API")], + max_length=30, + ), + ), + ("model_name", models.CharField(max_length=50)), + ( + "target_locale", + models.CharField( + choices=[ + ("af", "Afrikaans (af)"), + ("ar", "Arabic (ar)"), + ("az", "Azerbaijani (az)"), + ("bg", "Bulgarian (bg)"), + ("bm", "Bambara (bm)"), + ("bn", "Bengali (bn)"), + ("bs", "Bosnian (bs)"), + ("ca", "Catalan (ca)"), + ("cs", "Czech (cs)"), + ("da", "Danish (da)"), + ("de", "German (de)"), + ("ee", "Ewe (ee)"), + ("el", "Greek (el)"), + ("es", "Spanish (es)"), + ("et", "Estonian (et)"), + ("eu", "Basque (eu)"), + ("fa", "Persian (fa)"), + ("fi", "Finnish (fi)"), + ("fr", "French (fr)"), + ("fy-NL", "Frisian (fy-NL)"), + ("ga-IE", "Irish (Ireland) (ga-IE)"), + ("gl", "Galician (gl)"), + ("gn", "Guarani (gn)"), + ("gu-IN", "Gujarati (gu-IN)"), + ("ha", "Hausa (ha)"), + ("he", "Hebrew (he)"), + ("hi-IN", "Hindi (India) (hi-IN)"), + ("hr", "Croatian (hr)"), + ("hu", "Hungarian (hu)"), + ("dsb", "Lower Sorbian (dsb)"), + ("hsb", "Upper Sorbian (hsb)"), + ("id", "Indonesian (id)"), + ("ig", "Igbo (ig)"), + ("it", "Italian (it)"), + ("ja", "Japanese (ja)"), + ("ka", "Georgian (ka)"), + ("km", "Khmer (km)"), + ("kn", "Kannada (kn)"), + ("ko", "Korean (ko)"), + ("ln", "Lingala (ln)"), + ("lt", "Lithuanian (lt)"), + ("mg", "Malagasy (mg)"), + ("mk", "Macedonian (mk)"), + ("ml", "Malayalam (ml)"), + ("ms", "Malay (ms)"), + ("ne-NP", "Nepali (ne-NP)"), + ("nl", "Dutch (nl)"), + ("no", "Norwegian (no)"), + ("pl", "Polish (pl)"), + ("pt-BR", "Portuguese (Brazilian) (pt-BR)"), + ("pt-PT", "Portuguese (Portugal) (pt-PT)"), + ("ro", "Romanian (ro)"), + ("ru", "Russian (ru)"), + ("si", "Sinhala (si)"), + ("sk", "Slovak (sk)"), + ("sl", "Slovenian (sl)"), + ("sq", "Albanian (sq)"), + ("sr", "Serbian (sr)"), + ("sw", "Swahili (sw)"), + ("sv", "Swedish (sv)"), + ("ta", "Tamil (ta)"), + ("ta-LK", "Tamil (Sri Lanka) (ta-LK)"), + ("te", "Telugu (te)"), + ("th", "Thai (th)"), + ("tn", "Tswana (tn)"), + ("tr", "Turkish (tr)"), + ("uk", "Ukrainian (uk)"), + ("ur", "Urdu (ur)"), + ("vi", "Vietnamese (vi)"), + ("wo", "Wolof (wo)"), + ("xh", "Xhosa (xh)"), + ("yo", "Yoruba (yo)"), + ("zh-CN", "Chinese (Simplified) (zh-CN)"), + ("zh-TW", "Chinese (Traditional) (zh-TW)"), + ("zu", "Zulu (zu)"), + ], + max_length=7, + ), + ), + ("source_attribute", models.CharField(max_length=100)), + ("details", models.JSONField(default=dict)), + ( + "source_revision", + models.ForeignKey( + on_delete=django.db.models.deletion.CASCADE, + related_name="mt_service_records", + to="wiki.revision", + ), + ), + ], + options={ + "indexes": [ + models.Index(fields=["created"], name="created_idx"), + models.Index(fields=["service"], name="service_idx"), + models.Index(fields=["model_name"], name="model_name_idx"), + models.Index(fields=["target_locale"], name="target_locale_idx"), + models.Index(fields=["source_attribute"], name="source_attribute_idx"), + ], + }, + ), + ] diff --git a/kitsune/l10n/migrations/__init__.py b/kitsune/l10n/migrations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/kitsune/l10n/models.py b/kitsune/l10n/models.py new file mode 100644 index 00000000000..dce3987b805 --- /dev/null +++ b/kitsune/l10n/models.py @@ -0,0 +1,243 @@ +from datetime import datetime, timedelta +from functools import cached_property + +from django.conf import settings +from django.contrib.auth.models import Group +from django.core.exceptions import ValidationError +from django.db import models + +from kitsune.wiki.models import Revision + + +LANGUAGE_CHOICES_EXCLUDING_DEFAULT = tuple( + (lang, f"{settings.LOCALES[lang].english} ({lang})") + for lang in settings.SUMO_LANGUAGES + if lang not in ("xx", settings.WIKI_DEFAULT_LANGUAGE) +) + + +class MachineTranslationConfiguration(models.Model): + is_enabled = models.BooleanField(default=False, verbose_name="Enable machine translation") + llm_name = models.CharField( + blank=True, + max_length=100, + verbose_name="LLM model name", + ) + heartbeat_period = models.DurationField( + default=timedelta(hours=4), + verbose_name="Heartbeat period", + help_text=( + "The management of existing machine translations, as well as the " + "generation of any new machine translations, will be performed at " + "this interval." + ), + ) + review_grace_period = models.DurationField( + default=timedelta(days=3), + verbose_name="Review grace period", + help_text=( + "The grace period provided for the machine translation to be " + "reviewed, after which it will be automatically approved." + ), + ) + post_review_grace_period = models.DurationField( + default=timedelta(days=3), + verbose_name="Post-review grace period", + help_text=( + "The grace period provided after the machine translation has " + "been reviewed and rejected, after which the machine translation " + "will be automatically approved if no other translation has been " + "approved within that period." + ), + ) + locale_team_inactivity_grace_period = models.DurationField( + default=timedelta(days=30), + verbose_name="Locale-team inactivity grace period", + help_text=( + "If a leader or reviewer of a locale team has not created or " + "reviewed a KB article within this period of time, the locale " + "team will be considered inactive." + ), + ) + enabled_languages = models.JSONField( + default=list, blank=True, help_text="The languages enabled for machine translation." + ) + limit_to_slugs = models.JSONField( + default=list, + blank=True, + help_text="Limit machine translation to these KB article slugs.", + ) + disabled_slugs = models.JSONField( + default=list, + blank=True, + help_text="Disable machine translation for these KB article slugs.", + ) + limit_to_approved_after = models.DateTimeField( + null=True, + blank=True, + default=datetime.now, + verbose_name=( + "Limit machine translation to KB article revisions approved " + "after this date and time" + ), + ) + limit_to_approver_in_group = models.ForeignKey( + Group, + null=True, + blank=True, + default=None, + related_name="+", + on_delete=models.SET_DEFAULT, + verbose_name=( + "Limit machine translation to KB article revisions approved by " + "users within this group" + ), + ) + + class Meta: + verbose_name = "Machine translation configuration" + verbose_name_plural = "Machine translation configuration" + + @classmethod + def load(cls): + # Returns the singleton, creating it if necessary. + instance, created = cls.objects.get_or_create(id=1) + return instance + + @cached_property + def limit_to_full_slugs(self): + return set(slug for slug in self.limit_to_slugs if not slug.endswith("*")) + + @cached_property + def limit_to_slug_prefixes(self): + return tuple(slug.rstrip("*") for slug in self.limit_to_slugs if slug.endswith("*")) + + @cached_property + def disabled_full_slugs(self): + return set(slug for slug in self.disabled_slugs if not slug.endswith("*")) + + @cached_property + def disabled_slug_prefixes(self): + return tuple(slug.rstrip("*") for slug in self.disabled_slugs if slug.endswith("*")) + + def save(self, *args, **kwargs): + # Check if an instance already exists in the database. + if not self.pk and MachineTranslationConfiguration.objects.exists(): + raise ValidationError("Only one MachineTranslationConfiguration instance allowed.") + return super().save(*args, **kwargs) + + def is_active(self): + return self.is_enabled and self.llm_name and self.enabled_languages + + def is_slug_allowed(self, slug): + """ + Returns True only if the slug is included and not excluded. + """ + return ( + (not self.limit_to_slugs) + or (slug in self.limit_to_full_slugs) + or any(slug.startswith(p) for p in self.limit_to_slug_prefixes) + ) and not ( + (slug in self.disabled_full_slugs) + or any(slug.startswith(p) for p in self.disabled_slug_prefixes) + ) + + def is_approved_date_allowed(self, dtime): + """ + Returns True only if there's no limit on the approval date or if the + provided datetime instance is greater than the limit. + """ + return (not self.limit_to_approved_after) or (dtime > self.limit_to_approved_after) + + def is_approver_allowed(self, user): + """ + Returns True only if there's no limit on the approver or if the provided + user is a member of the group to which approvers are limited to. + """ + return (not self.limit_to_approver_in_group) or ( + user and user.groups.filter(id=self.limit_to_approver_in_group_id).exists() + ) + + def __str__(self): + return "MachineTranslationConfiguration" + + +class MachineTranslationServiceRecord(models.Model): + + SERVICE_OPENAI_API = "openai-api" + SERVICE_VERTEX_AI_API = "vertex-ai-api" + + SERVICES = ( + (SERVICE_OPENAI_API, "OpenAI API"), + (SERVICE_VERTEX_AI_API, "Vertex AI API"), + ) + + created = models.DateTimeField(default=datetime.now) + + service = models.CharField(max_length=30, choices=SERVICES) + + model_name = models.CharField(max_length=50) + + target_locale = models.CharField( + max_length=7, + choices=LANGUAGE_CHOICES_EXCLUDING_DEFAULT, + ) + + source_revision = models.ForeignKey( + Revision, on_delete=models.CASCADE, related_name="mt_service_records" + ) + + source_attribute = models.CharField(max_length=100) + + details = models.JSONField(default=dict) + + class Meta: + indexes = [ + models.Index(fields=["created"], name="created_idx"), + models.Index(fields=["service"], name="service_idx"), + models.Index(fields=["model_name"], name="model_name_idx"), + models.Index(fields=["target_locale"], name="target_locale_idx"), + models.Index(fields=["source_attribute"], name="source_attribute_idx"), + ] + + +class RevisionActivityRecord(models.Model): + + MT_CREATED_AS_AWAITING_REVIEW = 0 + MT_CREATED_AS_APPROVED = 1 + MT_REJECTED = 2 + MT_APPROVED_PRE_REVIEW = 3 + MT_CREATED_AS_APPROVED_POST_REJECTION = 4 + + ACTIONS = ( + (MT_CREATED_AS_AWAITING_REVIEW, "Created as awaiting review"), + (MT_CREATED_AS_APPROVED, "Created as already approved"), + (MT_REJECTED, "Rejected because no longer relevant"), + (MT_APPROVED_PRE_REVIEW, "Approved because not reviewed within grace period"), + ( + MT_CREATED_AS_APPROVED_POST_REJECTION, + ( + "Created as already approved because nothing else approved within " + "grace period after rejection" + ), + ), + ) + + revision = models.ForeignKey( + Revision, + on_delete=models.CASCADE, + related_name="l10n_actions", + ) + + action = models.PositiveSmallIntegerField(choices=ACTIONS) + + class Meta: + indexes = [ + models.Index(fields=["action"], name="action_idx"), + ] + + @property + def action_timestamp(self): + if self.action in (self.MT_REJECTED, self.MT_APPROVED_PRE_REVIEW): + return self.revision.reviewed + return self.revision.created diff --git a/kitsune/l10n/prompt.py b/kitsune/l10n/prompt.py new file mode 100644 index 00000000000..525a24ee239 --- /dev/null +++ b/kitsune/l10n/prompt.py @@ -0,0 +1,87 @@ +from django.conf import settings +from django.template.loader import render_to_string +from langchain_core.messages import HumanMessage, SystemMessage + + +BEGIN_RESULT = "<<>>" +END_RESULT = "<<>>" + + +def get_language_in_english(locale): + """ + Returns the name of the locale in English, for example "it" returns "Italian" + and "pt-BR" returns "Portuguese (Brazilian)". + """ + return settings.LOCALES[locale].english + + +def get_example(doc, content_attribute, target_locale): + """ + Returns a dictionary containing the most recent translation of the given + revision's document, or None if no approved translation exists. + """ + trans_doc = doc.translated_to(target_locale) + + if not ( + trans_doc + and (example_target := trans_doc.current_revision) + and (example_source := example_target.based_on) + ): + return None + + return dict( + source_text=getattr(example_source, content_attribute), + target_text=getattr(example_target, content_attribute), + ) + + +def get_messages( + source_text, source_locale, target_locale, example=None, include_wiki_instructions=False +): + """ + A generic function for returning a list of LLM messages ("syste," and "user" + messages) + """ + context = dict( + example=example, + source_text=source_text, + source_language=get_language_in_english(source_locale), + target_language=get_language_in_english(target_locale), + include_wiki_instructions=include_wiki_instructions, + result_delimiter_begin=BEGIN_RESULT, + result_delimiter_end=END_RESULT, + ) + return [ + SystemMessage(content=render_to_string("l10n/llm_system_message.txt", context)), + HumanMessage(content=render_to_string("l10n/llm_user_message.txt", context)), + ] + + +def get_prompt(doc, content_attribute, target_locale): + """ + Returns a list of LLM messages (a "system" message plus a "user" message) for + use as the prompt when invoking a response from an LLM model. The messages + comprise a request to translate the given document's specific content defined + by the given "content_attribute" into the language of the given target locale. + """ + if content_attribute == "title": + example = None + source_text = doc.title + else: + example = get_example(doc, content_attribute, target_locale) + source_text = getattr(doc.latest_localizable_revision, content_attribute) + + return get_messages( + source_text=source_text, + source_locale=doc.locale, + target_locale=target_locale, + example=example, + include_wiki_instructions=content_attribute in ("summary", "content"), + ) + + +def get_result(text): + """ + Given the text of the LLM response, extracts and returns the actual translation. + """ + return text.split(BEGIN_RESULT)[-1].split(END_RESULT)[0] diff --git a/kitsune/l10n/signals.py b/kitsune/l10n/signals.py new file mode 100644 index 00000000000..1bdc1b38041 --- /dev/null +++ b/kitsune/l10n/signals.py @@ -0,0 +1,29 @@ +from django.db.models.signals import post_save +from django.dispatch import receiver + +from kitsune.l10n.models import MachineTranslationConfiguration +from kitsune.l10n.utils import manage_heartbeat +from kitsune.l10n.wiki import is_suitable_for_l10n +from kitsune.l10n.tasks import handle_wiki_localization +from kitsune.wiki.signals import revision_approved + + +@receiver(revision_approved, dispatch_uid="l10.handle_wiki_localization_in_real_time") +def handle_wiki_localization_in_real_time(sender, revision, **kwargs): + """ + A revision has been approved for a document, so we may need to perform + some localization work. + """ + if is_suitable_for_l10n(revision.document.original): + handle_wiki_localization.delay(revision.document.id) + + +@receiver( + post_save, sender=MachineTranslationConfiguration, dispatch_uid="l10.manage_heartbeat_on_save" +) +def manage_heartbeat_on_save(sender, instance, created, **kwargs): + """ + Create, modify, or delete the heartbeat periodic task, if necessary, after the + MachineTranslationConfiguration singleton has been created or modified. + """ + manage_heartbeat(instance.heartbeat_period) diff --git a/kitsune/l10n/tasks.py b/kitsune/l10n/tasks.py new file mode 100644 index 00000000000..824f1a86626 --- /dev/null +++ b/kitsune/l10n/tasks.py @@ -0,0 +1,102 @@ +from celery import shared_task +from django.contrib.postgres.aggregates import ArrayAgg +from django.db.models import Q, Value +import logging + +from kitsune.l10n.models import MachineTranslationConfiguration +from kitsune.l10n.utils import build_message, get_l10n_bot, run_with_pg_lock +from kitsune.l10n.wiki import ( + create_machine_translations, + manage_existing_machine_translations, +) +from kitsune.messages.utils import send_message +from kitsune.wiki.models import Document, Locale + + +log = logging.getLogger("k.l10n.tasks") +log.setLevel(logging.INFO) + + +@shared_task +@run_with_pg_lock +def handle_wiki_localization(document_id=None): + """ + Task that handles all aspects of machine translations for KB articles. + If a document id is provided, the work is limited to that document, + otherwise all documents are considered. + """ + log.info(f"Starting handle_wiki_localization(document_id={document_id})...") + + if document_id is not None: + try: + doc = Document.objects.select_related( + "parent", + "current_revision", + "latest_localizable_revision", + ).get(id=document_id) + except Document.DoesNotExist: + return + else: + doc = None + + # Freshly load the configuration for machine translations. + mt_config = MachineTranslationConfiguration.load() + + # Make any changes as needed to existing machine translations. + modified = manage_existing_machine_translations(mt_config, doc) + # Generate new machine translations as needed. + created = create_machine_translations(mt_config, doc.parent if doc and doc.parent else doc) + + # Notify each of the locale teams as needed. + for locale in set(modified.keys()) | set(created.keys()): + modified_for_locale = modified.get(locale, {}) + created_for_locale = created.get(locale, {}) + text = build_message( + mt_config, + creations_awaiting_review=created_for_locale.get("awaiting_review"), + creations_already_approved=created_for_locale.get("already_approved"), + rejections=modified_for_locale.get("rejections"), + pre_review_approvals=modified_for_locale.get("pre_review_approvals"), + post_rejection_approvals=modified_for_locale.get("post_rejection_approvals"), + ) + send_message_to_locale_team.delay(locale, text) + + log.info(f"Completed handle_wiki_localization(document_id={document_id})") + + +@shared_task +def send_message_to_locale_team(locale, text): + """ + Task that sends the given message to the leaders and reviewers of the given locale. + """ + try: + team = ( + Locale.objects.filter(locale=locale) + .annotate( + usernames_of_leaders=ArrayAgg( + "leaders__username", + filter=Q(leaders__is_active=True), + default=Value([]), + ), + usernames_of_reviewers=ArrayAgg( + "reviewers__username", + filter=Q(reviewers__is_active=True), + default=Value([]), + ), + ) + .get() + ) + except Locale.DoesNotExist: + return + + if not (usernames := team.usernames_of_leaders + team.usernames_of_reviewers): + return + + # Send the message to the locale team's active leaders and reviewers. + to = dict(users=usernames) + log.info( + f'Sending the following message to the "{locale}" locale team:\n' + f'{"-" * 40}\nUsernames: {", ".join(usernames)}\n' + f'{"-" * 40}\n{text}\n{"-" * 40}' + ) + send_message(to, text=text, sender=get_l10n_bot()) diff --git a/kitsune/l10n/templatetags/__init__.py b/kitsune/l10n/templatetags/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/kitsune/l10n/templatetags/jinja_helpers.py b/kitsune/l10n/templatetags/jinja_helpers.py new file mode 100644 index 00000000000..a05ccedb9ed --- /dev/null +++ b/kitsune/l10n/templatetags/jinja_helpers.py @@ -0,0 +1,6 @@ +from django_jinja import library + +from kitsune.l10n.wiki import current_revision_is_unreviewed_machine_translation + + +library.global_function(current_revision_is_unreviewed_machine_translation) diff --git a/kitsune/l10n/tests/__init__.py b/kitsune/l10n/tests/__init__.py new file mode 100644 index 00000000000..c51260b610d --- /dev/null +++ b/kitsune/l10n/tests/__init__.py @@ -0,0 +1,13 @@ +from kitsune.l10n.models import MachineTranslationConfiguration + + +def make_mt_config(**kwargs): + """ + Convenience function for creating a MachineTranslationConfiguration instance for testing. + """ + mt_config = MachineTranslationConfiguration( + llm_name="test-model", is_enabled=True, limit_to_approved_after=None + ) + for key, value in kwargs.items(): + setattr(mt_config, key, value) + return mt_config diff --git a/kitsune/l10n/tests/test_utils.py b/kitsune/l10n/tests/test_utils.py new file mode 100644 index 00000000000..971e8e7643f --- /dev/null +++ b/kitsune/l10n/tests/test_utils.py @@ -0,0 +1,108 @@ +from datetime import timedelta + +from django.core.exceptions import ValidationError +from django_celery_beat.models import IntervalSchedule, PeriodicTask + +from kitsune.l10n.utils import ( + duration_to_text, + get_l10n_bot, + manage_heartbeat, + text_to_duration, +) +from kitsune.sumo.tests import TestCase + + +class UtilsTests(TestCase): + + def test_text_to_duration(self): + self.assertEqual(text_to_duration(" 3 minutes"), timedelta(minutes=3)) + self.assertEqual(text_to_duration(" 5 hours "), timedelta(hours=5)) + self.assertEqual(text_to_duration("7 days"), timedelta(days=7)) + self.assertEqual(text_to_duration("0"), timedelta(0)) + with self.assertRaises(ValidationError): + text_to_duration("") + with self.assertRaises(ValidationError): + text_to_duration("1 minute") + with self.assertRaises(ValidationError): + text_to_duration("-3 minutes") + with self.assertRaises(ValidationError): + text_to_duration("7 seconds") + with self.assertRaises(ValidationError): + text_to_duration("9hours") + with self.assertRaises(ValidationError): + text_to_duration("hours 4") + + def test_duration_to_text(self): + self.assertEqual(duration_to_text(timedelta(minutes=3)), "3 minutes") + self.assertEqual(duration_to_text(timedelta(hours=5)), "5 hours") + self.assertEqual(duration_to_text(timedelta(days=7)), "7 days") + self.assertEqual(duration_to_text(timedelta(days=2, hours=4, minutes=12)), "3132 minutes") + self.assertEqual(duration_to_text(timedelta(hours=4, minutes=12)), "252 minutes") + self.assertEqual(duration_to_text(timedelta(days=1, hours=4)), "28 hours") + self.assertEqual(duration_to_text(timedelta(days=1, hours=24)), "2 days") + self.assertEqual(duration_to_text(timedelta(days=2, hours=23, minutes=60)), "3 days") + + def test_get_l10n_bot(self): + l10n_bot1 = get_l10n_bot() + self.assertEqual(l10n_bot1.username, "sumo-l10n-bot") + self.assertEqual(l10n_bot1.email, "sumodev@mozilla.com") + self.assertTrue(l10n_bot1.profile) + self.assertEqual(l10n_bot1.profile.name, "SUMO Localization Bot") + l10n_bot2 = get_l10n_bot() + self.assertEqual(l10n_bot2.id, l10n_bot1.id) + self.assertEqual(l10n_bot2.profile.pk, l10n_bot1.profile.pk) + + def test_manage_heartbeat(self): + self.assertEqual(IntervalSchedule.objects.count(), 0) + self.assertEqual(PeriodicTask.objects.count(), 0) + + duration1 = timedelta(hours=4) + duration2 = timedelta(hours=2) + + manage_heartbeat(duration1) + + self.assertEqual(IntervalSchedule.objects.count(), 1) + self.assertEqual(IntervalSchedule.objects.filter(every=4, period="hours").count(), 1) + self.assertEqual(PeriodicTask.objects.count(), 1) + self.assertEqual( + PeriodicTask.objects.filter( + name="L10n Heartbeat", + interval=IntervalSchedule.objects.filter(every=4, period="hours").get(), + ).count(), + 1, + ) + + manage_heartbeat(duration2) + + self.assertEqual(IntervalSchedule.objects.count(), 2) + self.assertEqual(IntervalSchedule.objects.filter(every=4, period="hours").count(), 1) + self.assertEqual(IntervalSchedule.objects.filter(every=2, period="hours").count(), 1) + self.assertEqual(PeriodicTask.objects.count(), 1) + self.assertEqual( + PeriodicTask.objects.filter( + name="L10n Heartbeat", + interval=IntervalSchedule.objects.filter(every=2, period="hours").get(), + ).count(), + 1, + ) + + manage_heartbeat(duration1) + + self.assertEqual(IntervalSchedule.objects.count(), 2) + self.assertEqual(IntervalSchedule.objects.filter(every=4, period="hours").count(), 1) + self.assertEqual(IntervalSchedule.objects.filter(every=2, period="hours").count(), 1) + self.assertEqual(PeriodicTask.objects.count(), 1) + self.assertEqual( + PeriodicTask.objects.filter( + name="L10n Heartbeat", + interval=IntervalSchedule.objects.filter(every=4, period="hours").get(), + ).count(), + 1, + ) + + manage_heartbeat(timedelta(0)) + + self.assertEqual(IntervalSchedule.objects.count(), 2) + self.assertEqual(IntervalSchedule.objects.filter(every=4, period="hours").count(), 1) + self.assertEqual(IntervalSchedule.objects.filter(every=2, period="hours").count(), 1) + self.assertEqual(PeriodicTask.objects.count(), 0) diff --git a/kitsune/l10n/tests/test_wiki_create.py b/kitsune/l10n/tests/test_wiki_create.py new file mode 100644 index 00000000000..e5dda32c7eb --- /dev/null +++ b/kitsune/l10n/tests/test_wiki_create.py @@ -0,0 +1,657 @@ +from datetime import datetime, timedelta +from unittest import mock + +from kitsune.dashboards import LAST_30_DAYS +from kitsune.dashboards.models import WikiDocumentVisits +from kitsune.l10n.tests import make_mt_config +from kitsune.l10n.utils import get_l10n_bot +from kitsune.l10n.wiki import create_machine_translations +from kitsune.sumo.tests import TestCase +from kitsune.users.tests import GroupFactory, UserFactory +from kitsune.wiki.config import ( + MAJOR_SIGNIFICANCE, + MEDIUM_SIGNIFICANCE, + TYPO_SIGNIFICANCE, +) +from kitsune.wiki.tests import ( + ApprovedRevisionFactory, + DocumentFactory, + RedirectRevisionFactory, + RevisionFactory, +) + + +THIRTY_DAYS = timedelta(days=30) + + +class CreateMachineTranslationsTests(TestCase): + + def setUp(self): + super().setUp() + self.l10n_bot = get_l10n_bot() + now = datetime.now() + two_days_ago = now - timedelta(days=2) + four_days_ago = now - timedelta(days=4) + self.group1 = GroupFactory() + self.group2 = GroupFactory() + + self.doc1_en = DocumentFactory(slug="doc1_slug") + rev1_en = ApprovedRevisionFactory( + document=self.doc1_en, + created=datetime(2024, 5, 1), + reviewed=datetime(2024, 5, 2), + reviewer=UserFactory(), + significance=MAJOR_SIGNIFICANCE, + is_ready_for_localization=True, + ) + rev2_en = ApprovedRevisionFactory( + document=self.doc1_en, + created=datetime(2024, 5, 5), + reviewed=datetime(2024, 5, 6), + reviewer=UserFactory(), + significance=MEDIUM_SIGNIFICANCE, + is_ready_for_localization=True, + ) + rev2_en.reviewer.groups.add(GroupFactory()) + rev2_en.reviewer.groups.add(self.group1) + rev3_en = ApprovedRevisionFactory( + document=self.doc1_en, + created=datetime(2024, 6, 7), + reviewed=datetime(2024, 6, 8), + reviewer=UserFactory(), + significance=TYPO_SIGNIFICANCE, + is_ready_for_localization=False, + ) + + doc1_es = DocumentFactory(parent=self.doc1_en, locale="es") + ApprovedRevisionFactory( + document=doc1_es, + based_on=rev1_en, + created=datetime(2024, 5, 3), + reviewed=datetime(2024, 5, 4), + ) + doc1_fr = DocumentFactory(parent=self.doc1_en, locale="fr") + ApprovedRevisionFactory( + document=doc1_fr, + based_on=rev1_en, + created=datetime(2024, 5, 3), + reviewed=datetime(2024, 5, 4), + ) + doc1_el = DocumentFactory(parent=self.doc1_en, locale="el") + RevisionFactory( + document=doc1_el, + based_on=rev3_en, + created=datetime(2024, 7, 1), + reviewed=None, + ) + doc1_ja = DocumentFactory(parent=self.doc1_en, locale="ja") + RevisionFactory( + document=doc1_ja, + based_on=rev3_en, + created=datetime(2024, 7, 1), + reviewed=None, + ) + doc1_ro = DocumentFactory(parent=self.doc1_en, locale="ro") + RevisionFactory( + document=doc1_ro, + based_on=rev3_en, + created=two_days_ago, + reviewed=None, + ) + doc1_it = DocumentFactory(parent=self.doc1_en, locale="it") + ApprovedRevisionFactory( + document=doc1_it, + based_on=rev1_en, + created=datetime(2024, 5, 3), + reviewed=datetime(2024, 5, 4), + ) + RevisionFactory( + document=doc1_it, + based_on=rev2_en, + created=four_days_ago, + creator=self.l10n_bot, + reviewed=None, + ) + + self.doc2_en = DocumentFactory(slug="doc2_slug") + ApprovedRevisionFactory( + document=self.doc2_en, + created=datetime(2024, 5, 2), + reviewed=datetime(2024, 5, 3), + reviewer=UserFactory(), + significance=MAJOR_SIGNIFICANCE, + is_ready_for_localization=True, + ) + rev2_en_2 = ApprovedRevisionFactory( + document=self.doc2_en, + created=datetime(2024, 5, 6), + reviewed=datetime(2024, 5, 7), + reviewer=UserFactory(), + significance=MEDIUM_SIGNIFICANCE, + is_ready_for_localization=True, + ) + rev2_en_2.reviewer.groups.add(GroupFactory()) + rev2_en_2.reviewer.groups.add(self.group2) + rev3_en_2 = ApprovedRevisionFactory( + document=self.doc2_en, + created=datetime(2024, 6, 7), + reviewed=datetime(2024, 6, 8), + reviewer=UserFactory(), + significance=TYPO_SIGNIFICANCE, + is_ready_for_localization=False, + ) + + doc2_es = DocumentFactory(parent=self.doc2_en, locale="es") + ApprovedRevisionFactory( + document=doc2_es, + based_on=rev2_en_2, + created=datetime(2024, 5, 7), + reviewed=datetime(2024, 5, 8), + ) + RevisionFactory( + document=doc2_es, + based_on=rev3_en_2, + created=datetime(2024, 7, 1), + reviewed=None, + ) + doc2_el = DocumentFactory(parent=self.doc2_en, locale="el") + RevisionFactory( + document=doc2_el, + based_on=rev3_en_2, + created=four_days_ago, + reviewed=None, + ) + doc2_ro = DocumentFactory(parent=self.doc2_en, locale="ro") + RevisionFactory( + document=doc2_ro, + based_on=rev2_en_2, + created=two_days_ago, + reviewed=None, + ) + + doc3_en = DocumentFactory(is_localizable=False) + ApprovedRevisionFactory( + document=doc3_en, + created=datetime(2024, 5, 1), + reviewed=datetime(2024, 5, 2), + reviewer=UserFactory(), + significance=MAJOR_SIGNIFICANCE, + ) + + doc4_en = DocumentFactory() + rev1_en_4 = ApprovedRevisionFactory( + document=doc4_en, + created=datetime(2024, 5, 1), + reviewed=datetime(2024, 5, 2), + reviewer=UserFactory(), + significance=MAJOR_SIGNIFICANCE, + is_ready_for_localization=True, + ) + ApprovedRevisionFactory( + document=doc4_en, + created=datetime(2024, 5, 5), + reviewed=datetime(2024, 5, 6), + reviewer=UserFactory(), + significance=MEDIUM_SIGNIFICANCE, + is_ready_for_localization=True, + ) + ApprovedRevisionFactory( + document=doc4_en, + created=datetime(2024, 6, 7), + reviewed=datetime(2024, 6, 8), + reviewer=UserFactory(), + significance=TYPO_SIGNIFICANCE, + is_ready_for_localization=False, + ) + + doc4_it = DocumentFactory(parent=doc4_en, locale="it") + ApprovedRevisionFactory( + document=doc4_it, + based_on=rev1_en_4, + created=datetime(2024, 5, 3), + reviewed=datetime(2024, 5, 4), + ) + + doc4_en.is_archived = True + doc4_en.save() + + doc5_en = DocumentFactory() + rev1_en_5 = ApprovedRevisionFactory( + document=doc5_en, + created=datetime(2024, 5, 1), + reviewed=datetime(2024, 5, 2), + reviewer=UserFactory(), + significance=MAJOR_SIGNIFICANCE, + is_ready_for_localization=True, + ) + ApprovedRevisionFactory( + document=doc5_en, + created=datetime(2024, 5, 5), + reviewed=datetime(2024, 5, 6), + reviewer=UserFactory(), + significance=MEDIUM_SIGNIFICANCE, + is_ready_for_localization=True, + ) + ApprovedRevisionFactory( + document=doc5_en, + created=datetime(2024, 6, 7), + reviewed=datetime(2024, 6, 8), + reviewer=UserFactory(), + significance=TYPO_SIGNIFICANCE, + is_ready_for_localization=False, + ) + + doc5_ro = DocumentFactory(parent=doc5_en, locale="ro") + ApprovedRevisionFactory( + document=doc5_ro, + based_on=rev1_en_5, + created=datetime(2024, 5, 3), + reviewed=datetime(2024, 5, 4), + ) + + RedirectRevisionFactory(document=doc5_en) + + WikiDocumentVisits.objects.create(document=self.doc1_en, visits=100, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc1_it, visits=18, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc1_el, visits=17, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc1_ro, visits=16, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc1_es, visits=15, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=self.doc2_en, visits=175, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc2_es, visits=12, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc2_el, visits=11, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc2_ro, visits=10, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc3_en, visits=50, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc4_en, visits=40, period=LAST_30_DAYS) + WikiDocumentVisits.objects.create(document=doc5_en, visits=30, period=LAST_30_DAYS) + + def assert_calls_for_doc1_only(self, create_machine_translation_mock, report): + create_machine_translation_mock.assert_has_calls( + [ + mock.call("test-model", self.doc1_en, "el", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc1_en, "es", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc1_en, "de", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + ], + any_order=False, + ) + self.assertEqual(create_machine_translation_mock.call_count, 3) + self.assertEqual(set(report.keys()), set(["el", "de", "es"])) + self.assertIn("already_approved", report["el"]) + self.assertIn("already_approved", report["es"]) + self.assertIn("already_approved", report["de"]) + self.assertEqual(len(report["el"]["already_approved"]), 1) + self.assertEqual(len(report["es"]["already_approved"]), 1) + self.assertEqual(len(report["de"]["already_approved"]), 1) + + def assert_calls_for_doc2_only(self, create_machine_translation_mock, report): + create_machine_translation_mock.assert_has_calls( + [ + mock.call("test-model", self.doc2_en, "el", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc2_en, "de", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc2_en, "it", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + ], + any_order=False, + ) + self.assertEqual(create_machine_translation_mock.call_count, 3) + self.assertEqual(set(report.keys()), set(["el", "de", "it"])) + self.assertIn("already_approved", report["el"]) + self.assertIn("already_approved", report["de"]) + self.assertIn("already_approved", report["it"]) + self.assertEqual(len(report["el"]["already_approved"]), 1) + self.assertEqual(len(report["de"]["already_approved"]), 1) + self.assertEqual(len(report["it"]["already_approved"]), 1) + + def assert_calls_for_doc1_and_doc2(self, create_machine_translation_mock, report): + create_machine_translation_mock.assert_has_calls( + [ + mock.call("test-model", self.doc2_en, "el", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc1_en, "el", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc1_en, "es", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc2_en, "de", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc2_en, "it", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + mock.call("test-model", self.doc1_en, "de", self.l10n_bot, THIRTY_DAYS), + mock.call().is_approved.__bool__(), + mock.call().get_absolute_url(), + ], + any_order=False, + ) + self.assertEqual(create_machine_translation_mock.call_count, 6) + self.assertEqual(set(report.keys()), set(["el", "es", "de", "it"])) + self.assertIn("already_approved", report["el"]) + self.assertIn("already_approved", report["es"]) + self.assertIn("already_approved", report["de"]) + self.assertIn("already_approved", report["it"]) + self.assertEqual(len(report["el"]["already_approved"]), 2) + self.assertEqual(len(report["es"]["already_approved"]), 1) + self.assertEqual(len(report["de"]["already_approved"]), 2) + self.assertEqual(len(report["it"]["already_approved"]), 1) + + @mock.patch("kitsune.l10n.wiki.create_machine_translation") + def test_create_machine_translations_without_default_document( + self, create_machine_translation_mock + ): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + ) + report = create_machine_translations(mt_config) + self.assert_calls_for_doc1_and_doc2(create_machine_translation_mock, report) + + @mock.patch("kitsune.l10n.wiki.create_machine_translation") + def test_create_machine_translations_with_default_document( + self, create_machine_translation_mock + ): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + ) + + with self.subTest("doc1 as default doc argument"): + report = create_machine_translations(mt_config, self.doc1_en) + self.assert_calls_for_doc1_only(create_machine_translation_mock, report) + + create_machine_translation_mock.reset_mock() + + with self.subTest("doc2 as default doc argument"): + report = create_machine_translations(mt_config, self.doc2_en) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + @mock.patch("kitsune.l10n.wiki.create_machine_translation") + def test_create_machine_translations_with_slug_filtering( + self, create_machine_translation_mock + ): + with self.subTest("case 1"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["doc1*", "doc2_slug"], + disabled_slugs=["doc*"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 2"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + disabled_slugs=["doc1*", "doc2_slug"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 3"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["xyz"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 4"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["doc1_slug"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc2_en) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 5"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["doc2*", "xyz"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc1_en) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 6"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["doc2*", "xyz"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc2_en) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + with self.subTest("case 7"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + disabled_slugs=["doc1*"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + with self.subTest("case 8"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["doc2*"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + with self.subTest("case 9"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["doc2*", "doc1_slug"], + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc1_and_doc2(create_machine_translation_mock, report) + + @mock.patch("kitsune.l10n.wiki.create_machine_translation") + def test_create_machine_translations_with_approved_date_filtering( + self, create_machine_translation_mock + ): + with self.subTest("case 1"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approved_after=datetime(2024, 5, 7), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 2"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_slugs=["doc2_slug"], + limit_to_approved_after=datetime(2024, 5, 5), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + with self.subTest("case 3"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approved_after=datetime(2024, 5, 6), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + with self.subTest("case 4"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approved_after=datetime(2024, 5, 5), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc1_and_doc2(create_machine_translation_mock, report) + + with self.subTest("case 5"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approved_after=datetime(2024, 5, 5), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc2_en) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + with self.subTest("case 6"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approved_after=datetime(2024, 5, 5), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc1_en) + self.assert_calls_for_doc1_only(create_machine_translation_mock, report) + + with self.subTest("case 7"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approved_after=datetime(2024, 5, 6), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc1_en) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + @mock.patch("kitsune.l10n.wiki.create_machine_translation") + def test_create_machine_translations_with_approver_filtering( + self, create_machine_translation_mock + ): + with self.subTest("case 1"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approver_in_group=GroupFactory(), + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 2"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approver_in_group=self.group1, + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc2_en) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 3"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approver_in_group=self.group2, + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc1_en) + create_machine_translation_mock.assert_not_called() + self.assertFalse(report) + + with self.subTest("case 4"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approver_in_group=self.group1, + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc1_en) + self.assert_calls_for_doc1_only(create_machine_translation_mock, report) + + with self.subTest("case 5"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approver_in_group=self.group2, + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config, self.doc2_en) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) + + with self.subTest("case 6"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approver_in_group=self.group1, + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc1_only(create_machine_translation_mock, report) + + with self.subTest("case 7"): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + limit_to_approver_in_group=self.group2, + ) + create_machine_translation_mock.reset_mock() + report = create_machine_translations(mt_config) + self.assert_calls_for_doc2_only(create_machine_translation_mock, report) diff --git a/kitsune/l10n/tests/test_wiki_manage.py b/kitsune/l10n/tests/test_wiki_manage.py new file mode 100644 index 00000000000..7563329204f --- /dev/null +++ b/kitsune/l10n/tests/test_wiki_manage.py @@ -0,0 +1,741 @@ +from datetime import datetime, timedelta + +from kitsune.l10n.tests import make_mt_config +from kitsune.l10n.utils import get_l10n_bot +from kitsune.l10n.wiki import manage_existing_machine_translations +from kitsune.sumo.tests import TestCase +from kitsune.users.tests import UserFactory +from kitsune.wiki.config import ( + MAJOR_SIGNIFICANCE, + MEDIUM_SIGNIFICANCE, + TYPO_SIGNIFICANCE, +) +from kitsune.wiki.models import Revision +from kitsune.wiki.tests import ( + ApprovedRevisionFactory, + DocumentFactory, + RedirectRevisionFactory, + RevisionFactory, +) + + +APPROVED_MSG = "Automatically approved because it was not reviewed within 3 days." +REJECTED_MSG = "No longer relevant." +APPROVED_COPY_MSG = ( + "Automatically created and approved because an alternate translation " + "was not approved within 5 days after the rejection of %(url)s." +) + + +class ManageExistingMachineTranslationsTests(TestCase): + + def setUp(self): + super().setUp() + self.l10n_bot = get_l10n_bot() + now = datetime.now() + one_day_ago = now - timedelta(days=1) + two_days_ago = now - timedelta(days=2) + four_days_ago = now - timedelta(days=4) + six_days_ago = now - timedelta(days=6) + seven_days_ago = now - timedelta(days=7) + + self.doc1_en = DocumentFactory() + rev1_en = ApprovedRevisionFactory( + document=self.doc1_en, + created=datetime(2024, 5, 1), + reviewed=datetime(2024, 5, 2), + significance=MAJOR_SIGNIFICANCE, + is_ready_for_localization=True, + ) + rev2_en = ApprovedRevisionFactory( + document=self.doc1_en, + created=datetime(2024, 5, 5), + reviewed=datetime(2024, 5, 6), + significance=MEDIUM_SIGNIFICANCE, + is_ready_for_localization=True, + ) + ApprovedRevisionFactory( + document=self.doc1_en, + created=datetime(2024, 6, 7), + reviewed=datetime(2024, 6, 8), + significance=TYPO_SIGNIFICANCE, + is_ready_for_localization=False, + ) + + doc1_es = DocumentFactory(parent=self.doc1_en, locale="es") + ApprovedRevisionFactory( + document=doc1_es, + based_on=rev1_en, + created=datetime(2024, 5, 3), + reviewed=datetime(2024, 5, 4), + ) + self.rev2_es = RevisionFactory( + document=doc1_es, + based_on=rev2_en, + creator=self.l10n_bot, + created=two_days_ago, + reviewed=None, + ) + + doc1_ro = DocumentFactory(parent=self.doc1_en, locale="ro") + self.rev1_ro = RevisionFactory( + document=doc1_ro, + based_on=rev2_en, + creator=self.l10n_bot, + created=two_days_ago, + reviewed=None, + ) + ApprovedRevisionFactory( + document=doc1_ro, + based_on=rev2_en, + created=one_day_ago, + reviewed=one_day_ago, + ) + + self.doc1_el = DocumentFactory(parent=self.doc1_en, locale="el") + self.rev1_el = RevisionFactory( + document=self.doc1_el, + based_on=rev1_en, + creator=self.l10n_bot, + created=datetime(2024, 5, 3), + reviewed=None, + ) + self.rev2_el = RevisionFactory( + document=self.doc1_el, + based_on=rev2_en, + creator=self.l10n_bot, + created=four_days_ago, + reviewed=None, + ) + + self.doc1_ja = DocumentFactory(parent=self.doc1_en, locale="ja") + self.rev1_ja = RevisionFactory( + document=self.doc1_ja, + based_on=rev1_en, + creator=self.l10n_bot, + created=datetime(2024, 5, 3), + reviewed=None, + ) + self.rev2_ja = RevisionFactory( + document=self.doc1_ja, + based_on=rev2_en, + creator=self.l10n_bot, + created=four_days_ago, + reviewed=None, + ) + + doc1_it = DocumentFactory(parent=self.doc1_en, locale="it") + self.rev1_it = RevisionFactory( + document=doc1_it, + based_on=rev2_en, + creator=self.l10n_bot, + created=seven_days_ago, + reviewed=six_days_ago, + reviewer=UserFactory(), + ) + RevisionFactory( + document=doc1_it, + based_on=rev2_en, + creator=UserFactory(), + created=four_days_ago, + reviewed=None, + ) + + self.doc2_en = DocumentFactory() + rev1_en_2 = ApprovedRevisionFactory( + document=self.doc2_en, + created=datetime(2024, 5, 1), + reviewed=datetime(2024, 5, 2), + significance=MAJOR_SIGNIFICANCE, + is_ready_for_localization=True, + ) + rev2_en_2 = ApprovedRevisionFactory( + document=self.doc2_en, + created=datetime(2024, 5, 5), + reviewed=datetime(2024, 5, 6), + significance=MEDIUM_SIGNIFICANCE, + is_ready_for_localization=True, + ) + ApprovedRevisionFactory( + document=self.doc2_en, + created=datetime(2024, 6, 7), + reviewed=datetime(2024, 6, 8), + significance=TYPO_SIGNIFICANCE, + is_ready_for_localization=False, + ) + + doc2_es = DocumentFactory(parent=self.doc2_en, locale="es") + ApprovedRevisionFactory( + document=doc2_es, + based_on=rev1_en_2, + created=datetime(2024, 5, 3), + reviewed=datetime(2024, 5, 4), + ) + self.rev2_es_2 = RevisionFactory( + document=doc2_es, + based_on=rev2_en_2, + creator=self.l10n_bot, + created=two_days_ago, + reviewed=None, + ) + + doc2_ro = DocumentFactory(parent=self.doc2_en, locale="ro") + self.rev1_ro_2 = RevisionFactory( + document=doc2_ro, + based_on=rev2_en_2, + creator=self.l10n_bot, + created=two_days_ago, + reviewed=None, + ) + ApprovedRevisionFactory( + document=doc2_ro, + based_on=rev2_en_2, + created=one_day_ago, + reviewed=one_day_ago, + ) + + doc2_el = DocumentFactory(parent=self.doc2_en, locale="el") + self.rev1_el_2 = RevisionFactory( + document=doc2_el, + based_on=rev1_en_2, + creator=self.l10n_bot, + created=datetime(2024, 5, 3), + reviewed=None, + ) + self.rev2_el_2 = RevisionFactory( + document=doc2_el, + based_on=rev2_en_2, + creator=self.l10n_bot, + created=four_days_ago, + reviewed=None, + ) + + doc2_ja = DocumentFactory(parent=self.doc2_en, locale="ja") + self.rev1_ja_2 = RevisionFactory( + document=doc2_ja, + based_on=rev1_en_2, + creator=self.l10n_bot, + created=datetime(2024, 5, 3), + reviewed=None, + ) + self.rev2_ja_2 = RevisionFactory( + document=doc2_ja, + based_on=rev2_en_2, + creator=self.l10n_bot, + created=four_days_ago, + reviewed=None, + ) + + doc2_it = DocumentFactory(parent=self.doc2_en, locale="it") + self.rev1_it_2 = RevisionFactory( + document=doc2_it, + based_on=rev2_en_2, + creator=self.l10n_bot, + created=seven_days_ago, + reviewed=six_days_ago, + reviewer=UserFactory(), + ) + RevisionFactory( + document=doc2_it, + based_on=rev2_en_2, + creator=UserFactory(), + created=four_days_ago, + reviewed=None, + ) + + def test_manage_existing_machine_translations_with_no_argument(self): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + ) + + revs = ( + self.rev1_el, + self.rev2_el, + self.rev2_es, + self.rev1_ja, + self.rev2_ja, + self.rev1_ro, + self.rev1_it, + self.rev1_el_2, + self.rev2_el_2, + self.rev2_es_2, + self.rev1_ja_2, + self.rev2_ja_2, + self.rev1_ro_2, + self.rev1_it_2, + ) + + datetime_prior_to_test = datetime.now() + + report = manage_existing_machine_translations(mt_config) + + for rev in revs: + rev.refresh_from_db() + + # The Japanese locale is included in the report because it had machine + # translations that required management, and all existing machine + # translations are managed even if they're within locales that are no + # longer enabled. + self.assertEqual(set(report.keys()), set(["el", "ro", "it", "ja"])) + self.assertEqual( + set(report["el"].keys()), + set(["rejections", "pre_review_approvals"]), + ) + self.assertEqual( + set(report["el"]["rejections"]), + set([self.rev1_el.get_absolute_url(), self.rev1_el_2.get_absolute_url()]), + ) + self.assertEqual( + set(report["el"]["pre_review_approvals"]), + set([self.rev2_el.get_absolute_url(), self.rev2_el_2.get_absolute_url()]), + ) + self.assertEqual( + set(report["ja"].keys()), + set(["rejections", "pre_review_approvals"]), + ) + self.assertEqual( + set(report["ja"]["rejections"]), + set([self.rev1_ja.get_absolute_url(), self.rev1_ja_2.get_absolute_url()]), + ) + self.assertEqual( + set(report["ja"]["pre_review_approvals"]), + set([self.rev2_ja.get_absolute_url(), self.rev2_ja_2.get_absolute_url()]), + ) + # The first Greek and Japanese revisions have been rejected because they're out-of-date. + for rev in (self.rev1_el, self.rev1_el_2, self.rev1_ja, self.rev1_ja_2): + self.assertFalse(rev.is_approved) + self.assertTrue(rev.reviewed > datetime_prior_to_test) + self.assertEqual(rev.reviewer, self.l10n_bot) + self.assertEqual(rev.comment, REJECTED_MSG) + + # The second Greek and Japanese revisions have been approved because they're up-to-date + # and the review grace period has expired. + for rev in (self.rev2_el, self.rev2_el_2, self.rev2_ja, self.rev2_ja_2): + self.assertTrue(rev.is_approved) + self.assertTrue(rev.reviewed > datetime_prior_to_test) + self.assertEqual(rev.reviewer, self.l10n_bot) + self.assertEqual(rev.comment, APPROVED_MSG) + + # The second Spanish revision remains untouched because its review grace + # period has not yet expired. + for rev in (self.rev2_es, self.rev2_es_2): + self.assertIsNone(rev.reviewed) + self.assertFalse(rev.is_approved) + + self.assertEqual(list(report["ro"].keys()), ["rejections"]) + self.assertEqual( + set(report["ro"]["rejections"]), + set([self.rev1_ro.get_absolute_url(), self.rev1_ro_2.get_absolute_url()]), + ) + + # The first Romanian revision has been rejected because it was superseded + # within its review grace period. + for rev in (self.rev1_ro, self.rev1_ro_2): + self.assertFalse(rev.is_approved) + self.assertTrue(rev.reviewed > datetime_prior_to_test) + self.assertEqual(rev.reviewer, self.l10n_bot) + self.assertEqual(rev.comment, REJECTED_MSG) + + self.assertEqual(list(report["it"].keys()), ["post_rejection_approvals"]) + self.assertEqual(len(report["it"]["post_rejection_approvals"]), 2) + + # The first Italian revision was rejected, but an alternate revision wasn't + # approved within the post-review grace period, so a copy of it should have + # been created and approved by the L10n bot. + for rev in (self.rev1_it, self.rev1_it_2): + self.assertFalse(rev.is_approved) + self.assertTrue(rev.reviewed) + self.assertTrue( + Revision.objects.filter( + is_approved=True, + creator=self.l10n_bot, + reviewer=self.l10n_bot, + created__gte=datetime_prior_to_test, + reviewed__gte=datetime_prior_to_test, + based_on=rev.based_on, + document=rev.document, + content=rev.content, + keywords=rev.keywords, + comment=APPROVED_COPY_MSG % dict(url=rev.get_absolute_url()), + ).exists() + ) + + def test_manage_existing_machine_translations_with_default_doc1(self): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + ) + + revs = ( + self.rev1_el, + self.rev2_el, + self.rev2_es, + self.rev1_ja, + self.rev2_ja, + self.rev1_ro, + self.rev1_it, + self.rev1_el_2, + self.rev2_el_2, + self.rev2_es_2, + self.rev1_ja_2, + self.rev2_ja_2, + self.rev1_ro_2, + self.rev1_it_2, + ) + + datetime_prior_to_test = datetime.now() + + report = manage_existing_machine_translations(mt_config, self.doc1_en) + + for rev in revs: + rev.refresh_from_db() + + # The Japanese locale is included in the report because it had machine + # translations that required management, and all existing machine + # translations are managed even if they're within locales that are no + # longer enabled. + self.assertEqual(set(report.keys()), set(["el", "ro", "it", "ja"])) + + self.assertEqual( + set(report["el"].keys()), + set(["rejections", "pre_review_approvals"]), + ) + self.assertEqual(report["el"]["rejections"], [self.rev1_el.get_absolute_url()]) + self.assertEqual(report["el"]["pre_review_approvals"], [self.rev2_el.get_absolute_url()]) + self.assertEqual( + set(report["ja"].keys()), + set(["rejections", "pre_review_approvals"]), + ) + self.assertEqual(report["ja"]["rejections"], [self.rev1_ja.get_absolute_url()]) + self.assertEqual(report["ja"]["pre_review_approvals"], [self.rev2_ja.get_absolute_url()]) + + # The first Greek and Japanese revisions have been rejected because they're out-of-date, + # but only the revision within the context of "doc1_en". + self.assertFalse(self.rev1_el.is_approved) + self.assertTrue(self.rev1_el.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev1_el.reviewer, self.l10n_bot) + self.assertEqual(self.rev1_el.comment, REJECTED_MSG) + self.assertFalse(self.rev1_el_2.is_approved) + self.assertIsNone(self.rev1_el_2.reviewed) + self.assertFalse(self.rev1_ja.is_approved) + self.assertTrue(self.rev1_ja.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev1_ja.reviewer, self.l10n_bot) + self.assertEqual(self.rev1_ja.comment, REJECTED_MSG) + self.assertFalse(self.rev1_ja_2.is_approved) + self.assertIsNone(self.rev1_ja_2.reviewed) + + # The second Greek and Japanese revisions have been approved because they're up-to-date + # and the review grace period has expired, but only the revision within + # the context of "doc1_en". + self.assertTrue(self.rev2_el.is_approved) + self.assertTrue(self.rev2_el.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev2_el.reviewer, self.l10n_bot) + self.assertEqual(self.rev2_el.comment, APPROVED_MSG) + self.assertFalse(self.rev2_el_2.is_approved) + self.assertIsNone(self.rev2_el_2.reviewed) + self.assertTrue(self.rev2_ja.is_approved) + self.assertTrue(self.rev2_ja.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev2_ja.reviewer, self.l10n_bot) + self.assertEqual(self.rev2_ja.comment, APPROVED_MSG) + self.assertFalse(self.rev2_ja_2.is_approved) + self.assertIsNone(self.rev2_ja_2.reviewed) + + # The second Spanish revision remains untouched because its review grace + # period has not yet expired. That's true for the revision within the + # context of "doc1_en", but the revision within the context of "doc2_en" + # remains untouched because its out of context. + for rev in (self.rev2_es, self.rev2_es_2): + self.assertIsNone(rev.reviewed) + self.assertFalse(rev.is_approved) + + self.assertEqual(list(report["ro"].keys()), ["rejections"]) + self.assertEqual(report["ro"]["rejections"], [self.rev1_ro.get_absolute_url()]) + + # The first Romanian revision has been rejected because it was superseded + # within its review grace period, but only the revision within the context + # of "doc1_en". + self.assertFalse(self.rev1_ro.is_approved) + self.assertTrue(self.rev1_ro.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev1_ro.reviewer, self.l10n_bot) + self.assertEqual(self.rev1_ro.comment, REJECTED_MSG) + self.assertFalse(self.rev1_ro_2.is_approved) + self.assertIsNone(self.rev1_ro_2.reviewed) + + self.assertEqual(list(report["it"].keys()), ["post_rejection_approvals"]) + self.assertEqual(len(report["it"]["post_rejection_approvals"]), 1) + + # The first Italian revision was rejected, but an alternate revision wasn't + # approved within the post-review grace period, so a copy of it should have + # been created and approved by the L10n bot. This only applies to the revision + # within the context of "doc1_en". + self.assertFalse(self.rev1_it.is_approved) + self.assertTrue(self.rev1_it.reviewed) + self.assertTrue( + Revision.objects.filter( + is_approved=True, + creator=self.l10n_bot, + reviewer=self.l10n_bot, + created__gte=datetime_prior_to_test, + reviewed__gte=datetime_prior_to_test, + based_on=self.rev1_it.based_on, + document=self.rev1_it.document, + content=self.rev1_it.content, + keywords=self.rev1_it.keywords, + comment=APPROVED_COPY_MSG % dict(url=self.rev1_it.get_absolute_url()), + ).exists() + ) + self.assertFalse(self.rev1_it_2.is_approved) + self.assertTrue(self.rev1_it_2.reviewed) + self.assertFalse( + Revision.objects.filter( + is_approved=True, + creator=self.l10n_bot, + reviewer=self.l10n_bot, + created__gte=datetime_prior_to_test, + reviewed__gte=datetime_prior_to_test, + based_on=self.rev1_it_2.based_on, + document=self.rev1_it_2.document, + content=self.rev1_it_2.content, + keywords=self.rev1_it_2.keywords, + comment=APPROVED_COPY_MSG % dict(url=self.rev1_it_2.get_absolute_url()), + ).exists() + ) + + def test_manage_existing_machine_translations_with_default_doc2(self): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + ) + + revs = ( + self.rev1_el, + self.rev2_el, + self.rev2_es, + self.rev1_ja, + self.rev2_ja, + self.rev1_ro, + self.rev1_it, + self.rev1_el_2, + self.rev2_el_2, + self.rev2_es_2, + self.rev1_ja_2, + self.rev2_ja_2, + self.rev1_ro_2, + self.rev1_it_2, + ) + + datetime_prior_to_test = datetime.now() + + report = manage_existing_machine_translations(mt_config, self.doc2_en) + + for rev in revs: + rev.refresh_from_db() + + # The Japanese locale is included in the report because it had machine + # translations that required management, and all existing machine + # translations are managed even if they're within locales that are no + # longer enabled. + self.assertEqual(set(report.keys()), set(["el", "ro", "it", "ja"])) + + self.assertEqual( + set(report["el"].keys()), + set(["rejections", "pre_review_approvals"]), + ) + self.assertEqual(report["el"]["rejections"], [self.rev1_el_2.get_absolute_url()]) + self.assertEqual(report["el"]["pre_review_approvals"], [self.rev2_el_2.get_absolute_url()]) + self.assertEqual( + set(report["ja"].keys()), + set(["rejections", "pre_review_approvals"]), + ) + self.assertEqual(report["ja"]["rejections"], [self.rev1_ja_2.get_absolute_url()]) + self.assertEqual(report["ja"]["pre_review_approvals"], [self.rev2_ja_2.get_absolute_url()]) + + # The first Greek and Japanese revisions have been rejected because they're + # out-of-date, but only the revision within the context of "doc2_en". + self.assertFalse(self.rev1_el_2.is_approved) + self.assertTrue(self.rev1_el_2.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev1_el_2.reviewer, self.l10n_bot) + self.assertEqual(self.rev1_el_2.comment, REJECTED_MSG) + self.assertFalse(self.rev1_el.is_approved) + self.assertIsNone(self.rev1_el.reviewed) + self.assertFalse(self.rev1_ja_2.is_approved) + self.assertTrue(self.rev1_ja_2.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev1_ja_2.reviewer, self.l10n_bot) + self.assertEqual(self.rev1_ja_2.comment, REJECTED_MSG) + self.assertFalse(self.rev1_ja.is_approved) + self.assertIsNone(self.rev1_ja.reviewed) + + # The second Greek and Japanese revisions have been approved because they're + # up-to-date and the review grace period has expired, but only the revision + # within the context of "doc2_en". + self.assertTrue(self.rev2_el_2.is_approved) + self.assertTrue(self.rev2_el_2.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev2_el_2.reviewer, self.l10n_bot) + self.assertEqual(self.rev2_el_2.comment, APPROVED_MSG) + self.assertFalse(self.rev2_el.is_approved) + self.assertIsNone(self.rev2_el.reviewed) + self.assertTrue(self.rev2_ja_2.is_approved) + self.assertTrue(self.rev2_ja_2.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev2_ja_2.reviewer, self.l10n_bot) + self.assertEqual(self.rev2_ja_2.comment, APPROVED_MSG) + self.assertFalse(self.rev2_ja.is_approved) + self.assertIsNone(self.rev2_ja.reviewed) + + # The second Spanish revision remains untouched because its review grace + # period has not yet expired. That's true for the revision within the + # context of "doc2_en", but the revision within the context of "doc1_en" + # remains untouched because its out of context. + for rev in (self.rev2_es, self.rev2_es_2): + self.assertIsNone(rev.reviewed) + self.assertFalse(rev.is_approved) + + self.assertEqual(list(report["ro"].keys()), ["rejections"]) + self.assertEqual(report["ro"]["rejections"], [self.rev1_ro_2.get_absolute_url()]) + + # The first Romanian revision has been rejected because it was superseded + # within its review grace period, but only the revision within the context + # of "doc2_en". + self.assertFalse(self.rev1_ro_2.is_approved) + self.assertTrue(self.rev1_ro_2.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev1_ro_2.reviewer, self.l10n_bot) + self.assertEqual(self.rev1_ro_2.comment, REJECTED_MSG) + self.assertFalse(self.rev1_ro.is_approved) + self.assertIsNone(self.rev1_ro.reviewed) + + self.assertEqual(list(report["it"].keys()), ["post_rejection_approvals"]) + self.assertEqual(len(report["it"]["post_rejection_approvals"]), 1) + + # The first Italian revision was rejected, but an alternate revision wasn't + # approved within the post-review grace period, so a copy of it should have + # been created and approved by the L10n bot. This only applies to the revision + # within the context of "doc1_en". + self.assertFalse(self.rev1_it_2.is_approved) + self.assertTrue(self.rev1_it_2.reviewed) + self.assertTrue( + Revision.objects.filter( + is_approved=True, + creator=self.l10n_bot, + reviewer=self.l10n_bot, + created__gte=datetime_prior_to_test, + reviewed__gte=datetime_prior_to_test, + based_on=self.rev1_it_2.based_on, + document=self.rev1_it_2.document, + content=self.rev1_it_2.content, + keywords=self.rev1_it_2.keywords, + comment=APPROVED_COPY_MSG % dict(url=self.rev1_it_2.get_absolute_url()), + ).exists() + ) + self.assertFalse(self.rev1_it.is_approved) + self.assertTrue(self.rev1_it.reviewed) + self.assertFalse( + Revision.objects.filter( + is_approved=True, + creator=self.l10n_bot, + reviewer=self.l10n_bot, + created__gte=datetime_prior_to_test, + reviewed__gte=datetime_prior_to_test, + based_on=self.rev1_it.based_on, + document=self.rev1_it.document, + content=self.rev1_it.content, + keywords=self.rev1_it.keywords, + comment=APPROVED_COPY_MSG % dict(url=self.rev1_it.get_absolute_url()), + ).exists() + ) + + def test_manage_existing_machine_translations_with_doc1_el(self): + mt_config = make_mt_config( + review_grace_period=timedelta(days=3), + post_review_grace_period=timedelta(days=5), + enabled_languages=["el", "ro", "es", "it", "de"], + ) + + revs = ( + self.rev1_el, + self.rev2_el, + self.rev2_es, + self.rev1_ja, + self.rev2_ja, + self.rev1_ro, + self.rev1_it, + self.rev1_el_2, + self.rev2_el_2, + self.rev2_es_2, + self.rev1_ja_2, + self.rev2_ja_2, + self.rev1_ro_2, + self.rev1_it_2, + ) + + datetime_prior_to_test = datetime.now() + + report = manage_existing_machine_translations(mt_config, self.doc1_el) + + for rev in revs: + rev.refresh_from_db() + + self.assertEqual(list(report.keys()), ["el"]) + + self.assertEqual( + set(report["el"].keys()), + set(["rejections", "pre_review_approvals"]), + ) + self.assertEqual(report["el"]["rejections"], [self.rev1_el.get_absolute_url()]) + self.assertEqual(report["el"]["pre_review_approvals"], [self.rev2_el.get_absolute_url()]) + + # The first Greek revision has been rejected because it's out-of-date, + # but only the revision within the context of "doc1_el". + self.assertFalse(self.rev1_el.is_approved) + self.assertTrue(self.rev1_el.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev1_el.reviewer, self.l10n_bot) + self.assertEqual(self.rev1_el.comment, REJECTED_MSG) + + # The second Greek revision has been approved because it's up-to-date + # and the review grace period has expired, but only the revision within + # the context of "doc1_el". + self.assertTrue(self.rev2_el.is_approved) + self.assertTrue(self.rev2_el.reviewed > datetime_prior_to_test) + self.assertEqual(self.rev2_el.reviewer, self.l10n_bot) + self.assertEqual(self.rev2_el.comment, APPROVED_MSG) + + for rev in revs: + if rev in (self.rev1_el, self.rev2_el): + continue + self.assertFalse(rev.is_approved) + if rev in (self.rev1_it, self.rev1_it_2): + self.assertTrue(rev.reviewed) + self.assertFalse( + Revision.objects.filter( + is_approved=True, + creator=self.l10n_bot, + reviewer=self.l10n_bot, + created__gte=datetime_prior_to_test, + reviewed__gte=datetime_prior_to_test, + based_on=rev.based_on, + document=rev.document, + content=rev.content, + keywords=rev.keywords, + comment=APPROVED_COPY_MSG % dict(url=rev.get_absolute_url()), + ).exists() + ) + else: + self.assertIsNone(rev.reviewed) + + def test_manage_existing_machine_translations_with_unready_document(self): + mt_config = make_mt_config( + enabled_languages=["el", "ro", "es", "it", "de"], + ) + # The document has no current revision. + doc = DocumentFactory() + self.assertFalse(manage_existing_machine_translations(mt_config, doc)) + + # The document has no latest_localizable_revision. + ApprovedRevisionFactory(document=doc, is_ready_for_localization=False) + self.assertFalse(manage_existing_machine_translations(mt_config, doc)) + + # The document is a redirect. + doc = RedirectRevisionFactory().document + self.assertFalse(manage_existing_machine_translations(mt_config, doc)) diff --git a/kitsune/l10n/utils.py b/kitsune/l10n/utils.py new file mode 100644 index 00000000000..f75f64999b0 --- /dev/null +++ b/kitsune/l10n/utils.py @@ -0,0 +1,227 @@ +from datetime import timedelta +from functools import partial, wraps + +from django.contrib.auth import get_user_model +from django.core.exceptions import ValidationError +from django.db import connection +from django_celery_beat.models import IntervalSchedule, PeriodicTask + +from kitsune.users.models import Profile + + +def text_to_duration(text): + """ + Returns a timedelta object from a simple, readable string format. For + example, it would parse "3 days" into timedelta(days=3). The text must + be expressed as a positive integer and one of the following units of + measurement: "days", "hours", or "minutes". If the integer is zero, the + units can be skipped. For example, "3 days" or "2 hours" or "1 minutes" + or "0". The integer and units must be separated by whitespace. Invalid + text will raise an instance of ValidationError. + """ + parts = text.split() + validation_error = ValidationError( + ( + "Must be expressed as a positive integer and one of the following " + 'units of measurement: "days", "hours", or "minutes". If the integer ' + 'is zero, the units can be skipped. For example, "3 days" or "2 hours" ' + 'or "1 minutes" or "0").' + ) + ) + + if not parts: + raise validation_error + + try: + num = int(parts[0]) + except ValueError: + raise validation_error + else: + if num < 0: + raise validation_error + + if num == 0: + return timedelta(0) + + if not ((len(parts) == 2) and ((units := parts[1]) in ("days", "hours", "minutes"))): + raise validation_error + + return timedelta(**{units: num}) + + +def duration_to_text(duration): + """ + A simpler, more readable version of Django's function that converts a timedelta + instance into a string. It only cares about days, hours, and minutes. + """ + days = duration.days + minutes = duration.seconds // 60 + hours = minutes // 60 + minutes %= 60 + + if minutes: + return f"{(days * 24 * 60) + (hours * 60) + minutes} minutes" + + if hours: + return f"{(days * 24) + hours} hours" + + if days: + return f"{days} days" + + return "0" + + +def get_l10n_bot(): + """ + Returns the User instance that is the SUMO L10n Bot. + """ + user, created = get_user_model().objects.get_or_create( + username="sumo-l10n-bot", defaults=dict(email="sumodev@mozilla.com") + ) + if created: + Profile.objects.create(user=user, name="SUMO Localization Bot") + return user + + +def manage_heartbeat(heartbeat_period): + """ + Creates, updates, or deletes the "L10n Heartbeat" periodic task. + """ + name = "L10n Heartbeat" + + if heartbeat_period == timedelta(0): + # Delete the periodic task. + PeriodicTask.objects.filter(name=name).delete() + return + + num, units = duration_to_text(heartbeat_period).split() + + # Get or create an interval schedule. + interval, created_interval = IntervalSchedule.objects.get_or_create( + every=int(num), period=units + ) + + heartbeat, created_heartbeat = PeriodicTask.objects.get_or_create( + name=name, + defaults=dict( + interval=interval, + task="kitsune.l10n.tasks.handle_wiki_localization", + ), + ) + + # Has the interval changed? + if (not created_heartbeat) and ( + created_interval or (str(heartbeat.interval) != str(interval)) + ): + # Update the heartbeat with the new interval. + heartbeat.interval = interval + heartbeat.save() + + +def build_message( + mt_config, + creations_awaiting_review, + creations_already_approved, + rejections, + pre_review_approvals, + post_rejection_approvals, +): + """ + Builds a message given the machine translation URL's that have been automatically + created, rejected, approved after the review grace period, and approved after the + post-review grace period. + """ + msgs = [] + + if creations_awaiting_review: + msgs.append( + ( + "The following machine translations were automatically created " + "and are awaiting review:\n" + ) + ) + msgs.extend(f"{url}\n" for url in creations_awaiting_review) + + if creations_already_approved: + msgs.append( + ( + "The following machine translations were automatically created and also " + "immediately approved because the locale team has not been active within " + f"the last {duration_to_text(mt_config.locale_team_inactivity_grace_period)}:\n" + ) + ) + msgs.extend(f"{url}\n" for url in creations_already_approved) + + if pre_review_approvals or post_rejection_approvals: + if pre_review_approvals: + msgs.append( + ( + "The following machine translations were automatically approved " + "because they were not reviewed within the grace period of " + f"{duration_to_text(mt_config.review_grace_period)}:\n" + ) + ) + msgs.extend(f"{url}\n" for url in pre_review_approvals) + + if post_rejection_approvals: + msgs.append( + ( + "The following machine translations are copies of machine " + "translations that were reviewed and rejected, but because " + "alternate translations were not approved within the post-" + "review grace period of " + f"{duration_to_text(mt_config.post_review_grace_period)}, " + "these copies were automatically created and approved:\n" + ) + ) + msgs.extend(f"{url}\n" for url in post_rejection_approvals) + + if rejections: + msgs.append( + ( + "The following machine translations were automatically " + "rejected because they were out-of-date or superseded " + "by an alternate revision:\n" + ) + ) + msgs.extend(f"{url}\n" for url in rejections) + + return "\n\n".join(msgs) + + +def run_with_pg_lock(func=None, lock_key=None, default=None): + """ + Decorator that only runs the decorated function if it can acquire the + Postgres advisory lock specified by the given "lock_key". It does not + block if the lock can't be acquired, so in that case the execution of + the decorated function is skipped and the given default value is + returned. + """ + + if not func: + return partial(run_with_pg_lock, lock_key=lock_key, default=default) + + if not lock_key: + lock_key = 9876543210 + + def run_sql(sql, *args): + with connection.cursor() as cursor: + cursor.execute(sql, args) + return cursor.fetchone()[0] + + @wraps(func) + def wrapper(*args, **kwargs): + # Attempt to acquire the advisory lock without blocking. Immediately + # returns false if the lock is already taken. + lock_acquired = run_sql("SELECT pg_try_advisory_lock(%s);", lock_key) + + if not lock_acquired: + return default + + try: + return func(*args, **kwargs) + finally: + # Release the advisory lock. + run_sql("SELECT pg_advisory_unlock(%s);", lock_key) + + return wrapper diff --git a/kitsune/l10n/wiki.py b/kitsune/l10n/wiki.py new file mode 100644 index 00000000000..5edac7c2ecb --- /dev/null +++ b/kitsune/l10n/wiki.py @@ -0,0 +1,495 @@ +from datetime import datetime +from functools import cache + +from django.conf import settings +from django.contrib.postgres.aggregates import ArrayAgg +from django.db.models import Count, Exists, F, OuterRef, Q, Value +from django.db.models.functions import Now + +from kitsune.dashboards.readouts import get_visits_subquery +from kitsune.l10n.llm import get_localization +from kitsune.l10n.models import RevisionActivityRecord +from kitsune.l10n.utils import duration_to_text, get_l10n_bot +from kitsune.wiki.models import Document, Locale, Revision +from kitsune.wiki.config import REDIRECT_HTML + + +def is_suitable_for_l10n(doc): + """ + Returns a boolean indicating whether the given document is a default + document, localizable, has a current revision, and has a localizable + revision. In other words, are the bare minimum fundamentals in place + to be suitable for localization. + """ + return ( + doc + and (doc.locale == settings.WIKI_DEFAULT_LANGUAGE) + and doc.is_localizable + and doc.current_revision + and doc.latest_localizable_revision + ) + + +def is_allowed_for_machine_translation(mt_config, doc): + """ + Returns a boolean indicating whether the document is allowed for machine translation. + """ + if doc.locale != settings.WIKI_DEFAULT_LANGUAGE: + if not (doc.parent and (doc.locale in mt_config.enabled_languages)): + return False + doc = doc.parent + + return ( + is_suitable_for_l10n(doc) + and not doc.is_archived + and not doc.html.startswith(REDIRECT_HTML) + and mt_config.is_slug_allowed(doc.slug) + and mt_config.is_approved_date_allowed(doc.latest_localizable_revision.reviewed) + and mt_config.is_approver_allowed(doc.latest_localizable_revision.reviewer) + ) + + +@cache +def has_inactive_team(locale, grace_period): + """ + Returns a boolean indicating whether or not the given locale has a + team without an active leader or reviewer within the grace period. + """ + return not ( + Locale.objects.filter(locale=locale) + .filter( + Exists( + Revision.objects.filter( + ( + (Q(creator__in=OuterRef("leaders")) | Q(creator__in=OuterRef("reviewers"))) + & Q(created__gt=Now() - grace_period) + ) + | ( + ( + Q(reviewer__in=OuterRef("leaders")) + | Q(reviewer__in=OuterRef("reviewers")) + ) + & Q(reviewed__gt=Now() - grace_period) + ) + ) + ) + ) + .exists() + ) + + +def current_revision_is_unreviewed_machine_translation(doc): + """ + Convenience function that returns a boolean indicating whether or not + the given document's current revision was machine-generated and not + reviewed by a human. + """ + if ( + (doc.locale == settings.WIKI_DEFAULT_LANGUAGE) + or doc.parent is None + or not (current_rev := doc.current_revision) + ): + return False + l10n_bot = get_l10n_bot() + return (current_rev.creator == l10n_bot) and (current_rev.reviewer == l10n_bot) + + +def create_machine_translations(mt_config, default_doc=None): + """ + Create machine translations, within all locales enabled for machine translations, + for the given default document if one is provided, otherwise all default documents. + + Returns a dict containing the URL's of the machine-translations that were created, + organized by locale and the kind of creation. + For example: + { + "it": { + "awaiting_review": [...], + "already_approved": [...], + } + "es": { + "awaiting_review": [...], + "already_approved": [...], + }, + ... + } + """ + report = dict() + + if not mt_config.is_active(): + return report + + if default_doc and not is_allowed_for_machine_translation(mt_config, default_doc): + return report + + def add_to_report(locale, rev): + kind = "already_approved" if rev.is_approved else "awaiting_review" + report.setdefault(locale, {}).setdefault(kind, []).append(rev.get_absolute_url()) + + l10n_bot = get_l10n_bot() + model_name = mt_config.llm_name + enabled_locales = set(mt_config.enabled_languages) + review_grace_period = mt_config.review_grace_period + limit_to_approved_after = mt_config.limit_to_approved_after + limit_to_approver_in_group = mt_config.limit_to_approver_in_group + team_grace_period = mt_config.locale_team_inactivity_grace_period + + def get_filter_for(what, prefix=""): + if what == "limit_to_slugs": + full_slugs = mt_config.limit_to_full_slugs + slug_prefixes = mt_config.limit_to_slug_prefixes + else: + full_slugs = mt_config.disabled_full_slugs + slug_prefixes = mt_config.disabled_slug_prefixes + filter = Q() + if full_slugs: + filter = Q(**{f"{prefix}slug__in": full_slugs}) + for p in slug_prefixes: + filter |= Q(**{f"{prefix}slug__startswith": p}) + return filter + + # We want to cache the calls to "has_inactive_team" while creating machine + # translations below, but always start with a fresh cache. + has_inactive_team.cache_clear() + + if default_doc: + qs_localized_docs = Document.objects.filter(parent=default_doc) + else: + qs_localized_docs = Document.objects.filter( + parent__isnull=False, + parent__is_archived=False, + parent__is_localizable=True, + parent__current_revision__isnull=False, + parent__latest_localizable_revision__isnull=False, + ).exclude(parent__html__startswith=REDIRECT_HTML) + + if slug_limiting := get_filter_for("limit_to_slugs", prefix="parent__"): + qs_localized_docs = qs_localized_docs.filter(slug_limiting) + + if slug_disabling := get_filter_for("disabled_slugs", prefix="parent__"): + qs_localized_docs = qs_localized_docs.exclude(slug_disabling) + + if limit_to_approved_after: + qs_localized_docs = qs_localized_docs.filter( + parent__latest_localizable_revision__reviewed__gt=limit_to_approved_after + ) + + if limit_to_approver_in_group: + qs_localized_docs = qs_localized_docs.filter( + parent__latest_localizable_revision__reviewer__groups=limit_to_approver_in_group + ) + + # Only consider localized documents within locales that have been enabled + # for machine translations. + qs_localized_docs = qs_localized_docs.filter(locale__in=enabled_locales) + + # First, let's find all localized documents that need a machine translation. + for localized_doc in ( + qs_localized_docs.exclude( + # Exclude localized documents that are already up-to-date with their parent. + current_revision__based_on_id__gte=F("parent__latest_localizable_revision_id") + ) + .exclude( + # Exclude localized documents that already have an up-to-date revision + # that is either machine-generated, or contributor-created but still + # awaiting review within the review grace period. + Exists( + Revision.objects.filter( + document=OuterRef("pk"), + ).filter( + Q(based_on_id__gte=OuterRef("parent__latest_localizable_revision_id")) + & ( + Q(creator=l10n_bot) + | (Q(reviewed__isnull=True) & Q(created__gt=Now() - review_grace_period)) + ) + ) + ) + ) + .annotate( + num_visits=get_visits_subquery(document=OuterRef("pk")), + num_visits_parent=get_visits_subquery(document=OuterRef("parent")), + ) + .order_by( + F("num_visits_parent").desc(nulls_last=True), F("num_visits").desc(nulls_last=True) + ) + .select_related("parent") + ): + rev = create_machine_translation( + model_name, localized_doc.parent, localized_doc.locale, l10n_bot, team_grace_period + ) + add_to_report(localized_doc.locale, rev) + + # Finally, find all of the default documents that have a localizable revision but + # don't yet have a localized document in one or more of the locales that have been + # enabled for machine translation. + if default_doc: + existing_locales = set(qs_localized_docs.values_list("locale", flat=True)) + for locale in sorted(enabled_locales - existing_locales): + rev = create_machine_translation( + model_name, default_doc, locale, l10n_bot, team_grace_period + ) + add_to_report(locale, rev) + else: + qs_default_docs = Document.objects.filter( + is_archived=False, + is_localizable=True, + parent__isnull=True, + current_revision__isnull=False, + latest_localizable_revision__isnull=False, + locale=settings.WIKI_DEFAULT_LANGUAGE, + ).exclude(html__startswith=REDIRECT_HTML) + + if slug_limiting := get_filter_for("limit_to_slugs"): + qs_default_docs = qs_default_docs.filter(slug_limiting) + + if slug_disabling := get_filter_for("disabled_slugs"): + qs_default_docs = qs_default_docs.exclude(slug_disabling) + + if limit_to_approved_after: + qs_default_docs = qs_default_docs.filter( + latest_localizable_revision__reviewed__gt=limit_to_approved_after + ) + + if limit_to_approver_in_group: + qs_default_docs = qs_default_docs.filter( + latest_localizable_revision__reviewer__groups=limit_to_approver_in_group + ) + + for default_doc in ( + qs_default_docs.annotate( + existing_locales=ArrayAgg( + "translations__locale", + filter=Q(translations__locale__in=enabled_locales), + default=Value([]), + ), + num_existing_locales=Count( + "translations__locale", + filter=Q(translations__locale__in=enabled_locales), + ), + num_visits=get_visits_subquery(document=OuterRef("pk")), + ) + .filter(num_existing_locales__lt=len(enabled_locales)) + .order_by(F("num_visits").desc(nulls_last=True)) + ): + for locale in sorted(enabled_locales - set(default_doc.existing_locales)): + rev = create_machine_translation( + model_name, default_doc, locale, l10n_bot, team_grace_period + ) + add_to_report(locale, rev) + + return report + + +def create_machine_translation(model_name, default_doc, target_locale, creator, team_grace_period): + """ + Create and return the machine translation of the current revision of the given + default document for the given target locale. + """ + content = get_localization(model_name, default_doc, "content", target_locale) + summary = get_localization(model_name, default_doc, "summary", target_locale) + keywords = get_localization(model_name, default_doc, "keywords", target_locale) + + localized_doc = default_doc.translated_to(target_locale) + + if not localized_doc: + if default_doc.is_template: + # Do not translate the title of templates. + title = default_doc.title + else: + title = get_localization(model_name, default_doc, "title", target_locale) + + # Create a new document for the locale if there isn't one already. + localized_doc = Document.objects.create( + title=title, + parent=default_doc, + locale=target_locale, + slug=default_doc.slug, + category=default_doc.category, + allow_discussion=default_doc.allow_discussion, + ) + + now = datetime.now() + + extra_kwargs = {} + if publish_now := has_inactive_team(target_locale, team_grace_period): + extra_kwargs.update( + reviewed=now, + reviewer=creator, + comment=( + "Approved immediately because its locale team had not been " + f"active within the past {duration_to_text(team_grace_period)}." + ), + ) + + # Create the localized revision. + rev = Revision.objects.create( + created=now, + creator=creator, + content=content, + summary=summary, + keywords=keywords, + document=localized_doc, + is_approved=publish_now, + based_on=default_doc.latest_localizable_revision, + **extra_kwargs, + ) + # Record the action. + RevisionActivityRecord.objects.create( + revision=rev, + action=( + RevisionActivityRecord.MT_CREATED_AS_APPROVED + if publish_now + else RevisionActivityRecord.MT_CREATED_AS_AWAITING_REVIEW + ), + ) + return rev + + +def manage_existing_machine_translations(mt_config, doc=None): + """ + This function manages pending machine translations. It does two things, + both of which operate within the context of the provided document if one + is provided, otherwise all documents. First, it "cleans" pending machine + translations, which means it marks as reviewed the machine translations + that are either out-of-date or no longer needed. Second, it approves pending + machine translations that are still relevant, and have either been awaiting + review for longer than the review grace period, or have been rejected but no + other translation has been approved within the post-review grace period. + + NOTE: This function will manage all existing machine translations, even + those within locales that were initially enabled and then later disabled. + + Returns a dict containing the URL's of the machine-translated revisions + that were modified, organized by locale and the kind of modification. + For example: + { + "it": { + "rejections": [...], + "pre_review_approvals": [...], + "post_rejection_approvals": [...], + } + "es": { + "rejections": [...], + "pre_review_approvals": [...], + "post_rejection_approvals": [...], + }, + ... + } + """ + l10n_bot = get_l10n_bot() + review_grace_period = mt_config.review_grace_period + post_review_grace_period = mt_config.post_review_grace_period + + report = dict() + + def add_to_report(locale, kind, rev): + report.setdefault(locale, {}).setdefault(kind, []).append(rev.get_absolute_url()) + + if doc: + if not is_suitable_for_l10n(doc.original): + return report + if doc.locale == settings.WIKI_DEFAULT_LANGUAGE: + # Consider the revisions of all of this document's localized documents. + qs = Revision.objects.filter(document__parent=doc) + else: + # Only consider the revisions of this localized document. + qs = doc.revisions + else: + # Consider the revisions of all localized documents. + qs = Revision.objects.filter( + document__parent__isnull=False, + document__parent__is_localizable=True, + document__parent__current_revision__isnull=False, + document__parent__latest_localizable_revision__isnull=False, + ) + + qs = qs.filter(creator=l10n_bot, is_approved=False).annotate(locale=F("document__locale")) + + no_longer_needed = ( + Q( + document__current_revision__based_on_id__gte=F( + "document__parent__latest_localizable_revision_id" + ) + ) + | Q(document__parent__is_archived=True) + | Q(document__parent__html__startswith=REDIRECT_HTML) + ) + + # First, mark as reviewed any irrelevant machine translations that were awaiting review. + for rev in qs.filter( + reviewed__isnull=True, + ).filter( + # This machine translation is either out-of-date or no longer needed. + Q(based_on_id__lt=F("document__parent__latest_localizable_revision_id")) + | no_longer_needed + ): + rev.is_approved = False + rev.reviewer = l10n_bot + rev.reviewed = datetime.now() + rev.comment = "No longer relevant." + rev.save() + # Record the action. + RevisionActivityRecord.objects.create( + revision=rev, action=RevisionActivityRecord.MT_REJECTED + ) + add_to_report(rev.locale, "rejections", rev) + + # Next, publish pending machine translations, which are still-relevant machine + # translations that have either not been reviewed and approved within the review + # grace period, or were reviewed and not approved but an alternate translation + # was not approved within the post-review grace period. We can't do an SQL update + # because we need to trigger the Revision.save method. + for rev in ( + qs.filter( + based_on_id__gte=F("document__parent__latest_localizable_revision_id"), + ) + .exclude(no_longer_needed) + .filter( + (Q(reviewed__isnull=True) & Q(created__lt=Now() - review_grace_period)) + | ( + Q(reviewed__isnull=False) + & ~Q(reviewer=l10n_bot) + & Q(reviewed__lt=Now() - post_review_grace_period) + ) + ) + ): + if rev.reviewed is None: + rev.is_approved = True + rev.reviewed = datetime.now() + rev.reviewer = l10n_bot + rev.comment = ( + "Automatically approved because it was not reviewed " + f"within {duration_to_text(review_grace_period)}." + ) + rev.save() + # Record the action. + RevisionActivityRecord.objects.create( + revision=rev, action=RevisionActivityRecord.MT_APPROVED_PRE_REVIEW + ) + add_to_report(rev.locale, "pre_review_approvals", rev) + else: + now = datetime.now() + copy = Revision.objects.create( + created=now, + reviewed=now, + is_approved=True, + creator=l10n_bot, + reviewer=l10n_bot, + document_id=rev.document_id, + based_on_id=rev.based_on_id, + summary=rev.summary, + content=rev.content, + keywords=rev.keywords, + comment=( + "Automatically created and approved because an alternate translation " + f"was not approved within {duration_to_text(post_review_grace_period)} " + f"after the rejection of {rev.get_absolute_url()}." + ), + ) + # Record the action. + RevisionActivityRecord.objects.create( + revision=copy, action=RevisionActivityRecord.MT_CREATED_AS_APPROVED_POST_REJECTION + ) + add_to_report(rev.locale, "post_rejection_approvals", copy) + + return report diff --git a/kitsune/settings.py b/kitsune/settings.py index 605d9470455..d8bdaba8402 100644 --- a/kitsune/settings.py +++ b/kitsune/settings.py @@ -635,6 +635,7 @@ def _username_algo(email): "django.contrib.sites", "django.contrib.messages", "django.contrib.staticfiles", + "django_celery_beat", "django_jinja", "graphene_django", "mozilla_django_oidc", @@ -674,6 +675,7 @@ def _username_algo(email): "kitsune.notifications", "kitsune.journal", "kitsune.tidings", + "kitsune.l10n", "rest_framework", "statici18n", "watchman", @@ -875,6 +877,7 @@ def JINJA_CONFIG(): CELERY_WORKER_HIJACK_ROOT_LOGGER = config( "CELERY_WORKER_HIJACK_ROOT_LOGGER", default=False, cast=bool ) +CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler" # Wiki rebuild settings WIKI_REBUILD_TOKEN = "sumo:wiki:full-rebuild" diff --git a/kitsune/wiki/badges.py b/kitsune/wiki/badges.py index 897ec3c1aab..290beee9098 100644 --- a/kitsune/wiki/badges.py +++ b/kitsune/wiki/badges.py @@ -2,6 +2,8 @@ from django.db.models.signals import post_save from kitsune.wiki.models import Revision +from kitsune.l10n.utils import get_l10n_bot + # Yo ******! These are year-agnostic badge templates which code uses # to get-or-create the actual Badge instances. These strings should @@ -33,8 +35,8 @@ def on_revision_save(sender, instance, **kwargs): year = rev.created.year creator = rev.creator - # We only care about approved revisions. - if not rev.is_approved: + # We only care about approved revisions by real users. + if (not rev.is_approved) or (creator == get_l10n_bot()): return # The badge to be awarded depends on the locale. diff --git a/kitsune/wiki/jinja2/wiki/includes/document_macros.html b/kitsune/wiki/jinja2/wiki/includes/document_macros.html index 26d43b4e809..12a8ab9a133 100644 --- a/kitsune/wiki/jinja2/wiki/includes/document_macros.html +++ b/kitsune/wiki/jinja2/wiki/includes/document_macros.html @@ -21,8 +21,8 @@

{{ document.title }}

{% if created_date %} - - {% if is_first_revision %}{{ _("Created") }}{% else %}{{ _("Last updated") }}{% endif %}: + + {% if is_first_revision %}{{ _("Created") }}{% else %}{{ _("Last updated") }}{% endif %}: {{ datetimeformat(created_date, format="shortdate", use_naturaltime=True) }} @@ -75,6 +75,18 @@

{{ document.title }}

{% macro document_content(document, fallback_reason, request, settings, css_class='', any_localizable_revision=False, full_locale_name=None) -%}
{% if not fallback_reason %} + {% if current_revision_is_unreviewed_machine_translation(document) %} +
+ +
+ {% trans edit_link=url('wiki.edit_document', document.slug, locale=document.locale) %} + This is a machine-generated translation of the English content. It has not been + reviewed by a human, and may contain errors. If you would like to revise this + content, you can start here. + {% endtrans %} +
+
+ {% endif %} {{ document.html|safe }} {% elif fallback_reason == 'no_translation' %} {% if document.is_localizable and any_localizable_revision and not document.is_archived %} diff --git a/kitsune/wiki/signals.py b/kitsune/wiki/signals.py new file mode 100644 index 00000000000..bb46db3559a --- /dev/null +++ b/kitsune/wiki/signals.py @@ -0,0 +1,4 @@ +from django.dispatch import Signal + + +revision_approved = Signal() diff --git a/kitsune/wiki/views.py b/kitsune/wiki/views.py index 4b8b3c37f05..5e832716864 100644 --- a/kitsune/wiki/views.py +++ b/kitsune/wiki/views.py @@ -74,6 +74,7 @@ doc_html_cache_key, ) from kitsune.wiki.parser import wiki_to_html +from kitsune.wiki.signals import revision_approved from kitsune.wiki.tasks import ( render_document_cascade, schedule_rebuild_kb, @@ -802,8 +803,10 @@ def review_revision(request, document_slug, revision_id): doc.save() - # Send notifications of approvedness and readiness: if rev.is_ready_for_localization or rev.is_approved: + # Send the "revision_approved" signal. + revision_approved.send(sender="kitsune.wiki.views.review_revision", revision=rev) + # Send notifications of approvedness and readiness: ApprovedOrReadyUnion(rev).fire(exclude=[rev.creator, request.user]) # Send an email (not really a "notification" in the sense that diff --git a/poetry.lock b/poetry.lock index 3504a212434..6bc43a7ae48 100644 --- a/poetry.lock +++ b/poetry.lock @@ -69,6 +69,38 @@ files = [ [package.extras] dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.8.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +files = [ + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] +trio = ["trio (>=0.26.1)"] + [[package]] name = "appnope" version = "0.1.3" @@ -701,6 +733,20 @@ files = [ {file = "commonware-0.6.0.tar.gz", hash = "sha256:0e9520986e292f2bf8cdf80b32f21ef01e4058fd7baa61d2d282d21ed7085b1f"}, ] +[[package]] +name = "cron-descriptor" +version = "1.4.5" +description = "A Python library that converts cron expressions into human readable strings." +optional = false +python-versions = "*" +files = [ + {file = "cron_descriptor-1.4.5-py3-none-any.whl", hash = "sha256:736b3ae9d1a99bc3dbfc5b55b5e6e7c12031e7ba5de716625772f8b02dcd6013"}, + {file = "cron_descriptor-1.4.5.tar.gz", hash = "sha256:f51ce4ffc1d1f2816939add8524f206c376a42c87a5fca3091ce26725b3b1bca"}, +] + +[package.extras] +dev = ["polib"] + [[package]] name = "cryptography" version = "43.0.1" @@ -855,6 +901,17 @@ files = [ {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, ] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "dj-database-url" version = "1.3.0" @@ -953,6 +1010,25 @@ files = [ {file = "django_cache_url-3.4.5-py2.py3-none-any.whl", hash = "sha256:5f350759978483ab85dc0e3e17b3d53eed3394a28148f6bf0f53d11d0feb5b3c"}, ] +[[package]] +name = "django-celery-beat" +version = "2.7.0" +description = "Database-backed Periodic Tasks." +optional = false +python-versions = ">=3.8" +files = [ + {file = "django_celery_beat-2.7.0-py3-none-any.whl", hash = "sha256:851c680d8fbf608ca5fecd5836622beea89fa017bc2b3f94a5b8c648c32d84b1"}, + {file = "django_celery_beat-2.7.0.tar.gz", hash = "sha256:8482034925e09b698c05ad61c36ed2a8dbc436724a3fe119215193a4ca6dc967"}, +] + +[package.dependencies] +celery = ">=5.2.3,<6.0" +cron-descriptor = ">=1.2.32" +Django = ">=2.2,<5.2" +django-timezone-field = ">=5.0" +python-crontab = ">=2.3.4" +tzdata = "*" + [[package]] name = "django-cors-headers" version = "3.14.0" @@ -1309,6 +1385,17 @@ django = ["django"] flask = ["blinker", "flask"] sanic = ["sanic"] +[[package]] +name = "docstring-parser" +version = "0.16" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"}, + {file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"}, +] + [[package]] name = "docutils" version = "0.18.1" @@ -1588,13 +1675,13 @@ grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] [[package]] name = "google-auth" -version = "2.26.2" +version = "2.37.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" files = [ - {file = "google-auth-2.26.2.tar.gz", hash = "sha256:97327dbbf58cccb58fc5a1712bba403ae76668e64814eb30f7316f7e27126b81"}, - {file = "google_auth-2.26.2-py2.py3-none-any.whl", hash = "sha256:3f445c8ce9b61ed6459aad86d8ccdba4a9afed841b2d1451a11ef4db08957424"}, + {file = "google_auth-2.37.0-py2.py3-none-any.whl", hash = "sha256:42664f18290a6be591be5329a96fe30184be1a1badb7292a7f686a9659de9ca0"}, + {file = "google_auth-2.37.0.tar.gz", hash = "sha256:0054623abf1f9c83492c63d3f47e77f0a544caa3d40b2d98e099a611c2dd5d00"}, ] [package.dependencies] @@ -1604,11 +1691,91 @@ rsa = ">=3.1.4,<5" [package.extras] aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (>=38.0.3)", "pyjwt (>=2.0)"] pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +[[package]] +name = "google-cloud-aiplatform" +version = "1.76.0" +description = "Vertex AI API client library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "google_cloud_aiplatform-1.76.0-py2.py3-none-any.whl", hash = "sha256:0b0348525b9528db7b69538ff6e86289ea2ce0d80f3784a42865fc994fe10dd1"}, + {file = "google_cloud_aiplatform-1.76.0.tar.gz", hash = "sha256:910fb7fb6ef7ec73a48523872d669370755f59ac6d764dc8bf2fc91e7c0b2fca"}, +] + +[package.dependencies] +docstring-parser = "<1" +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-bigquery = ">=1.15.0,<3.20.0 || >3.20.0,<4.0.0dev" +google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" +google-cloud-storage = ">=1.32.0,<3.0.0dev" +packaging = ">=14.3" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" +pydantic = "<3" +shapely = "<3.0.0dev" +typing-extensions = "*" + +[package.extras] +autologging = ["mlflow (>=1.27.0,<=2.16.0)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)"] +endpoint = ["requests (>=2.28.1)"] +evaluation = ["pandas (>=1.0.0)", "tqdm (>=4.23.0)"] +full = ["docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)"] +langchain = ["langchain (>=0.1.16,<0.4)", "langchain-core (<0.4)", "langchain-google-vertexai (<3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)"] +langchain-testing = ["absl-py", "cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "langchain (>=0.1.16,<0.4)", "langchain-core (<0.4)", "langchain-google-vertexai (<3)", "openinference-instrumentation-langchain (>=0.1.19,<0.2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "pytest-xdist", "typing-extensions"] +lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] +metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] +pipelines = ["pyyaml (>=5.3.1,<7)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<=0.114.0)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "setuptools (<70.0.0)"] +ray-testing = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "immutabledict", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pytest-xdist", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "ray[train]", "scikit-learn (<1.6.0)", "setuptools (<70.0.0)", "tensorflow", "torch (>=2.0.0,<2.1.0)", "xgboost", "xgboost-ray"] +reasoningengine = ["cloudpickle (>=3.0,<4.0)", "google-cloud-trace (<2)", "opentelemetry-exporter-gcp-trace (<2)", "opentelemetry-sdk (<2)", "pydantic (>=2.6.3,<3)", "typing-extensions"] +tensorboard = ["tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +testing = ["aiohttp", "bigframes", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<=0.114.0)", "google-api-core (>=2.11,<3.0.0)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "immutabledict", "ipython", "kfp (>=2.6.0,<3.0.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.16.0)", "nltk", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=14.0.0)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pytest-asyncio", "pytest-xdist", "pyyaml (>=5.3.1,<7)", "ray[default] (>=2.4,<2.5.dev0 || >2.9.0,!=2.9.1,!=2.9.2,<2.10.dev0 || >=2.33.dev0,<=2.33.0)", "ray[default] (>=2.5,<=2.33.0)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "scikit-learn (<1.6.0)", "sentencepiece (>=0.2.0)", "setuptools (<70.0.0)", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<2.18.0)", "tensorflow (==2.13.0)", "tensorflow (==2.16.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "torch (>=2.2.0)", "tqdm (>=4.23.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost"] +tokenization = ["sentencepiece (>=0.2.0)"] +vizier = ["google-vizier (>=0.1.6)"] +xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] + +[[package]] +name = "google-cloud-bigquery" +version = "3.27.0" +description = "Google BigQuery API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_cloud_bigquery-3.27.0-py2.py3-none-any.whl", hash = "sha256:b53b0431e5ba362976a4cd8acce72194b4116cdf8115030c7b339b884603fcc3"}, + {file = "google_cloud_bigquery-3.27.0.tar.gz", hash = "sha256:379c524054d7b090fa56d0c22662cc6e6458a6229b6754c0e7177e3a73421d2c"}, +] + +[package.dependencies] +google-api-core = {version = ">=2.11.1,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<3.0.0dev" +google-cloud-core = ">=2.4.1,<3.0.0dev" +google-resumable-media = ">=2.0.0,<3.0dev" +packaging = ">=20.0.0" +python-dateutil = ">=2.7.3,<3.0dev" +requests = ">=2.21.0,<3.0.0dev" + +[package.extras] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "bigquery-magics (>=0.1.0)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +bigquery-v2 = ["proto-plus (>=1.22.3,<2.0.0dev)", "protobuf (>=3.20.2,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] +ipython = ["bigquery-magics (>=0.1.0)"] +ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] +opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] + [[package]] name = "google-cloud-core" version = "2.4.1" @@ -1627,27 +1794,64 @@ google-auth = ">=1.25.0,<3.0dev" [package.extras] grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] +[[package]] +name = "google-cloud-resource-manager" +version = "1.12.5" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_cloud_resource_manager-1.12.5-py2.py3-none-any.whl", hash = "sha256:2708a718b45c79464b7b21559c701b5c92e6b0b1ab2146d0a256277a623dc175"}, + {file = "google_cloud_resource_manager-1.12.5.tar.gz", hash = "sha256:b7af4254401ed4efa3aba3a929cb3ddb803fa6baf91a78485e45583597de5891"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + +[[package]] +name = "google-cloud-resource-manager" +version = "1.14.0" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google_cloud_resource_manager-1.14.0-py2.py3-none-any.whl", hash = "sha256:4860c3ea9ace760b317ea90d4e27f1b32e54ededdcc340a7cb70c8ef238d8f7c"}, + {file = "google_cloud_resource_manager-1.14.0.tar.gz", hash = "sha256:daa70a3a4704759d31f812ed221e3b6f7b660af30c7862e4a0060ea91291db30"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.1,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +google-auth = ">=2.14.1,<2.24.0 || >2.24.0,<2.25.0 || >2.25.0,<3.0.0dev" +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = {version = ">=1.22.3,<2.0.0dev", markers = "python_version < \"3.13\""} +protobuf = ">=3.20.2,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + [[package]] name = "google-cloud-storage" -version = "2.14.0" +version = "2.19.0" description = "Google Cloud Storage API client library" optional = false python-versions = ">=3.7" files = [ - {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, - {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, + {file = "google_cloud_storage-2.19.0-py2.py3-none-any.whl", hash = "sha256:aeb971b5c29cf8ab98445082cbfe7b161a1f48ed275822f59ed3f1524ea54fba"}, + {file = "google_cloud_storage-2.19.0.tar.gz", hash = "sha256:cd05e9e7191ba6cb68934d8eb76054d9be4562aa89dbc4236feee4d7d51342b2"}, ] [package.dependencies] -google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" -google-auth = ">=2.23.3,<3.0dev" +google-api-core = ">=2.15.0,<3.0.0dev" +google-auth = ">=2.26.1,<3.0dev" google-cloud-core = ">=2.3.0,<3.0dev" google-crc32c = ">=1.0,<2.0dev" -google-resumable-media = ">=2.6.0" +google-resumable-media = ">=2.7.2" requests = ">=2.18.0,<3.0.0dev" [package.extras] -protobuf = ["protobuf (<5.0.0dev)"] +protobuf = ["protobuf (<6.0.0dev)"] +tracing = ["opentelemetry-api (>=1.1.0)"] [[package]] name = "google-crc32c" @@ -1731,13 +1935,13 @@ testing = ["pytest"] [[package]] name = "google-resumable-media" -version = "2.7.0" +version = "2.7.2" description = "Utilities for Google Media Downloads and Resumable Uploads" optional = false -python-versions = ">= 3.7" +python-versions = ">=3.7" files = [ - {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, - {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, + {file = "google_resumable_media-2.7.2-py2.py3-none-any.whl", hash = "sha256:3ce7551e9fe6d99e9a126101d2536612bb73486721951e9562fee0f90c6ababa"}, + {file = "google_resumable_media-2.7.2.tar.gz", hash = "sha256:5280aed4629f2b60b847b0d42f9857fd4935c11af266744df33d8074cae92fe0"}, ] [package.dependencies] @@ -1759,6 +1963,7 @@ files = [ ] [package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" [package.extras] @@ -1930,6 +2135,22 @@ files = [ docs = ["Sphinx", "furo"] test = ["objgraph", "psutil"] +[[package]] +name = "grpc-google-iam-v1" +version = "0.14.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc_google_iam_v1-0.14.0-py2.py3-none-any.whl", hash = "sha256:fb4a084b30099ba3ab07d61d620a0d4429570b13ff53bd37bac75235f98b7da4"}, + {file = "grpc_google_iam_v1-0.14.0.tar.gz", hash = "sha256:c66e07aa642e39bb37950f9e7f491f70dad150ac9801263b42b2814307c2df99"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0dev" + [[package]] name = "grpcio" version = "1.63.0" @@ -2025,6 +2246,17 @@ setproctitle = ["setproctitle"] testing = ["coverage", "eventlet", "gevent", "pytest", "pytest-cov"] tornado = ["tornado (>=0.2)"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + [[package]] name = "html5lib" version = "1.1" @@ -2046,6 +2278,63 @@ chardet = ["chardet (>=2.2)"] genshi = ["genshi"] lxml = ["lxml"] +[[package]] +name = "httpcore" +version = "1.0.7" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, + {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "httpx-sse" +version = "0.4.0" +description = "Consume Server-Sent Event (SSE) messages with HTTPX." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721"}, + {file = "httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f"}, +] + [[package]] name = "identify" version = "2.5.33" @@ -2193,6 +2482,91 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jiter" +version = "0.8.2" +description = "Fast iterable JSON parser." +optional = false +python-versions = ">=3.8" +files = [ + {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, + {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, + {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, + {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, + {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, + {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, + {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, + {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, + {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, + {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, + {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, + {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, + {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, + {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, + {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, + {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, + {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, + {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, + {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, + {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, + {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, + {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, + {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, + {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, + {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, + {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, + {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, + {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, + {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, + {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, + {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, + {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, + {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, + {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, + {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, +] + [[package]] name = "jmespath" version = "1.0.1" @@ -2233,6 +2607,31 @@ pyopenssl = ">=0.13" [package.extras] docs = ["sphinx (>=4.3.0)", "sphinx-rtd-theme (>=1.0)"] +[[package]] +name = "jsonpatch" +version = "1.33" +description = "Apply JSON-Patches (RFC 6902)" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, !=3.6.*" +files = [ + {file = "jsonpatch-1.33-py2.py3-none-any.whl", hash = "sha256:0ae28c0cd062bbd8b8ecc26d7d164fbbea9652a1a3693f3b956c1eae5145dade"}, + {file = "jsonpatch-1.33.tar.gz", hash = "sha256:9fcd4009c41e6d12348b4a0ff2563ba56a2923a7dfee731d004e212e1ee5030c"}, +] + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + [[package]] name = "jsonschema" version = "4.20.0" @@ -2300,6 +2699,93 @@ sqs = ["boto3 (>=1.26.143)", "pycurl (>=7.43.0.5)", "urllib3 (>=1.26.16)"] yaml = ["PyYAML (>=3.10)"] zookeeper = ["kazoo (>=2.8.0)"] +[[package]] +name = "langchain-core" +version = "0.3.29" +description = "Building applications with LLMs through composability" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_core-0.3.29-py3-none-any.whl", hash = "sha256:817db1474871611a81105594a3e4d11704949661008e455a10e38ca9ff601a1a"}, + {file = "langchain_core-0.3.29.tar.gz", hash = "sha256:773d6aeeb612e7ce3d996c0be403433d8c6a91e77bbb7a7461c13e15cfbe5b06"}, +] + +[package.dependencies] +jsonpatch = ">=1.33,<2.0" +langsmith = ">=0.1.125,<0.3" +packaging = ">=23.2,<25" +pydantic = [ + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, + {version = ">=2.5.2,<3.0.0", markers = "python_full_version < \"3.12.4\""}, +] +PyYAML = ">=5.3" +tenacity = ">=8.1.0,<8.4.0 || >8.4.0,<10.0.0" +typing-extensions = ">=4.7" + +[[package]] +name = "langchain-google-vertexai" +version = "2.0.10" +description = "An integration package connecting Google VertexAI and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_google_vertexai-2.0.10-py3-none-any.whl", hash = "sha256:192a5ac1c9165652a9b7bd936740be5d9efbecc6589f9865530d62c9054b3a1c"}, + {file = "langchain_google_vertexai-2.0.10.tar.gz", hash = "sha256:02e3dca590d1f20e63a6e4aa48693183c551a1e98a0476cdbbdba1e5f80b30f9"}, +] + +[package.dependencies] +google-cloud-aiplatform = ">=1.75.0,<2.0.0" +google-cloud-storage = ">=2.18.0,<3.0.0" +httpx = ">=0.27.0,<0.28.0" +httpx-sse = ">=0.4.0,<0.5.0" +langchain-core = ">=0.3.27,<0.4" +pydantic = ">=2.9,<2.10" + +[package.extras] +anthropic = ["anthropic[vertexai] (>=0.35.0,<1)"] +mistral = ["langchain-mistralai (>=0.2.0,<1)"] + +[[package]] +name = "langchain-openai" +version = "0.2.14" +description = "An integration package connecting OpenAI and LangChain" +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langchain_openai-0.2.14-py3-none-any.whl", hash = "sha256:d232496662f79ece9a11caf7d798ba863e559c771bc366814f7688e0fe664fe8"}, + {file = "langchain_openai-0.2.14.tar.gz", hash = "sha256:7a514f309e356b182a337c0ed36ab3fbe34d9834a235a3b85cb7f91ae775d978"}, +] + +[package.dependencies] +langchain-core = ">=0.3.27,<0.4.0" +openai = ">=1.58.1,<2.0.0" +tiktoken = ">=0.7,<1" + +[[package]] +name = "langsmith" +version = "0.2.10" +description = "Client library to connect to the LangSmith LLM Tracing and Evaluation Platform." +optional = false +python-versions = "<4.0,>=3.9" +files = [ + {file = "langsmith-0.2.10-py3-none-any.whl", hash = "sha256:b02f2f174189ff72e54c88b1aa63343defd6f0f676c396a690c63a4b6495dcc2"}, + {file = "langsmith-0.2.10.tar.gz", hash = "sha256:153c7b3ccbd823528ff5bec84801e7e50a164e388919fc583252df5b27dd7830"}, +] + +[package.dependencies] +httpx = ">=0.23.0,<1" +orjson = {version = ">=3.9.14,<4.0.0", markers = "platform_python_implementation != \"PyPy\""} +pydantic = [ + {version = ">=2.7.4,<3.0.0", markers = "python_full_version >= \"3.12.4\""}, + {version = ">=1,<3", markers = "python_full_version < \"3.12.4\""}, +] +requests = ">=2,<3" +requests-toolbelt = ">=1.0.0,<2.0.0" + +[package.extras] +compression = ["zstandard (>=0.23.0,<0.24.0)"] +langsmith-pyo3 = ["langsmith-pyo3 (>=0.1.0rc2,<0.2.0)"] + [[package]] name = "lazy-object-proxy" version = "1.10.0" @@ -2752,6 +3238,70 @@ files = [ [package.dependencies] setuptools = "*" +[[package]] +name = "numpy" +version = "2.2.1" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.2.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5edb4e4caf751c1518e6a26a83501fda79bff41cc59dac48d70e6d65d4ec4440"}, + {file = "numpy-2.2.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa3017c40d513ccac9621a2364f939d39e550c542eb2a894b4c8da92b38896ab"}, + {file = "numpy-2.2.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:61048b4a49b1c93fe13426e04e04fdf5a03f456616f6e98c7576144677598675"}, + {file = "numpy-2.2.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7671dc19c7019103ca44e8d94917eba8534c76133523ca8406822efdd19c9308"}, + {file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4250888bcb96617e00bfa28ac24850a83c9f3a16db471eca2ee1f1714df0f957"}, + {file = "numpy-2.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a7746f235c47abc72b102d3bce9977714c2444bdfaea7888d241b4c4bb6a78bf"}, + {file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:059e6a747ae84fce488c3ee397cee7e5f905fd1bda5fb18c66bc41807ff119b2"}, + {file = "numpy-2.2.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f62aa6ee4eb43b024b0e5a01cf65a0bb078ef8c395e8713c6e8a12a697144528"}, + {file = "numpy-2.2.1-cp310-cp310-win32.whl", hash = "sha256:48fd472630715e1c1c89bf1feab55c29098cb403cc184b4859f9c86d4fcb6a95"}, + {file = "numpy-2.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:b541032178a718c165a49638d28272b771053f628382d5e9d1c93df23ff58dbf"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40f9e544c1c56ba8f1cf7686a8c9b5bb249e665d40d626a23899ba6d5d9e1484"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9b57eaa3b0cd8db52049ed0330747b0364e899e8a606a624813452b8203d5f7"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:bc8a37ad5b22c08e2dbd27df2b3ef7e5c0864235805b1e718a235bcb200cf1cb"}, + {file = "numpy-2.2.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9036d6365d13b6cbe8f27a0eaf73ddcc070cae584e5ff94bb45e3e9d729feab5"}, + {file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51faf345324db860b515d3f364eaa93d0e0551a88d6218a7d61286554d190d73"}, + {file = "numpy-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38efc1e56b73cc9b182fe55e56e63b044dd26a72128fd2fbd502f75555d92591"}, + {file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:31b89fa67a8042e96715c68e071a1200c4e172f93b0fbe01a14c0ff3ff820fc8"}, + {file = "numpy-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4c86e2a209199ead7ee0af65e1d9992d1dce7e1f63c4b9a616500f93820658d0"}, + {file = "numpy-2.2.1-cp311-cp311-win32.whl", hash = "sha256:b34d87e8a3090ea626003f87f9392b3929a7bbf4104a05b6667348b6bd4bf1cd"}, + {file = "numpy-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:360137f8fb1b753c5cde3ac388597ad680eccbbbb3865ab65efea062c4a1fd16"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:694f9e921a0c8f252980e85bce61ebbd07ed2b7d4fa72d0e4246f2f8aa6642ab"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3683a8d166f2692664262fd4900f207791d005fb088d7fdb973cc8d663626faa"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:780077d95eafc2ccc3ced969db22377b3864e5b9a0ea5eb347cc93b3ea900315"}, + {file = "numpy-2.2.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:55ba24ebe208344aa7a00e4482f65742969a039c2acfcb910bc6fcd776eb4355"}, + {file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b1d07b53b78bf84a96898c1bc139ad7f10fda7423f5fd158fd0f47ec5e01ac7"}, + {file = "numpy-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5062dc1a4e32a10dc2b8b13cedd58988261416e811c1dc4dbdea4f57eea61b0d"}, + {file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fce4f615f8ca31b2e61aa0eb5865a21e14f5629515c9151850aa936c02a1ee51"}, + {file = "numpy-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:67d4cda6fa6ffa073b08c8372aa5fa767ceb10c9a0587c707505a6d426f4e046"}, + {file = "numpy-2.2.1-cp312-cp312-win32.whl", hash = "sha256:32cb94448be47c500d2c7a95f93e2f21a01f1fd05dd2beea1ccd049bb6001cd2"}, + {file = "numpy-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:ba5511d8f31c033a5fcbda22dd5c813630af98c70b2661f2d2c654ae3cdfcfc8"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f1d09e520217618e76396377c81fba6f290d5f926f50c35f3a5f72b01a0da780"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3ecc47cd7f6ea0336042be87d9e7da378e5c7e9b3c8ad0f7c966f714fc10d821"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f419290bc8968a46c4933158c91a0012b7a99bb2e465d5ef5293879742f8797e"}, + {file = "numpy-2.2.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:5b6c390bfaef8c45a260554888966618328d30e72173697e5cabe6b285fb2348"}, + {file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:526fc406ab991a340744aad7e25251dd47a6720a685fa3331e5c59fef5282a59"}, + {file = "numpy-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f74e6fdeb9a265624ec3a3918430205dff1df7e95a230779746a6af78bc615af"}, + {file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:53c09385ff0b72ba79d8715683c1168c12e0b6e84fb0372e97553d1ea91efe51"}, + {file = "numpy-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f3eac17d9ec51be534685ba877b6ab5edc3ab7ec95c8f163e5d7b39859524716"}, + {file = "numpy-2.2.1-cp313-cp313-win32.whl", hash = "sha256:9ad014faa93dbb52c80d8f4d3dcf855865c876c9660cb9bd7553843dd03a4b1e"}, + {file = "numpy-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:164a829b6aacf79ca47ba4814b130c4020b202522a93d7bff2202bfb33b61c60"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4dfda918a13cc4f81e9118dea249e192ab167a0bb1966272d5503e39234d694e"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:733585f9f4b62e9b3528dd1070ec4f52b8acf64215b60a845fa13ebd73cd0712"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:89b16a18e7bba224ce5114db863e7029803c179979e1af6ad6a6b11f70545008"}, + {file = "numpy-2.2.1-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:676f4eebf6b2d430300f1f4f4c2461685f8269f94c89698d832cdf9277f30b84"}, + {file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f5cdf9f493b35f7e41e8368e7d7b4bbafaf9660cba53fb21d2cd174ec09631"}, + {file = "numpy-2.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1ad395cf254c4fbb5b2132fee391f361a6e8c1adbd28f2cd8e79308a615fe9d"}, + {file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:08ef779aed40dbc52729d6ffe7dd51df85796a702afbf68a4f4e41fafdc8bda5"}, + {file = "numpy-2.2.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:26c9c4382b19fcfbbed3238a14abf7ff223890ea1936b8890f058e7ba35e8d71"}, + {file = "numpy-2.2.1-cp313-cp313t-win32.whl", hash = "sha256:93cf4e045bae74c90ca833cba583c14b62cb4ba2cba0abd2b141ab52548247e2"}, + {file = "numpy-2.2.1-cp313-cp313t-win_amd64.whl", hash = "sha256:bff7d8ec20f5f42607599f9994770fa65d76edca264a87b5e4ea5629bce12268"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7ba9cc93a91d86365a5d270dee221fdc04fb68d7478e6bf6af650de78a8339e3"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:3d03883435a19794e41f147612a77a8f56d4e52822337844fff3d4040a142964"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4511d9e6071452b944207c8ce46ad2f897307910b402ea5fa975da32e0102800"}, + {file = "numpy-2.2.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5c5cc0cbabe9452038ed984d05ac87910f89370b9242371bd9079cb4af61811e"}, + {file = "numpy-2.2.1.tar.gz", hash = "sha256:45681fd7128c8ad1c379f0ca0776a8b0c6583d2f69889ddac01559dfe4390918"}, +] + [[package]] name = "oauthlib" version = "3.2.2" @@ -2768,6 +3318,115 @@ rsa = ["cryptography (>=3.0.0)"] signals = ["blinker (>=1.4.0)"] signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] +[[package]] +name = "openai" +version = "1.59.6" +description = "The official Python library for the openai API" +optional = false +python-versions = ">=3.8" +files = [ + {file = "openai-1.59.6-py3-none-any.whl", hash = "sha256:b28ed44eee3d5ebe1a3ea045ee1b4b50fea36ecd50741aaa5ce5a5559c900cb6"}, + {file = "openai-1.59.6.tar.gz", hash = "sha256:c7670727c2f1e4473f62fea6fa51475c8bc098c9ffb47bfb9eef5be23c747934"}, +] + +[package.dependencies] +anyio = ">=3.5.0,<5" +distro = ">=1.7.0,<2" +httpx = ">=0.23.0,<1" +jiter = ">=0.4.0,<1" +pydantic = ">=1.9.0,<3" +sniffio = "*" +tqdm = ">4" +typing-extensions = ">=4.11,<5" + +[package.extras] +datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] +realtime = ["websockets (>=13,<15)"] + +[[package]] +name = "orjson" +version = "3.10.14" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +optional = false +python-versions = ">=3.8" +files = [ + {file = "orjson-3.10.14-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:849ea7845a55f09965826e816cdc7689d6cf74fe9223d79d758c714af955bcb6"}, + {file = "orjson-3.10.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5947b139dfa33f72eecc63f17e45230a97e741942955a6c9e650069305eb73d"}, + {file = "orjson-3.10.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cde6d76910d3179dae70f164466692f4ea36da124d6fb1a61399ca589e81d69a"}, + {file = "orjson-3.10.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c6dfbaeb7afa77ca608a50e2770a0461177b63a99520d4928e27591b142c74b1"}, + {file = "orjson-3.10.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa45e489ef80f28ff0e5ba0a72812b8cfc7c1ef8b46a694723807d1b07c89ebb"}, + {file = "orjson-3.10.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f5007abfdbb1d866e2aa8990bd1c465f0f6da71d19e695fc278282be12cffa5"}, + {file = "orjson-3.10.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1b49e2af011c84c3f2d541bb5cd1e3c7c2df672223e7e3ea608f09cf295e5f8a"}, + {file = "orjson-3.10.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:164ac155109226b3a2606ee6dda899ccfbe6e7e18b5bdc3fbc00f79cc074157d"}, + {file = "orjson-3.10.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:6b1225024cf0ef5d15934b5ffe9baf860fe8bc68a796513f5ea4f5056de30bca"}, + {file = "orjson-3.10.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6546e8073dc382e60fcae4a001a5a1bc46da5eab4a4878acc2d12072d6166d5"}, + {file = "orjson-3.10.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9f1d2942605c894162252d6259b0121bf1cb493071a1ea8cb35d79cb3e6ac5bc"}, + {file = "orjson-3.10.14-cp310-cp310-win32.whl", hash = "sha256:397083806abd51cf2b3bbbf6c347575374d160331a2d33c5823e22249ad3118b"}, + {file = "orjson-3.10.14-cp310-cp310-win_amd64.whl", hash = "sha256:fa18f949d3183a8d468367056be989666ac2bef3a72eece0bade9cdb733b3c28"}, + {file = "orjson-3.10.14-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:f506fd666dd1ecd15a832bebc66c4df45c1902fd47526292836c339f7ba665a9"}, + {file = "orjson-3.10.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efe5fd254cfb0eeee13b8ef7ecb20f5d5a56ddda8a587f3852ab2cedfefdb5f6"}, + {file = "orjson-3.10.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4ddc8c866d7467f5ee2991397d2ea94bcf60d0048bdd8ca555740b56f9042725"}, + {file = "orjson-3.10.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af8e42ae4363773658b8d578d56dedffb4f05ceeb4d1d4dd3fb504950b45526"}, + {file = "orjson-3.10.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84dd83110503bc10e94322bf3ffab8bc49150176b49b4984dc1cce4c0a993bf9"}, + {file = "orjson-3.10.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f5bfc0399cd4811bf10ec7a759c7ab0cd18080956af8ee138097d5b5296a95"}, + {file = "orjson-3.10.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:868943660fb2a1e6b6b965b74430c16a79320b665b28dd4511d15ad5038d37d5"}, + {file = "orjson-3.10.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:33449c67195969b1a677533dee9d76e006001213a24501333624623e13c7cc8e"}, + {file = "orjson-3.10.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e4c9f60f9fb0b5be66e416dcd8c9d94c3eabff3801d875bdb1f8ffc12cf86905"}, + {file = "orjson-3.10.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0de4d6315cfdbd9ec803b945c23b3a68207fd47cbe43626036d97e8e9561a436"}, + {file = "orjson-3.10.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:83adda3db595cb1a7e2237029b3249c85afbe5c747d26b41b802e7482cb3933e"}, + {file = "orjson-3.10.14-cp311-cp311-win32.whl", hash = "sha256:998019ef74a4997a9d741b1473533cdb8faa31373afc9849b35129b4b8ec048d"}, + {file = "orjson-3.10.14-cp311-cp311-win_amd64.whl", hash = "sha256:9d034abdd36f0f0f2240f91492684e5043d46f290525d1117712d5b8137784eb"}, + {file = "orjson-3.10.14-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:2ad4b7e367efba6dc3f119c9a0fcd41908b7ec0399a696f3cdea7ec477441b09"}, + {file = "orjson-3.10.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f496286fc85e93ce0f71cc84fc1c42de2decf1bf494094e188e27a53694777a7"}, + {file = "orjson-3.10.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c7f189bbfcded40e41a6969c1068ba305850ba016665be71a217918931416fbf"}, + {file = "orjson-3.10.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cc8204f0b75606869c707da331058ddf085de29558b516fc43c73ee5ee2aadb"}, + {file = "orjson-3.10.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:deaa2899dff7f03ab667e2ec25842d233e2a6a9e333efa484dfe666403f3501c"}, + {file = "orjson-3.10.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1c3ea52642c9714dc6e56de8a451a066f6d2707d273e07fe8a9cc1ba073813d"}, + {file = "orjson-3.10.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9d3f9ed72e7458ded9a1fb1b4d4ed4c4fdbaf82030ce3f9274b4dc1bff7ace2b"}, + {file = "orjson-3.10.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:07520685d408a2aba514c17ccc16199ff2934f9f9e28501e676c557f454a37fe"}, + {file = "orjson-3.10.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:76344269b550ea01488d19a2a369ab572c1ac4449a72e9f6ac0d70eb1cbfb953"}, + {file = "orjson-3.10.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e2979d0f2959990620f7e62da6cd954e4620ee815539bc57a8ae46e2dacf90e3"}, + {file = "orjson-3.10.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:03f61ca3674555adcb1aa717b9fc87ae936aa7a63f6aba90a474a88701278780"}, + {file = "orjson-3.10.14-cp312-cp312-win32.whl", hash = "sha256:d5075c54edf1d6ad81d4c6523ce54a748ba1208b542e54b97d8a882ecd810fd1"}, + {file = "orjson-3.10.14-cp312-cp312-win_amd64.whl", hash = "sha256:175cafd322e458603e8ce73510a068d16b6e6f389c13f69bf16de0e843d7d406"}, + {file = "orjson-3.10.14-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:0905ca08a10f7e0e0c97d11359609300eb1437490a7f32bbaa349de757e2e0c7"}, + {file = "orjson-3.10.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92d13292249f9f2a3e418cbc307a9fbbef043c65f4bd8ba1eb620bc2aaba3d15"}, + {file = "orjson-3.10.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90937664e776ad316d64251e2fa2ad69265e4443067668e4727074fe39676414"}, + {file = "orjson-3.10.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9ed3d26c4cb4f6babaf791aa46a029265850e80ec2a566581f5c2ee1a14df4f1"}, + {file = "orjson-3.10.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:56ee546c2bbe9599aba78169f99d1dc33301853e897dbaf642d654248280dc6e"}, + {file = "orjson-3.10.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:901e826cb2f1bdc1fcef3ef59adf0c451e8f7c0b5deb26c1a933fb66fb505eae"}, + {file = "orjson-3.10.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:26336c0d4b2d44636e1e1e6ed1002f03c6aae4a8a9329561c8883f135e9ff010"}, + {file = "orjson-3.10.14-cp313-cp313-win32.whl", hash = "sha256:e2bc525e335a8545c4e48f84dd0328bc46158c9aaeb8a1c2276546e94540ea3d"}, + {file = "orjson-3.10.14-cp313-cp313-win_amd64.whl", hash = "sha256:eca04dfd792cedad53dc9a917da1a522486255360cb4e77619343a20d9f35364"}, + {file = "orjson-3.10.14-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9a0fba3b8a587a54c18585f077dcab6dd251c170d85cfa4d063d5746cd595a0f"}, + {file = "orjson-3.10.14-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175abf3d20e737fec47261d278f95031736a49d7832a09ab684026528c4d96db"}, + {file = "orjson-3.10.14-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:29ca1a93e035d570e8b791b6c0feddd403c6a5388bfe870bf2aa6bba1b9d9b8e"}, + {file = "orjson-3.10.14-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f77202c80e8ab5a1d1e9faf642343bee5aaf332061e1ada4e9147dbd9eb00c46"}, + {file = "orjson-3.10.14-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e2ec73b7099b6a29b40a62e08a23b936423bd35529f8f55c42e27acccde7954"}, + {file = "orjson-3.10.14-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2d1679df9f9cd9504f8dff24555c1eaabba8aad7f5914f28dab99e3c2552c9d"}, + {file = "orjson-3.10.14-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:691ab9a13834310a263664313e4f747ceb93662d14a8bdf20eb97d27ed488f16"}, + {file = "orjson-3.10.14-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b11ed82054fce82fb74cea33247d825d05ad6a4015ecfc02af5fbce442fbf361"}, + {file = "orjson-3.10.14-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:e70a1d62b8288677d48f3bea66c21586a5f999c64ecd3878edb7393e8d1b548d"}, + {file = "orjson-3.10.14-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:16642f10c1ca5611251bd835de9914a4b03095e28a34c8ba6a5500b5074338bd"}, + {file = "orjson-3.10.14-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3871bad546aa66c155e3f36f99c459780c2a392d502a64e23fb96d9abf338511"}, + {file = "orjson-3.10.14-cp38-cp38-win32.whl", hash = "sha256:0293a88815e9bb5c90af4045f81ed364d982f955d12052d989d844d6c4e50945"}, + {file = "orjson-3.10.14-cp38-cp38-win_amd64.whl", hash = "sha256:6169d3868b190d6b21adc8e61f64e3db30f50559dfbdef34a1cd6c738d409dfc"}, + {file = "orjson-3.10.14-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:06d4ec218b1ec1467d8d64da4e123b4794c781b536203c309ca0f52819a16c03"}, + {file = "orjson-3.10.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:962c2ec0dcaf22b76dee9831fdf0c4a33d4bf9a257a2bc5d4adc00d5c8ad9034"}, + {file = "orjson-3.10.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:21d3be4132f71ef1360385770474f29ea1538a242eef72ac4934fe142800e37f"}, + {file = "orjson-3.10.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c28ed60597c149a9e3f5ad6dd9cebaee6fb2f0e3f2d159a4a2b9b862d4748860"}, + {file = "orjson-3.10.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e947f70167fe18469f2023644e91ab3d24f9aed69a5e1c78e2c81b9cea553fb"}, + {file = "orjson-3.10.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64410696c97a35af2432dea7bdc4ce32416458159430ef1b4beb79fd30093ad6"}, + {file = "orjson-3.10.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8050a5d81c022561ee29cd2739de5b4445f3c72f39423fde80a63299c1892c52"}, + {file = "orjson-3.10.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b49a28e30d3eca86db3fe6f9b7f4152fcacbb4a467953cd1b42b94b479b77956"}, + {file = "orjson-3.10.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ca041ad20291a65d853a9523744eebc3f5a4b2f7634e99f8fe88320695ddf766"}, + {file = "orjson-3.10.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:d313a2998b74bb26e9e371851a173a9b9474764916f1fc7971095699b3c6e964"}, + {file = "orjson-3.10.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7796692136a67b3e301ef9052bde6fe8e7bd5200da766811a3a608ffa62aaff0"}, + {file = "orjson-3.10.14-cp39-cp39-win32.whl", hash = "sha256:eee4bc767f348fba485ed9dc576ca58b0a9eac237f0e160f7a59bce628ed06b3"}, + {file = "orjson-3.10.14-cp39-cp39-win_amd64.whl", hash = "sha256:96a1c0ee30fb113b3ae3c748fd75ca74a157ff4c58476c47db4d61518962a011"}, + {file = "orjson-3.10.14.tar.gz", hash = "sha256:cf31f6f071a6b8e7aa1ead1fa27b935b48d00fbfa6a28ce856cfff2d5dd68eed"}, +] + [[package]] name = "packaging" version = "23.2" @@ -3219,6 +3878,130 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pydantic" +version = "2.9.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, + {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.23.4" +typing-extensions = [ + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, + {version = ">=4.6.1", markers = "python_version < \"3.13\""}, +] + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.23.4" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, + {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, + {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, + {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, + {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, + {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, + {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, + {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, + {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, + {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, + {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, + {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, + {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, + {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, + {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, + {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, + {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, + {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, + {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, + {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, + {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, + {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, + {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, + {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, + {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, + {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, + {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, + {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, + {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, + {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, + {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, + {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, + {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, + {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + [[package]] name = "pyee" version = "12.0.0" @@ -3523,6 +4306,24 @@ psutil = ["psutil (>=3.0)"] setproctitle = ["setproctitle"] testing = ["filelock"] +[[package]] +name = "python-crontab" +version = "3.2.0" +description = "Python Crontab API" +optional = false +python-versions = "*" +files = [ + {file = "python_crontab-3.2.0-py3-none-any.whl", hash = "sha256:82cb9b6a312d41ff66fd3caf3eed7115c28c195bfb50711bc2b4b9592feb9fe5"}, + {file = "python_crontab-3.2.0.tar.gz", hash = "sha256:40067d1dd39ade3460b2ad8557c7651514cd3851deffff61c5c60e1227c5c36b"}, +] + +[package.dependencies] +python-dateutil = "*" + +[package.extras] +cron-description = ["cron-descriptor"] +cron-schedule = ["croniter"] + [[package]] name = "python-dateutil" version = "2.8.2" @@ -3905,6 +4706,20 @@ requests = ">=2.0.0" [package.extras] rsa = ["oauthlib[signedtoken] (>=3.0.0)"] +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + [[package]] name = "rope" version = "1.11.0" @@ -4128,6 +4943,64 @@ files = [ docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +[[package]] +name = "shapely" +version = "2.0.6" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29a34e068da2d321e926b5073539fd2a1d4429a2c656bd63f0bd4c8f5b236d0b"}, + {file = "shapely-2.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c84c3f53144febf6af909d6b581bc05e8785d57e27f35ebaa5c1ab9baba13b"}, + {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad2fae12dca8d2b727fa12b007e46fbc522148a584f5d6546c539f3464dccde"}, + {file = "shapely-2.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3304883bd82d44be1b27a9d17f1167fda8c7f5a02a897958d86c59ec69b705e"}, + {file = "shapely-2.0.6-cp310-cp310-win32.whl", hash = "sha256:3ec3a0eab496b5e04633a39fa3d5eb5454628228201fb24903d38174ee34565e"}, + {file = "shapely-2.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:28f87cdf5308a514763a5c38de295544cb27429cfa655d50ed8431a4796090c4"}, + {file = "shapely-2.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5aeb0f51a9db176da9a30cb2f4329b6fbd1e26d359012bb0ac3d3c7781667a9e"}, + {file = "shapely-2.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9a7a78b0d51257a367ee115f4d41ca4d46edbd0dd280f697a8092dd3989867b2"}, + {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f32c23d2f43d54029f986479f7c1f6e09c6b3a19353a3833c2ffb226fb63a855"}, + {file = "shapely-2.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3dc9fb0eb56498912025f5eb352b5126f04801ed0e8bdbd867d21bdbfd7cbd0"}, + {file = "shapely-2.0.6-cp311-cp311-win32.whl", hash = "sha256:d93b7e0e71c9f095e09454bf18dad5ea716fb6ced5df3cb044564a00723f339d"}, + {file = "shapely-2.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:c02eb6bf4cfb9fe6568502e85bb2647921ee49171bcd2d4116c7b3109724ef9b"}, + {file = "shapely-2.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cec9193519940e9d1b86a3b4f5af9eb6910197d24af02f247afbfb47bcb3fab0"}, + {file = "shapely-2.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83b94a44ab04a90e88be69e7ddcc6f332da7c0a0ebb1156e1c4f568bbec983c3"}, + {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:537c4b2716d22c92036d00b34aac9d3775e3691f80c7aa517c2c290351f42cd8"}, + {file = "shapely-2.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98fea108334be345c283ce74bf064fa00cfdd718048a8af7343c59eb40f59726"}, + {file = "shapely-2.0.6-cp312-cp312-win32.whl", hash = "sha256:42fd4cd4834747e4990227e4cbafb02242c0cffe9ce7ef9971f53ac52d80d55f"}, + {file = "shapely-2.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:665990c84aece05efb68a21b3523a6b2057e84a1afbef426ad287f0796ef8a48"}, + {file = "shapely-2.0.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:42805ef90783ce689a4dde2b6b2f261e2c52609226a0438d882e3ced40bb3013"}, + {file = "shapely-2.0.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6d2cb146191a47bd0cee8ff5f90b47547b82b6345c0d02dd8b25b88b68af62d7"}, + {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3fdef0a1794a8fe70dc1f514440aa34426cc0ae98d9a1027fb299d45741c381"}, + {file = "shapely-2.0.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c665a0301c645615a107ff7f52adafa2153beab51daf34587170d85e8ba6805"}, + {file = "shapely-2.0.6-cp313-cp313-win32.whl", hash = "sha256:0334bd51828f68cd54b87d80b3e7cee93f249d82ae55a0faf3ea21c9be7b323a"}, + {file = "shapely-2.0.6-cp313-cp313-win_amd64.whl", hash = "sha256:d37d070da9e0e0f0a530a621e17c0b8c3c9d04105655132a87cfff8bd77cc4c2"}, + {file = "shapely-2.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fa7468e4f5b92049c0f36d63c3e309f85f2775752e076378e36c6387245c5462"}, + {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed5867e598a9e8ac3291da6cc9baa62ca25706eea186117034e8ec0ea4355653"}, + {file = "shapely-2.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:81d9dfe155f371f78c8d895a7b7f323bb241fb148d848a2bf2244f79213123fe"}, + {file = "shapely-2.0.6-cp37-cp37m-win32.whl", hash = "sha256:fbb7bf02a7542dba55129062570211cfb0defa05386409b3e306c39612e7fbcc"}, + {file = "shapely-2.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:837d395fac58aa01aa544495b97940995211e3e25f9aaf87bc3ba5b3a8cd1ac7"}, + {file = "shapely-2.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c6d88ade96bf02f6bfd667ddd3626913098e243e419a0325ebef2bbd481d1eb6"}, + {file = "shapely-2.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b3b818c4407eaa0b4cb376fd2305e20ff6df757bf1356651589eadc14aab41b"}, + {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbc783529a21f2bd50c79cef90761f72d41c45622b3e57acf78d984c50a5d13"}, + {file = "shapely-2.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2423f6c0903ebe5df6d32e0066b3d94029aab18425ad4b07bf98c3972a6e25a1"}, + {file = "shapely-2.0.6-cp38-cp38-win32.whl", hash = "sha256:2de00c3bfa80d6750832bde1d9487e302a6dd21d90cb2f210515cefdb616e5f5"}, + {file = "shapely-2.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:3a82d58a1134d5e975f19268710e53bddd9c473743356c90d97ce04b73e101ee"}, + {file = "shapely-2.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:392f66f458a0a2c706254f473290418236e52aa4c9b476a072539d63a2460595"}, + {file = "shapely-2.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eba5bae271d523c938274c61658ebc34de6c4b33fdf43ef7e938b5776388c1be"}, + {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7060566bc4888b0c8ed14b5d57df8a0ead5c28f9b69fb6bed4476df31c51b0af"}, + {file = "shapely-2.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b02154b3e9d076a29a8513dffcb80f047a5ea63c897c0cd3d3679f29363cf7e5"}, + {file = "shapely-2.0.6-cp39-cp39-win32.whl", hash = "sha256:44246d30124a4f1a638a7d5419149959532b99dfa25b54393512e6acc9c211ac"}, + {file = "shapely-2.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:2b542d7f1dbb89192d3512c52b679c822ba916f93479fa5d4fc2fe4fa0b3c9e8"}, + {file = "shapely-2.0.6.tar.gz", hash = "sha256:997f6159b1484059ec239cacaa53467fd8b5564dabe186cd84ac2944663b0bf6"}, +] + +[package.dependencies] +numpy = ">=1.14,<3" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + [[package]] name = "simplejson" version = "3.19.2" @@ -4246,6 +5119,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -4491,6 +5375,21 @@ files = [ [package.extras] widechars = ["wcwidth"] +[[package]] +name = "tenacity" +version = "9.0.0" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, + {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + [[package]] name = "testfixtures" version = "7.2.2" @@ -4518,6 +5417,53 @@ files = [ {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, ] +[[package]] +name = "tiktoken" +version = "0.8.0" +description = "tiktoken is a fast BPE tokeniser for use with OpenAI's models" +optional = false +python-versions = ">=3.9" +files = [ + {file = "tiktoken-0.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b07e33283463089c81ef1467180e3e00ab00d46c2c4bbcef0acab5f771d6695e"}, + {file = "tiktoken-0.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9269348cb650726f44dd3bbb3f9110ac19a8dcc8f54949ad3ef652ca22a38e21"}, + {file = "tiktoken-0.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e13f37bc4ef2d012731e93e0fef21dc3b7aea5bb9009618de9a4026844e560"}, + {file = "tiktoken-0.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f13d13c981511331eac0d01a59b5df7c0d4060a8be1e378672822213da51e0a2"}, + {file = "tiktoken-0.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6b2ddbc79a22621ce8b1166afa9f9a888a664a579350dc7c09346a3b5de837d9"}, + {file = "tiktoken-0.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d8c2d0e5ba6453a290b86cd65fc51fedf247e1ba170191715b049dac1f628005"}, + {file = "tiktoken-0.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d622d8011e6d6f239297efa42a2657043aaed06c4f68833550cac9e9bc723ef1"}, + {file = "tiktoken-0.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2efaf6199717b4485031b4d6edb94075e4d79177a172f38dd934d911b588d54a"}, + {file = "tiktoken-0.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5637e425ce1fc49cf716d88df3092048359a4b3bbb7da762840426e937ada06d"}, + {file = "tiktoken-0.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fb0e352d1dbe15aba082883058b3cce9e48d33101bdaac1eccf66424feb5b47"}, + {file = "tiktoken-0.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:56edfefe896c8f10aba372ab5706b9e3558e78db39dd497c940b47bf228bc419"}, + {file = "tiktoken-0.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:326624128590def898775b722ccc327e90b073714227175ea8febbc920ac0a99"}, + {file = "tiktoken-0.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:881839cfeae051b3628d9823b2e56b5cc93a9e2efb435f4cf15f17dc45f21586"}, + {file = "tiktoken-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fe9399bdc3f29d428f16a2f86c3c8ec20be3eac5f53693ce4980371c3245729b"}, + {file = "tiktoken-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a58deb7075d5b69237a3ff4bb51a726670419db6ea62bdcd8bd80c78497d7ab"}, + {file = "tiktoken-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2908c0d043a7d03ebd80347266b0e58440bdef5564f84f4d29fb235b5df3b04"}, + {file = "tiktoken-0.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:294440d21a2a51e12d4238e68a5972095534fe9878be57d905c476017bff99fc"}, + {file = "tiktoken-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:d8f3192733ac4d77977432947d563d7e1b310b96497acd3c196c9bddb36ed9db"}, + {file = "tiktoken-0.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:02be1666096aff7da6cbd7cdaa8e7917bfed3467cd64b38b1f112e96d3b06a24"}, + {file = "tiktoken-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c94ff53c5c74b535b2cbf431d907fc13c678bbd009ee633a2aca269a04389f9a"}, + {file = "tiktoken-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b231f5e8982c245ee3065cd84a4712d64692348bc609d84467c57b4b72dcbc5"}, + {file = "tiktoken-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4177faa809bd55f699e88c96d9bb4635d22e3f59d635ba6fd9ffedf7150b9953"}, + {file = "tiktoken-0.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5376b6f8dc4753cd81ead935c5f518fa0fbe7e133d9e25f648d8c4dabdd4bad7"}, + {file = "tiktoken-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:18228d624807d66c87acd8f25fc135665617cab220671eb65b50f5d70fa51f69"}, + {file = "tiktoken-0.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17807445f0cf1f25771c9d86496bd8b5c376f7419912519699f3cc4dc5c12e"}, + {file = "tiktoken-0.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:886f80bd339578bbdba6ed6d0567a0d5c6cfe198d9e587ba6c447654c65b8edc"}, + {file = "tiktoken-0.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6adc8323016d7758d6de7313527f755b0fc6c72985b7d9291be5d96d73ecd1e1"}, + {file = "tiktoken-0.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b591fb2b30d6a72121a80be24ec7a0e9eb51c5500ddc7e4c2496516dd5e3816b"}, + {file = "tiktoken-0.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:845287b9798e476b4d762c3ebda5102be87ca26e5d2c9854002825d60cdb815d"}, + {file = "tiktoken-0.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:1473cfe584252dc3fa62adceb5b1c763c1874e04511b197da4e6de51d6ce5a02"}, + {file = "tiktoken-0.8.0.tar.gz", hash = "sha256:9ccbb2740f24542534369c5635cfd9b2b3c2490754a78ac8831d99f89f94eeb2"}, +] + +[package.dependencies] +regex = ">=2022.1.18" +requests = ">=2.26.0" + +[package.extras] +blobfile = ["blobfile (>=2)"] + [[package]] name = "tinycss2" version = "1.2.1" @@ -4629,13 +5575,13 @@ requests-oauthlib = ">=0.4.0" [[package]] name = "typing-extensions" -version = "4.9.0" +version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -5038,4 +5984,4 @@ testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "fd7b33e6de0a2ca52f8d603c007322ccf27c6cbfb9304d3e9e0bb70e627ee57e" +content-hash = "8da65b463e164a631f6d62c591402e3e161c6c1f06eb9d6f3628d7bd58de89c3" diff --git a/pyproject.toml b/pyproject.toml index 41db760c44e..ec55d46ccf2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,7 +78,7 @@ oauthlib = "^3.2.2" protobuf = "^4.25.2" Babel = "^2.14.0" Jinja2 = "^3.1.5" -google-auth = "^2.26.2" +google-auth = "^2.37.0" graphene-django = "^3.2.0" django-guardian = "^2.4.0" django-email-bandit = "^2.0" @@ -91,6 +91,9 @@ pyparsing = "3.1.2" django-silk = "^5.1.0" requests = "^2.32.3" fuzzywuzzy = "^0.18.0" +langchain-openai = "^0.2.14" +langchain-google-vertexai = "^2.0.10" +django-celery-beat = "^2.7.0" [tool.poetry.group.dev.dependencies] ipdb = "^0.13.11"