diff --git a/.python-version b/.python-version
index a5c4c7633..1635d0f5a 100644
--- a/.python-version
+++ b/.python-version
@@ -1 +1 @@
-3.9.0
+3.9.6
diff --git a/CONTRIBUTE.md b/CONTRIBUTE.md
index fda4aa395..762700414 100644
--- a/CONTRIBUTE.md
+++ b/CONTRIBUTE.md
@@ -15,7 +15,7 @@ Use LLMs to eliminate manual processes involving unstructured data.
Just run the `run-platform.sh` launch script to get started in few minutes.
-The launch script does env setup with default values, pulls public docker images or builds them locally and finally runs them in containers.
+The launch script configures the env with sane defaults, pulls public docker images or builds them locally and finally runs them in containers.
```bash
# Pull and run entire Unstract platform with default env config.
@@ -45,6 +45,7 @@ The launch script does env setup with default values, pulls public docker images
Now visit [http://frontend.unstract.localhost](http://frontend.unstract.localhost) in your browser.
+NOTE: Modify the `.env` files present in each service folder to update its runtime behaviour. Run docker compose up again for the changes to take effect.```
That's all. Enjoy!
## Authentication
diff --git a/README.md b/README.md
index 900514fda..95024bd24 100644
--- a/README.md
+++ b/README.md
@@ -84,7 +84,7 @@ Unstract comes well documented. You can get introduced to the [basics of Unstrac
|| Weaviate | β
Working |
|
| Pinecone | β
Working |
|
| PostgreSQL | β
Working |
-|
| Milvus | ποΈ Coming soon! |
+|
| Milvus | β
Working |
@@ -142,4 +142,4 @@ Contributions are welcome! Please read [CONTRIBUTE.md](CONTRIBUTE.md) for furthe
## π A note on analytics
-In full disclosure, Unstract integrates Posthog to track usage analytics. As you can inspect the relevant code here, we collect the minimum possible metrics.
+In full disclosure, Unstract integrates Posthog to track usage analytics. As you can inspect the relevant code here, we collect the minimum possible metrics. Posthog can be disabled if desired by setting `REACT_APP_ENABLE_POSTHOG` to `false` in the frontend's .env file.
diff --git a/backend/.python-version b/backend/.python-version
index a5c4c7633..1635d0f5a 100644
--- a/backend/.python-version
+++ b/backend/.python-version
@@ -1 +1 @@
-3.9.0
+3.9.6
diff --git a/backend/account/authentication_controller.py b/backend/account/authentication_controller.py
index de0734cee..f8b41e921 100644
--- a/backend/account/authentication_controller.py
+++ b/backend/account/authentication_controller.py
@@ -184,7 +184,9 @@ def set_user_organization(self, request: Request, organization_id: str) -> Respo
f"{ErrorMessage.ORGANIZATION_EXIST}, \
{ErrorMessage.DUPLICATE_API}"
)
- self.create_tenant_user(organization=organization, user=user)
+ organization_member = self.create_tenant_user(
+ organization=organization, user=user
+ )
if new_organization:
try:
@@ -209,6 +211,7 @@ def set_user_organization(self, request: Request, organization_id: str) -> Respo
response: Response = Response(
status=status.HTTP_200_OK,
data={
+ "is_new_org": new_organization,
"user": serialized_user_info,
"organization": organization_info,
f"{Common.LOG_EVENTS_ID}": StateStore.get(Common.LOG_EVENTS_ID),
@@ -221,6 +224,7 @@ def set_user_organization(self, request: Request, organization_id: str) -> Respo
organization_id=current_organization_id,
)
UserSessionUtils.set_organization_id(request, organization_id)
+ UserSessionUtils.set_organization_member_role(request, organization_member)
OrganizationMemberService.set_user_membership_in_organization_cache(
user_id=user.user_id, organization_id=organization_id
)
@@ -419,12 +423,14 @@ def save_orgnanization_user_role(self, user_id: str, role: str) -> None:
organization_user.role = role
organization_user.save()
- def create_tenant_user(self, organization: Organization, user: User) -> None:
+ def create_tenant_user(
+ self, organization: Organization, user: User
+ ) -> OrganizationMember:
with tenant_context(organization):
existing_tenant_user = OrganizationMemberService.get_user_by_id(id=user.id)
if existing_tenant_user:
Logger.info(f"{existing_tenant_user.user.email} Already exist")
-
+ return existing_tenant_user
else:
account_user = self.get_or_create_user(user=user)
if account_user:
@@ -441,7 +447,7 @@ def create_tenant_user(self, organization: Organization, user: User) -> None:
is_prompt_studio_onboarding_msg=False,
)
tenant_user.save()
-
+ return tenant_user
else:
raise UserNotExistError()
diff --git a/backend/account/dto.py b/backend/account/dto.py
index 1554901fb..66cf6c1aa 100644
--- a/backend/account/dto.py
+++ b/backend/account/dto.py
@@ -59,6 +59,7 @@ class UserSessionInfo:
email: str
organization_id: str
user: UserInfo
+ role: str
@staticmethod
def from_dict(data: dict[str, Any]) -> "UserSessionInfo":
@@ -67,6 +68,7 @@ def from_dict(data: dict[str, Any]) -> "UserSessionInfo":
user_id=data["user_id"],
email=data["email"],
organization_id=data["organization_id"],
+ role=data["role"],
)
def to_dict(self) -> Any:
@@ -75,6 +77,7 @@ def to_dict(self) -> Any:
"user_id": self.user_id,
"email": self.email,
"organization_id": self.organization_id,
+ "role": self.role,
}
diff --git a/backend/account/serializer.py b/backend/account/serializer.py
index 57d166fe7..c40c0b993 100644
--- a/backend/account/serializer.py
+++ b/backend/account/serializer.py
@@ -116,3 +116,4 @@ class UserSessionResponseSerializer(serializers.Serializer):
user_id = serializers.CharField()
email = serializers.CharField()
organization_id = serializers.CharField()
+ role = serializers.CharField()
diff --git a/backend/account/subscription_loader.py b/backend/account/subscription_loader.py
index d380ed19d..133a4d9eb 100644
--- a/backend/account/subscription_loader.py
+++ b/backend/account/subscription_loader.py
@@ -4,6 +4,7 @@
from typing import Any
from django.apps import apps
+from django.utils import timezone
logger = logging.getLogger(__name__)
@@ -75,3 +76,32 @@ def load_plugins() -> list[Any]:
logger.info("No subscription plugins found.")
return subscription_plugins
+
+
+def validate_etl_run(org_id: str) -> bool:
+ """Method to check subscription status before ETL runs.
+
+ Args:
+ org_id: The ID of the organization.
+
+ Returns:
+ A boolean indicating whether the pre-run check passed or not.
+ """
+ try:
+ from pluggable_apps.subscription.subscription_helper import SubscriptionHelper
+ except ModuleNotFoundError:
+ logger.error("Subscription plugin not found.")
+ return False
+
+ org_plans = SubscriptionHelper.get_subscription(org_id)
+ if not org_plans or not org_plans.is_active:
+ return False
+
+ if org_plans.is_paid:
+ return True
+
+ if timezone.now() >= org_plans.end_date:
+ logger.debug(f"Trial expired for org {org_id}")
+ return False
+
+ return True
diff --git a/backend/account/views.py b/backend/account/views.py
index 0f7eb8719..0e6582821 100644
--- a/backend/account/views.py
+++ b/backend/account/views.py
@@ -142,6 +142,7 @@ def make_session_response(
email=request.user.email,
user=auth_controller.get_user_info(request),
organization_id=UserSessionUtils.get_organization_id(request),
+ role=UserSessionUtils.get_organization_member_role(request),
)
).data
diff --git a/backend/adapter_processor/serializers.py b/backend/adapter_processor/serializers.py
index 80637206e..a3f281dcb 100644
--- a/backend/adapter_processor/serializers.py
+++ b/backend/adapter_processor/serializers.py
@@ -124,6 +124,10 @@ def to_representation(self, instance: AdapterInstance) -> dict[str, str]:
rep[common.ICON] = AdapterProcessor.get_adapter_data_with_key(
instance.adapter_id, common.ICON
)
+ adapter_metadata = instance.get_adapter_meta_data()
+ model = adapter_metadata.get("model")
+ if model:
+ rep["model"] = model
if instance.is_friction_less:
rep["created_by_email"] = "Unstract"
diff --git a/backend/api/api_deployment_views.py b/backend/api/api_deployment_views.py
index 6c8db8249..7217117ca 100644
--- a/backend/api/api_deployment_views.py
+++ b/backend/api/api_deployment_views.py
@@ -50,7 +50,9 @@ def post(
serializer = ExecutionRequestSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
timeout = serializer.get_timeout(serializer.validated_data)
-
+ include_metadata = (
+ request.data.get(ApiExecution.INCLUDE_METADATA, "false").lower() == "true"
+ )
if not file_objs or len(file_objs) == 0:
raise InvalidAPIRequest("File shouldn't be empty")
response = DeploymentHelper.execute_workflow(
@@ -58,6 +60,7 @@ def post(
api=api,
file_objs=file_objs,
timeout=timeout,
+ include_metadata=include_metadata,
)
if "error" in response and response["error"]:
return Response(
diff --git a/backend/api/constants.py b/backend/api/constants.py
index c0f056091..0ec324cc9 100644
--- a/backend/api/constants.py
+++ b/backend/api/constants.py
@@ -1,6 +1,6 @@
class ApiExecution:
PATH: str = "deployment/api"
MAXIMUM_TIMEOUT_IN_SEC: int = 300 # 5 minutes
- DEFAULT_TIMEOUT_IN_SEC: int = 80
FILES_FORM_DATA: str = "files"
TIMEOUT_FORM_DATA: str = "timeout"
+ INCLUDE_METADATA: str = "include_metadata"
diff --git a/backend/api/deployment_helper.py b/backend/api/deployment_helper.py
index 3a484dbc2..59f7b3b3a 100644
--- a/backend/api/deployment_helper.py
+++ b/backend/api/deployment_helper.py
@@ -188,6 +188,7 @@ def execute_workflow(
api: APIDeployment,
file_objs: list[UploadedFile],
timeout: int,
+ include_metadata: bool = False,
) -> ReturnDict:
"""Execute workflow by api.
@@ -215,6 +216,7 @@ def execute_workflow(
hash_values_of_files=hash_values_of_files,
timeout=timeout,
execution_id=execution_id,
+ include_metadata=include_metadata,
)
result.status_api = DeploymentHelper.construct_status_endpoint(
api_endpoint=api.api_endpoint, execution_id=execution_id
diff --git a/backend/api/postman_collection/dto.py b/backend/api/postman_collection/dto.py
index e905e64f8..c862baf81 100644
--- a/backend/api/postman_collection/dto.py
+++ b/backend/api/postman_collection/dto.py
@@ -99,7 +99,12 @@ def create(
FormDataItem(
key=ApiExecution.TIMEOUT_FORM_DATA,
type="text",
- value=ApiExecution.DEFAULT_TIMEOUT_IN_SEC,
+ value=ApiExecution.MAXIMUM_TIMEOUT_IN_SEC,
+ ),
+ FormDataItem(
+ key=ApiExecution.INCLUDE_METADATA,
+ type="text",
+ value=False,
),
]
)
diff --git a/backend/apps/__init__.py b/backend/apps/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/backend/apps/constants.py b/backend/apps/constants.py
deleted file mode 100644
index 15297aefc..000000000
--- a/backend/apps/constants.py
+++ /dev/null
@@ -1,2 +0,0 @@
-class AppConstants:
- """Constants for Apps."""
diff --git a/backend/apps/exceptions.py b/backend/apps/exceptions.py
deleted file mode 100644
index fb1980ae9..000000000
--- a/backend/apps/exceptions.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from rest_framework.exceptions import APIException
-
-
-class FetchAppListFailed(APIException):
- status_code = 400
- default_detail = "Failed to fetch App list."
diff --git a/backend/apps/urls.py b/backend/apps/urls.py
deleted file mode 100644
index d60c9b2bb..000000000
--- a/backend/apps/urls.py
+++ /dev/null
@@ -1,9 +0,0 @@
-from apps import views
-from django.urls import path
-from rest_framework.urlpatterns import format_suffix_patterns
-
-urlpatterns = format_suffix_patterns(
- [
- path("app/", views.get_app_list, name="app-list"),
- ]
-)
diff --git a/backend/apps/views.py b/backend/apps/views.py
deleted file mode 100644
index 944987319..000000000
--- a/backend/apps/views.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import logging
-
-from apps.exceptions import FetchAppListFailed
-from rest_framework import status
-from rest_framework.decorators import api_view
-from rest_framework.request import Request
-from rest_framework.response import Response
-
-logger = logging.getLogger(__name__)
-
-
-@api_view(("GET",))
-def get_app_list(request: Request) -> Response:
- """API to fetch List of Apps."""
- if request.method == "GET":
- try:
- return Response(data=[], status=status.HTTP_200_OK)
- # Refactored dated: 19/12/2023
- # ( Removed -> backend/apps/app_processor.py )
- except Exception as exe:
- logger.error(f"Error occured while fetching app list {exe}")
- raise FetchAppListFailed()
diff --git a/backend/backend/constants.py b/backend/backend/constants.py
index b11ada55d..365222e30 100644
--- a/backend/backend/constants.py
+++ b/backend/backend/constants.py
@@ -34,3 +34,4 @@ class FeatureFlag:
"""Temporary feature flags."""
MULTI_TENANCY_V2 = "multi_tenancy_v2"
+ APP_DEPLOYMENT = "app_deployment"
diff --git a/backend/backend/settings/base.py b/backend/backend/settings/base.py
index ae25fe6b2..902a7d012 100644
--- a/backend/backend/settings/base.py
+++ b/backend/backend/settings/base.py
@@ -167,6 +167,7 @@ def get_required_setting(
"CELERY_BROKER_URL", f"redis://{REDIS_HOST}:{REDIS_PORT}"
)
+INDEXING_FLAG_TTL = int(get_required_setting("INDEXING_FLAG_TTL"))
# Flag to Enable django admin
ADMIN_ENABLED = False
diff --git a/backend/backend/urls.py b/backend/backend/urls.py
index 9a4f70bc5..8b299ecc6 100644
--- a/backend/backend/urls.py
+++ b/backend/backend/urls.py
@@ -29,7 +29,6 @@
path("", include("file_management.urls")),
path("", include("tool_instance.urls")),
path("", include("pipeline.urls")),
- path("", include("apps.urls")),
path("", include("feature_flag.urls")),
path("workflow/", include("workflow_manager.urls")),
path("platform/", include("platform_settings.urls")),
@@ -62,11 +61,42 @@
),
]
+
+# APP deployment urls
+try:
+ import pluggable_apps.apps.app_deployment.urls # noqa # pylint: disable=unused-import
+ import pluggable_apps.apps.canned_question.urls # noqa # pylint: disable=unused-import
+ import pluggable_apps.apps.chat_history.urls # noqa # pylint: disable=unused-import
+ import pluggable_apps.apps.chat_transcript.urls # noqa # pylint: disable=unused-import
+
+ urlpatterns += [
+ path(
+ "canned_question/",
+ include("pluggable_apps.apps.canned_question.urls"),
+ ),
+ path("app/", include("pluggable_apps.apps.app_deployment.urls")),
+ path("chat_history/", include("pluggable_apps.apps.chat_history.urls")),
+ path("chat/", include("pluggable_apps.apps.chat_transcript.urls")),
+ ]
+except ImportError:
+ pass
+
+# Subscription urls
try:
- import pluggable_apps.subscription.urls # noqa: F401
+
+ import pluggable_apps.subscription.urls # noqa # pylint: disable=unused-import
urlpatterns += [
path("", include("pluggable_apps.subscription.urls")),
]
except ImportError:
pass
+
+try:
+ import pluggable_apps.manual_review.urls # noqa: F401
+
+ urlpatterns += [
+ path("manual_review/", include("pluggable_apps.manual_review.urls")),
+ ]
+except ImportError:
+ pass
diff --git a/backend/connector/migrations/0003_alter_connectorinstance_connector_mode.py b/backend/connector/migrations/0003_alter_connectorinstance_connector_mode.py
new file mode 100644
index 000000000..5e5f1d50e
--- /dev/null
+++ b/backend/connector/migrations/0003_alter_connectorinstance_connector_mode.py
@@ -0,0 +1,27 @@
+# Generated by Django 4.2.1 on 2024-06-24 12:51
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("connector", "0002_connectorinstance_connector_metadata_b"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="connectorinstance",
+ name="connector_mode",
+ field=models.CharField(
+ choices=[
+ (0, "UNKNOWN"),
+ (1, "FILE_SYSTEM"),
+ (2, "DATABASE"),
+ (3, "APPDEPLOYMENT"),
+ ],
+ db_comment="Choices of connectors",
+ default=0,
+ ),
+ ),
+ ]
diff --git a/backend/connector/migrations/0004_alter_connectorinstance_connector_mode.py b/backend/connector/migrations/0004_alter_connectorinstance_connector_mode.py
new file mode 100644
index 000000000..6feec997f
--- /dev/null
+++ b/backend/connector/migrations/0004_alter_connectorinstance_connector_mode.py
@@ -0,0 +1,28 @@
+# Generated by Django 4.2.1 on 2024-07-04 05:44
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("connector", "0003_alter_connectorinstance_connector_mode"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="connectorinstance",
+ name="connector_mode",
+ field=models.CharField(
+ choices=[
+ (0, "UNKNOWN"),
+ (1, "FILE_SYSTEM"),
+ (2, "DATABASE"),
+ (3, "APPDEPLOYMENT"),
+ (4, "MANUAL_REVIEW"),
+ ],
+ db_comment="Choices of connectors",
+ default=0,
+ ),
+ ),
+ ]
diff --git a/backend/connector/models.py b/backend/connector/models.py
index 352ee3694..55c228e47 100644
--- a/backend/connector/models.py
+++ b/backend/connector/models.py
@@ -29,6 +29,8 @@ class ConnectorMode(models.IntegerChoices):
UNKNOWN = 0, "UNKNOWN"
FILE_SYSTEM = 1, "FILE_SYSTEM"
DATABASE = 2, "DATABASE"
+ APPDEPLOYMENT = 3, "APPDEPLOYMENT"
+ MANUAL_REVIEW = 4, "MANUAL_REVIEW"
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
connector_name = models.TextField(
@@ -62,7 +64,7 @@ class ConnectorMode(models.IntegerChoices):
connector_mode = models.CharField(
choices=ConnectorMode.choices,
default=ConnectorMode.UNKNOWN,
- db_comment="0: UNKNOWN, 1: FILE_SYSTEM, 2: DATABASE",
+ db_comment="Choices of connectors",
)
created_by = models.ForeignKey(
diff --git a/backend/pdm.lock b/backend/pdm.lock
index 01d9ae0c7..230c6db84 100644
--- a/backend/pdm.lock
+++ b/backend/pdm.lock
@@ -2,17 +2,17 @@
# It is not intended for manual editing.
[metadata]
-groups = ["default", "deploy", "test"]
+groups = ["default", "deploy", "dev", "test"]
strategy = ["cross_platform", "inherit_metadata"]
lock_version = "4.4.1"
-content_hash = "sha256:62e6144be344e2e9499394e656f83fffa0c5a18ff001045d5fb5dc8c33d6d15a"
+content_hash = "sha256:833399304360347050cb95f45e62bcb0977b835e9e1087689c248e0e281c5958"
[[package]]
name = "adlfs"
version = "2023.8.0"
requires_python = ">=3.8"
summary = "Access Azure Datalake Gen1 with fsspec and dask"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"aiohttp>=3.7.0",
"azure-core<2.0.0,>=1.23.1",
@@ -31,7 +31,7 @@ name = "aiobotocore"
version = "2.5.4"
requires_python = ">=3.7"
summary = "Async client for aws services using botocore and aiohttp"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"aiohttp<4.0.0,>=3.3.1",
"aioitertools<1.0.0,>=0.5.1",
@@ -49,7 +49,7 @@ version = "2.5.4"
extras = ["boto3"]
requires_python = ">=3.7"
summary = "Async client for aws services using botocore and aiohttp"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"aiobotocore==2.5.4",
"boto3<1.28.18,>=1.28.17",
@@ -64,7 +64,7 @@ name = "aiohttp"
version = "3.9.5"
requires_python = ">=3.8"
summary = "Async http client/server framework (asyncio)"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"aiosignal>=1.1.2",
"async-timeout<5.0,>=4.0; python_version < \"3.11\"",
@@ -127,7 +127,7 @@ name = "aioitertools"
version = "0.11.0"
requires_python = ">=3.6"
summary = "itertools and builtins for AsyncIO and mixed iterables"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"typing-extensions>=4.0; python_version < \"3.10\"",
]
@@ -141,7 +141,7 @@ name = "aiosignal"
version = "1.3.1"
requires_python = ">=3.7"
summary = "aiosignal: a list of registered asynchronous callbacks"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"frozenlist>=1.1.0",
]
@@ -155,7 +155,7 @@ name = "amqp"
version = "5.2.0"
requires_python = ">=3.6"
summary = "Low-level AMQP client for Python (fork of amqplib)."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"vine<6.0.0,>=5.0.0",
]
@@ -169,7 +169,7 @@ name = "annotated-types"
version = "0.7.0"
requires_python = ">=3.8"
summary = "Reusable constraint types to use with typing.Annotated"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"},
{file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"},
@@ -180,7 +180,7 @@ name = "anthropic"
version = "0.23.1"
requires_python = ">=3.7"
summary = "The official Python library for the anthropic API"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"anyio<5,>=3.5.0",
"distro<2,>=1.7.0",
@@ -200,7 +200,7 @@ name = "anyio"
version = "4.4.0"
requires_python = ">=3.8"
summary = "High level compatibility layer for multiple asynchronous event loop implementations"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"exceptiongroup>=1.0.2; python_version < \"3.11\"",
"idna>=2.8",
@@ -216,7 +216,7 @@ files = [
name = "appdirs"
version = "1.4.4"
summary = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
@@ -240,7 +240,7 @@ files = [
name = "asn1crypto"
version = "1.5.1"
summary = "Fast ASN.1 parser and serializer with definitions for private keys, public keys, certificates, CRL, OCSP, CMS, PKCS#3, PKCS#7, PKCS#8, PKCS#12, PKCS#5, X.509 and TSP"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "asn1crypto-1.5.1-py2.py3-none-any.whl", hash = "sha256:db4e40728b728508912cbb3d44f19ce188f218e9eba635821bb4b68564f8fd67"},
{file = "asn1crypto-1.5.1.tar.gz", hash = "sha256:13ae38502be632115abf8a24cbe5f4da52e3b5231990aff31123c805306ccb9c"},
@@ -251,7 +251,7 @@ name = "async-timeout"
version = "4.0.3"
requires_python = ">=3.7"
summary = "Timeout context manager for asyncio programs"
-groups = ["default"]
+groups = ["default", "dev"]
marker = "python_version < \"3.12.0\""
files = [
{file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"},
@@ -263,7 +263,7 @@ name = "asyncpg"
version = "0.29.0"
requires_python = ">=3.8.0"
summary = "An asyncio PostgreSQL driver"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"async-timeout>=4.0.3; python_version < \"3.12.0\"",
]
@@ -300,7 +300,7 @@ name = "attrs"
version = "23.2.0"
requires_python = ">=3.7"
summary = "Classes Without Boilerplate"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
@@ -310,7 +310,7 @@ files = [
name = "authlib"
version = "1.2.1"
summary = "The ultimate Python library in building OAuth and OpenID Connect servers and clients."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"cryptography>=3.2",
]
@@ -334,7 +334,7 @@ name = "azure-core"
version = "1.30.2"
requires_python = ">=3.8"
summary = "Microsoft Azure Core Library for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"requests>=2.21.0",
"six>=1.11.0",
@@ -349,7 +349,7 @@ files = [
name = "azure-datalake-store"
version = "0.0.53"
summary = "Azure Data Lake Store Filesystem Client Library for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"cffi",
"msal<2,>=1.16.0",
@@ -365,7 +365,7 @@ name = "azure-identity"
version = "1.16.0"
requires_python = ">=3.8"
summary = "Microsoft Azure Identity Library for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"azure-core>=1.23.0",
"cryptography>=2.5",
@@ -412,7 +412,7 @@ name = "azure-storage-blob"
version = "12.20.0"
requires_python = ">=3.8"
summary = "Microsoft Azure Blob Storage Client Library for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"azure-core>=1.28.0",
"cryptography>=2.1.4",
@@ -429,7 +429,7 @@ name = "beautifulsoup4"
version = "4.12.3"
requires_python = ">=3.6.0"
summary = "Screen-scraping library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"soupsieve>1.2",
]
@@ -465,7 +465,7 @@ name = "boto3"
version = "1.28.17"
requires_python = ">= 3.7"
summary = "The AWS SDK for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"botocore<1.32.0,>=1.31.17",
"jmespath<2.0.0,>=0.7.1",
@@ -481,7 +481,7 @@ name = "botocore"
version = "1.31.17"
requires_python = ">= 3.7"
summary = "Low-level, data-driven core of boto 3."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"jmespath<2.0.0,>=0.7.1",
"python-dateutil<3.0.0,>=2.1",
@@ -497,7 +497,7 @@ name = "boxfs"
version = "0.2.1"
requires_python = ">=3.8,<4.0"
summary = "Implementation of fsspec for Box file storage"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"boxsdk[jwt]<4.0,>=3.7",
"fsspec>=2023.4",
@@ -511,7 +511,7 @@ files = [
name = "boxsdk"
version = "3.11.0"
summary = "Official Box Python SDK"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"attrs>=17.3.0",
"python-dateutil",
@@ -529,7 +529,7 @@ name = "boxsdk"
version = "3.11.0"
extras = ["jwt"]
summary = "Official Box Python SDK"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"boxsdk==3.11.0",
"cryptography>=3",
@@ -545,7 +545,7 @@ name = "cachetools"
version = "5.3.3"
requires_python = ">=3.7"
summary = "Extensible memoizing collections and decorators"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"},
{file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"},
@@ -575,13 +575,13 @@ files = [
[[package]]
name = "certifi"
-version = "2024.6.2"
+version = "2024.7.4"
requires_python = ">=3.6"
summary = "Python package for providing Mozilla's CA Bundle."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
- {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"},
- {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"},
+ {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"},
+ {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"},
]
[[package]]
@@ -589,7 +589,7 @@ name = "cffi"
version = "1.16.0"
requires_python = ">=3.8"
summary = "Foreign Function Interface for Python calling C code."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"pycparser",
]
@@ -635,7 +635,7 @@ name = "charset-normalizer"
version = "3.3.2"
requires_python = ">=3.7.0"
summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"},
{file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"},
@@ -691,7 +691,7 @@ name = "click"
version = "8.1.7"
requires_python = ">=3.7"
summary = "Composable command line interface toolkit"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"colorama; platform_system == \"Windows\"",
]
@@ -747,7 +747,7 @@ name = "colorama"
version = "0.4.6"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7"
summary = "Cross-platform colored terminal text."
-groups = ["default", "test"]
+groups = ["default", "dev", "test"]
marker = "sys_platform == \"win32\" or platform_system == \"Windows\""
files = [
{file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"},
@@ -768,7 +768,7 @@ name = "cryptography"
version = "41.0.7"
requires_python = ">=3.7"
summary = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"cffi>=1.12",
]
@@ -803,7 +803,7 @@ name = "dataclasses-json"
version = "0.6.7"
requires_python = "<4.0,>=3.7"
summary = "Easily serialize dataclasses to and from JSON."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"marshmallow<4.0.0,>=3.18.0",
"typing-inspect<1,>=0.4.0",
@@ -818,7 +818,7 @@ name = "decorator"
version = "5.1.1"
requires_python = ">=3.5"
summary = "Decorators for Humans"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"},
{file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"},
@@ -840,7 +840,7 @@ name = "deprecated"
version = "1.2.14"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
summary = "Python @deprecated decorator to deprecate old python classes, functions or methods."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"wrapt<2,>=1.10",
]
@@ -853,7 +853,7 @@ files = [
name = "dirtyjson"
version = "1.0.8"
summary = "JSON decoder for Python that can extract data from the muck"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "dirtyjson-1.0.8-py3-none-any.whl", hash = "sha256:125e27248435a58acace26d5c2c4c11a1c0de0a9c5124c5a94ba78e517d74f53"},
{file = "dirtyjson-1.0.8.tar.gz", hash = "sha256:90ca4a18f3ff30ce849d100dcf4a003953c79d3a2348ef056f1d9c22231a25fd"},
@@ -864,7 +864,7 @@ name = "distro"
version = "1.9.0"
requires_python = ">=3.6"
summary = "Distro - an OS platform information API"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"},
{file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"},
@@ -960,16 +960,16 @@ files = [
[[package]]
name = "django-timezone-field"
-version = "6.1.0"
-requires_python = ">=3.8,<4.0"
+version = "7.0"
+requires_python = "<4.0,>=3.8"
summary = "A Django app providing DB, form, and REST framework fields for zoneinfo and pytz timezone objects."
groups = ["default"]
dependencies = [
"Django<6.0,>=3.2",
]
files = [
- {file = "django_timezone_field-6.1.0-py3-none-any.whl", hash = "sha256:0095f43da716552fcc606783cfb42cb025892514f1ec660ebfa96186eb83b74c"},
- {file = "django_timezone_field-6.1.0.tar.gz", hash = "sha256:d40f7059d7bae4075725d04a9dae601af9fe3c7f0119a69b0e2c6194a782f797"},
+ {file = "django_timezone_field-7.0-py3-none-any.whl", hash = "sha256:3232e7ecde66ba4464abb6f9e6b8cc739b914efb9b29dc2cf2eee451f7cc2acb"},
+ {file = "django_timezone_field-7.0.tar.gz", hash = "sha256:aa6f4965838484317b7f08d22c0d91a53d64e7bbbd34264468ae83d4023898a7"},
]
[[package]]
@@ -992,7 +992,7 @@ name = "docker"
version = "6.1.3"
requires_python = ">=3.7"
summary = "A Python library for the Docker Engine API."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"packaging>=14.0",
"pywin32>=304; sys_platform == \"win32\"",
@@ -1010,7 +1010,7 @@ name = "docstring-parser"
version = "0.16"
requires_python = ">=3.6,<4.0"
summary = "Parse Python docstrings in reST, Google and Numpydoc format"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637"},
{file = "docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e"},
@@ -1055,7 +1055,7 @@ files = [
name = "dropbox"
version = "12.0.2"
summary = "Official Dropbox API Client"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"requests>=2.16.2",
"six>=1.12.0",
@@ -1071,7 +1071,7 @@ name = "dropboxdrivefs"
version = "1.3.1"
requires_python = ">=3.5"
summary = "Dropbox implementation for fsspec module"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"dropbox",
"fsspec",
@@ -1086,7 +1086,7 @@ name = "environs"
version = "9.5.0"
requires_python = ">=3.6"
summary = "simplified environment variable parsing"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"marshmallow>=3.0.0",
"python-dotenv",
@@ -1101,7 +1101,7 @@ name = "exceptiongroup"
version = "1.2.1"
requires_python = ">=3.7"
summary = "Backport of PEP 654 (exception groups)"
-groups = ["default", "test"]
+groups = ["default", "dev", "test"]
marker = "python_version < \"3.11\""
files = [
{file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"},
@@ -1113,7 +1113,7 @@ name = "filelock"
version = "3.15.4"
requires_python = ">=3.8"
summary = "A platform independent file lock."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"},
{file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"},
@@ -1123,7 +1123,7 @@ files = [
name = "filetype"
version = "1.2.0"
summary = "Infer file type and MIME type of any file/buffer. No external dependencies."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "filetype-1.2.0-py2.py3-none-any.whl", hash = "sha256:7ce71b6880181241cf7ac8697a2f1eb6a8bd9b429f7ad6d27b8db9ba5f1c2d25"},
{file = "filetype-1.2.0.tar.gz", hash = "sha256:66b56cd6474bf41d8c54660347d37afcc3f7d1970648de365c102ef77548aadb"},
@@ -1152,7 +1152,7 @@ name = "frozenlist"
version = "1.4.1"
requires_python = ">=3.8"
summary = "A list-like structure which implements collections.abc.MutableSequence"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"},
{file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"},
@@ -1208,7 +1208,7 @@ name = "fsspec"
version = "2023.6.0"
requires_python = ">=3.8"
summary = "File-system specification"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "fsspec-2023.6.0-py3-none-any.whl", hash = "sha256:1cbad1faef3e391fba6dc005ae9b5bdcbf43005c9167ce78c915549c352c869a"},
{file = "fsspec-2023.6.0.tar.gz", hash = "sha256:d0b2f935446169753e7a5c5c55681c54ea91996cc67be93c39a154fb3a2742af"},
@@ -1218,7 +1218,7 @@ files = [
name = "funcy"
version = "2.0"
summary = "A fancy and practical functional tools"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "funcy-2.0-py2.py3-none-any.whl", hash = "sha256:53df23c8bb1651b12f095df764bfb057935d49537a56de211b098f4c79614bb0"},
{file = "funcy-2.0.tar.gz", hash = "sha256:3963315d59d41c6f30c04bc910e10ab50a3ac4a225868bfa96feed133df075cb"},
@@ -1229,7 +1229,7 @@ name = "gcsfs"
version = "2023.6.0"
requires_python = ">=3.8"
summary = "Convenient Filesystem interface over GCS"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"aiohttp!=4.0.0a0,!=4.0.0a1",
"decorator>4.1.2",
@@ -1249,7 +1249,7 @@ name = "google-ai-generativelanguage"
version = "0.4.0"
requires_python = ">=3.7"
summary = "Google Ai Generativelanguage API client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0dev,>=1.34.0",
"proto-plus<2.0.0dev,>=1.22.3",
@@ -1265,7 +1265,7 @@ name = "google-api-core"
version = "2.19.1"
requires_python = ">=3.7"
summary = "Google API client core library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-auth<3.0.dev0,>=2.14.1",
"googleapis-common-protos<2.0.dev0,>=1.56.2",
@@ -1284,7 +1284,7 @@ version = "2.19.1"
extras = ["grpc"]
requires_python = ">=3.7"
summary = "Google API client core library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core==2.19.1",
"grpcio-status<2.0.dev0,>=1.33.2",
@@ -1299,10 +1299,10 @@ files = [
[[package]]
name = "google-api-python-client"
-version = "2.135.0"
+version = "2.136.0"
requires_python = ">=3.7"
summary = "Google API Client Library for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0.dev0,>=1.31.5",
"google-auth!=2.24.0,!=2.25.0,<3.0.0.dev0,>=1.32.0",
@@ -1311,8 +1311,8 @@ dependencies = [
"uritemplate<5,>=3.0.1",
]
files = [
- {file = "google-api-python-client-2.135.0.tar.gz", hash = "sha256:b552a28123ed95493035698db80e8ed78c9106a8b422e63a175150b9b55b704e"},
- {file = "google_api_python_client-2.135.0-py2.py3-none-any.whl", hash = "sha256:91742fa4c779d48456c0256ef346fa1cc185ba427176d3277e35141fa3268026"},
+ {file = "google-api-python-client-2.136.0.tar.gz", hash = "sha256:161c722c8864e7ed39393e2b7eea76ef4e1c933a6a59f9d7c70409b6635f225d"},
+ {file = "google_api_python_client-2.136.0-py2.py3-none-any.whl", hash = "sha256:5a554c8b5edf0a609b905d89d7ced82e8f6ac31da1e4d8d5684ef63dbc0e49f5"},
]
[[package]]
@@ -1320,7 +1320,7 @@ name = "google-auth"
version = "2.20.0"
requires_python = ">=3.6"
summary = "Google Authentication Library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"cachetools<6.0,>=2.0.0",
"pyasn1-modules>=0.2.1",
@@ -1337,7 +1337,7 @@ files = [
name = "google-auth-httplib2"
version = "0.2.0"
summary = "Google Authentication Library: httplib2 transport"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-auth",
"httplib2>=0.19.0",
@@ -1349,25 +1349,25 @@ files = [
[[package]]
name = "google-auth-oauthlib"
-version = "1.2.0"
+version = "1.2.1"
requires_python = ">=3.6"
summary = "Google Authentication Library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-auth>=2.15.0",
"requests-oauthlib>=0.7.0",
]
files = [
- {file = "google-auth-oauthlib-1.2.0.tar.gz", hash = "sha256:292d2d3783349f2b0734a0a0207b1e1e322ac193c2c09d8f7c613fb7cc501ea8"},
- {file = "google_auth_oauthlib-1.2.0-py2.py3-none-any.whl", hash = "sha256:297c1ce4cb13a99b5834c74a1fe03252e1e499716718b190f56bcb9c4abc4faf"},
+ {file = "google_auth_oauthlib-1.2.1-py2.py3-none-any.whl", hash = "sha256:2d58a27262d55aa1b87678c3ba7142a080098cbc2024f903c62355deb235d91f"},
+ {file = "google_auth_oauthlib-1.2.1.tar.gz", hash = "sha256:afd0cad092a2eaa53cd8e8298557d6de1034c6cb4a740500b5357b648af97263"},
]
[[package]]
name = "google-cloud-aiplatform"
-version = "1.57.0"
+version = "1.58.0"
requires_python = ">=3.8"
summary = "Vertex AI API client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"docstring-parser<1",
"google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,<3.0.0dev,>=1.34.1",
@@ -1382,8 +1382,8 @@ dependencies = [
"shapely<3.0.0dev",
]
files = [
- {file = "google-cloud-aiplatform-1.57.0.tar.gz", hash = "sha256:113905f100cb0a9ad744a2445a7675f92f28600233ba499614aa704d11a809b7"},
- {file = "google_cloud_aiplatform-1.57.0-py2.py3-none-any.whl", hash = "sha256:ca5391a56e0cc8f4ed39a2beb7be02f51936ff04fd5304775a72a86c345d0e47"},
+ {file = "google-cloud-aiplatform-1.58.0.tar.gz", hash = "sha256:7a05aceac4a6c7eaa26e684e9f202b829cc7e57f82bffe7281684275a553fcad"},
+ {file = "google_cloud_aiplatform-1.58.0-py2.py3-none-any.whl", hash = "sha256:21f1320860f4916183ec939fdf2ff3fc1d7fdde97fe5795974257ab21f9458ec"},
]
[[package]]
@@ -1391,7 +1391,7 @@ name = "google-cloud-bigquery"
version = "3.11.4"
requires_python = ">=3.7"
summary = "Google BigQuery API client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0dev,>=1.31.5",
"google-cloud-core<3.0.0dev,>=1.6.0",
@@ -1414,7 +1414,7 @@ name = "google-cloud-core"
version = "2.4.1"
requires_python = ">=3.7"
summary = "Google Cloud API client core library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0dev,>=1.31.6",
"google-auth<3.0dev,>=1.25.0",
@@ -1426,20 +1426,20 @@ files = [
[[package]]
name = "google-cloud-resource-manager"
-version = "1.12.3"
+version = "1.12.4"
requires_python = ">=3.7"
summary = "Google Cloud Resource Manager API client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0dev,>=1.34.1",
"google-auth!=2.24.0,!=2.25.0,<3.0.0dev,>=2.14.1",
"grpc-google-iam-v1<1.0.0dev,>=0.12.4",
"proto-plus<2.0.0dev,>=1.22.3",
- "protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5",
+ "protobuf!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0dev,>=3.20.2",
]
files = [
- {file = "google-cloud-resource-manager-1.12.3.tar.gz", hash = "sha256:809851824119834e4f2310b2c4f38621c1d16b2bb14d5b9f132e69c79d355e7f"},
- {file = "google_cloud_resource_manager-1.12.3-py2.py3-none-any.whl", hash = "sha256:92be7d6959927b76d90eafc4028985c37975a46ded5466a018f02e8649e113d4"},
+ {file = "google-cloud-resource-manager-1.12.4.tar.gz", hash = "sha256:3eda914a925e92465ef80faaab7e0f7a9312d486dd4e123d2c76e04bac688ff0"},
+ {file = "google_cloud_resource_manager-1.12.4-py2.py3-none-any.whl", hash = "sha256:0b6663585f7f862166c0fb4c55fdda721fce4dc2dc1d5b52d03ee4bf2653a85f"},
]
[[package]]
@@ -1447,7 +1447,7 @@ name = "google-cloud-secret-manager"
version = "2.16.1"
requires_python = ">=3.7"
summary = "Google Cloud Secret Manager API client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core[grpc]!=2.0.*,!=2.1.*,!=2.10.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,<3.0.0dev,>=1.34.0",
"grpc-google-iam-v1<1.0.0dev,>=0.12.4",
@@ -1465,7 +1465,7 @@ name = "google-cloud-storage"
version = "2.9.0"
requires_python = ">=3.7"
summary = "Google Cloud Storage API client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-api-core!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0,<3.0.0dev,>=1.31.5",
"google-auth<3.0dev,>=1.25.0",
@@ -1483,7 +1483,7 @@ name = "google-crc32c"
version = "1.5.0"
requires_python = ">=3.7"
summary = "A python wrapper of the C library 'Google CRC32C'"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"},
{file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"},
@@ -1539,7 +1539,7 @@ name = "google-generativeai"
version = "0.4.1"
requires_python = ">=3.9"
summary = "Google Generative AI High level API client library and tools."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-ai-generativelanguage==0.4.0",
"google-api-core",
@@ -1558,7 +1558,7 @@ name = "google-resumable-media"
version = "2.7.1"
requires_python = ">=3.7"
summary = "Utilities for Google Media Downloads and Resumable Uploads"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-crc32c<2.0dev,>=1.0",
]
@@ -1572,7 +1572,7 @@ name = "googleapis-common-protos"
version = "1.63.2"
requires_python = ">=3.7"
summary = "Common protobufs used in Google APIs"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"protobuf!=3.20.0,!=3.20.1,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<6.0.0.dev0,>=3.20.2",
]
@@ -1587,7 +1587,7 @@ version = "1.63.2"
extras = ["grpc"]
requires_python = ">=3.7"
summary = "Common protobufs used in Google APIs"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"googleapis-common-protos==1.63.2",
"grpcio<2.0.0.dev0,>=1.44.0",
@@ -1602,7 +1602,7 @@ name = "greenlet"
version = "3.0.3"
requires_python = ">=3.7"
summary = "Lightweight in-process concurrent programming"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"},
{file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"},
@@ -1640,7 +1640,7 @@ name = "grpc-google-iam-v1"
version = "0.13.1"
requires_python = ">=3.7"
summary = "IAM API client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"googleapis-common-protos[grpc]<2.0.0dev,>=1.56.0",
"grpcio<2.0.0dev,>=1.44.0",
@@ -1656,7 +1656,7 @@ name = "grpcio"
version = "1.60.0"
requires_python = ">=3.7"
summary = "HTTP/2-based RPC framework"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"},
{file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"},
@@ -1693,7 +1693,7 @@ name = "grpcio-status"
version = "1.60.0"
requires_python = ">=3.6"
summary = "Status proto mapping for gRPC"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"googleapis-common-protos>=1.5.5",
"grpcio>=1.60.0",
@@ -1709,7 +1709,7 @@ name = "grpcio-tools"
version = "1.60.0"
requires_python = ">=3.7"
summary = "Protobuf code generator for gRPC"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"grpcio>=1.60.0",
"protobuf<5.0dev,>=4.21.6",
@@ -1765,7 +1765,7 @@ name = "h11"
version = "0.14.0"
requires_python = ">=3.7"
summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
@@ -1776,7 +1776,7 @@ name = "h2"
version = "4.1.0"
requires_python = ">=3.6.1"
summary = "HTTP/2 State-Machine based protocol implementation"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"hpack<5,>=4.0",
"hyperframe<7,>=6.0",
@@ -1791,7 +1791,7 @@ name = "hpack"
version = "4.0.0"
requires_python = ">=3.6.1"
summary = "Pure-Python HPACK header compression"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"},
{file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"},
@@ -1802,7 +1802,7 @@ name = "httpcore"
version = "1.0.5"
requires_python = ">=3.8"
summary = "A minimal low-level HTTP client."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"certifi",
"h11<0.15,>=0.13",
@@ -1817,7 +1817,7 @@ name = "httplib2"
version = "0.22.0"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
summary = "A comprehensive HTTP client library."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"pyparsing!=3.0.0,!=3.0.1,!=3.0.2,!=3.0.3,<4,>=2.4.2; python_version > \"3.0\"",
]
@@ -1831,7 +1831,7 @@ name = "httpx"
version = "0.27.0"
requires_python = ">=3.8"
summary = "The next generation HTTP client."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"anyio",
"certifi",
@@ -1850,7 +1850,7 @@ version = "0.27.0"
extras = ["http2"]
requires_python = ">=3.8"
summary = "The next generation HTTP client."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"h2<5,>=3",
"httpx==0.27.0",
@@ -1865,7 +1865,7 @@ name = "huggingface-hub"
version = "0.23.4"
requires_python = ">=3.8.0"
summary = "Client library to download and publish models, datasets and other repos on the huggingface.co hub"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"filelock",
"fsspec>=2023.5.0",
@@ -1882,13 +1882,13 @@ files = [
[[package]]
name = "humanize"
-version = "4.9.0"
+version = "4.10.0"
requires_python = ">=3.8"
summary = "Python humanize utilities"
groups = ["default"]
files = [
- {file = "humanize-4.9.0-py3-none-any.whl", hash = "sha256:ce284a76d5b1377fd8836733b983bfb0b76f1aa1c090de2566fcf008d7f6ab16"},
- {file = "humanize-4.9.0.tar.gz", hash = "sha256:582a265c931c683a7e9b8ed9559089dea7edcf6cc95be39a3cbc2c5d5ac2bcfa"},
+ {file = "humanize-4.10.0-py3-none-any.whl", hash = "sha256:39e7ccb96923e732b5c2e27aeaa3b10a8dfeeba3eb965ba7b74a3eb0e30040a6"},
+ {file = "humanize-4.10.0.tar.gz", hash = "sha256:06b6eb0293e4b85e8d385397c5868926820db32b9b654b932f57fa41c23c9978"},
]
[[package]]
@@ -1896,7 +1896,7 @@ name = "hyperframe"
version = "6.0.1"
requires_python = ">=3.6.1"
summary = "HTTP/2 framing layer for Python"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"},
{file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"},
@@ -1907,7 +1907,7 @@ name = "idna"
version = "3.7"
requires_python = ">=3.5"
summary = "Internationalized Domain Names in Applications (IDNA)"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"},
{file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"},
@@ -1939,7 +1939,7 @@ files = [
name = "isodate"
version = "0.6.1"
summary = "An ISO 8601 date/time/duration parser and formatter"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"six",
]
@@ -1953,7 +1953,7 @@ name = "jmespath"
version = "1.0.1"
requires_python = ">=3.7"
summary = "JSON Matching Expressions"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
@@ -1964,7 +1964,7 @@ name = "joblib"
version = "1.4.2"
requires_python = ">=3.8"
summary = "Lightweight pipelining with Python functions"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"},
{file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"},
@@ -1975,7 +1975,7 @@ name = "jsonschema"
version = "4.18.6"
requires_python = ">=3.8"
summary = "An implementation of JSON Schema validation for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"attrs>=22.2.0",
"jsonschema-specifications>=2023.03.6",
@@ -1992,7 +1992,7 @@ name = "jsonschema-specifications"
version = "2023.12.1"
requires_python = ">=3.8"
summary = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"referencing>=0.31.0",
]
@@ -2006,7 +2006,7 @@ name = "kombu"
version = "5.3.7"
requires_python = ">=3.8"
summary = "Messaging library for Python."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"amqp<6.0.0,>=5.1.1",
"typing-extensions; python_version < \"3.10\"",
@@ -2022,7 +2022,7 @@ name = "llama-cloud"
version = "0.0.6"
requires_python = "<4,>=3.8"
summary = ""
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"httpx>=0.20.0",
"pydantic>=1.10",
@@ -2037,7 +2037,7 @@ name = "llama-index"
version = "0.10.38"
requires_python = "<4.0,>=3.8.1"
summary = "Interface between LLMs and your data"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-agent-openai<0.3.0,>=0.1.4",
"llama-index-cli<0.2.0,>=0.1.2",
@@ -2059,18 +2059,18 @@ files = [
[[package]]
name = "llama-index-agent-openai"
-version = "0.2.7"
+version = "0.2.8"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index agent openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.41",
"llama-index-llms-openai<0.2.0,>=0.1.5",
"openai>=1.14.0",
]
files = [
- {file = "llama_index_agent_openai-0.2.7-py3-none-any.whl", hash = "sha256:34be65011a508dd8cab0c9a606594f28075b98b0cebe69e3c543adc8564fee0d"},
- {file = "llama_index_agent_openai-0.2.7.tar.gz", hash = "sha256:13ce535f03e32c821763c01e26af4222f3981178622414d3868013a1946e8124"},
+ {file = "llama_index_agent_openai-0.2.8-py3-none-any.whl", hash = "sha256:e3e5c58cb1347d336a41391c60539098655f38fa57c1023063df5cfc5f468ef6"},
+ {file = "llama_index_agent_openai-0.2.8.tar.gz", hash = "sha256:7f123e86992d6c33f310d36331c32989a921cb76abe9d0fd1cf1e40a7eb65d2e"},
]
[[package]]
@@ -2078,7 +2078,7 @@ name = "llama-index-cli"
version = "0.1.12"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index cli"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.11.post1",
"llama-index-embeddings-openai<0.2.0,>=0.1.1",
@@ -2091,10 +2091,10 @@ files = [
[[package]]
name = "llama-index-core"
-version = "0.10.50.post1"
+version = "0.10.53.post1"
requires_python = "<4.0,>=3.8.1"
summary = "Interface between LLMs and your data"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"PyYAML>=6.0.1",
"SQLAlchemy[asyncio]>=1.4.49",
@@ -2121,8 +2121,8 @@ dependencies = [
"wrapt",
]
files = [
- {file = "llama_index_core-0.10.50.post1-py3-none-any.whl", hash = "sha256:b97cbb26675bcd9318747479529ce2f74cc75ed83852900ba053f4a980cb26f6"},
- {file = "llama_index_core-0.10.50.post1.tar.gz", hash = "sha256:cb5999fc09a951b1c5f1118ddbb8573da20116510f070f689231f471ca38aa3f"},
+ {file = "llama_index_core-0.10.53.post1-py3-none-any.whl", hash = "sha256:565d0967dd8f05456c66f5aca6ee6ee3dbc5645b6a55c81957f776ff029d6a99"},
+ {file = "llama_index_core-0.10.53.post1.tar.gz", hash = "sha256:6219a737b66c887b406814b0d9db6e24addd35f3136ffb6a879e54ac3f133406"},
]
[[package]]
@@ -2130,7 +2130,7 @@ name = "llama-index-embeddings-azure-openai"
version = "0.1.6"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index embeddings azure openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.11.post1",
"llama-index-embeddings-openai<0.2.0,>=0.1.3",
@@ -2146,7 +2146,7 @@ name = "llama-index-embeddings-google"
version = "0.1.5"
requires_python = "<4.0,>=3.9"
summary = "llama-index embeddings google integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-generativeai<0.5.0,>=0.4.1",
"llama-index-core<0.11.0,>=0.10.11.post1",
@@ -2161,7 +2161,7 @@ name = "llama-index-embeddings-ollama"
version = "0.1.2"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index embeddings ollama integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
]
@@ -2175,7 +2175,7 @@ name = "llama-index-embeddings-openai"
version = "0.1.10"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index embeddings openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
]
@@ -2189,7 +2189,7 @@ name = "llama-index-indices-managed-llama-cloud"
version = "0.1.6"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index indices llama-cloud integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.0",
"llamaindex-py-client<0.2.0,>=0.1.19",
@@ -2204,7 +2204,7 @@ name = "llama-index-legacy"
version = "0.9.48"
requires_python = ">=3.8.1,<4.0"
summary = "Interface between LLMs and your data"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"SQLAlchemy[asyncio]>=1.4.49",
"aiohttp<4.0.0,>=3.8.6",
@@ -2235,7 +2235,7 @@ name = "llama-index-llms-anthropic"
version = "0.1.11"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index llms anthropic integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"anthropic<0.24.0,>=0.23.1",
"llama-index-core<0.11.0,>=0.10.1",
@@ -2250,7 +2250,7 @@ name = "llama-index-llms-anyscale"
version = "0.1.3"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index llms anyscale integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
"llama-index-llms-openai<0.2.0,>=0.1.1",
@@ -2265,7 +2265,7 @@ name = "llama-index-llms-azure-openai"
version = "0.1.5"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index llms azure openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"azure-identity<2.0.0,>=1.15.0",
"httpx",
@@ -2282,7 +2282,7 @@ name = "llama-index-llms-mistralai"
version = "0.1.10"
requires_python = "<4.0,>=3.9"
summary = "llama-index llms mistral ai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.24",
"mistralai>=0.1.3",
@@ -2297,7 +2297,7 @@ name = "llama-index-llms-ollama"
version = "0.1.3"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index llms ollama integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
]
@@ -2308,16 +2308,16 @@ files = [
[[package]]
name = "llama-index-llms-openai"
-version = "0.1.23"
+version = "0.1.25"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index llms openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.24",
]
files = [
- {file = "llama_index_llms_openai-0.1.23-py3-none-any.whl", hash = "sha256:38753baac823a0459b8f6511258d84020219cb6b223a9866ec526e83ddbc94e1"},
- {file = "llama_index_llms_openai-0.1.23.tar.gz", hash = "sha256:b40289c47fda9df86c8177999d6af0a47fce14fe4324572ea2fe25bbdbd05021"},
+ {file = "llama_index_llms_openai-0.1.25-py3-none-any.whl", hash = "sha256:d1922ad2f2bb4697a6ee2f61793aeb2f5c5606302639559dd9bb0a1d6ab9e73f"},
+ {file = "llama_index_llms_openai-0.1.25.tar.gz", hash = "sha256:49750f71d58e762a597ce639a2ccb119195c47aefa8a48c55c77be8a5cec4bc5"},
]
[[package]]
@@ -2325,7 +2325,7 @@ name = "llama-index-llms-palm"
version = "0.1.5"
requires_python = "<4.0,>=3.9"
summary = "llama-index llms palm integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-generativeai<0.5.0,>=0.4.1",
"llama-index-core<0.11.0,>=0.10.11.post1",
@@ -2340,7 +2340,7 @@ name = "llama-index-llms-replicate"
version = "0.1.3"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index llms replicate integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
]
@@ -2354,7 +2354,7 @@ name = "llama-index-llms-vertex"
version = "0.1.8"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index llms vertex integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"google-cloud-aiplatform<2.0.0,>=1.39.0",
"llama-index-core<0.11.0,>=0.10.1",
@@ -2367,17 +2367,16 @@ files = [
[[package]]
name = "llama-index-multi-modal-llms-openai"
-version = "0.1.6"
+version = "0.1.7"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index multi-modal-llms openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
"llama-index-llms-openai<0.2.0,>=0.1.1",
]
files = [
- {file = "llama_index_multi_modal_llms_openai-0.1.6-py3-none-any.whl", hash = "sha256:0b6950a6cf98d16ade7d3b9dd0821ecfe457ca103819ae6c3e66cfc9634ca646"},
- {file = "llama_index_multi_modal_llms_openai-0.1.6.tar.gz", hash = "sha256:10de75a877a444af35306385faad9b9f0624391e55309970564114a080a0578c"},
+ {file = "llama_index_multi_modal_llms_openai-0.1.7-py3-none-any.whl", hash = "sha256:a86fa4a8f8372da31b978cc28d14da75ce6a39f42b1eea90cd3ac93017644766"},
]
[[package]]
@@ -2385,7 +2384,7 @@ name = "llama-index-program-openai"
version = "0.1.6"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index program openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-agent-openai<0.3.0,>=0.1.1",
"llama-index-core<0.11.0,>=0.10.1",
@@ -2401,7 +2400,7 @@ name = "llama-index-question-gen-openai"
version = "0.1.3"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index question_gen openai integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
"llama-index-llms-openai<0.2.0,>=0.1.1",
@@ -2414,10 +2413,10 @@ files = [
[[package]]
name = "llama-index-readers-file"
-version = "0.1.25"
+version = "0.1.29"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index readers file integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"beautifulsoup4<5.0.0,>=4.12.3",
"llama-index-core<0.11.0,>=0.10.37.post1",
@@ -2425,23 +2424,23 @@ dependencies = [
"striprtf<0.0.27,>=0.0.26",
]
files = [
- {file = "llama_index_readers_file-0.1.25-py3-none-any.whl", hash = "sha256:bc659e432d441c445e110580340675aa60abae1d82add4f65e559dfe8add541b"},
- {file = "llama_index_readers_file-0.1.25.tar.gz", hash = "sha256:238ddd98aa377d6a44322013eb848056037c80ad84571ea5bf451a640fff4d5c"},
+ {file = "llama_index_readers_file-0.1.29-py3-none-any.whl", hash = "sha256:b25f3dbf7bf3e0635290e499e808db5ba955eab67f205a3ff1cea6a4eb93556a"},
+ {file = "llama_index_readers_file-0.1.29.tar.gz", hash = "sha256:f9f696e738383e7d14078e75958fba5a7030f7994a20586e3140e1ca41395a54"},
]
[[package]]
name = "llama-index-readers-llama-parse"
-version = "0.1.4"
+version = "0.1.6"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index readers llama-parse integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.7",
- "llama-parse<0.5.0,>=0.4.0",
+ "llama-parse>=0.4.0",
]
files = [
- {file = "llama_index_readers_llama_parse-0.1.4-py3-none-any.whl", hash = "sha256:c4914b37d12cceee56fbd185cca80f87d60acbf8ea7a73f9719610180be1fcdd"},
- {file = "llama_index_readers_llama_parse-0.1.4.tar.gz", hash = "sha256:78608b193c818894aefeee0aa303f02b7f80f2e4caf13866c2fd3b0b1023e2c0"},
+ {file = "llama_index_readers_llama_parse-0.1.6-py3-none-any.whl", hash = "sha256:71d445a2357ce4c632e0fada7c913ac62790e77c062f12d916dd86378380ff1f"},
+ {file = "llama_index_readers_llama_parse-0.1.6.tar.gz", hash = "sha256:04f2dcfbb0fb87ce70890f5a2f4f89941d79be6a818b43738f053560e4b451cf"},
]
[[package]]
@@ -2449,7 +2448,7 @@ name = "llama-index-vector-stores-milvus"
version = "0.1.18"
requires_python = "<4.0,>=3.8.1"
summary = "llama-index vector_stores milvus integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
"pymilvus<3.0.0,>=2.3.6",
@@ -2464,7 +2463,7 @@ name = "llama-index-vector-stores-pinecone"
version = "0.1.4"
requires_python = ">=3.8.1,<3.13"
summary = "llama-index vector_stores pinecone integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.11.post1",
"pinecone-client<4.0.0,>=3.0.2",
@@ -2479,7 +2478,7 @@ name = "llama-index-vector-stores-postgres"
version = "0.1.3"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index vector_stores postgres integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"asyncpg<0.30.0,>=0.29.0",
"llama-index-core<0.11.0,>=0.10.1",
@@ -2497,7 +2496,7 @@ name = "llama-index-vector-stores-qdrant"
version = "0.2.8"
requires_python = "<3.13,>=3.9"
summary = "llama-index vector_stores qdrant integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"grpcio<2.0.0,>=1.60.0",
"llama-index-core<0.11.0,>=0.10.1",
@@ -2513,7 +2512,7 @@ name = "llama-index-vector-stores-weaviate"
version = "0.1.4"
requires_python = ">=3.8.1,<4.0"
summary = "llama-index vector_stores weaviate integration"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core<0.11.0,>=0.10.1",
"weaviate-client<4.0.0,>=3.26.2",
@@ -2528,7 +2527,7 @@ name = "llama-parse"
version = "0.4.1"
requires_python = "<4.0,>=3.8.1"
summary = "Parse files into RAG-Optimized formats."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"llama-index-core>=0.10.29",
]
@@ -2542,7 +2541,7 @@ name = "llamaindex-py-client"
version = "0.1.19"
requires_python = "<4,>=3.8"
summary = ""
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"httpx>=0.20.0",
"pydantic>=1.10",
@@ -2557,7 +2556,7 @@ name = "marshmallow"
version = "3.21.3"
requires_python = ">=3.8"
summary = "A lightweight library for converting complex datatypes to and from native Python datatypes."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"packaging>=17.0",
]
@@ -2568,32 +2567,35 @@ files = [
[[package]]
name = "milvus-lite"
-version = "2.4.7"
+version = "2.4.8"
requires_python = ">=3.7"
summary = "A lightweight version of Milvus wrapped with Python."
-groups = ["default"]
+groups = ["default", "dev"]
marker = "sys_platform != \"win32\""
+dependencies = [
+ "tqdm",
+]
files = [
- {file = "milvus_lite-2.4.7-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:c828190118b104b05b8c8e0b5a4147811c86b54b8fb67bc2e726ad10fc0b544e"},
- {file = "milvus_lite-2.4.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e1537633c39879714fb15082be56a4b97f74c905a6e98e302ec01320561081af"},
- {file = "milvus_lite-2.4.7-py3-none-manylinux2014_aarch64.whl", hash = "sha256:fcb909d38c83f21478ca9cb500c84264f988c69f62715ae9462e966767fb76dd"},
- {file = "milvus_lite-2.4.7-py3-none-manylinux2014_x86_64.whl", hash = "sha256:f016474d663045787dddf1c3aad13b7d8b61fd329220318f858184918143dcbf"},
+ {file = "milvus_lite-2.4.8-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:b7e90b34b214884cd44cdc112ab243d4cb197b775498355e2437b6cafea025fe"},
+ {file = "milvus_lite-2.4.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:519dfc62709d8f642d98a1c5b1dcde7080d107e6e312d677fef5a3412a40ac08"},
+ {file = "milvus_lite-2.4.8-py3-none-manylinux2014_aarch64.whl", hash = "sha256:b21f36d24cbb0e920b4faad607019bb28c1b2c88b4d04680ac8c7697a4ae8a4d"},
+ {file = "milvus_lite-2.4.8-py3-none-manylinux2014_x86_64.whl", hash = "sha256:08332a2b9abfe7c4e1d7926068937e46f8fb81f2707928b7bc02c9dc99cebe41"},
]
[[package]]
name = "mistralai"
-version = "0.4.1"
+version = "0.4.2"
requires_python = "<4.0,>=3.9"
summary = ""
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"httpx<1,>=0.25",
"orjson<3.11,>=3.9.10",
"pydantic<3,>=2.5.2",
]
files = [
- {file = "mistralai-0.4.1-py3-none-any.whl", hash = "sha256:c11d636093c9eec923f00ac9dff13e4619eb751d44d7a3fea5b665a0e8f99f93"},
- {file = "mistralai-0.4.1.tar.gz", hash = "sha256:22a88c24b9e3176021b466c1d78e6582eef700688803460fd449254fb7647979"},
+ {file = "mistralai-0.4.2-py3-none-any.whl", hash = "sha256:63c98eea139585f0a3b2c4c6c09c453738bac3958055e6f2362d3866e96b0168"},
+ {file = "mistralai-0.4.2.tar.gz", hash = "sha256:5eb656710517168ae053f9847b0bb7f617eda07f1f93f946ad6c91a4d407fd93"},
]
[[package]]
@@ -2601,7 +2603,7 @@ name = "msal"
version = "1.29.0"
requires_python = ">=3.7"
summary = "The Microsoft Authentication Library (MSAL) for Python library enables your app to access the Microsoft Cloud by supporting authentication of users with Microsoft Azure Active Directory accounts (AAD) and Microsoft Accounts (MSA) using industry standard OAuth2 and OpenID Connect."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"PyJWT[crypto]<3,>=1.0.0",
"cryptography<45,>=2.5",
@@ -2617,7 +2619,7 @@ name = "msal-extensions"
version = "1.2.0"
requires_python = ">=3.7"
summary = "Microsoft Authentication Library extensions (MSAL EX) provides a persistence API that can save your data on disk, encrypted on Windows, macOS and Linux. Concurrent data access will be coordinated by a file lock mechanism."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"msal<2,>=1.29",
"portalocker<3,>=1.4",
@@ -2650,7 +2652,7 @@ name = "multidict"
version = "6.0.5"
requires_python = ">=3.7"
summary = "multidict implementation"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"},
{file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"},
@@ -2706,7 +2708,7 @@ name = "mypy-extensions"
version = "1.0.0"
requires_python = ">=3.5"
summary = "Type system extensions for programs checked with the mypy type checker."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
@@ -2717,7 +2719,7 @@ name = "nest-asyncio"
version = "1.6.0"
requires_python = ">=3.5"
summary = "Patch asyncio to allow nested event loops"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
@@ -2728,7 +2730,7 @@ name = "networkx"
version = "3.2.1"
requires_python = ">=3.9"
summary = "Python package for creating and manipulating graphs and networks"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"},
{file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"},
@@ -2739,7 +2741,7 @@ name = "nltk"
version = "3.8.1"
requires_python = ">=3.7"
summary = "Natural Language Toolkit"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"click",
"joblib",
@@ -2756,7 +2758,7 @@ name = "numpy"
version = "1.26.4"
requires_python = ">=3.9"
summary = "Fundamental package for array computing in Python"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"},
{file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"},
@@ -2792,7 +2794,7 @@ files = [
name = "oauth2client"
version = "4.1.3"
summary = "OAuth 2.0 client library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"httplib2>=0.9.1",
"pyasn1-modules>=0.0.5",
@@ -2810,7 +2812,7 @@ name = "oauthlib"
version = "3.2.2"
requires_python = ">=3.6"
summary = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"},
{file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"},
@@ -2821,7 +2823,7 @@ name = "openai"
version = "1.21.2"
requires_python = ">=3.7.1"
summary = "The official Python library for the openai API"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"anyio<5,>=3.5.0",
"distro<2,>=1.7.0",
@@ -2838,39 +2840,42 @@ files = [
[[package]]
name = "orjson"
-version = "3.10.5"
+version = "3.10.6"
requires_python = ">=3.8"
summary = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy"
-groups = ["default"]
-files = [
- {file = "orjson-3.10.5-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:545d493c1f560d5ccfc134803ceb8955a14c3fcb47bbb4b2fee0232646d0b932"},
- {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4324929c2dd917598212bfd554757feca3e5e0fa60da08be11b4aa8b90013c1"},
- {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c13ca5e2ddded0ce6a927ea5a9f27cae77eee4c75547b4297252cb20c4d30e6"},
- {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b6c8e30adfa52c025f042a87f450a6b9ea29649d828e0fec4858ed5e6caecf63"},
- {file = "orjson-3.10.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:338fd4f071b242f26e9ca802f443edc588fa4ab60bfa81f38beaedf42eda226c"},
- {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6970ed7a3126cfed873c5d21ece1cd5d6f83ca6c9afb71bbae21a0b034588d96"},
- {file = "orjson-3.10.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:235dadefb793ad12f7fa11e98a480db1f7c6469ff9e3da5e73c7809c700d746b"},
- {file = "orjson-3.10.5-cp310-none-win32.whl", hash = "sha256:be79e2393679eda6a590638abda16d167754393f5d0850dcbca2d0c3735cebe2"},
- {file = "orjson-3.10.5-cp310-none-win_amd64.whl", hash = "sha256:c4a65310ccb5c9910c47b078ba78e2787cb3878cdded1702ac3d0da71ddc5228"},
- {file = "orjson-3.10.5-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:cdf7365063e80899ae3a697def1277c17a7df7ccfc979990a403dfe77bb54d40"},
- {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b68742c469745d0e6ca5724506858f75e2f1e5b59a4315861f9e2b1df77775a"},
- {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7d10cc1b594951522e35a3463da19e899abe6ca95f3c84c69e9e901e0bd93d38"},
- {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcbe82b35d1ac43b0d84072408330fd3295c2896973112d495e7234f7e3da2e1"},
- {file = "orjson-3.10.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c0eb7e0c75e1e486c7563fe231b40fdd658a035ae125c6ba651ca3b07936f5"},
- {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:53ed1c879b10de56f35daf06dbc4a0d9a5db98f6ee853c2dbd3ee9d13e6f302f"},
- {file = "orjson-3.10.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:099e81a5975237fda3100f918839af95f42f981447ba8f47adb7b6a3cdb078fa"},
- {file = "orjson-3.10.5-cp311-none-win32.whl", hash = "sha256:1146bf85ea37ac421594107195db8bc77104f74bc83e8ee21a2e58596bfb2f04"},
- {file = "orjson-3.10.5-cp311-none-win_amd64.whl", hash = "sha256:36a10f43c5f3a55c2f680efe07aa93ef4a342d2960dd2b1b7ea2dd764fe4a37c"},
- {file = "orjson-3.10.5-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:85c89131d7b3218db1b24c4abecea92fd6c7f9fab87441cfc342d3acc725d807"},
- {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66215277a230c456f9038d5e2d84778141643207f85336ef8d2a9da26bd7ca"},
- {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:51bbcdea96cdefa4a9b4461e690c75ad4e33796530d182bdd5c38980202c134a"},
- {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbead71dbe65f959b7bd8cf91e0e11d5338033eba34c114f69078d59827ee139"},
- {file = "orjson-3.10.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df58d206e78c40da118a8c14fc189207fffdcb1f21b3b4c9c0c18e839b5a214"},
- {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c4057c3b511bb8aef605616bd3f1f002a697c7e4da6adf095ca5b84c0fd43595"},
- {file = "orjson-3.10.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b39e006b00c57125ab974362e740c14a0c6a66ff695bff44615dcf4a70ce2b86"},
- {file = "orjson-3.10.5-cp39-none-win32.whl", hash = "sha256:eded5138cc565a9d618e111c6d5c2547bbdd951114eb822f7f6309e04db0fb47"},
- {file = "orjson-3.10.5-cp39-none-win_amd64.whl", hash = "sha256:cc28e90a7cae7fcba2493953cff61da5a52950e78dc2dacfe931a317ee3d8de7"},
- {file = "orjson-3.10.5.tar.gz", hash = "sha256:7a5baef8a4284405d96c90c7c62b755e9ef1ada84c2406c24a9ebec86b89f46d"},
+groups = ["default", "dev"]
+files = [
+ {file = "orjson-3.10.6-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:fb0ee33124db6eaa517d00890fc1a55c3bfe1cf78ba4a8899d71a06f2d6ff5c7"},
+ {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c1c4b53b24a4c06547ce43e5fee6ec4e0d8fe2d597f4647fc033fd205707365"},
+ {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eadc8fd310edb4bdbd333374f2c8fec6794bbbae99b592f448d8214a5e4050c0"},
+ {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:61272a5aec2b2661f4fa2b37c907ce9701e821b2c1285d5c3ab0207ebd358d38"},
+ {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57985ee7e91d6214c837936dc1608f40f330a6b88bb13f5a57ce5257807da143"},
+ {file = "orjson-3.10.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:633a3b31d9d7c9f02d49c4ab4d0a86065c4a6f6adc297d63d272e043472acab5"},
+ {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:1c680b269d33ec444afe2bdc647c9eb73166fa47a16d9a75ee56a374f4a45f43"},
+ {file = "orjson-3.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f759503a97a6ace19e55461395ab0d618b5a117e8d0fbb20e70cfd68a47327f2"},
+ {file = "orjson-3.10.6-cp310-none-win32.whl", hash = "sha256:95a0cce17f969fb5391762e5719575217bd10ac5a189d1979442ee54456393f3"},
+ {file = "orjson-3.10.6-cp310-none-win_amd64.whl", hash = "sha256:df25d9271270ba2133cc88ee83c318372bdc0f2cd6f32e7a450809a111efc45c"},
+ {file = "orjson-3.10.6-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b1ec490e10d2a77c345def52599311849fc063ae0e67cf4f84528073152bb2ba"},
+ {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55d43d3feb8f19d07e9f01e5b9be4f28801cf7c60d0fa0d279951b18fae1932b"},
+ {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3045267e98fe749408eee1593a142e02357c5c99be0802185ef2170086a863"},
+ {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c27bc6a28ae95923350ab382c57113abd38f3928af3c80be6f2ba7eb8d8db0b0"},
+ {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d27456491ca79532d11e507cadca37fb8c9324a3976294f68fb1eff2dc6ced5a"},
+ {file = "orjson-3.10.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05ac3d3916023745aa3b3b388e91b9166be1ca02b7c7e41045da6d12985685f0"},
+ {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1335d4ef59ab85cab66fe73fd7a4e881c298ee7f63ede918b7faa1b27cbe5212"},
+ {file = "orjson-3.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4bbc6d0af24c1575edc79994c20e1b29e6fb3c6a570371306db0993ecf144dc5"},
+ {file = "orjson-3.10.6-cp311-none-win32.whl", hash = "sha256:450e39ab1f7694465060a0550b3f6d328d20297bf2e06aa947b97c21e5241fbd"},
+ {file = "orjson-3.10.6-cp311-none-win_amd64.whl", hash = "sha256:227df19441372610b20e05bdb906e1742ec2ad7a66ac8350dcfd29a63014a83b"},
+ {file = "orjson-3.10.6-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:697a35a083c4f834807a6232b3e62c8b280f7a44ad0b759fd4dce748951e70db"},
+ {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd502f96bf5ea9a61cbc0b2b5900d0dd68aa0da197179042bdd2be67e51a1e4b"},
+ {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f215789fb1667cdc874c1b8af6a84dc939fd802bf293a8334fce185c79cd359b"},
+ {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2debd8ddce948a8c0938c8c93ade191d2f4ba4649a54302a7da905a81f00b56"},
+ {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5410111d7b6681d4b0d65e0f58a13be588d01b473822483f77f513c7f93bd3b2"},
+ {file = "orjson-3.10.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb1f28a137337fdc18384079fa5726810681055b32b92253fa15ae5656e1dddb"},
+ {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:bf2fbbce5fe7cd1aa177ea3eab2b8e6a6bc6e8592e4279ed3db2d62e57c0e1b2"},
+ {file = "orjson-3.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:79b9b9e33bd4c517445a62b90ca0cc279b0f1f3970655c3df9e608bc3f91741a"},
+ {file = "orjson-3.10.6-cp39-none-win32.whl", hash = "sha256:30b0a09a2014e621b1adf66a4f705f0809358350a757508ee80209b2d8dae219"},
+ {file = "orjson-3.10.6-cp39-none-win_amd64.whl", hash = "sha256:49e3bc615652617d463069f91b867a4458114c5b104e13b7ae6872e5f79d0844"},
+ {file = "orjson-3.10.6.tar.gz", hash = "sha256:e54b63d0a7c6c54a5f5f726bc93a2078111ef060fec4ecbf34c5db800ca3b3a7"},
]
[[package]]
@@ -2878,7 +2883,7 @@ name = "packaging"
version = "24.1"
requires_python = ">=3.8"
summary = "Core utilities for Python packages"
-groups = ["default", "deploy", "test"]
+groups = ["default", "deploy", "dev", "test"]
files = [
{file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"},
{file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"},
@@ -2889,7 +2894,7 @@ name = "pandas"
version = "2.1.4"
requires_python = ">=3.9"
summary = "Powerful data structures for data analysis, time series, and statistics"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"numpy<2,>=1.22.4; python_version < \"3.11\"",
"numpy<2,>=1.23.2; python_version == \"3.11\"",
@@ -2924,7 +2929,7 @@ name = "pgvector"
version = "0.2.5"
requires_python = ">=3.8"
summary = "pgvector support for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"numpy",
]
@@ -2934,59 +2939,59 @@ files = [
[[package]]
name = "pillow"
-version = "10.3.0"
+version = "10.4.0"
requires_python = ">=3.8"
summary = "Python Imaging Library (Fork)"
-groups = ["default"]
-files = [
- {file = "pillow-10.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:90b9e29824800e90c84e4022dd5cc16eb2d9605ee13f05d47641eb183cd73d45"},
- {file = "pillow-10.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a2c405445c79c3f5a124573a051062300936b0281fee57637e706453e452746c"},
- {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78618cdbccaa74d3f88d0ad6cb8ac3007f1a6fa5c6f19af64b55ca170bfa1edf"},
- {file = "pillow-10.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261ddb7ca91fcf71757979534fb4c128448b5b4c55cb6152d280312062f69599"},
- {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ce49c67f4ea0609933d01c0731b34b8695a7a748d6c8d186f95e7d085d2fe475"},
- {file = "pillow-10.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b14f16f94cbc61215115b9b1236f9c18403c15dd3c52cf629072afa9d54c1cbf"},
- {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d33891be6df59d93df4d846640f0e46f1a807339f09e79a8040bc887bdcd7ed3"},
- {file = "pillow-10.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b50811d664d392f02f7761621303eba9d1b056fb1868c8cdf4231279645c25f5"},
- {file = "pillow-10.3.0-cp310-cp310-win32.whl", hash = "sha256:ca2870d5d10d8726a27396d3ca4cf7976cec0f3cb706debe88e3a5bd4610f7d2"},
- {file = "pillow-10.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f0d0591a0aeaefdaf9a5e545e7485f89910c977087e7de2b6c388aec32011e9f"},
- {file = "pillow-10.3.0-cp310-cp310-win_arm64.whl", hash = "sha256:ccce24b7ad89adb5a1e34a6ba96ac2530046763912806ad4c247356a8f33a67b"},
- {file = "pillow-10.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:5f77cf66e96ae734717d341c145c5949c63180842a545c47a0ce7ae52ca83795"},
- {file = "pillow-10.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4b878386c4bf293578b48fc570b84ecfe477d3b77ba39a6e87150af77f40c57"},
- {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdcbb4068117dfd9ce0138d068ac512843c52295ed996ae6dd1faf537b6dbc27"},
- {file = "pillow-10.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9797a6c8fe16f25749b371c02e2ade0efb51155e767a971c61734b1bf6293994"},
- {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:9e91179a242bbc99be65e139e30690e081fe6cb91a8e77faf4c409653de39451"},
- {file = "pillow-10.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:1b87bd9d81d179bd8ab871603bd80d8645729939f90b71e62914e816a76fc6bd"},
- {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:81d09caa7b27ef4e61cb7d8fbf1714f5aec1c6b6c5270ee53504981e6e9121ad"},
- {file = "pillow-10.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:048ad577748b9fa4a99a0548c64f2cb8d672d5bf2e643a739ac8faff1164238c"},
- {file = "pillow-10.3.0-cp311-cp311-win32.whl", hash = "sha256:7161ec49ef0800947dc5570f86568a7bb36fa97dd09e9827dc02b718c5643f09"},
- {file = "pillow-10.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:8eb0908e954d093b02a543dc963984d6e99ad2b5e36503d8a0aaf040505f747d"},
- {file = "pillow-10.3.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e6f7d1c414191c1199f8996d3f2282b9ebea0945693fb67392c75a3a320941f"},
- {file = "pillow-10.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2ed854e716a89b1afcedea551cd85f2eb2a807613752ab997b9974aaa0d56936"},
- {file = "pillow-10.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dc1a390a82755a8c26c9964d457d4c9cbec5405896cba94cf51f36ea0d855002"},
- {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4203efca580f0dd6f882ca211f923168548f7ba334c189e9eab1178ab840bf60"},
- {file = "pillow-10.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3102045a10945173d38336f6e71a8dc71bcaeed55c3123ad4af82c52807b9375"},
- {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:6fb1b30043271ec92dc65f6d9f0b7a830c210b8a96423074b15c7bc999975f57"},
- {file = "pillow-10.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:1dfc94946bc60ea375cc39cff0b8da6c7e5f8fcdc1d946beb8da5c216156ddd8"},
- {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b09b86b27a064c9624d0a6c54da01c1beaf5b6cadfa609cf63789b1d08a797b9"},
- {file = "pillow-10.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d3b2348a78bc939b4fed6552abfd2e7988e0f81443ef3911a4b8498ca084f6eb"},
- {file = "pillow-10.3.0-cp39-cp39-win32.whl", hash = "sha256:45ebc7b45406febf07fef35d856f0293a92e7417ae7933207e90bf9090b70572"},
- {file = "pillow-10.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:0ba26351b137ca4e0db0342d5d00d2e355eb29372c05afd544ebf47c0956ffeb"},
- {file = "pillow-10.3.0-cp39-cp39-win_arm64.whl", hash = "sha256:50fd3f6b26e3441ae07b7c979309638b72abc1a25da31a81a7fbd9495713ef4f"},
- {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:6b02471b72526ab8a18c39cb7967b72d194ec53c1fd0a70b050565a0f366d355"},
- {file = "pillow-10.3.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8ab74c06ffdab957d7670c2a5a6e1a70181cd10b727cd788c4dd9005b6a8acd9"},
- {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:048eeade4c33fdf7e08da40ef402e748df113fd0b4584e32c4af74fe78baaeb2"},
- {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e2ec1e921fd07c7cda7962bad283acc2f2a9ccc1b971ee4b216b75fad6f0463"},
- {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c8e73e99da7db1b4cad7f8d682cf6abad7844da39834c288fbfa394a47bbced"},
- {file = "pillow-10.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:16563993329b79513f59142a6b02055e10514c1a8e86dca8b48a893e33cf91e3"},
- {file = "pillow-10.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dd78700f5788ae180b5ee8902c6aea5a5726bac7c364b202b4b3e3ba2d293170"},
- {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:aff76a55a8aa8364d25400a210a65ff59d0168e0b4285ba6bf2bd83cf675ba32"},
- {file = "pillow-10.3.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b7bc2176354defba3edc2b9a777744462da2f8e921fbaf61e52acb95bafa9828"},
- {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:793b4e24db2e8742ca6423d3fde8396db336698c55cd34b660663ee9e45ed37f"},
- {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93480005693d247f8346bc8ee28c72a2191bdf1f6b5db469c096c0c867ac015"},
- {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c83341b89884e2b2e55886e8fbbf37c3fa5efd6c8907124aeb72f285ae5696e5"},
- {file = "pillow-10.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1a1d1915db1a4fdb2754b9de292642a39a7fb28f1736699527bb649484fb966a"},
- {file = "pillow-10.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a0eaa93d054751ee9964afa21c06247779b90440ca41d184aeb5d410f20ff591"},
- {file = "pillow-10.3.0.tar.gz", hash = "sha256:9d2455fbf44c914840c793e89aa82d0e1763a14253a000743719ae5946814b2d"},
+groups = ["default", "dev"]
+files = [
+ {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"},
+ {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"},
+ {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"},
+ {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"},
+ {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"},
+ {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"},
+ {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"},
+ {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"},
+ {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"},
+ {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"},
+ {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"},
+ {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"},
+ {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"},
+ {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"},
+ {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"},
+ {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"},
+ {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"},
+ {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"},
+ {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"},
+ {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"},
+ {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"},
+ {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"},
+ {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"},
+ {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"},
+ {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"},
+ {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"},
+ {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"},
+ {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"},
+ {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"},
+ {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"},
+ {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"},
+ {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"},
+ {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"},
+ {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"},
+ {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"},
+ {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"},
+ {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"},
+ {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"},
+ {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"},
+ {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"},
+ {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"},
+ {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"},
+ {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"},
+ {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"},
+ {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"},
+ {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"},
+ {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"},
+ {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"},
]
[[package]]
@@ -2994,7 +2999,7 @@ name = "pinecone-client"
version = "3.2.2"
requires_python = "<4.0,>=3.8"
summary = "Pinecone client and SDK"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"certifi>=2019.11.17",
"tqdm>=4.64.1",
@@ -3011,7 +3016,7 @@ name = "platformdirs"
version = "3.11.0"
requires_python = ">=3.7"
summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "platformdirs-3.11.0-py3-none-any.whl", hash = "sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"},
{file = "platformdirs-3.11.0.tar.gz", hash = "sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3"},
@@ -3032,7 +3037,7 @@ files = [
name = "ply"
version = "3.11"
summary = "Python Lex & Yacc"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "ply-3.11-py2.py3-none-any.whl", hash = "sha256:096f9b8350b65ebd2fd1346b12452efe5b9607f7482813ffca50c22722a807ce"},
{file = "ply-3.11.tar.gz", hash = "sha256:00c7c1aaa88358b9c765b6d3000c6eec0ba42abca5351b095321aef446081da3"},
@@ -3043,7 +3048,7 @@ name = "portalocker"
version = "2.10.0"
requires_python = ">=3.8"
summary = "Wraps the portalocker recipe for easy usage"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"pywin32>=226; platform_system == \"Windows\"",
]
@@ -3082,7 +3087,7 @@ name = "proto-plus"
version = "1.24.0"
requires_python = ">=3.7"
summary = "Beautiful, Pythonic protocol buffers."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"protobuf<6.0.0dev,>=3.19.0",
]
@@ -3096,7 +3101,7 @@ name = "protobuf"
version = "4.25.3"
requires_python = ">=3.8"
summary = ""
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "protobuf-4.25.3-cp310-abi3-win32.whl", hash = "sha256:d4198877797a83cbfe9bffa3803602bbe1625dc30d8a097365dbc762e5790faa"},
{file = "protobuf-4.25.3-cp310-abi3-win_amd64.whl", hash = "sha256:209ba4cc916bab46f64e56b85b090607a676f66b473e6b762e6f1d9d591eb2e8"},
@@ -3114,7 +3119,7 @@ name = "psycopg2-binary"
version = "2.9.9"
requires_python = ">=3.7"
summary = "psycopg2 - Python-PostgreSQL Database Adapter"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"},
{file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"},
@@ -3160,7 +3165,7 @@ name = "pyarrow"
version = "15.0.2"
requires_python = ">=3.8"
summary = "Python library for Apache Arrow"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"numpy<2,>=1.16.6",
]
@@ -3194,7 +3199,7 @@ name = "pyasn1"
version = "0.6.0"
requires_python = ">=3.8"
summary = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"},
{file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"},
@@ -3205,7 +3210,7 @@ name = "pyasn1-modules"
version = "0.4.0"
requires_python = ">=3.8"
summary = "A collection of ASN.1-based protocols modules"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"pyasn1<0.7.0,>=0.4.6",
]
@@ -3219,7 +3224,7 @@ name = "pycparser"
version = "2.22"
requires_python = ">=3.8"
summary = "C parser in Python"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
@@ -3227,84 +3232,83 @@ files = [
[[package]]
name = "pydantic"
-version = "2.7.4"
+version = "2.8.2"
requires_python = ">=3.8"
summary = "Data validation using Python type hints"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"annotated-types>=0.4.0",
- "pydantic-core==2.18.4",
- "typing-extensions>=4.6.1",
+ "pydantic-core==2.20.1",
+ "typing-extensions>=4.6.1; python_version < \"3.13\"",
]
files = [
- {file = "pydantic-2.7.4-py3-none-any.whl", hash = "sha256:ee8538d41ccb9c0a9ad3e0e5f07bf15ed8015b481ced539a1759d8cc89ae90d0"},
- {file = "pydantic-2.7.4.tar.gz", hash = "sha256:0c84efd9548d545f63ac0060c1e4d39bb9b14db8b3c0652338aecc07b5adec52"},
+ {file = "pydantic-2.8.2-py3-none-any.whl", hash = "sha256:73ee9fddd406dc318b885c7a2eab8a6472b68b8fb5ba8150949fc3db939f23c8"},
+ {file = "pydantic-2.8.2.tar.gz", hash = "sha256:6f62c13d067b0755ad1c21a34bdd06c0c12625a22b0fc09c6b149816604f7c2a"},
]
[[package]]
name = "pydantic-core"
-version = "2.18.4"
+version = "2.20.1"
requires_python = ">=3.8"
summary = "Core functionality for Pydantic validation and serialization"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"typing-extensions!=4.7.0,>=4.6.0",
]
files = [
- {file = "pydantic_core-2.18.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:f76d0ad001edd426b92233d45c746fd08f467d56100fd8f30e9ace4b005266e4"},
- {file = "pydantic_core-2.18.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:59ff3e89f4eaf14050c8022011862df275b552caef8082e37b542b066ce1ff26"},
- {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a55b5b16c839df1070bc113c1f7f94a0af4433fcfa1b41799ce7606e5c79ce0a"},
- {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4d0dcc59664fcb8974b356fe0a18a672d6d7cf9f54746c05f43275fc48636851"},
- {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8951eee36c57cd128f779e641e21eb40bc5073eb28b2d23f33eb0ef14ffb3f5d"},
- {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4701b19f7e3a06ea655513f7938de6f108123bf7c86bbebb1196eb9bd35cf724"},
- {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00a3f196329e08e43d99b79b286d60ce46bed10f2280d25a1718399457e06be"},
- {file = "pydantic_core-2.18.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:97736815b9cc893b2b7f663628e63f436018b75f44854c8027040e05230eeddb"},
- {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6891a2ae0e8692679c07728819b6e2b822fb30ca7445f67bbf6509b25a96332c"},
- {file = "pydantic_core-2.18.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bc4ff9805858bd54d1a20efff925ccd89c9d2e7cf4986144b30802bf78091c3e"},
- {file = "pydantic_core-2.18.4-cp310-none-win32.whl", hash = "sha256:1b4de2e51bbcb61fdebd0ab86ef28062704f62c82bbf4addc4e37fa4b00b7cbc"},
- {file = "pydantic_core-2.18.4-cp310-none-win_amd64.whl", hash = "sha256:6a750aec7bf431517a9fd78cb93c97b9b0c496090fee84a47a0d23668976b4b0"},
- {file = "pydantic_core-2.18.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:942ba11e7dfb66dc70f9ae66b33452f51ac7bb90676da39a7345e99ffb55402d"},
- {file = "pydantic_core-2.18.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2ebef0e0b4454320274f5e83a41844c63438fdc874ea40a8b5b4ecb7693f1c4"},
- {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a642295cd0c8df1b86fc3dced1d067874c353a188dc8e0f744626d49e9aa51c4"},
- {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f09baa656c904807e832cf9cce799c6460c450c4ad80803517032da0cd062e2"},
- {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98906207f29bc2c459ff64fa007afd10a8c8ac080f7e4d5beff4c97086a3dabd"},
- {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:19894b95aacfa98e7cb093cd7881a0c76f55731efad31073db4521e2b6ff5b7d"},
- {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fbbdc827fe5e42e4d196c746b890b3d72876bdbf160b0eafe9f0334525119c8"},
- {file = "pydantic_core-2.18.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f85d05aa0918283cf29a30b547b4df2fbb56b45b135f9e35b6807cb28bc47951"},
- {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e85637bc8fe81ddb73fda9e56bab24560bdddfa98aa64f87aaa4e4b6730c23d2"},
- {file = "pydantic_core-2.18.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2f5966897e5461f818e136b8451d0551a2e77259eb0f73a837027b47dc95dab9"},
- {file = "pydantic_core-2.18.4-cp311-none-win32.whl", hash = "sha256:44c7486a4228413c317952e9d89598bcdfb06399735e49e0f8df643e1ccd0558"},
- {file = "pydantic_core-2.18.4-cp311-none-win_amd64.whl", hash = "sha256:8a7164fe2005d03c64fd3b85649891cd4953a8de53107940bf272500ba8a788b"},
- {file = "pydantic_core-2.18.4-cp311-none-win_arm64.whl", hash = "sha256:4e99bc050fe65c450344421017f98298a97cefc18c53bb2f7b3531eb39bc7805"},
- {file = "pydantic_core-2.18.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:44a688331d4a4e2129140a8118479443bd6f1905231138971372fcde37e43528"},
- {file = "pydantic_core-2.18.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a2fdd81edd64342c85ac7cf2753ccae0b79bf2dfa063785503cb85a7d3593223"},
- {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86110d7e1907ab36691f80b33eb2da87d780f4739ae773e5fc83fb272f88825f"},
- {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:46387e38bd641b3ee5ce247563b60c5ca098da9c56c75c157a05eaa0933ed154"},
- {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:123c3cec203e3f5ac7b000bd82235f1a3eced8665b63d18be751f115588fea30"},
- {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dc1803ac5c32ec324c5261c7209e8f8ce88e83254c4e1aebdc8b0a39f9ddb443"},
- {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53db086f9f6ab2b4061958d9c276d1dbe3690e8dd727d6abf2321d6cce37fa94"},
- {file = "pydantic_core-2.18.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:abc267fa9837245cc28ea6929f19fa335f3dc330a35d2e45509b6566dc18be23"},
- {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a0d829524aaefdebccb869eed855e2d04c21d2d7479b6cada7ace5448416597b"},
- {file = "pydantic_core-2.18.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:509daade3b8649f80d4e5ff21aa5673e4ebe58590b25fe42fac5f0f52c6f034a"},
- {file = "pydantic_core-2.18.4-cp39-none-win32.whl", hash = "sha256:ca26a1e73c48cfc54c4a76ff78df3727b9d9f4ccc8dbee4ae3f73306a591676d"},
- {file = "pydantic_core-2.18.4-cp39-none-win_amd64.whl", hash = "sha256:c67598100338d5d985db1b3d21f3619ef392e185e71b8d52bceacc4a7771ea7e"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:574d92eac874f7f4db0ca653514d823a0d22e2354359d0759e3f6a406db5d55d"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1f4d26ceb5eb9eed4af91bebeae4b06c3fb28966ca3a8fb765208cf6b51102ab"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77450e6d20016ec41f43ca4a6c63e9fdde03f0ae3fe90e7c27bdbeaece8b1ed4"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d323a01da91851a4f17bf592faf46149c9169d68430b3146dcba2bb5e5719abc"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43d447dd2ae072a0065389092a231283f62d960030ecd27565672bd40746c507"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:578e24f761f3b425834f297b9935e1ce2e30f51400964ce4801002435a1b41ef"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:81b5efb2f126454586d0f40c4d834010979cb80785173d1586df845a632e4e6d"},
- {file = "pydantic_core-2.18.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ab86ce7c8f9bea87b9d12c7f0af71102acbf5ecbc66c17796cff45dae54ef9a5"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:90afc12421df2b1b4dcc975f814e21bc1754640d502a2fbcc6d41e77af5ec312"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:51991a89639a912c17bef4b45c87bd83593aee0437d8102556af4885811d59f5"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:293afe532740370aba8c060882f7d26cfd00c94cae32fd2e212a3a6e3b7bc15e"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48ece5bde2e768197a2d0f6e925f9d7e3e826f0ad2271120f8144a9db18d5c8"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:eae237477a873ab46e8dd748e515c72c0c804fb380fbe6c85533c7de51f23a8f"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:834b5230b5dfc0c1ec37b2fda433b271cbbc0e507560b5d1588e2cc1148cf1ce"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e858ac0a25074ba4bce653f9b5d0a85b7456eaddadc0ce82d3878c22489fa4ee"},
- {file = "pydantic_core-2.18.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2fd41f6eff4c20778d717af1cc50eca52f5afe7805ee530a4fbd0bae284f16e9"},
- {file = "pydantic_core-2.18.4.tar.gz", hash = "sha256:ec3beeada09ff865c344ff3bc2f427f5e6c26401cc6113d77e372c3fdac73864"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3acae97ffd19bf091c72df4d726d552c473f3576409b2a7ca36b2f535ffff4a3"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:41f4c96227a67a013e7de5ff8f20fb496ce573893b7f4f2707d065907bffdbd6"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f239eb799a2081495ea659d8d4a43a8f42cd1fe9ff2e7e436295c38a10c286a"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53e431da3fc53360db73eedf6f7124d1076e1b4ee4276b36fb25514544ceb4a3"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1f62b2413c3a0e846c3b838b2ecd6c7a19ec6793b2a522745b0869e37ab5bc1"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d41e6daee2813ecceea8eda38062d69e280b39df793f5a942fa515b8ed67953"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d482efec8b7dc6bfaedc0f166b2ce349df0011f5d2f1f25537ced4cfc34fd98"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e93e1a4b4b33daed65d781a57a522ff153dcf748dee70b40c7258c5861e1768a"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7c4ea22b6739b162c9ecaaa41d718dfad48a244909fe7ef4b54c0b530effc5a"},
+ {file = "pydantic_core-2.20.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4f2790949cf385d985a31984907fecb3896999329103df4e4983a4a41e13e840"},
+ {file = "pydantic_core-2.20.1-cp310-none-win32.whl", hash = "sha256:5e999ba8dd90e93d57410c5e67ebb67ffcaadcea0ad973240fdfd3a135506250"},
+ {file = "pydantic_core-2.20.1-cp310-none-win_amd64.whl", hash = "sha256:512ecfbefef6dac7bc5eaaf46177b2de58cdf7acac8793fe033b24ece0b9566c"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d2a8fa9d6d6f891f3deec72f5cc668e6f66b188ab14bb1ab52422fe8e644f312"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:175873691124f3d0da55aeea1d90660a6ea7a3cfea137c38afa0a5ffabe37b88"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37eee5b638f0e0dcd18d21f59b679686bbd18917b87db0193ae36f9c23c355fc"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:25e9185e2d06c16ee438ed39bf62935ec436474a6ac4f9358524220f1b236e43"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:150906b40ff188a3260cbee25380e7494ee85048584998c1e66df0c7a11c17a6"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ad4aeb3e9a97286573c03df758fc7627aecdd02f1da04516a86dc159bf70121"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3f3ed29cd9f978c604708511a1f9c2fdcb6c38b9aae36a51905b8811ee5cbf1"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0dae11d8f5ded51699c74d9548dcc5938e0804cc8298ec0aa0da95c21fff57b"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:faa6b09ee09433b87992fb5a2859efd1c264ddc37280d2dd5db502126d0e7f27"},
+ {file = "pydantic_core-2.20.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9dc1b507c12eb0481d071f3c1808f0529ad41dc415d0ca11f7ebfc666e66a18b"},
+ {file = "pydantic_core-2.20.1-cp311-none-win32.whl", hash = "sha256:fa2fddcb7107e0d1808086ca306dcade7df60a13a6c347a7acf1ec139aa6789a"},
+ {file = "pydantic_core-2.20.1-cp311-none-win_amd64.whl", hash = "sha256:40a783fb7ee353c50bd3853e626f15677ea527ae556429453685ae32280c19c2"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b03f7941783b4c4a26051846dea594628b38f6940a2fdc0df00b221aed39314c"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1eedfeb6089ed3fad42e81a67755846ad4dcc14d73698c120a82e4ccf0f1f9f6"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:635fee4e041ab9c479e31edda27fcf966ea9614fff1317e280d99eb3e5ab6fe2"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:77bf3ac639c1ff567ae3b47f8d4cc3dc20f9966a2a6dd2311dcc055d3d04fb8a"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7ed1b0132f24beeec5a78b67d9388656d03e6a7c837394f99257e2d55b461611"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6514f963b023aeee506678a1cf821fe31159b925c4b76fe2afa94cc70b3222b"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d4204d8ca33146e761c79f83cc861df20e7ae9f6487ca290a97702daf56006"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2d036c7187b9422ae5b262badb87a20a49eb6c5238b2004e96d4da1231badef1"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ebfef07dbe1d93efb94b4700f2d278494e9162565a54f124c404a5656d7ff09"},
+ {file = "pydantic_core-2.20.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6b9d9bb600328a1ce523ab4f454859e9d439150abb0906c5a1983c146580ebab"},
+ {file = "pydantic_core-2.20.1-cp39-none-win32.whl", hash = "sha256:784c1214cb6dd1e3b15dd8b91b9a53852aed16671cc3fbe4786f4f1db07089e2"},
+ {file = "pydantic_core-2.20.1-cp39-none-win_amd64.whl", hash = "sha256:d2fe69c5434391727efa54b47a1e7986bb0186e72a41b203df8f5b0a19a4f669"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a45f84b09ac9c3d35dfcf6a27fd0634d30d183205230a0ebe8373a0e8cfa0906"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d02a72df14dfdbaf228424573a07af10637bd490f0901cee872c4f434a735b94"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2b27e6af28f07e2f195552b37d7d66b150adbaa39a6d327766ffd695799780f"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084659fac3c83fd674596612aeff6041a18402f1e1bc19ca39e417d554468482"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:242b8feb3c493ab78be289c034a1f659e8826e2233786e36f2893a950a719bb6"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:38cf1c40a921d05c5edc61a785c0ddb4bed67827069f535d794ce6bcded919fc"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e0bbdd76ce9aa5d4209d65f2b27fc6e5ef1312ae6c5333c26db3f5ade53a1e99"},
+ {file = "pydantic_core-2.20.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:254ec27fdb5b1ee60684f91683be95e5133c994cc54e86a0b0963afa25c8f8a6"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:407653af5617f0757261ae249d3fba09504d7a71ab36ac057c938572d1bc9331"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:c693e916709c2465b02ca0ad7b387c4f8423d1db7b4649c551f27a529181c5ad"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b5ff4911aea936a47d9376fd3ab17e970cc543d1b68921886e7f64bd28308d1"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177f55a886d74f1808763976ac4efd29b7ed15c69f4d838bbd74d9d09cf6fa86"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:964faa8a861d2664f0c7ab0c181af0bea66098b1919439815ca8803ef136fc4e"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4dd484681c15e6b9a977c785a345d3e378d72678fd5f1f3c0509608da24f2ac0"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f6d6cff3538391e8486a431569b77921adfcdef14eb18fbf19b7c0a5294d4e6a"},
+ {file = "pydantic_core-2.20.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6d511cc297ff0883bc3708b465ff82d7560193169a8b93260f74ecb0a5e08a7"},
+ {file = "pydantic_core-2.20.1.tar.gz", hash = "sha256:26ca695eeee5f9f1aeeb211ffc12f10bcb6f71e2989988fda61dabd65db878d4"},
]
[[package]]
@@ -3312,7 +3316,7 @@ name = "pydrive2"
version = "1.15.4"
requires_python = ">=3.7"
summary = "Google Drive API made easy. Maintained fork of PyDrive."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"PyYAML>=3.0",
"google-api-python-client>=1.12.5",
@@ -3330,7 +3334,7 @@ version = "1.15.4"
extras = ["fsspec"]
requires_python = ">=3.7"
summary = "Google Drive API made easy. Maintained fork of PyDrive."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"PyDrive2==1.15.4",
"appdirs>=1.4.3",
@@ -3348,7 +3352,7 @@ name = "pyjwt"
version = "2.8.0"
requires_python = ">=3.7"
summary = "JSON Web Token implementation in Python"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"},
{file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"},
@@ -3360,7 +3364,7 @@ version = "2.8.0"
extras = ["crypto"]
requires_python = ">=3.7"
summary = "JSON Web Token implementation in Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"PyJWT==2.8.0",
"cryptography>=3.4.0",
@@ -3375,7 +3379,7 @@ name = "pymilvus"
version = "2.4.4"
requires_python = ">=3.8"
summary = "Python Sdk for Milvus"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"environs<=9.5.0",
"grpcio<=1.63.0,>=1.49.1",
@@ -3394,7 +3398,7 @@ files = [
name = "pymssql"
version = "2.2.8"
summary = "DB-API interface to Microsoft SQL Server for Python. (new Cython-based version)"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "pymssql-2.2.8-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bfd7b8edef78097ccd3f52ac3f3a5c3cf0019f8a280f306cacbbb165caaf63"},
{file = "pymssql-2.2.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:049f2e3de919e8e02504780a21ebbf235e21ca8ed5c7538c5b6e705aa6c43d8c"},
@@ -3419,7 +3423,7 @@ name = "pymysql"
version = "1.1.0"
requires_python = ">=3.7"
summary = "Pure Python MySQL Driver"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "PyMySQL-1.1.0-py3-none-any.whl", hash = "sha256:8969ec6d763c856f7073c4c64662882675702efcb114b4bcbb955aea3a069fa7"},
{file = "PyMySQL-1.1.0.tar.gz", hash = "sha256:4f13a7df8bf36a51e81dd9f3605fede45a4878fe02f9236349fd82a3f0612f96"},
@@ -3430,7 +3434,7 @@ name = "pyopenssl"
version = "23.3.0"
requires_python = ">=3.7"
summary = "Python wrapper module around the OpenSSL library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"cryptography<42,>=41.0.5",
]
@@ -3444,7 +3448,7 @@ name = "pyparsing"
version = "3.1.2"
requires_python = ">=3.6.8"
summary = "pyparsing module - Classes and methods to define and execute parsing grammars"
-groups = ["default"]
+groups = ["default", "dev"]
marker = "python_version > \"3.0\""
files = [
{file = "pyparsing-3.1.2-py3-none-any.whl", hash = "sha256:f9db75911801ed778fe61bb643079ff86601aca99fcae6345aa67292038fb742"},
@@ -3456,7 +3460,7 @@ name = "pypdf"
version = "4.2.0"
requires_python = ">=3.6"
summary = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"typing-extensions>=4.0; python_version < \"3.11\"",
]
@@ -3500,14 +3504,15 @@ files = [
[[package]]
name = "python-crontab"
-version = "3.1.0"
+version = "3.2.0"
summary = "Python Crontab API"
groups = ["default"]
dependencies = [
"python-dateutil",
]
files = [
- {file = "python-crontab-3.1.0.tar.gz", hash = "sha256:f4ea1605d24533b67fa7a634ef26cb59a5f2e7954f6e677d2d7a2229959a2fc8"},
+ {file = "python_crontab-3.2.0-py3-none-any.whl", hash = "sha256:82cb9b6a312d41ff66fd3caf3eed7115c28c195bfb50711bc2b4b9592feb9fe5"},
+ {file = "python_crontab-3.2.0.tar.gz", hash = "sha256:40067d1dd39ade3460b2ad8557c7651514cd3851deffff61c5c60e1227c5c36b"},
]
[[package]]
@@ -3515,7 +3520,7 @@ name = "python-dateutil"
version = "2.9.0.post0"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
summary = "Extensions to the standard Python datetime module"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"six>=1.5",
]
@@ -3529,7 +3534,7 @@ name = "python-dotenv"
version = "1.0.0"
requires_python = ">=3.8"
summary = "Read key-value pairs from a .env file and set them as environment variables"
-groups = ["default", "test"]
+groups = ["default", "dev", "test"]
files = [
{file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"},
{file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"},
@@ -3592,7 +3597,7 @@ files = [
name = "pytz"
version = "2024.1"
summary = "World timezone definitions, modern and historical"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"},
{file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"},
@@ -3602,7 +3607,7 @@ files = [
name = "pywin32"
version = "306"
summary = "Python for Window Extensions"
-groups = ["default"]
+groups = ["default", "dev"]
marker = "sys_platform == \"win32\" or platform_system == \"Windows\""
files = [
{file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"},
@@ -3619,7 +3624,7 @@ name = "pyyaml"
version = "6.0.1"
requires_python = ">=3.6"
summary = "YAML parser and emitter for Python"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"},
{file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"},
@@ -3650,10 +3655,10 @@ files = [
[[package]]
name = "qdrant-client"
-version = "1.9.2"
+version = "1.10.1"
requires_python = ">=3.8"
summary = "Client library for the Qdrant vector search engine"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"grpcio-tools>=1.41.0",
"grpcio>=1.41.0",
@@ -3664,8 +3669,8 @@ dependencies = [
"urllib3<3,>=1.26.14",
]
files = [
- {file = "qdrant_client-1.9.2-py3-none-any.whl", hash = "sha256:0f49a4a6a47f62bc2c9afc69f9e1fb7790e4861ffe083d2de78dda30eb477d0e"},
- {file = "qdrant_client-1.9.2.tar.gz", hash = "sha256:35ba55a8484a4b817f985749d11fe6b5d2acf617fec07dd8bc01f3e9b4e9fa79"},
+ {file = "qdrant_client-1.10.1-py3-none-any.whl", hash = "sha256:b9fb8fe50dd168d92b2998be7c6135d5a229b3a3258ad158cc69c8adf9ff1810"},
+ {file = "qdrant_client-1.10.1.tar.gz", hash = "sha256:2284c8c5bb1defb0d9dbacb07d16f344972f395f4f2ed062318476a7951fd84c"},
]
[[package]]
@@ -3673,7 +3678,7 @@ name = "redis"
version = "5.0.7"
requires_python = ">=3.7"
summary = "Python client for Redis database and key-value store"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"async-timeout>=4.0.3; python_full_version < \"3.11.3\"",
]
@@ -3687,7 +3692,7 @@ name = "referencing"
version = "0.35.1"
requires_python = ">=3.8"
summary = "JSON Referencing + Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"attrs>=22.2.0",
"rpds-py>=0.7.0",
@@ -3702,7 +3707,7 @@ name = "regex"
version = "2024.5.15"
requires_python = ">=3.8"
summary = "Alternative regular expression module, to replace re."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "regex-2024.5.15-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a81e3cfbae20378d75185171587cbf756015ccb14840702944f014e0d93ea09f"},
{file = "regex-2024.5.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7b59138b219ffa8979013be7bc85bb60c6f7b7575df3d56dc1e403a438c7a3f6"},
@@ -3759,7 +3764,7 @@ name = "requests"
version = "2.31.0"
requires_python = ">=3.7"
summary = "Python HTTP for Humans."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"certifi>=2017.4.17",
"charset-normalizer<4,>=2",
@@ -3776,7 +3781,7 @@ name = "requests-oauthlib"
version = "2.0.0"
requires_python = ">=3.4"
summary = "OAuthlib authentication support for Requests."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"oauthlib>=3.0.0",
"requests>=2.0.0",
@@ -3791,7 +3796,7 @@ name = "requests-toolbelt"
version = "1.0.0"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
summary = "A utility belt for advanced users of python-requests"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"requests<3.0.0,>=2.0.1",
]
@@ -3802,84 +3807,84 @@ files = [
[[package]]
name = "rpds-py"
-version = "0.18.1"
+version = "0.19.0"
requires_python = ">=3.8"
summary = "Python bindings to Rust's persistent data structures (rpds)"
-groups = ["default"]
-files = [
- {file = "rpds_py-0.18.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d31dea506d718693b6b2cffc0648a8929bdc51c70a311b2770f09611caa10d53"},
- {file = "rpds_py-0.18.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:732672fbc449bab754e0b15356c077cc31566df874964d4801ab14f71951ea80"},
- {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a98a1f0552b5f227a3d6422dbd61bc6f30db170939bd87ed14f3c339aa6c7c9"},
- {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7f1944ce16401aad1e3f7d312247b3d5de7981f634dc9dfe90da72b87d37887d"},
- {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38e14fb4e370885c4ecd734f093a2225ee52dc384b86fa55fe3f74638b2cfb09"},
- {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08d74b184f9ab6289b87b19fe6a6d1a97fbfea84b8a3e745e87a5de3029bf944"},
- {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d70129cef4a8d979caa37e7fe957202e7eee8ea02c5e16455bc9808a59c6b2f0"},
- {file = "rpds_py-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0bb20e3a11bd04461324a6a798af34d503f8d6f1aa3d2aa8901ceaf039176d"},
- {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81c5196a790032e0fc2464c0b4ab95f8610f96f1f2fa3d4deacce6a79852da60"},
- {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f3027be483868c99b4985fda802a57a67fdf30c5d9a50338d9db646d590198da"},
- {file = "rpds_py-0.18.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:d44607f98caa2961bab4fa3c4309724b185b464cdc3ba6f3d7340bac3ec97cc1"},
- {file = "rpds_py-0.18.1-cp310-none-win32.whl", hash = "sha256:c273e795e7a0f1fddd46e1e3cb8be15634c29ae8ff31c196debb620e1edb9333"},
- {file = "rpds_py-0.18.1-cp310-none-win_amd64.whl", hash = "sha256:8352f48d511de5f973e4f2f9412736d7dea76c69faa6d36bcf885b50c758ab9a"},
- {file = "rpds_py-0.18.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6b5ff7e1d63a8281654b5e2896d7f08799378e594f09cf3674e832ecaf396ce8"},
- {file = "rpds_py-0.18.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8927638a4d4137a289e41d0fd631551e89fa346d6dbcfc31ad627557d03ceb6d"},
- {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:154bf5c93d79558b44e5b50cc354aa0459e518e83677791e6adb0b039b7aa6a7"},
- {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:07f2139741e5deb2c5154a7b9629bc5aa48c766b643c1a6750d16f865a82c5fc"},
- {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8c7672e9fba7425f79019db9945b16e308ed8bc89348c23d955c8c0540da0a07"},
- {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:489bdfe1abd0406eba6b3bb4fdc87c7fa40f1031de073d0cfb744634cc8fa261"},
- {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c20f05e8e3d4fc76875fc9cb8cf24b90a63f5a1b4c5b9273f0e8225e169b100"},
- {file = "rpds_py-0.18.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:967342e045564cef76dfcf1edb700b1e20838d83b1aa02ab313e6a497cf923b8"},
- {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2cc7c1a47f3a63282ab0f422d90ddac4aa3034e39fc66a559ab93041e6505da7"},
- {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f7afbfee1157e0f9376c00bb232e80a60e59ed716e3211a80cb8506550671e6e"},
- {file = "rpds_py-0.18.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e6934d70dc50f9f8ea47081ceafdec09245fd9f6032669c3b45705dea096b88"},
- {file = "rpds_py-0.18.1-cp311-none-win32.whl", hash = "sha256:c69882964516dc143083d3795cb508e806b09fc3800fd0d4cddc1df6c36e76bb"},
- {file = "rpds_py-0.18.1-cp311-none-win_amd64.whl", hash = "sha256:70a838f7754483bcdc830444952fd89645569e7452e3226de4a613a4c1793fb2"},
- {file = "rpds_py-0.18.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:19e515b78c3fc1039dd7da0a33c28c3154458f947f4dc198d3c72db2b6b5dc93"},
- {file = "rpds_py-0.18.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7b28c5b066bca9a4eb4e2f2663012debe680f097979d880657f00e1c30875a0"},
- {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:673fdbbf668dd958eff750e500495ef3f611e2ecc209464f661bc82e9838991e"},
- {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d960de62227635d2e61068f42a6cb6aae91a7fe00fca0e3aeed17667c8a34611"},
- {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:352a88dc7892f1da66b6027af06a2e7e5d53fe05924cc2cfc56495b586a10b72"},
- {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4e0ee01ad8260184db21468a6e1c37afa0529acc12c3a697ee498d3c2c4dcaf3"},
- {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4c39ad2f512b4041343ea3c7894339e4ca7839ac38ca83d68a832fc8b3748ab"},
- {file = "rpds_py-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aaa71ee43a703c321906813bb252f69524f02aa05bf4eec85f0c41d5d62d0f4c"},
- {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6cd8098517c64a85e790657e7b1e509b9fe07487fd358e19431cb120f7d96338"},
- {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:4adec039b8e2928983f885c53b7cc4cda8965b62b6596501a0308d2703f8af1b"},
- {file = "rpds_py-0.18.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32b7daaa3e9389db3695964ce8e566e3413b0c43e3394c05e4b243a4cd7bef26"},
- {file = "rpds_py-0.18.1-cp39-none-win32.whl", hash = "sha256:2625f03b105328729f9450c8badda34d5243231eef6535f80064d57035738360"},
- {file = "rpds_py-0.18.1-cp39-none-win_amd64.whl", hash = "sha256:bf18932d0003c8c4d51a39f244231986ab23ee057d235a12b2684ea26a353590"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cbfbea39ba64f5e53ae2915de36f130588bba71245b418060ec3330ebf85678e"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3d456ff2a6a4d2adcdf3c1c960a36f4fd2fec6e3b4902a42a384d17cf4e7a65"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7700936ef9d006b7ef605dc53aa364da2de5a3aa65516a1f3ce73bf82ecfc7ae"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:51584acc5916212e1bf45edd17f3a6b05fe0cbb40482d25e619f824dccb679de"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:942695a206a58d2575033ff1e42b12b2aece98d6003c6bc739fbf33d1773b12f"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b906b5f58892813e5ba5c6056d6a5ad08f358ba49f046d910ad992196ea61397"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f8e3fecca256fefc91bb6765a693d96692459d7d4c644660a9fff32e517843"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7732770412bab81c5a9f6d20aeb60ae943a9b36dcd990d876a773526468e7163"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bd1105b50ede37461c1d51b9698c4f4be6e13e69a908ab7751e3807985fc0346"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:618916f5535784960f3ecf8111581f4ad31d347c3de66d02e728de460a46303c"},
- {file = "rpds_py-0.18.1-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:17c6d2155e2423f7e79e3bb18151c686d40db42d8645e7977442170c360194d4"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6c4c4c3f878df21faf5fac86eda32671c27889e13570645a9eea0a1abdd50922"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:fab6ce90574645a0d6c58890e9bcaac8d94dff54fb51c69e5522a7358b80ab64"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:531796fb842b53f2695e94dc338929e9f9dbf473b64710c28af5a160b2a8927d"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:740884bc62a5e2bbb31e584f5d23b32320fd75d79f916f15a788d527a5e83644"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:998125738de0158f088aef3cb264a34251908dd2e5d9966774fdab7402edfab7"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e2be6e9dd4111d5b31ba3b74d17da54a8319d8168890fbaea4b9e5c3de630ae5"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0cee71bc618cd93716f3c1bf56653740d2d13ddbd47673efa8bf41435a60daa"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2c3caec4ec5cd1d18e5dd6ae5194d24ed12785212a90b37f5f7f06b8bedd7139"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:27bba383e8c5231cd559affe169ca0b96ec78d39909ffd817f28b166d7ddd4d8"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:a888e8bdb45916234b99da2d859566f1e8a1d2275a801bb8e4a9644e3c7e7909"},
- {file = "rpds_py-0.18.1-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6031b25fb1b06327b43d841f33842b383beba399884f8228a6bb3df3088485ff"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:48c2faaa8adfacefcbfdb5f2e2e7bdad081e5ace8d182e5f4ade971f128e6bb3"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:d85164315bd68c0806768dc6bb0429c6f95c354f87485ee3593c4f6b14def2bd"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6afd80f6c79893cfc0574956f78a0add8c76e3696f2d6a15bca2c66c415cf2d4"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa242ac1ff583e4ec7771141606aafc92b361cd90a05c30d93e343a0c2d82a89"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21be4770ff4e08698e1e8e0bce06edb6ea0626e7c8f560bc08222880aca6a6f"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c45a639e93a0c5d4b788b2613bd637468edd62f8f95ebc6fcc303d58ab3f0a8"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e71711d1055b2768181efa0a17537b2622afeb0424116619817007f8a2b10"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9bb1f182a97880f6078283b3505a707057c42bf55d8fca604f70dedfdc0772a"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:1d54f74f40b1f7aaa595a02ff42ef38ca654b1469bef7d52867da474243cc633"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8d2e182c9ee01135e11e9676e9a62dfad791a7a467738f06726872374a83db49"},
- {file = "rpds_py-0.18.1-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:636a15acc588f70fda1661234761f9ed9ad79ebed3f2125d44be0862708b666e"},
- {file = "rpds_py-0.18.1.tar.gz", hash = "sha256:dc48b479d540770c811fbd1eb9ba2bb66951863e448efec2e2c102625328e92f"},
+groups = ["default", "dev"]
+files = [
+ {file = "rpds_py-0.19.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:fb37bd599f031f1a6fb9e58ec62864ccf3ad549cf14bac527dbfa97123edcca4"},
+ {file = "rpds_py-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3384d278df99ec2c6acf701d067147320b864ef6727405d6470838476e44d9e8"},
+ {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e54548e0be3ac117595408fd4ca0ac9278fde89829b0b518be92863b17ff67a2"},
+ {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8eb488ef928cdbc05a27245e52de73c0d7c72a34240ef4d9893fdf65a8c1a955"},
+ {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5da93debdfe27b2bfc69eefb592e1831d957b9535e0943a0ee8b97996de21b5"},
+ {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79e205c70afddd41f6ee79a8656aec738492a550247a7af697d5bd1aee14f766"},
+ {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:959179efb3e4a27610e8d54d667c02a9feaa86bbabaf63efa7faa4dfa780d4f1"},
+ {file = "rpds_py-0.19.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a6e605bb9edcf010f54f8b6a590dd23a4b40a8cb141255eec2a03db249bc915b"},
+ {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9133d75dc119a61d1a0ded38fb9ba40a00ef41697cc07adb6ae098c875195a3f"},
+ {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:dd36b712d35e757e28bf2f40a71e8f8a2d43c8b026d881aa0c617b450d6865c9"},
+ {file = "rpds_py-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:354f3a91718489912f2e0fc331c24eaaf6a4565c080e00fbedb6015857c00582"},
+ {file = "rpds_py-0.19.0-cp310-none-win32.whl", hash = "sha256:ebcbf356bf5c51afc3290e491d3722b26aaf5b6af3c1c7f6a1b757828a46e336"},
+ {file = "rpds_py-0.19.0-cp310-none-win_amd64.whl", hash = "sha256:75a6076289b2df6c8ecb9d13ff79ae0cad1d5fb40af377a5021016d58cd691ec"},
+ {file = "rpds_py-0.19.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6d45080095e585f8c5097897313def60caa2046da202cdb17a01f147fb263b81"},
+ {file = "rpds_py-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5c9581019c96f865483d031691a5ff1cc455feb4d84fc6920a5ffc48a794d8a"},
+ {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1540d807364c84516417115c38f0119dfec5ea5c0dd9a25332dea60b1d26fc4d"},
+ {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e65489222b410f79711dc3d2d5003d2757e30874096b2008d50329ea4d0f88c"},
+ {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9da6f400eeb8c36f72ef6646ea530d6d175a4f77ff2ed8dfd6352842274c1d8b"},
+ {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37f46bb11858717e0efa7893c0f7055c43b44c103e40e69442db5061cb26ed34"},
+ {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:071d4adc734de562bd11d43bd134330fb6249769b2f66b9310dab7460f4bf714"},
+ {file = "rpds_py-0.19.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9625367c8955e4319049113ea4f8fee0c6c1145192d57946c6ffcd8fe8bf48dd"},
+ {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e19509145275d46bc4d1e16af0b57a12d227c8253655a46bbd5ec317e941279d"},
+ {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d438e4c020d8c39961deaf58f6913b1bf8832d9b6f62ec35bd93e97807e9cbc"},
+ {file = "rpds_py-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:90bf55d9d139e5d127193170f38c584ed3c79e16638890d2e36f23aa1630b952"},
+ {file = "rpds_py-0.19.0-cp311-none-win32.whl", hash = "sha256:8d6ad132b1bc13d05ffe5b85e7a01a3998bf3a6302ba594b28d61b8c2cf13aaf"},
+ {file = "rpds_py-0.19.0-cp311-none-win_amd64.whl", hash = "sha256:7ec72df7354e6b7f6eb2a17fa6901350018c3a9ad78e48d7b2b54d0412539a67"},
+ {file = "rpds_py-0.19.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5039e3cef7b3e7a060de468a4a60a60a1f31786da94c6cb054e7a3c75906111c"},
+ {file = "rpds_py-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab1932ca6cb8c7499a4d87cb21ccc0d3326f172cfb6a64021a889b591bb3045c"},
+ {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2afd2164a1e85226fcb6a1da77a5c8896c18bfe08e82e8ceced5181c42d2179"},
+ {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1c30841f5040de47a0046c243fc1b44ddc87d1b12435a43b8edff7e7cb1e0d0"},
+ {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f757f359f30ec7dcebca662a6bd46d1098f8b9fb1fcd661a9e13f2e8ce343ba1"},
+ {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15e65395a59d2e0e96caf8ee5389ffb4604e980479c32742936ddd7ade914b22"},
+ {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb0f6eb3a320f24b94d177e62f4074ff438f2ad9d27e75a46221904ef21a7b05"},
+ {file = "rpds_py-0.19.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b228e693a2559888790936e20f5f88b6e9f8162c681830eda303bad7517b4d5a"},
+ {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2575efaa5d949c9f4e2cdbe7d805d02122c16065bfb8d95c129372d65a291a0b"},
+ {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5c872814b77a4e84afa293a1bee08c14daed1068b2bb1cc312edbf020bbbca2b"},
+ {file = "rpds_py-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850720e1b383df199b8433a20e02b25b72f0fded28bc03c5bd79e2ce7ef050be"},
+ {file = "rpds_py-0.19.0-cp39-none-win32.whl", hash = "sha256:ce84a7efa5af9f54c0aa7692c45861c1667080814286cacb9958c07fc50294fb"},
+ {file = "rpds_py-0.19.0-cp39-none-win_amd64.whl", hash = "sha256:1c26da90b8d06227d7769f34915913911222d24ce08c0ab2d60b354e2d9c7aff"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:75969cf900d7be665ccb1622a9aba225cf386bbc9c3bcfeeab9f62b5048f4a07"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8445f23f13339da640d1be8e44e5baf4af97e396882ebbf1692aecd67f67c479"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5a7c1062ef8aea3eda149f08120f10795835fc1c8bc6ad948fb9652a113ca55"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:462b0c18fbb48fdbf980914a02ee38c423a25fcc4cf40f66bacc95a2d2d73bc8"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3208f9aea18991ac7f2b39721e947bbd752a1abbe79ad90d9b6a84a74d44409b"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3444fe52b82f122d8a99bf66777aed6b858d392b12f4c317da19f8234db4533"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb4bac7185a9f0168d38c01d7a00addece9822a52870eee26b8d5b61409213"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6b130bd4163c93798a6b9bb96be64a7c43e1cec81126ffa7ffaa106e1fc5cef5"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:a707b158b4410aefb6b054715545bbb21aaa5d5d0080217290131c49c2124a6e"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:dc9ac4659456bde7c567107556ab065801622396b435a3ff213daef27b495388"},
+ {file = "rpds_py-0.19.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:81ea573aa46d3b6b3d890cd3c0ad82105985e6058a4baed03cf92518081eec8c"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3f148c3f47f7f29a79c38cc5d020edcb5ca780020fab94dbc21f9af95c463581"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0906357f90784a66e89ae3eadc2654f36c580a7d65cf63e6a616e4aec3a81be"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f629ecc2db6a4736b5ba95a8347b0089240d69ad14ac364f557d52ad68cf94b0"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c6feacd1d178c30e5bc37184526e56740342fd2aa6371a28367bad7908d454fc"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae8b6068ee374fdfab63689be0963333aa83b0815ead5d8648389a8ded593378"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d57546bad81e0da13263e4c9ce30e96dcbe720dbff5ada08d2600a3502e526"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8b6683a37338818646af718c9ca2a07f89787551057fae57c4ec0446dc6224b"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e8481b946792415adc07410420d6fc65a352b45d347b78fec45d8f8f0d7496f0"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:bec35eb20792ea64c3c57891bc3ca0bedb2884fbac2c8249d9b731447ecde4fa"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:aa5476c3e3a402c37779e95f7b4048db2cb5b0ed0b9d006983965e93f40fe05a"},
+ {file = "rpds_py-0.19.0-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:19d02c45f2507b489fd4df7b827940f1420480b3e2e471e952af4d44a1ea8e34"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:a3e2fd14c5d49ee1da322672375963f19f32b3d5953f0615b175ff7b9d38daed"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:93a91c2640645303e874eada51f4f33351b84b351a689d470f8108d0e0694210"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5b9fc03bf76a94065299d4a2ecd8dfbae4ae8e2e8098bbfa6ab6413ca267709"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a4b07cdf3f84310c08c1de2c12ddadbb7a77568bcb16e95489f9c81074322ed"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba0ed0dc6763d8bd6e5de5cf0d746d28e706a10b615ea382ac0ab17bb7388633"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:474bc83233abdcf2124ed3f66230a1c8435896046caa4b0b5ab6013c640803cc"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:329c719d31362355a96b435f4653e3b4b061fcc9eba9f91dd40804ca637d914e"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef9101f3f7b59043a34f1dccbb385ca760467590951952d6701df0da9893ca0c"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:0121803b0f424ee2109d6e1f27db45b166ebaa4b32ff47d6aa225642636cd834"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:8344127403dea42f5970adccf6c5957a71a47f522171fafaf4c6ddb41b61703a"},
+ {file = "rpds_py-0.19.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:443cec402ddd650bb2b885113e1dcedb22b1175c6be223b14246a714b61cd521"},
+ {file = "rpds_py-0.19.0.tar.gz", hash = "sha256:4fdc9afadbeb393b4bbbad75481e0ea78e4469f2e1d713a90811700830b553a9"},
]
[[package]]
@@ -3887,7 +3892,7 @@ name = "rsa"
version = "4.9"
requires_python = ">=3.6,<4"
summary = "Pure-Python RSA implementation"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"pyasn1>=0.1.3",
]
@@ -3901,7 +3906,7 @@ name = "s3fs"
version = "2023.6.0"
requires_python = ">= 3.8"
summary = "Convenient Filesystem interface over S3"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"aiobotocore~=2.5.0",
"aiohttp!=4.0.0a0,!=4.0.0a1",
@@ -3918,7 +3923,7 @@ version = "2023.6.0"
extras = ["boto3"]
requires_python = ">= 3.8"
summary = "Convenient Filesystem interface over S3"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"aiobotocore[boto3]~=2.5.0",
"s3fs==2023.6.0",
@@ -3933,7 +3938,7 @@ name = "s3transfer"
version = "0.6.2"
requires_python = ">= 3.7"
summary = "An Amazon S3 Transfer Manager"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"botocore<2.0a.0,>=1.12.36",
]
@@ -4017,13 +4022,13 @@ files = [
[[package]]
name = "setuptools"
-version = "70.1.1"
+version = "70.2.0"
requires_python = ">=3.8"
summary = "Easily download, build, install, upgrade, and uninstall Python packages"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
- {file = "setuptools-70.1.1-py3-none-any.whl", hash = "sha256:a58a8fde0541dab0419750bcc521fbdf8585f6e5cb41909df3a472ef7b81ca95"},
- {file = "setuptools-70.1.1.tar.gz", hash = "sha256:937a48c7cdb7a21eb53cd7f9b59e525503aa8abaf3584c730dc5f7a5bec3a650"},
+ {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"},
+ {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"},
]
[[package]]
@@ -4031,7 +4036,7 @@ name = "shapely"
version = "2.0.4"
requires_python = ">=3.7"
summary = "Manipulation and analysis of geometric objects"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"numpy<3,>=1.14",
]
@@ -4078,7 +4083,7 @@ files = [
name = "singleton-decorator"
version = "1.0.0"
summary = "A testable singleton decorator"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "singleton-decorator-1.0.0.tar.gz", hash = "sha256:1a90ad8a8a738be591c9c167fdd677c5d4a43d1bc6b1c128227be1c5e03bee07"},
]
@@ -4088,7 +4093,7 @@ name = "six"
version = "1.16.0"
requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
summary = "Python 2 and 3 compatibility utilities"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
@@ -4099,7 +4104,7 @@ name = "sniffio"
version = "1.3.1"
requires_python = ">=3.7"
summary = "Sniff out which async library your code is running under"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
@@ -4110,7 +4115,7 @@ name = "snowflake-connector-python"
version = "3.6.0"
requires_python = ">=3.8"
summary = "Snowflake Connector for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"asn1crypto<2.0.0,>0.24.0",
"certifi>=2017.4.17",
@@ -4155,7 +4160,7 @@ version = "3.6.0"
extras = ["pandas"]
requires_python = ">=3.8"
summary = "Snowflake Connector for Python"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"pandas<2.2.0,>=1.0.0",
"pyarrow",
@@ -4219,7 +4224,7 @@ files = [
name = "sortedcontainers"
version = "2.4.0"
summary = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
@@ -4230,7 +4235,7 @@ name = "soupsieve"
version = "2.5"
requires_python = ">=3.8"
summary = "A modern CSS selector implementation for Beautiful Soup."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"},
{file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"},
@@ -4241,7 +4246,7 @@ name = "sqlalchemy"
version = "2.0.31"
requires_python = ">=3.7"
summary = "Database Abstraction Library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"greenlet!=0.4.17; (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\") and python_version < \"3.13\"",
"typing-extensions>=4.6.0",
@@ -4281,7 +4286,7 @@ version = "2.0.31"
extras = ["asyncio"]
requires_python = ">=3.7"
summary = "Database Abstraction Library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"greenlet!=0.4.17",
"sqlalchemy==2.0.31",
@@ -4330,7 +4335,7 @@ files = [
name = "stone"
version = "3.3.1"
summary = "Stone is an interface description language (IDL) for APIs."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"ply>=3.4",
"six>=1.12.0",
@@ -4344,7 +4349,7 @@ files = [
name = "striprtf"
version = "0.0.26"
summary = "A simple library to convert rtf to text"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "striprtf-0.0.26-py3-none-any.whl", hash = "sha256:8c8f9d32083cdc2e8bfb149455aa1cc5a4e0a035893bedc75db8b73becb3a1bb"},
{file = "striprtf-0.0.26.tar.gz", hash = "sha256:fdb2bba7ac440072d1c41eab50d8d74ae88f60a8b6575c6e2c7805dc462093aa"},
@@ -4352,13 +4357,13 @@ files = [
[[package]]
name = "tenacity"
-version = "8.4.2"
+version = "8.5.0"
requires_python = ">=3.8"
summary = "Retry code until it succeeds"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
- {file = "tenacity-8.4.2-py3-none-any.whl", hash = "sha256:9e6f7cf7da729125c7437222f8a522279751cdfbe6b67bfe64f75d3a348661b2"},
- {file = "tenacity-8.4.2.tar.gz", hash = "sha256:cd80a53a79336edba8489e767f729e4f391c896956b57140b5d7511a64bbd3ef"},
+ {file = "tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687"},
+ {file = "tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78"},
]
[[package]]
@@ -4366,7 +4371,7 @@ name = "tiktoken"
version = "0.4.0"
requires_python = ">=3.8"
summary = "tiktoken is a fast BPE tokeniser for use with OpenAI's models"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"regex>=2022.1.18",
"requests>=2.26.0",
@@ -4401,7 +4406,7 @@ name = "tokenizers"
version = "0.15.2"
requires_python = ">=3.7"
summary = ""
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"huggingface-hub<1.0,>=0.16.4",
]
@@ -4489,7 +4494,7 @@ name = "tomlkit"
version = "0.12.5"
requires_python = ">=3.7"
summary = "Style preserving TOML library"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"},
{file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"},
@@ -4520,7 +4525,7 @@ name = "tqdm"
version = "4.66.4"
requires_python = ">=3.7"
summary = "Fast, Extensible Progress Meter"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"colorama; platform_system == \"Windows\"",
]
@@ -4557,7 +4562,7 @@ name = "typing-extensions"
version = "4.12.2"
requires_python = ">=3.8"
summary = "Backported and Experimental Type Hints for Python 3.8+"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
@@ -4567,7 +4572,7 @@ files = [
name = "typing-inspect"
version = "0.9.0"
summary = "Runtime inspection utilities for typing module."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"mypy-extensions>=0.3.0",
"typing-extensions>=3.7.4",
@@ -4582,7 +4587,7 @@ name = "tzdata"
version = "2024.1"
requires_python = ">=2"
summary = "Provider of IANA time zone data"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"},
{file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"},
@@ -4593,7 +4598,7 @@ name = "ujson"
version = "5.10.0"
requires_python = ">=3.8"
summary = "Ultra fast JSON encoder and decoder for Python"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"},
{file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"},
@@ -4647,10 +4652,10 @@ files = [
[[package]]
name = "unstract-adapters"
-version = "0.20.1"
+version = "0.21.0"
requires_python = "<3.12,>=3.9"
summary = "Unstract interface for LLMs, Embeddings and VectorDBs"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"filetype~=1.2.0",
"httpx>=0.25.2",
@@ -4676,17 +4681,18 @@ dependencies = [
"singleton-decorator~=1.0.0",
]
files = [
- {file = "unstract_adapters-0.20.1-py3-none-any.whl", hash = "sha256:e1f291016ab81f2fa46bc5ac0629905a9977198a8e5ad3077b8b188383fbd0c3"},
- {file = "unstract_adapters-0.20.1.tar.gz", hash = "sha256:4c9b10bc5ae6f0d395088ba4f3d2b27bcfade1b2f1666ccf2611723336d33571"},
+ {file = "unstract_adapters-0.21.0-py3-none-any.whl", hash = "sha256:6c4f597602f55b80ba176a29a930755abd3494ff1c085f406536e7463902d655"},
+ {file = "unstract_adapters-0.21.0.tar.gz", hash = "sha256:ebb8f80b7f26f04874bb5466fe0be8c3e1b51d44ed85c4e62289a7751c996af6"},
]
[[package]]
name = "unstract-connectors"
version = "0.0.3"
requires_python = ">=3.9"
+editable = true
path = "../unstract/connectors"
summary = "All connectors that are part of the Unstract platform"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"PyDrive2[fsspec]==1.15.4",
"PyMySQL==1.1.0",
@@ -4709,9 +4715,10 @@ dependencies = [
name = "unstract-core"
version = "0.0.1"
requires_python = ">=3.9,<3.11.1"
+editable = true
path = "../unstract/core"
summary = "Core library that helps with executing workflows."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"boto3~=1.28.17",
"botocore~=1.31.17",
@@ -4726,9 +4733,10 @@ dependencies = [
name = "unstract-flags"
version = "0.0.1"
requires_python = ">=3.9"
+editable = true
path = "../unstract/flags"
summary = "Unstract's feature flags package."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"grpcio-tools<=1.60.0",
"grpcio<=1.60.0",
@@ -4736,7 +4744,7 @@ dependencies = [
[[package]]
name = "unstract-sdk"
-version = "0.35.0"
+version = "0.37.0"
requires_python = "<3.11.1,>=3.9"
summary = "A framework for writing Unstract Tools/Apps"
groups = ["default"]
@@ -4748,25 +4756,26 @@ dependencies = [
"python-magic~=0.4.27",
"tiktoken~=0.4.0",
"transformers==4.37.0",
- "unstract-adapters~=0.20.1",
+ "unstract-adapters~=0.21.0",
]
files = [
- {file = "unstract_sdk-0.35.0-py3-none-any.whl", hash = "sha256:d91338a8f48c421f58ad6ae7337480eae39fd468b44a6a8f6bdc4784d1dac478"},
- {file = "unstract_sdk-0.35.0.tar.gz", hash = "sha256:863622d5c785aba2bb549e61959bd1d85cd21376310a340031a41fb78eab47ed"},
+ {file = "unstract_sdk-0.37.0-py3-none-any.whl", hash = "sha256:3bd83dfb7a760e73f35cc9ed4bdadf645bd77f03f77d25578e842156645e6f1d"},
+ {file = "unstract_sdk-0.37.0.tar.gz", hash = "sha256:a00b81f014508db0130dd21d594e3d8e0034dcafd3f01461ce30d37e68646944"},
]
[[package]]
name = "unstract-tool-registry"
version = "0.0.1"
requires_python = ">=3.9,<3.11.1"
+editable = true
path = "../unstract/tool-registry"
summary = "Unstract's registry of tools to be used in workflows."
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"PyYAML~=6.0.1",
"docker~=6.1.3",
"jsonschema~=4.18.2",
- "unstract-adapters~=0.20.1",
+ "unstract-adapters~=0.21.0",
"unstract-tool-sandbox",
]
@@ -4774,9 +4783,10 @@ dependencies = [
name = "unstract-tool-sandbox"
version = "0.0.1"
requires_python = ">=3.9"
+editable = true
path = "../unstract/tool-sandbox"
summary = "Unstract Tool Sandbox is a package to communicate with tool worker"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"requests==2.31.0",
]
@@ -4785,9 +4795,10 @@ dependencies = [
name = "unstract-workflow-execution"
version = "0.0.1"
requires_python = ">=3.9"
+editable = true
path = "../unstract/workflow-execution"
summary = "Unstract workflow execution package"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"unstract-core",
"unstract-tool-registry",
@@ -4799,7 +4810,7 @@ name = "uritemplate"
version = "4.1.1"
requires_python = ">=3.6"
summary = "Implementation of RFC 6570 URI Templates"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "uritemplate-4.1.1-py2.py3-none-any.whl", hash = "sha256:830c08b8d99bdd312ea4ead05994a38e8936266f84b9a7878232db50b044e02e"},
{file = "uritemplate-4.1.1.tar.gz", hash = "sha256:4346edfc5c3b79f694bccd6d6099a322bbeb628dbf2cd86eea55a456ce5124f0"},
@@ -4810,7 +4821,7 @@ name = "urllib3"
version = "1.26.19"
requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7"
summary = "HTTP library with thread-safe connection pooling, file post, and more."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "urllib3-1.26.19-py2.py3-none-any.whl", hash = "sha256:37a0344459b199fce0e80b0d3569837ec6b6937435c5244e7fd73fa6006830f3"},
{file = "urllib3-1.26.19.tar.gz", hash = "sha256:3e3d753a8618b86d7de333b4223005f68720bcd6a7d2bcb9fbd2229ec7c1e429"},
@@ -4818,13 +4829,13 @@ files = [
[[package]]
name = "validators"
-version = "0.28.3"
+version = "0.31.0"
requires_python = ">=3.8"
summary = "Python Data Validation for Humansβ’"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
- {file = "validators-0.28.3-py3-none-any.whl", hash = "sha256:53cafa854f13850156259d9cc479b864ee901f6a96e6b109e6fc33f98f37d99f"},
- {file = "validators-0.28.3.tar.gz", hash = "sha256:c6c79840bcde9ba77b19f6218f7738188115e27830cbaff43264bc4ed24c429d"},
+ {file = "validators-0.31.0-py3-none-any.whl", hash = "sha256:e15a600d81555a4cd409b17bf55946c5edec7748e776afc85ed0a19bdee54e56"},
+ {file = "validators-0.31.0.tar.gz", hash = "sha256:de7574fc56a231c788162f3e7da15bc2053c5ff9e0281d9ff1afb3a7b69498df"},
]
[[package]]
@@ -4832,7 +4843,7 @@ name = "vine"
version = "5.1.0"
requires_python = ">=3.6"
summary = "Python promises."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc"},
{file = "vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0"},
@@ -4850,18 +4861,18 @@ files = [
[[package]]
name = "weaviate-client"
-version = "3.26.2"
+version = "3.26.5"
requires_python = ">=3.8"
summary = "A python native Weaviate client"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"authlib<2.0.0,>=1.2.1",
"requests<3.0.0,>=2.30.0",
"validators<1.0.0,>=0.21.2",
]
files = [
- {file = "weaviate-client-3.26.2.tar.gz", hash = "sha256:63ec70839b64909810a64aa7b3e5b85088462e93c7e2ed3c32ebefb702f36723"},
- {file = "weaviate_client-3.26.2-py3-none-any.whl", hash = "sha256:ca43bfb9c06b8ae3fd938dc9158acd93d4cbf4622192e173333e1ff63cf97164"},
+ {file = "weaviate_client-3.26.5-py3-none-any.whl", hash = "sha256:76327ba93bdfff293e7e299e90ea28ad0e489cfff7c7a6be82c72d1159b60e4f"},
+ {file = "weaviate_client-3.26.5.tar.gz", hash = "sha256:f9dc0e42656e3458b12aa59b73e08da0e0f6301f3cd368473c9f5242821854d6"},
]
[[package]]
@@ -4869,7 +4880,7 @@ name = "websocket-client"
version = "1.8.0"
requires_python = ">=3.8"
summary = "WebSocket client for Python with low level API options"
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"},
{file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"},
@@ -4880,7 +4891,7 @@ name = "wrapt"
version = "1.16.0"
requires_python = ">=3.6"
summary = "Module for decorators, wrappers and monkey patching."
-groups = ["default"]
+groups = ["default", "dev"]
files = [
{file = "wrapt-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ffa565331890b90056c01db69c0fe634a776f8019c143a5ae265f9c6bc4bd6d4"},
{file = "wrapt-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4fdb9275308292e880dcbeb12546df7f3e0f96c6b41197e0cf37d2826359020"},
@@ -4935,7 +4946,7 @@ name = "yarl"
version = "1.9.4"
requires_python = ">=3.7"
summary = "Yet another URL library"
-groups = ["default"]
+groups = ["default", "dev"]
dependencies = [
"idna>=2.0",
"multidict>=4.0",
diff --git a/backend/prompt_studio/prompt_profile_manager/constants.py b/backend/prompt_studio/prompt_profile_manager/constants.py
index 70cf34019..6540b58ee 100644
--- a/backend/prompt_studio/prompt_profile_manager/constants.py
+++ b/backend/prompt_studio/prompt_profile_manager/constants.py
@@ -7,6 +7,8 @@ class ProfileManagerKeys:
VECTOR_STORE = "vector_store"
EMBEDDING_MODEL = "embedding_model"
X2TEXT = "x2text"
+ PROMPT_STUDIO_TOOL = "prompt_studio_tool"
+ MAX_PROFILE_COUNT = 4
class ProfileManagerErrors:
diff --git a/backend/prompt_studio/prompt_profile_manager/profile_manager_helper.py b/backend/prompt_studio/prompt_profile_manager/profile_manager_helper.py
new file mode 100644
index 000000000..68783b551
--- /dev/null
+++ b/backend/prompt_studio/prompt_profile_manager/profile_manager_helper.py
@@ -0,0 +1,11 @@
+from prompt_studio.prompt_profile_manager.models import ProfileManager
+
+
+class ProfileManagerHelper:
+
+ @classmethod
+ def get_profile_manager(cls, profile_manager_id: str) -> ProfileManager:
+ try:
+ return ProfileManager.objects.get(profile_id=profile_manager_id)
+ except ProfileManager.DoesNotExist:
+ raise ValueError("ProfileManager does not exist.")
diff --git a/backend/prompt_studio/prompt_profile_manager/serializers.py b/backend/prompt_studio/prompt_profile_manager/serializers.py
index 4d4753561..fc83aaab4 100644
--- a/backend/prompt_studio/prompt_profile_manager/serializers.py
+++ b/backend/prompt_studio/prompt_profile_manager/serializers.py
@@ -2,6 +2,7 @@
from adapter_processor.adapter_processor import AdapterProcessor
from prompt_studio.prompt_profile_manager.constants import ProfileManagerKeys
+from prompt_studio.prompt_studio_core.exceptions import MaxProfilesReachedError
from backend.serializers import AuditSerializer
@@ -38,3 +39,15 @@ def to_representation(self, instance): # type: ignore
AdapterProcessor.get_adapter_instance_by_id(x2text)
)
return rep
+
+ def validate(self, data):
+ prompt_studio_tool = data.get(ProfileManagerKeys.PROMPT_STUDIO_TOOL)
+
+ profile_count = ProfileManager.objects.filter(
+ prompt_studio_tool=prompt_studio_tool
+ ).count()
+
+ if profile_count >= ProfileManagerKeys.MAX_PROFILE_COUNT:
+ raise MaxProfilesReachedError()
+
+ return data
diff --git a/backend/prompt_studio/prompt_studio_core/constants.py b/backend/prompt_studio/prompt_studio_core/constants.py
index 934d9b530..213fc066f 100644
--- a/backend/prompt_studio/prompt_studio_core/constants.py
+++ b/backend/prompt_studio/prompt_studio_core/constants.py
@@ -85,6 +85,10 @@ class ToolStudioPromptKeys:
NOTES = "NOTES"
OUTPUT = "output"
SEQUENCE_NUMBER = "sequence_number"
+ PROFILE_MANAGER_ID = "profile_manager"
+ CONTEXT = "context"
+ METADATA = "metadata"
+ INCLUDE_METADATA = "include_metadata"
class FileViewTypes:
@@ -108,6 +112,13 @@ class LogLevel(Enum):
FATAL = "FATAL"
+class IndexingStatus(Enum):
+ PENDING_STATUS = "pending"
+ COMPLETED_STATUS = "completed"
+ STARTED_STATUS = "started"
+ DOCUMENT_BEING_INDEXED = "Document is being indexed"
+
+
class DefaultPrompts:
PREAMBLE = (
"Your ability to extract and summarize this context accurately "
diff --git a/backend/prompt_studio/prompt_studio_core/document_indexing_service.py b/backend/prompt_studio/prompt_studio_core/document_indexing_service.py
new file mode 100644
index 000000000..539c5a2dc
--- /dev/null
+++ b/backend/prompt_studio/prompt_studio_core/document_indexing_service.py
@@ -0,0 +1,53 @@
+from typing import Optional
+
+from django.conf import settings
+from prompt_studio.prompt_studio_core.constants import IndexingStatus
+from utils.cache_service import CacheService
+
+
+class DocumentIndexingService:
+ CACHE_PREFIX = "document_indexing:"
+
+ @classmethod
+ def set_document_indexing(cls, org_id: str, user_id: str, doc_id_key: str) -> None:
+ CacheService.set_key(
+ cls._cache_key(org_id, user_id, doc_id_key),
+ IndexingStatus.STARTED_STATUS.value,
+ expire=settings.INDEXING_FLAG_TTL,
+ )
+
+ @classmethod
+ def is_document_indexing(cls, org_id: str, user_id: str, doc_id_key: str) -> bool:
+ return (
+ CacheService.get_key(cls._cache_key(org_id, user_id, doc_id_key))
+ == IndexingStatus.STARTED_STATUS.value
+ )
+
+ @classmethod
+ def mark_document_indexed(
+ cls, org_id: str, user_id: str, doc_id_key: str, doc_id: str
+ ) -> None:
+ CacheService.set_key(
+ cls._cache_key(org_id, user_id, doc_id_key),
+ doc_id,
+ expire=settings.INDEXING_FLAG_TTL,
+ )
+
+ @classmethod
+ def get_indexed_document_id(
+ cls, org_id: str, user_id: str, doc_id_key: str
+ ) -> Optional[str]:
+ result = CacheService.get_key(cls._cache_key(org_id, user_id, doc_id_key))
+ if result and result != IndexingStatus.STARTED_STATUS.value:
+ return result
+ return None
+
+ @classmethod
+ def remove_document_indexing(
+ cls, org_id: str, user_id: str, doc_id_key: str
+ ) -> None:
+ CacheService.delete_a_key(cls._cache_key(org_id, user_id, doc_id_key))
+
+ @classmethod
+ def _cache_key(cls, org_id: str, user_id: str, doc_id_key: str) -> str:
+ return f"{cls.CACHE_PREFIX}{org_id}:{user_id}:{doc_id_key}"
diff --git a/backend/prompt_studio/prompt_studio_core/exceptions.py b/backend/prompt_studio/prompt_studio_core/exceptions.py
index 666d41241..241418060 100644
--- a/backend/prompt_studio/prompt_studio_core/exceptions.py
+++ b/backend/prompt_studio/prompt_studio_core/exceptions.py
@@ -1,3 +1,4 @@
+from prompt_studio.prompt_profile_manager.constants import ProfileManagerKeys
from prompt_studio.prompt_studio_core.constants import ToolStudioErrors
from rest_framework.exceptions import APIException
@@ -58,3 +59,11 @@ class PermissionError(APIException):
class EmptyPromptError(APIException):
status_code = 422
default_detail = "Prompt(s) cannot be empty"
+
+
+class MaxProfilesReachedError(APIException):
+ status_code = 403
+ default_detail = (
+ f"Maximum number of profiles (max {ProfileManagerKeys.MAX_PROFILE_COUNT})"
+ " per prompt studio project has been reached."
+ )
diff --git a/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py b/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py
index c279bd8c4..8f07ac336 100644
--- a/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py
+++ b/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py
@@ -13,9 +13,15 @@
from django.db.models.manager import BaseManager
from file_management.file_management_helper import FileManagerHelper
from prompt_studio.prompt_profile_manager.models import ProfileManager
+from prompt_studio.prompt_profile_manager.profile_manager_helper import (
+ ProfileManagerHelper,
+)
from prompt_studio.prompt_studio.models import ToolStudioPrompt
-from prompt_studio.prompt_studio_core.constants import LogLevels
+from prompt_studio.prompt_studio_core.constants import IndexingStatus, LogLevels
from prompt_studio.prompt_studio_core.constants import ToolStudioPromptKeys as TSPKeys
+from prompt_studio.prompt_studio_core.document_indexing_service import (
+ DocumentIndexingService,
+)
from prompt_studio.prompt_studio_core.exceptions import (
AnswerFetchError,
DefaultProfileError,
@@ -159,21 +165,21 @@ def validate_profile_manager_owner_access(
).exists()
)
is_vector_store_owned = (
- profile_manager.llm.shared_to_org
+ profile_manager.vector_store.shared_to_org
or profile_manager.vector_store.created_by == profile_manager_owner
or profile_manager.vector_store.shared_users.filter(
pk=profile_manager_owner.pk
).exists()
)
is_embedding_model_owned = (
- profile_manager.llm.shared_to_org
+ profile_manager.embedding_model.shared_to_org
or profile_manager.embedding_model.created_by == profile_manager_owner
or profile_manager.embedding_model.shared_users.filter(
pk=profile_manager_owner.pk
).exists()
)
is_x2text_owned = (
- profile_manager.llm.shared_to_org
+ profile_manager.x2text.shared_to_org
or profile_manager.x2text.created_by == profile_manager_owner
or profile_manager.x2text.shared_users.filter(
pk=profile_manager_owner.pk
@@ -344,6 +350,7 @@ def index_document(
is_summary=is_summary,
reindex=True,
run_id=run_id,
+ user_id=user_id,
)
logger.info(f"[{tool_id}] Indexing successful for doc: {file_name}")
@@ -354,7 +361,7 @@ def index_document(
"Indexing successful",
)
- return doc_id
+ return doc_id.get("output")
@staticmethod
def prompt_responder(
@@ -364,6 +371,7 @@ def prompt_responder(
document_id: str,
id: Optional[str] = None,
run_id: str = None,
+ profile_manager_id: Optional[str] = None,
) -> Any:
"""Execute chain/single run of the prompts. Makes a call to prompt
service and returns the dict of response.
@@ -374,6 +382,7 @@ def prompt_responder(
user_id (str): User's ID
document_id (str): UUID of the document uploaded
id (Optional[str]): ID of the prompt
+ profile_manager_id (Optional[str]): UUID of the profile manager
Raises:
AnswerFetchError: Error from prompt-service
@@ -383,44 +392,94 @@ def prompt_responder(
"""
document: DocumentManager = DocumentManager.objects.get(pk=document_id)
doc_name: str = document.document_name
-
- doc_path = FileManagerHelper.handle_sub_directory_for_tenants(
- org_id=org_id,
- user_id=user_id,
- tool_id=tool_id,
- is_create=False,
+ doc_path = PromptStudioHelper._get_document_path(
+ org_id, user_id, tool_id, doc_name
)
- doc_path = str(Path(doc_path) / doc_name)
if id:
- prompt_instance = PromptStudioHelper._fetch_prompt_from_id(id)
- prompt_name = prompt_instance.prompt_key
- logger.info(f"[{tool_id}] Executing single prompt {id}")
- PromptStudioHelper._publish_log(
- {
- "tool_id": tool_id,
- "run_id": run_id,
- "prompt_key": prompt_name,
- "doc_name": doc_name,
- },
- LogLevels.INFO,
- LogLevels.RUN,
- "Executing single prompt",
+ return PromptStudioHelper._execute_single_prompt(
+ id,
+ doc_path,
+ doc_name,
+ tool_id,
+ org_id,
+ user_id,
+ document_id,
+ run_id,
+ profile_manager_id,
)
+ else:
+ return PromptStudioHelper._execute_prompts_in_single_pass(
+ doc_path, tool_id, org_id, user_id, document_id, run_id
+ )
+
+ @staticmethod
+ def _execute_single_prompt(
+ id,
+ doc_path,
+ doc_name,
+ tool_id,
+ org_id,
+ user_id,
+ document_id,
+ run_id,
+ profile_manager_id,
+ ):
+ prompt_instance = PromptStudioHelper._fetch_prompt_from_id(id)
+ prompt_name = prompt_instance.prompt_key
+ PromptStudioHelper._publish_log(
+ {
+ "tool_id": tool_id,
+ "run_id": run_id,
+ "prompt_key": prompt_name,
+ "doc_name": doc_name,
+ },
+ LogLevels.INFO,
+ LogLevels.RUN,
+ "Executing single prompt",
+ )
+ prompts = [prompt_instance]
+ tool = prompt_instance.tool_id
- prompts: list[ToolStudioPrompt] = []
- prompts.append(prompt_instance)
- tool: CustomTool = prompt_instance.tool_id
+ if tool.summarize_as_source:
+ directory, filename = os.path.split(doc_path)
+ doc_path = os.path.join(
+ directory, TSPKeys.SUMMARIZE, os.path.splitext(filename)[0] + ".txt"
+ )
- if tool.summarize_as_source:
- directory, filename = os.path.split(doc_path)
- doc_path = os.path.join(
- directory,
- TSPKeys.SUMMARIZE,
- os.path.splitext(filename)[0] + ".txt",
- )
+ PromptStudioHelper._publish_log(
+ {
+ "tool_id": tool_id,
+ "run_id": run_id,
+ "prompt_key": prompt_name,
+ "doc_name": doc_name,
+ },
+ LogLevels.DEBUG,
+ LogLevels.RUN,
+ "Invoking prompt service",
+ )
- logger.info(f"[{tool.tool_id}] Invoking prompt service for prompt {id}")
+ try:
+ response = PromptStudioHelper._fetch_response(
+ doc_path=doc_path,
+ doc_name=doc_name,
+ tool=tool,
+ prompt=prompt_instance,
+ org_id=org_id,
+ document_id=document_id,
+ run_id=run_id,
+ profile_manager_id=profile_manager_id,
+ user_id=user_id,
+ )
+ return PromptStudioHelper._handle_response(
+ response, run_id, prompts, document_id, False, profile_manager_id
+ )
+ except Exception as e:
+ logger.error(
+ f"[{tool.tool_id}] Error while fetching response for "
+ f"prompt {id} and doc {document_id}: {e}"
+ )
+ msg = str(e)
PromptStudioHelper._publish_log(
{
"tool_id": tool_id,
@@ -428,130 +487,89 @@ def prompt_responder(
"prompt_key": prompt_name,
"doc_name": doc_name,
},
- LogLevels.DEBUG,
+ LogLevels.ERROR,
LogLevels.RUN,
- "Invoking prompt service",
+ msg,
)
+ raise e
- try:
- response = PromptStudioHelper._fetch_response(
- doc_path=doc_path,
- doc_name=doc_name,
- tool=tool,
- prompt=prompt_instance,
- org_id=org_id,
- document_id=document_id,
- run_id=run_id,
- )
+ @staticmethod
+ def _execute_prompts_in_single_pass(
+ doc_path, tool_id, org_id, user_id, document_id, run_id
+ ):
+ prompts = PromptStudioHelper.fetch_prompt_from_tool(tool_id)
+ prompts = [prompt for prompt in prompts if prompt.prompt_type != TSPKeys.NOTES]
+ if not prompts:
+ logger.error(f"[{tool_id or 'NA'}] No prompts found for id: {id}")
+ raise NoPromptsFound()
- OutputManagerHelper.handle_prompt_output_update(
- run_id=run_id,
- prompts=prompts,
- outputs=response["output"],
- document_id=document_id,
- is_single_pass_extract=False,
- )
- # TODO: Review if this catch-all is required
- except Exception as e:
- logger.error(
- f"[{tool.tool_id}] Error while fetching response for "
- f"prompt {id} and doc {document_id}: {e}"
- )
- msg: str = (
- f"Error while fetching response for "
- f"'{prompt_name}' with '{doc_name}'. {e}"
- )
- if isinstance(e, AnswerFetchError):
- msg = str(e)
- PromptStudioHelper._publish_log(
- {
- "tool_id": tool_id,
- "run_id": run_id,
- "prompt_key": prompt_name,
- "doc_name": doc_name,
- },
- LogLevels.ERROR,
- LogLevels.RUN,
- msg,
- )
- raise e
+ PromptStudioHelper._publish_log(
+ {"tool_id": tool_id, "run_id": run_id, "prompt_id": str(id)},
+ LogLevels.INFO,
+ LogLevels.RUN,
+ "Executing prompts in single pass",
+ )
- logger.info(
- f"[{tool.tool_id}] Response fetched successfully for prompt {id}"
+ try:
+ tool = prompts[0].tool_id
+ response = PromptStudioHelper._fetch_single_pass_response(
+ file_path=doc_path,
+ tool=tool,
+ prompts=prompts,
+ org_id=org_id,
+ document_id=document_id,
+ run_id=run_id,
+ user_id=user_id,
+ )
+ return PromptStudioHelper._handle_response(
+ response, run_id, prompts, document_id, True
+ )
+ except Exception as e:
+ logger.error(
+ f"[{tool.tool_id}] Error while fetching single pass response: {e}"
)
PromptStudioHelper._publish_log(
{
"tool_id": tool_id,
"run_id": run_id,
- "prompt_key": prompt_name,
- "doc_name": doc_name,
+ "prompt_id": str(id),
},
- LogLevels.INFO,
- LogLevels.RUN,
- "Single prompt execution completed",
- )
-
- return response
- else:
- prompts = PromptStudioHelper.fetch_prompt_from_tool(tool_id)
- prompts = [
- prompt for prompt in prompts if prompt.prompt_type != TSPKeys.NOTES
- ]
- if not prompts:
- logger.error(f"[{tool_id or 'NA'}] No prompts found for id: {id}")
- raise NoPromptsFound()
-
- logger.info(f"[{tool_id}] Executing prompts in single pass")
- PromptStudioHelper._publish_log(
- {"tool_id": tool_id, "run_id": run_id, "prompt_id": str(id)},
- LogLevels.INFO,
+ LogLevels.ERROR,
LogLevels.RUN,
- "Executing prompts in single pass",
+ f"Failed to fetch single pass response. {e}",
)
+ raise e
- try:
- tool = prompts[0].tool_id
- response = PromptStudioHelper._fetch_single_pass_response(
- file_path=doc_path,
- tool=tool,
- prompts=prompts,
- org_id=org_id,
- document_id=document_id,
- run_id=run_id,
- )
-
- OutputManagerHelper.handle_prompt_output_update(
- run_id=run_id,
- prompts=prompts,
- outputs=response[TSPKeys.OUTPUT],
- document_id=document_id,
- is_single_pass_extract=True,
- )
- except Exception as e:
- logger.error(
- f"[{tool.tool_id}] Error while fetching single pass response: {e}" # noqa: E501
- )
- PromptStudioHelper._publish_log(
- {
- "tool_id": tool_id,
- "run_id": run_id,
- "prompt_id": str(id),
- },
- LogLevels.ERROR,
- LogLevels.RUN,
- f"Failed to fetch single pass response. {e}",
- )
- raise e
-
- logger.info(f"[{tool.tool_id}] Single pass response fetched successfully")
- PromptStudioHelper._publish_log(
- {"tool_id": tool_id, "run_id": run_id, "prompt_id": str(id)},
- LogLevels.INFO,
- LogLevels.RUN,
- "Single pass execution completed",
- )
+ @staticmethod
+ def _get_document_path(org_id, user_id, tool_id, doc_name):
+ doc_path = FileManagerHelper.handle_sub_directory_for_tenants(
+ org_id=org_id,
+ user_id=user_id,
+ tool_id=tool_id,
+ is_create=False,
+ )
+ return str(Path(doc_path) / doc_name)
- return response
+ @staticmethod
+ def _handle_response(
+ response, run_id, prompts, document_id, is_single_pass, profile_manager_id=None
+ ):
+ if response.get("status") == IndexingStatus.PENDING_STATUS.value:
+ return {
+ "status": IndexingStatus.PENDING_STATUS.value,
+ "message": IndexingStatus.DOCUMENT_BEING_INDEXED.value,
+ }
+
+ OutputManagerHelper.handle_prompt_output_update(
+ run_id=run_id,
+ prompts=prompts,
+ outputs=response["output"],
+ document_id=document_id,
+ is_single_pass_extract=is_single_pass,
+ profile_manager_id=profile_manager_id,
+ context=response["metadata"].get("context"),
+ )
+ return response
@staticmethod
def _fetch_response(
@@ -562,6 +580,8 @@ def _fetch_response(
org_id: str,
document_id: str,
run_id: str,
+ user_id: str,
+ profile_manager_id: Optional[str] = None,
) -> Any:
"""Utility function to invoke prompt service. Used internally.
@@ -572,6 +592,9 @@ def _fetch_response(
prompt (ToolStudioPrompt): ToolStudioPrompt instance to fetch response
org_id (str): UUID of the organization
document_id (str): UUID of the document
+ profile_manager_id (Optional[str]): UUID of the profile manager
+ user_id (str): The ID of the user who uploaded the document
+
Raises:
DefaultProfileError: If no default profile is selected
@@ -580,6 +603,14 @@ def _fetch_response(
Returns:
Any: Output from LLM
"""
+
+ # Fetch the ProfileManager instance using the profile_manager_id if provided
+ profile_manager = prompt.profile_manager
+ if profile_manager_id:
+ profile_manager = ProfileManagerHelper.get_profile_manager(
+ profile_manager_id=profile_manager_id
+ )
+
monitor_llm_instance: Optional[AdapterInstance] = tool.monitor_llm
monitor_llm: Optional[str] = None
challenge_llm_instance: Optional[AdapterInstance] = tool.challenge_llm
@@ -600,28 +631,33 @@ def _fetch_response(
challenge_llm = str(default_profile.llm.id)
# Need to check the user who created profile manager
- PromptStudioHelper.validate_adapter_status(prompt.profile_manager)
+ PromptStudioHelper.validate_adapter_status(profile_manager)
# Need to check the user who created profile manager
# has access to adapters
- PromptStudioHelper.validate_profile_manager_owner_access(prompt.profile_manager)
+ PromptStudioHelper.validate_profile_manager_owner_access(profile_manager)
# Not checking reindex here as there might be
# change in Profile Manager
- vector_db = str(prompt.profile_manager.vector_store.id)
- embedding_model = str(prompt.profile_manager.embedding_model.id)
- llm = str(prompt.profile_manager.llm.id)
- x2text = str(prompt.profile_manager.x2text.id)
- prompt_profile_manager: ProfileManager = prompt.profile_manager
- if not prompt_profile_manager:
+ vector_db = str(profile_manager.vector_store.id)
+ embedding_model = str(profile_manager.embedding_model.id)
+ llm = str(profile_manager.llm.id)
+ x2text = str(profile_manager.x2text.id)
+ if not profile_manager:
raise DefaultProfileError()
- PromptStudioHelper.dynamic_indexer(
- profile_manager=prompt_profile_manager,
+ index_result = PromptStudioHelper.dynamic_indexer(
+ profile_manager=profile_manager,
file_path=doc_path,
tool_id=str(tool.tool_id),
org_id=org_id,
document_id=document_id,
is_summary=tool.summarize_as_source,
run_id=run_id,
+ user_id=user_id,
)
+ if index_result.get("status") == IndexingStatus.PENDING_STATUS.value:
+ return {
+ "status": IndexingStatus.PENDING_STATUS.value,
+ "message": IndexingStatus.DOCUMENT_BEING_INDEXED.value,
+ }
output: dict[str, Any] = {}
outputs: list[dict[str, Any]] = []
@@ -639,16 +675,16 @@ def _fetch_response(
output[TSPKeys.PROMPT] = prompt.prompt
output[TSPKeys.ACTIVE] = prompt.active
- output[TSPKeys.CHUNK_SIZE] = prompt.profile_manager.chunk_size
+ output[TSPKeys.CHUNK_SIZE] = profile_manager.chunk_size
output[TSPKeys.VECTOR_DB] = vector_db
output[TSPKeys.EMBEDDING] = embedding_model
- output[TSPKeys.CHUNK_OVERLAP] = prompt.profile_manager.chunk_overlap
+ output[TSPKeys.CHUNK_OVERLAP] = profile_manager.chunk_overlap
output[TSPKeys.LLM] = llm
output[TSPKeys.TYPE] = prompt.enforce_type
output[TSPKeys.NAME] = prompt.prompt_key
- output[TSPKeys.RETRIEVAL_STRATEGY] = prompt.profile_manager.retrieval_strategy
- output[TSPKeys.SIMILARITY_TOP_K] = prompt.profile_manager.similarity_top_k
- output[TSPKeys.SECTION] = prompt.profile_manager.section
+ output[TSPKeys.RETRIEVAL_STRATEGY] = profile_manager.retrieval_strategy
+ output[TSPKeys.SIMILARITY_TOP_K] = profile_manager.similarity_top_k
+ output[TSPKeys.SECTION] = profile_manager.section
output[TSPKeys.X2TEXT_ADAPTER] = x2text
# Eval settings for the prompt
output[TSPKeys.EVAL_SETTINGS] = {}
@@ -695,8 +731,9 @@ def _fetch_response(
prompt_host=settings.PROMPT_HOST,
prompt_port=settings.PROMPT_PORT,
)
+ include_metadata = {TSPKeys.INCLUDE_METADATA: True}
- answer = responder.answer_prompt(payload)
+ answer = responder.answer_prompt(payload, include_metadata)
# TODO: Make use of dataclasses
if answer["status"] == "ERROR":
# TODO: Publish to FE logs from here
@@ -715,10 +752,11 @@ def dynamic_indexer(
file_path: str,
org_id: str,
document_id: str,
+ user_id: str,
is_summary: bool = False,
reindex: bool = False,
run_id: str = None,
- ) -> str:
+ ) -> Any:
"""Used to index a file based on the passed arguments.
This is useful when a file needs to be indexed dynamically as the
@@ -732,6 +770,7 @@ def dynamic_indexer(
org_id (str): ID of the organization
is_summary (bool, optional): Flag to ensure if extracted contents
need to be persisted. Defaults to False.
+ user_id (str): The ID of the user who uploaded the document
Returns:
str: Index key for the combination of arguments
@@ -750,9 +789,42 @@ def dynamic_indexer(
profile_manager.chunk_size = 0
try:
+
usage_kwargs = {"run_id": run_id}
util = PromptIdeBaseTool(log_level=LogLevel.INFO, org_id=org_id)
tool_index = Index(tool=util)
+ doc_id_key = tool_index.generate_file_id(
+ tool_id=tool_id,
+ vector_db=vector_db,
+ embedding=embedding_model,
+ x2text=x2text_adapter,
+ chunk_size=str(profile_manager.chunk_size),
+ chunk_overlap=str(profile_manager.chunk_overlap),
+ file_path=file_path,
+ file_hash=None,
+ )
+ if not reindex:
+ indexed_doc_id = DocumentIndexingService.get_indexed_document_id(
+ org_id=org_id, user_id=user_id, doc_id_key=doc_id_key
+ )
+ if indexed_doc_id:
+ return {
+ "status": IndexingStatus.COMPLETED_STATUS.value,
+ "output": indexed_doc_id,
+ }
+ # Polling if document is already being indexed
+ if DocumentIndexingService.is_document_indexing(
+ org_id=org_id, user_id=user_id, doc_id_key=doc_id_key
+ ):
+ return {
+ "status": IndexingStatus.PENDING_STATUS.value,
+ "output": IndexingStatus.DOCUMENT_BEING_INDEXED.value,
+ }
+
+ # Set the document as being indexed
+ DocumentIndexingService.set_document_indexing(
+ org_id=org_id, user_id=user_id, doc_id_key=doc_id_key
+ )
doc_id: str = tool_index.index(
tool_id=tool_id,
embedding_instance_id=embedding_model,
@@ -772,7 +844,10 @@ def dynamic_indexer(
profile_manager=profile_manager,
doc_id=doc_id,
)
- return doc_id
+ DocumentIndexingService.mark_document_indexed(
+ org_id=org_id, user_id=user_id, doc_id_key=doc_id_key, doc_id=doc_id
+ )
+ return {"status": IndexingStatus.COMPLETED_STATUS.value, "output": doc_id}
except (IndexingError, IndexingAPIError, SdkError) as e:
doc_name = os.path.split(file_path)[1]
PromptStudioHelper._publish_log(
@@ -791,6 +866,7 @@ def _fetch_single_pass_response(
file_path: str,
prompts: list[ToolStudioPrompt],
org_id: str,
+ user_id: str,
document_id: str,
run_id: str = None,
) -> Any:
@@ -819,7 +895,7 @@ def _fetch_single_pass_response(
if not default_profile:
raise DefaultProfileError()
- PromptStudioHelper.dynamic_indexer(
+ index_result = PromptStudioHelper.dynamic_indexer(
profile_manager=default_profile,
file_path=file_path,
tool_id=tool_id,
@@ -827,7 +903,13 @@ def _fetch_single_pass_response(
is_summary=tool.summarize_as_source,
document_id=document_id,
run_id=run_id,
+ user_id=user_id,
)
+ if index_result.get("status") == IndexingStatus.PENDING_STATUS.value:
+ return {
+ "status": IndexingStatus.PENDING_STATUS.value,
+ "message": IndexingStatus.DOCUMENT_BEING_INDEXED.value,
+ }
vector_db = str(default_profile.vector_store.id)
embedding_model = str(default_profile.embedding_model.id)
@@ -877,8 +959,9 @@ def _fetch_single_pass_response(
prompt_host=settings.PROMPT_HOST,
prompt_port=settings.PROMPT_PORT,
)
+ include_metadata = {TSPKeys.INCLUDE_METADATA: True}
- answer = responder.single_pass_extraction(payload)
+ answer = responder.single_pass_extraction(payload, include_metadata)
# TODO: Make use of dataclasses
if answer["status"] == "ERROR":
error_message = answer.get("error", None)
diff --git a/backend/prompt_studio/prompt_studio_core/views.py b/backend/prompt_studio/prompt_studio_core/views.py
index 9093efd42..8db0a3ef5 100644
--- a/backend/prompt_studio/prompt_studio_core/views.py
+++ b/backend/prompt_studio/prompt_studio_core/views.py
@@ -21,6 +21,9 @@
ToolStudioKeys,
ToolStudioPromptKeys,
)
+from prompt_studio.prompt_studio_core.document_indexing_service import (
+ DocumentIndexingService,
+)
from prompt_studio.prompt_studio_core.exceptions import (
IndexingAPIError,
ToolDeleteError,
@@ -30,6 +33,7 @@
from prompt_studio.prompt_studio_document_manager.prompt_studio_document_helper import ( # noqa: E501
PromptStudioDocumentHelper,
)
+from prompt_studio.prompt_studio_index_manager.models import IndexManager
from prompt_studio.prompt_studio_registry.prompt_studio_registry_helper import (
PromptStudioRegistryHelper,
)
@@ -264,6 +268,7 @@ def fetch_response(self, request: HttpRequest, pk: Any = None) -> Response:
document_id: str = request.data.get(ToolStudioPromptKeys.DOCUMENT_ID)
id: str = request.data.get(ToolStudioPromptKeys.ID)
run_id: str = request.data.get(ToolStudioPromptKeys.RUN_ID)
+ profile_manager: str = request.data.get(ToolStudioPromptKeys.PROFILE_MANAGER_ID)
if not run_id:
# Generate a run_id
run_id = CommonUtils.generate_uuid()
@@ -275,6 +280,7 @@ def fetch_response(self, request: HttpRequest, pk: Any = None) -> Response:
user_id=custom_tool.created_by.user_id,
document_id=document_id,
run_id=run_id,
+ profile_manager_id=profile_manager,
)
return Response(response, status=status.HTTP_200_OK)
@@ -446,17 +452,26 @@ def delete_for_ide(self, request: HttpRequest, pk: uuid) -> Response:
document_id: str = serializer.validated_data.get(
ToolStudioPromptKeys.DOCUMENT_ID
)
+ org_id = UserSessionUtils.get_organization_id(request)
+ user_id = custom_tool.created_by.user_id
document: DocumentManager = DocumentManager.objects.get(pk=document_id)
file_name: str = document.document_name
file_path = FileManagerHelper.handle_sub_directory_for_tenants(
- UserSessionUtils.get_organization_id(request),
+ org_id=org_id,
is_create=False,
- user_id=custom_tool.created_by.user_id,
+ user_id=user_id,
tool_id=str(custom_tool.tool_id),
)
path = file_path
file_system = LocalStorageFS(settings={"path": path})
try:
+ # Delete indexed flags in redis
+ index_managers = IndexManager.objects.filter(document_manager=document_id)
+ for index_manager in index_managers:
+ raw_index_id = index_manager.raw_index_id
+ DocumentIndexingService.remove_document_indexing(
+ org_id=org_id, user_id=user_id, doc_id_key=raw_index_id
+ )
# Delete the document record
document.delete()
# Delete the files
diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0013_promptstudiooutputmanager_context.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0013_promptstudiooutputmanager_context.py
new file mode 100644
index 000000000..9d72dbd4d
--- /dev/null
+++ b/backend/prompt_studio/prompt_studio_output_manager/migrations/0013_promptstudiooutputmanager_context.py
@@ -0,0 +1,20 @@
+# Generated by Django 4.2.1 on 2024-06-27 18:27
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("prompt_studio_output_manager", "0012_promptstudiooutputmanager_run_id"),
+ ]
+
+ operations = [
+ migrations.AddField(
+ model_name="promptstudiooutputmanager",
+ name="context",
+ field=models.CharField(
+ blank=True, db_comment="Field to store chucks used", null=True
+ ),
+ ),
+ ]
diff --git a/backend/prompt_studio/prompt_studio_output_manager/migrations/0014_alter_promptstudiooutputmanager_context.py b/backend/prompt_studio/prompt_studio_output_manager/migrations/0014_alter_promptstudiooutputmanager_context.py
new file mode 100644
index 000000000..9d7844eaa
--- /dev/null
+++ b/backend/prompt_studio/prompt_studio_output_manager/migrations/0014_alter_promptstudiooutputmanager_context.py
@@ -0,0 +1,20 @@
+# Generated by Django 4.2.1 on 2024-06-30 17:17
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("prompt_studio_output_manager", "0013_promptstudiooutputmanager_context"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="promptstudiooutputmanager",
+ name="context",
+ field=models.TextField(
+ blank=True, db_comment="Field to store chunks used", null=True
+ ),
+ ),
+ ]
diff --git a/backend/prompt_studio/prompt_studio_output_manager/models.py b/backend/prompt_studio/prompt_studio_output_manager/models.py
index 14febf634..e1f7f5b86 100644
--- a/backend/prompt_studio/prompt_studio_output_manager/models.py
+++ b/backend/prompt_studio/prompt_studio_output_manager/models.py
@@ -21,6 +21,9 @@ class PromptStudioOutputManager(BaseModel):
output = models.CharField(
db_comment="Field to store output", editable=True, null=True, blank=True
)
+ context = models.TextField(
+ db_comment="Field to store chunks used", editable=True, null=True, blank=True
+ )
eval_metrics = models.JSONField(
db_column="eval_metrics",
null=False,
diff --git a/backend/prompt_studio/prompt_studio_output_manager/output_manager_helper.py b/backend/prompt_studio/prompt_studio_output_manager/output_manager_helper.py
index 6f942e3b7..b88a25602 100644
--- a/backend/prompt_studio/prompt_studio_output_manager/output_manager_helper.py
+++ b/backend/prompt_studio/prompt_studio_output_manager/output_manager_helper.py
@@ -1,10 +1,14 @@
import json
import logging
-from typing import Any
+from typing import Any, Optional
from prompt_studio.prompt_profile_manager.models import ProfileManager
-from prompt_studio.prompt_studio.exceptions import AnswerFetchError
from prompt_studio.prompt_studio.models import ToolStudioPrompt
+from prompt_studio.prompt_studio_core.exceptions import (
+ AnswerFetchError,
+ DefaultProfileError,
+)
+from prompt_studio.prompt_studio_core.models import CustomTool
from prompt_studio.prompt_studio_document_manager.models import DocumentManager
from prompt_studio.prompt_studio_output_manager.constants import (
PromptStudioOutputManagerKeys as PSOMKeys,
@@ -20,42 +24,32 @@ def handle_prompt_output_update(
run_id: str,
prompts: list[ToolStudioPrompt],
outputs: Any,
+ context: Any,
document_id: str,
is_single_pass_extract: bool,
+ profile_manager_id: Optional[str] = None,
) -> None:
"""Handles updating prompt outputs in the database.
Args:
+ run_id (str): ID of the run.
prompts (list[ToolStudioPrompt]): List of prompts to update.
outputs (Any): Outputs corresponding to the prompts.
document_id (str): ID of the document.
+ profile_manager_id (Optional[str]): UUID of the profile manager.
is_single_pass_extract (bool):
Flag indicating if single pass extract is active.
"""
- # Check if prompts list is empty
- if not prompts:
- return # Return early if prompts list is empty
- tool = prompts[0].tool_id
- document_manager = DocumentManager.objects.get(pk=document_id)
- default_profile = ProfileManager.get_default_llm_profile(tool=tool)
- # Iterate through each prompt in the list
- for prompt in prompts:
- if prompt.prompt_type == PSOMKeys.NOTES:
- continue
- if is_single_pass_extract:
- profile_manager = default_profile
- else:
- profile_manager = prompt.profile_manager
- output = json.dumps(outputs.get(prompt.prompt_key))
- eval_metrics = outputs.get(f"{prompt.prompt_key}__evaluation", [])
-
- # Attempt to update an existing output manager,
- # for the given criteria,
- # or create a new one if it doesn't exist
+ def update_or_create_prompt_output(
+ prompt: ToolStudioPrompt,
+ profile_manager: ProfileManager,
+ output: str,
+ eval_metrics: list[Any],
+ tool: CustomTool,
+ context: str,
+ ):
try:
- # Create or get the existing record for this document, prompt and
- # profile combo
_, success = PromptStudioOutputManager.objects.get_or_create(
document_manager=document_manager,
tool_id=tool,
@@ -65,6 +59,7 @@ def handle_prompt_output_update(
defaults={
"output": output,
"eval_metrics": eval_metrics,
+ "context": context,
},
)
@@ -79,11 +74,12 @@ def handle_prompt_output_update(
f"profile {profile_manager.profile_id}"
)
- args: dict[str, str] = dict()
- args["run_id"] = run_id
- args["output"] = output
- args["eval_metrics"] = eval_metrics
- # Update the record with the run id and other params
+ args: dict[str, str] = {
+ "run_id": run_id,
+ "output": output,
+ "eval_metrics": eval_metrics,
+ "context": context,
+ }
PromptStudioOutputManager.objects.filter(
document_manager=document_manager,
tool_id=tool,
@@ -94,3 +90,57 @@ def handle_prompt_output_update(
except Exception as e:
raise AnswerFetchError(f"Error updating prompt output {e}") from e
+
+ if not prompts:
+ return # Return early if prompts list is empty
+
+ tool = prompts[0].tool_id
+ default_profile = OutputManagerHelper.get_default_profile(
+ profile_manager_id, tool
+ )
+ document_manager = DocumentManager.objects.get(pk=document_id)
+
+ for prompt in prompts:
+ if prompt.prompt_type == PSOMKeys.NOTES or not prompt.active:
+ continue
+
+ if not is_single_pass_extract:
+ context = json.dumps(context.get(prompt.prompt_key))
+
+ output = json.dumps(outputs.get(prompt.prompt_key))
+ profile_manager = default_profile
+ eval_metrics = outputs.get(f"{prompt.prompt_key}__evaluation", [])
+
+ update_or_create_prompt_output(
+ prompt=prompt,
+ profile_manager=profile_manager,
+ output=output,
+ eval_metrics=eval_metrics,
+ tool=tool,
+ context=context,
+ )
+
+ @staticmethod
+ def get_default_profile(
+ profile_manager_id: Optional[str], tool: CustomTool
+ ) -> ProfileManager:
+ if profile_manager_id:
+ return OutputManagerHelper.fetch_profile_manager(profile_manager_id)
+ else:
+ return OutputManagerHelper.fetch_default_llm_profile(tool)
+
+ @staticmethod
+ def fetch_profile_manager(profile_manager_id: str) -> ProfileManager:
+ try:
+ return ProfileManager.objects.get(profile_id=profile_manager_id)
+ except ProfileManager.DoesNotExist:
+ raise DefaultProfileError(
+ f"ProfileManager with ID {profile_manager_id} does not exist."
+ )
+
+ @staticmethod
+ def fetch_default_llm_profile(tool: CustomTool) -> ProfileManager:
+ try:
+ return ProfileManager.get_default_llm_profile(tool=tool)
+ except DefaultProfileError:
+ raise DefaultProfileError("Default ProfileManager does not exist.")
diff --git a/backend/pyproject.toml b/backend/pyproject.toml
index 865de53c5..5b454b5a5 100644
--- a/backend/pyproject.toml
+++ b/backend/pyproject.toml
@@ -31,8 +31,8 @@ dependencies = [
"python-socketio==5.9.0", # For log_events
"social-auth-app-django==5.3.0", # For OAuth
"social-auth-core==4.4.2", # For OAuth
- "unstract-sdk~=0.35.0",
- "unstract-adapters~=0.20.1",
+ "unstract-sdk~=0.37.0",
+ "unstract-adapters~=0.21.0",
# ! IMPORTANT!
# Indirect local dependencies usually need to be added in their own projects
# as: https://pdm-project.org/latest/usage/dependency/#local-dependencies.
@@ -68,6 +68,14 @@ test = [
"pytest>=8.0.1",
"pytest-dotenv==0.5.2",
]
+dev = [
+ "-e unstract-connectors @ file:///${PROJECT_ROOT}/../unstract/connectors",
+ "-e unstract-core @ file:///${PROJECT_ROOT}/../unstract/core",
+ "-e unstract-flags @ file:///${PROJECT_ROOT}/../unstract/flags",
+ "-e unstract-tool-registry @ file:///${PROJECT_ROOT}/../unstract/tool-registry",
+ "-e unstract-tool-sandbox @ file:///${PROJECT_ROOT}/../unstract/tool-sandbox",
+ "-e unstract-workflow-execution @ file:///${PROJECT_ROOT}/../unstract/workflow-execution",
+]
[tool.pytest.ini_options]
env_files = "test.env" # Load env from particular env file
diff --git a/backend/sample.env b/backend/sample.env
index e5f2ebe3e..2cca7fc19 100644
--- a/backend/sample.env
+++ b/backend/sample.env
@@ -88,10 +88,11 @@ PROMPT_PORT=3003
#Prompt Studio
PROMPT_STUDIO_FILE_PATH=/app/prompt-studio-data
-# Structure Tool
-STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.28"
+# Structure Tool Image (Runs prompt studio exported tools)
+# https://hub.docker.com/r/unstract/tool-structure
+STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.30"
STRUCTURE_TOOL_IMAGE_NAME="unstract/tool-structure"
-STRUCTURE_TOOL_IMAGE_TAG="0.0.28"
+STRUCTURE_TOOL_IMAGE_TAG="0.0.30"
# Feature Flags
EVALUATION_SERVER_IP=unstract-flipt
@@ -139,3 +140,6 @@ LOGS_BATCH_LIMIT=30
# Celery Configuration
CELERY_BROKER_URL = "redis://unstract-redis:6379"
+
+# Indexing flag to prevent re-index
+INDEXING_FLAG_TTL=1800
diff --git a/backend/scheduler/tasks.py b/backend/scheduler/tasks.py
index e03dc3b28..dbc6f6101 100644
--- a/backend/scheduler/tasks.py
+++ b/backend/scheduler/tasks.py
@@ -3,6 +3,7 @@
from typing import Any
from account.models import Organization
+from account.subscription_loader import load_plugins, validate_etl_run
from celery import shared_task
from django_celery_beat.models import CrontabSchedule, PeriodicTask
from django_tenants.utils import get_tenant_model, tenant_context
@@ -12,6 +13,7 @@
from workflow_manager.workflow.workflow_helper import WorkflowHelper
logger = logging.getLogger(__name__)
+subscription_loader = load_plugins()
def create_periodic_task(
@@ -57,13 +59,25 @@ def execute_pipeline_task(
name: Any,
) -> None:
logger.info(f"Executing pipeline name: {name}")
-
try:
logger.info(f"Executing workflow id: {workflow_id}")
tenant: Organization = (
get_tenant_model().objects.filter(schema_name=org_schema).first()
)
with tenant_context(tenant):
+ if (
+ subscription_loader
+ and subscription_loader[0]
+ and not validate_etl_run(org_schema)
+ ):
+ try:
+ logger.info(f"Disabling ETL task: {pipepline_id}")
+ disable_task(pipepline_id)
+ except Exception as e:
+ logger.warning(
+ f"Failed to disable task: {pipepline_id}. Error: {e}"
+ )
+ return
workflow = Workflow.objects.get(id=workflow_id)
logger.info(f"Executing workflow: {workflow}")
PipelineProcessor.update_pipeline(
@@ -88,7 +102,6 @@ def delete_periodic_task(task_name: str) -> None:
logger.error(f"Periodic task does not exist: {task_name}")
-@shared_task
def disable_task(task_name: str) -> None:
task = PeriodicTask.objects.get(name=task_name)
task.enabled = False
@@ -96,7 +109,6 @@ def disable_task(task_name: str) -> None:
PipelineProcessor.update_pipeline(task_name, Pipeline.PipelineStatus.PAUSED, False)
-@shared_task
def enable_task(task_name: str) -> None:
task = PeriodicTask.objects.get(name=task_name)
task.enabled = True
diff --git a/backend/tenant_account/users_view.py b/backend/tenant_account/users_view.py
index 9e8ffd21a..ebcebd14a 100644
--- a/backend/tenant_account/users_view.py
+++ b/backend/tenant_account/users_view.py
@@ -31,8 +31,6 @@ def assign_organization_role_to_user(self, request: Request) -> Response:
if not (user_email and role):
raise BadRequestException
org_id: str = UserSessionUtils.get_organization_id(request)
- auth_controller = AuthenticationController()
-
auth_controller = AuthenticationController()
update_status = auth_controller.add_user_role(
request.user, org_id, user_email, role
diff --git a/backend/usage/constants.py b/backend/usage/constants.py
index d28074e1e..8da54da05 100644
--- a/backend/usage/constants.py
+++ b/backend/usage/constants.py
@@ -4,3 +4,4 @@ class UsageKeys:
PROMPT_TOKENS = "prompt_tokens"
COMPLETION_TOKENS = "completion_tokens"
TOTAL_TOKENS = "total_tokens"
+ COST_IN_DOLLARS = "cost_in_dollars"
diff --git a/backend/usage/helper.py b/backend/usage/helper.py
index b91fae556..0bfab7556 100644
--- a/backend/usage/helper.py
+++ b/backend/usage/helper.py
@@ -36,6 +36,7 @@ def get_aggregated_token_count(run_id: str) -> dict:
prompt_tokens=Sum(UsageKeys.PROMPT_TOKENS),
completion_tokens=Sum(UsageKeys.COMPLETION_TOKENS),
total_tokens=Sum(UsageKeys.TOTAL_TOKENS),
+ cost_in_dollars=Sum(UsageKeys.COST_IN_DOLLARS),
)
logger.info(f"Token counts aggregated successfully for run_id: {run_id}")
@@ -50,6 +51,7 @@ def get_aggregated_token_count(run_id: str) -> dict:
UsageKeys.COMPLETION_TOKENS
),
UsageKeys.TOTAL_TOKENS: usage_summary.get(UsageKeys.TOTAL_TOKENS),
+ UsageKeys.COST_IN_DOLLARS: usage_summary.get(UsageKeys.COST_IN_DOLLARS),
}
return result
except Usage.DoesNotExist:
diff --git a/backend/utils/user_session.py b/backend/utils/user_session.py
index d8758b25e..888c6c353 100644
--- a/backend/utils/user_session.py
+++ b/backend/utils/user_session.py
@@ -1,6 +1,7 @@
from typing import Optional
from django.http import HttpRequest
+from tenant_account.models import OrganizationMember
from utils.constants import FeatureFlag
from unstract.flags.feature_flag import check_feature_flag_status
@@ -23,3 +24,13 @@ def set_organization_id(request: HttpRequest, organization_id: str) -> None:
@staticmethod
def get_user_id(request: HttpRequest) -> Optional[str]:
return request.session.get("user_id")
+
+ @staticmethod
+ def set_organization_member_role(
+ request: HttpRequest, member: OrganizationMember
+ ) -> None:
+ request.session["role"] = member.role
+
+ @staticmethod
+ def get_organization_member_role(request: HttpRequest) -> Optional[str]:
+ return request.session.get("role")
diff --git a/backend/workflow_manager/endpoint/constants.py b/backend/workflow_manager/endpoint/constants.py
index 1674c5725..d9553245d 100644
--- a/backend/workflow_manager/endpoint/constants.py
+++ b/backend/workflow_manager/endpoint/constants.py
@@ -92,3 +92,14 @@ class BigQuery:
"""
TABLE_NAME_SIZE = 3
+ COLUMN_TYPES = [
+ "DATE",
+ "DATETIME",
+ "TIME",
+ "TIMESTAMP",
+ ]
+
+
+class QueueResultStatus:
+ SUCCESS = "Success"
+ FAILED = "Failed"
diff --git a/backend/workflow_manager/endpoint/destination.py b/backend/workflow_manager/endpoint/destination.py
index 464652d7f..6b88dd86c 100644
--- a/backend/workflow_manager/endpoint/destination.py
+++ b/backend/workflow_manager/endpoint/destination.py
@@ -1,4 +1,5 @@
import ast
+import base64
import json
import logging
import os
@@ -15,6 +16,7 @@
from workflow_manager.endpoint.constants import (
ApiDeploymentResultStatus,
DestinationKey,
+ QueueResultStatus,
WorkflowFileType,
)
from workflow_manager.endpoint.database_utils import DatabaseUtils
@@ -26,6 +28,7 @@
ToolOutputTypeMismatch,
)
from workflow_manager.endpoint.models import WorkflowEndpoint
+from workflow_manager.endpoint.queue_utils import QueueResult, QueueUtils
from workflow_manager.workflow.enums import ExecutionStatus
from workflow_manager.workflow.file_history_helper import FileHistoryHelper
from workflow_manager.workflow.models.file_history import FileHistory
@@ -55,8 +58,10 @@ def __init__(self, workflow: Workflow, execution_id: str) -> None:
organization_id = connection.tenant.schema_name
super().__init__(workflow.id, execution_id, organization_id)
self.endpoint = self._get_endpoint_for_workflow(workflow=workflow)
+ self.source_endpoint = self._get_source_endpoint_for_workflow(workflow=workflow)
self.execution_id = execution_id
self.api_results: list[dict[str, Any]] = []
+ self.queue_results: list[dict[str, Any]] = []
def _get_endpoint_for_workflow(
self,
@@ -81,6 +86,29 @@ def _get_endpoint_for_workflow(
)
return endpoint
+ def _get_source_endpoint_for_workflow(
+ self,
+ workflow: Workflow,
+ ) -> WorkflowEndpoint:
+ """Get WorkflowEndpoint instance.
+
+ Args:
+ workflow (Workflow): Workflow associated with the
+ destination connector.
+
+ Returns:
+ WorkflowEndpoint: WorkflowEndpoint instance.
+ """
+ endpoint: WorkflowEndpoint = WorkflowEndpoint.objects.get(
+ workflow=workflow,
+ endpoint_type=WorkflowEndpoint.EndpointType.SOURCE,
+ )
+ if endpoint.connector_instance:
+ endpoint.connector_instance.connector_metadata = (
+ endpoint.connector_instance.metadata
+ )
+ return endpoint
+
def validate(self) -> None:
connection_type = self.endpoint.connection_type
connector: ConnectorInstance = self.endpoint.connector_instance
@@ -88,7 +116,11 @@ def validate(self) -> None:
raise MissingDestinationConnectionType()
if connection_type not in WorkflowEndpoint.ConnectionType.values:
raise InvalidDestinationConnectionType()
- if connection_type != WorkflowEndpoint.ConnectionType.API and connector is None:
+ if (
+ connection_type != WorkflowEndpoint.ConnectionType.API
+ and connection_type != WorkflowEndpoint.ConnectionType.MANUALREVIEW
+ and connector is None
+ ):
raise DestinationConnectorNotConfigured()
def handle_output(
@@ -98,6 +130,7 @@ def handle_output(
workflow: Workflow,
file_history: Optional[FileHistory] = None,
error: Optional[str] = None,
+ input_file_path: Optional[str] = None,
) -> None:
"""Handle the output based on the connection type."""
connection_type = self.endpoint.connection_type
@@ -114,10 +147,19 @@ def handle_output(
elif connection_type == WorkflowEndpoint.ConnectionType.API:
result = self.get_result(file_history)
meta_data = self.get_metadata(file_history)
-
self._handle_api_result(
file_name=file_name, error=error, result=result, meta_data=meta_data
)
+ elif connection_type == WorkflowEndpoint.ConnectionType.MANUALREVIEW:
+ result = self.get_result(file_history)
+ meta_data = self.get_metadata(file_history)
+ self._push_to_queue(
+ file_name=file_name,
+ workflow=workflow,
+ result=result,
+ input_file_path=input_file_path,
+ meta_data=meta_data,
+ )
if not file_history:
FileHistoryHelper.create_file_history(
cache_key=file_hash,
@@ -433,3 +475,58 @@ def get_json_schema_for_api(cls) -> dict[str, Any]:
os.path.dirname(__file__), "static", "dest", "api.json"
)
return cls.get_json_schema(file_path=schema_path)
+
+ def _push_to_queue(
+ self,
+ file_name: str,
+ workflow: Workflow,
+ result: Optional[str] = None,
+ input_file_path: Optional[str] = None,
+ meta_data: Optional[dict[str, Any]] = None,
+ ) -> None:
+ """Handle the Manual Review QUEUE result.
+
+ This method is responsible for pushing the input file and result to
+ review queue.
+ Args:
+ file_name (str): The name of the file.
+ workflow (Workflow): The workflow object containing
+ details about the workflow.
+ result (Optional[str], optional): The result of the API call.
+ Defaults to None.
+ input_file_path (Optional[str], optional):
+ The path to the input file.
+ Defaults to None.
+ meta_data (Optional[dict[str, Any]], optional):
+ A dictionary containing additional
+ metadata related to the file. Defaults to None.
+
+ Returns:
+ None
+ """
+ if not result:
+ return
+ connector: ConnectorInstance = self.source_endpoint.connector_instance
+ connector_settings: dict[str, Any] = connector.connector_metadata
+
+ source_fs = self.get_fsspec(
+ settings=connector_settings, connector_id=connector.connector_id
+ )
+ with source_fs.open(input_file_path, "rb") as remote_file:
+ file_content = remote_file.read()
+ # Convert file content to a base64 encoded string
+ file_content_base64 = base64.b64encode(file_content).decode("utf-8")
+ q_name = f"review_queue_{self.organization_id}_{workflow.workflow_name}"
+ queue_result = QueueResult(
+ file=file_name,
+ whisper_hash=meta_data["whisper-hash"],
+ status=QueueResultStatus.SUCCESS,
+ result=result,
+ workflow_id=str(self.workflow_id),
+ file_content=file_content_base64,
+ )
+ # Convert the result dictionary to a JSON string
+ queue_result_json = json.dumps(queue_result)
+ conn = QueueUtils.get_queue_inst()
+ # Enqueue the JSON string
+ conn.enqueue(queue_name=q_name, message=queue_result_json)
diff --git a/backend/workflow_manager/endpoint/exceptions.py b/backend/workflow_manager/endpoint/exceptions.py
index 69e332cdd..7028eec28 100644
--- a/backend/workflow_manager/endpoint/exceptions.py
+++ b/backend/workflow_manager/endpoint/exceptions.py
@@ -77,3 +77,11 @@ class UnstractDBException(APIException):
def __init__(self, detail: str = default_detail) -> None:
status_code = 500
super().__init__(detail=detail, code=status_code)
+
+
+class UnstractQueueException(APIException):
+ default_detail = "Error creating/inserting to Queue. "
+
+ def __init__(self, detail: str = default_detail) -> None:
+ status_code = 500
+ super().__init__(detail=detail, code=status_code)
diff --git a/backend/workflow_manager/endpoint/migrations/0002_alter_workflowendpoint_connection_type.py b/backend/workflow_manager/endpoint/migrations/0002_alter_workflowendpoint_connection_type.py
new file mode 100644
index 000000000..d0e56e146
--- /dev/null
+++ b/backend/workflow_manager/endpoint/migrations/0002_alter_workflowendpoint_connection_type.py
@@ -0,0 +1,27 @@
+# Generated by Django 4.2.1 on 2024-06-06 06:26
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("endpoint", "0001_initial"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="workflowendpoint",
+ name="connection_type",
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ("FILESYSTEM", "FileSystem connector"),
+ ("DATABASE", "Database Connector"),
+ ("API", "API Connector"),
+ ("APPDEPLOYMENT", "App Deployment"),
+ ],
+ db_comment="Connection type (Filesystem, Database or API)",
+ ),
+ ),
+ ]
diff --git a/backend/workflow_manager/endpoint/migrations/0003_alter_workflowendpoint_connection_type.py b/backend/workflow_manager/endpoint/migrations/0003_alter_workflowendpoint_connection_type.py
new file mode 100644
index 000000000..c2da4a302
--- /dev/null
+++ b/backend/workflow_manager/endpoint/migrations/0003_alter_workflowendpoint_connection_type.py
@@ -0,0 +1,28 @@
+# Generated by Django 4.2.1 on 2024-07-04 05:44
+
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("endpoint", "0002_alter_workflowendpoint_connection_type"),
+ ]
+
+ operations = [
+ migrations.AlterField(
+ model_name="workflowendpoint",
+ name="connection_type",
+ field=models.CharField(
+ blank=True,
+ choices=[
+ ("FILESYSTEM", "FileSystem connector"),
+ ("DATABASE", "Database Connector"),
+ ("API", "API Connector"),
+ ("APPDEPLOYMENT", "App Deployment"),
+ ("MANUALREVIEW", "Manual Review Queue Connector"),
+ ],
+ db_comment="Connection type (Filesystem, Database, API or Manualreview)",
+ ),
+ ),
+ ]
diff --git a/backend/workflow_manager/endpoint/models.py b/backend/workflow_manager/endpoint/models.py
index a90c583ac..f49ca1474 100644
--- a/backend/workflow_manager/endpoint/models.py
+++ b/backend/workflow_manager/endpoint/models.py
@@ -15,6 +15,8 @@ class ConnectionType(models.TextChoices):
FILESYSTEM = "FILESYSTEM", "FileSystem connector"
DATABASE = "DATABASE", "Database Connector"
API = "API", "API Connector"
+ APPDEPLOYMENT = "APPDEPLOYMENT", "App Deployment"
+ MANUALREVIEW = "MANUALREVIEW", "Manual Review Queue Connector"
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
workflow = models.ForeignKey(
@@ -32,7 +34,7 @@ class ConnectionType(models.TextChoices):
connection_type = models.CharField(
choices=ConnectionType.choices,
blank=True,
- db_comment="Connection type (Filesystem, Database or API)",
+ db_comment="Connection type (Filesystem, Database, API or Manualreview)",
)
configuration = models.JSONField(
blank=True, null=True, db_comment="Configuration in JSON format"
diff --git a/backend/workflow_manager/endpoint/queue_utils.py b/backend/workflow_manager/endpoint/queue_utils.py
new file mode 100644
index 000000000..bbb4dddb3
--- /dev/null
+++ b/backend/workflow_manager/endpoint/queue_utils.py
@@ -0,0 +1,41 @@
+import logging
+from dataclasses import dataclass
+from enum import Enum
+from typing import Any
+
+from utils.constants import Common
+from workflow_manager.endpoint.exceptions import UnstractQueueException
+
+from unstract.connectors.queues import connectors as queue_connectors
+from unstract.connectors.queues.unstract_queue import UnstractQueue
+
+logger = logging.getLogger(__name__)
+
+
+class QueueResultStatus(Enum):
+ SUCCESS = "success"
+ FAILURE = "failure"
+ # Add other statuses as needed
+
+
+class QueueUtils:
+ @staticmethod
+ def get_queue_inst(connector_settings: dict[str, Any] = {}) -> UnstractQueue:
+ if not queue_connectors:
+ raise UnstractQueueException(detail="Queue connector not exists")
+ queue_connector_key = next(iter(queue_connectors))
+ connector = queue_connectors[queue_connector_key][Common.METADATA][
+ Common.CONNECTOR
+ ]
+ connector_class: UnstractQueue = connector(connector_settings)
+ return connector_class
+
+
+@dataclass
+class QueueResult:
+ file: str
+ whisper_hash: str
+ status: QueueResultStatus
+ result: Any
+ workflow_id: str
+ file_content: str
diff --git a/backend/workflow_manager/endpoint/source.py b/backend/workflow_manager/endpoint/source.py
index e755a697c..4dd096044 100644
--- a/backend/workflow_manager/endpoint/source.py
+++ b/backend/workflow_manager/endpoint/source.py
@@ -3,6 +3,7 @@
import os
import shutil
from hashlib import md5, sha256
+from io import BytesIO
from pathlib import Path
from typing import Any, Optional
@@ -384,6 +385,18 @@ def handle_final_result(
if connection_type == WorkflowEndpoint.ConnectionType.API:
results.append({"file": file_name, "result": result})
+ def load_file(self, input_file_path: str) -> tuple[str, BytesIO]:
+ connector: ConnectorInstance = self.endpoint.connector_instance
+ connector_settings: dict[str, Any] = connector.connector_metadata
+ source_fs: fsspec.AbstractFileSystem = self.get_fsspec(
+ settings=connector_settings, connector_id=connector.connector_id
+ )
+ with source_fs.open(input_file_path, "rb") as remote_file:
+ file_content = remote_file.read()
+ file_stream = BytesIO(file_content)
+
+ return remote_file.key, file_stream
+
@classmethod
def add_input_file_to_api_storage(
cls, workflow_id: str, execution_id: str, file_objs: list[UploadedFile]
diff --git a/backend/workflow_manager/workflow/execution.py b/backend/workflow_manager/workflow/execution.py
index b249e53dc..81b94a236 100644
--- a/backend/workflow_manager/workflow/execution.py
+++ b/backend/workflow_manager/workflow/execution.py
@@ -37,6 +37,7 @@ def __init__(
scheduled: bool = False,
mode: tuple[str, str] = WorkflowExecution.Mode.INSTANT,
workflow_execution: Optional[WorkflowExecution] = None,
+ include_metadata: bool = False,
) -> None:
tool_instances_as_dto = []
for tool_instance in tool_instances:
@@ -58,6 +59,7 @@ def __init__(
tool_instances=tool_instances_as_dto,
platform_service_api_key=str(platform_key.key),
ignore_processed_entities=False,
+ include_metadata=include_metadata,
)
if not workflow_execution:
# Use pipline_id for pipelines / API deployment
@@ -227,7 +229,7 @@ def execute(self, single_step: bool = False) -> None:
execution_time = end_time - start_time
message = str(exception)[:EXECUTION_ERROR_LENGTH]
logger.info(
- f"Execution {self.execution_id} in {execution_time}s, "
+ f"Execution {self.execution_id} ran for {execution_time:.4f}s, "
f" Error {exception}"
)
raise WorkflowExecutionError(message) from exception
diff --git a/backend/workflow_manager/workflow/views.py b/backend/workflow_manager/workflow/views.py
index 7438890b8..c02569355 100644
--- a/backend/workflow_manager/workflow/views.py
+++ b/backend/workflow_manager/workflow/views.py
@@ -173,6 +173,9 @@ def execute(
execution_id = serializer.get_execution_id(serializer.validated_data)
execution_action = serializer.get_execution_action(serializer.validated_data)
file_objs = request.FILES.getlist("files")
+ include_metadata = (
+ request.data.get("include_metadata", "false").lower() == "true"
+ )
hashes_of_files = {}
if file_objs and execution_id and workflow_id:
hashes_of_files = SourceConnector.add_input_file_to_api_storage(
@@ -191,6 +194,7 @@ def execute(
execution_id=execution_id,
pipeline_guid=pipeline_guid,
hash_values_of_files=hashes_of_files,
+ include_metadata=include_metadata,
)
return Response(
make_execution_response(execution_response),
@@ -211,6 +215,7 @@ def execute_workflow(
execution_id: Optional[str] = None,
pipeline_guid: Optional[str] = None,
hash_values_of_files: dict[str, str] = {},
+ include_metadata: bool = False,
) -> ExecutionResponse:
if execution_action is not None:
# Step execution
@@ -219,6 +224,7 @@ def execute_workflow(
execution_action,
execution_id=execution_id,
hash_values_of_files=hash_values_of_files,
+ include_metadata=include_metadata,
)
elif pipeline_guid:
# pipeline execution
@@ -236,6 +242,7 @@ def execute_workflow(
workflow=workflow,
execution_id=execution_id,
hash_values_of_files=hash_values_of_files,
+ include_metadata=include_metadata,
)
return execution_response
diff --git a/backend/workflow_manager/workflow/workflow_helper.py b/backend/workflow_manager/workflow/workflow_helper.py
index 8b94d8edd..9f4e434cc 100644
--- a/backend/workflow_manager/workflow/workflow_helper.py
+++ b/backend/workflow_manager/workflow/workflow_helper.py
@@ -89,6 +89,7 @@ def build_workflow_execution_service(
scheduled: bool,
execution_mode: tuple[str, str],
workflow_execution: Optional[WorkflowExecution],
+ include_metadata: bool = False,
) -> WorkflowExecutionServiceHelper:
workflow_execution_service = WorkflowExecutionServiceHelper(
organization_id=organization_id,
@@ -99,6 +100,7 @@ def build_workflow_execution_service(
scheduled=scheduled,
mode=execution_mode,
workflow_execution=workflow_execution,
+ include_metadata=include_metadata,
)
workflow_execution_service.build()
return workflow_execution_service
@@ -203,6 +205,7 @@ def process_file(
workflow=workflow,
file_history=file_history,
error=error,
+ input_file_path=input_file,
)
execution_service.publish_update_log(
LogState.SUCCESS,
@@ -232,6 +235,7 @@ def run_workflow(
single_step: bool = False,
workflow_execution: Optional[WorkflowExecution] = None,
execution_mode: Optional[tuple[str, str]] = None,
+ include_metadata: bool = False,
) -> ExecutionResponse:
tool_instances: list[ToolInstance] = (
ToolInstanceHelper.get_tool_instances_by_workflow(
@@ -250,6 +254,7 @@ def run_workflow(
scheduled=scheduled,
execution_mode=execution_mode,
workflow_execution=workflow_execution,
+ include_metadata=include_metadata,
)
execution_id = execution_service.execution_id
source = SourceConnector(
@@ -349,18 +354,19 @@ def execute_workflow_async(
hash_values_of_files: dict[str, str],
timeout: int = -1,
pipeline_id: Optional[str] = None,
+ include_metadata: bool = False,
) -> ExecutionResponse:
"""Adding a workflow to the queue for execution.
Args:
workflow_id (str): workflowId
- execution_id (str): _description_
- timeout (int): celery timeout (timeout -1 : async execution)
- pipeline_id (Optional[str], optional): optional pipeline.
- Defaults to None.
+ execution_id (str): Execution ID
+ timeout (int): Celery timeout (timeout -1 : async execution)
+ pipeline_id (Optional[str], optional): Optional pipeline. Defaults to None.
+ include_metadata (bool): Whether to include metadata in the prompt output
Returns:
- ExecutionStatus: Existing status of execution
+ ExecutionResponse: Existing status of execution
"""
try:
org_schema = connection.tenant.schema_name
@@ -372,6 +378,7 @@ def execute_workflow_async(
execution_id=execution_id,
pipeline_id=pipeline_id,
log_events_id=log_events_id,
+ include_metadata=include_metadata,
)
if timeout > -1:
async_execution.wait(
@@ -426,6 +433,7 @@ def execute_bin(
scheduled: bool = False,
execution_mode: Optional[tuple[str, str]] = None,
pipeline_id: Optional[str] = None,
+ include_metadata: bool = False,
**kwargs: dict[str, Any],
) -> Optional[list[Any]]:
"""Asynchronous Execution By celery.
@@ -433,22 +441,20 @@ def execute_bin(
Args:
schema_name (str): schema name to get Data
workflow_id (str): Workflow Id
- execution_id (Optional[str], optional): Id of the execution.
- Defaults to None.
- scheduled (bool, optional): Represents if it is a scheduled
- execution. Defaults to False.
- execution_mode (Optional[WorkflowExecution.Mode]):
- WorkflowExecution Mode. Defaults to None.
- pipeline_id (Optional[str], optional): Id of pipeline.
- Defaults to None.
+ execution_id (str): Id of the execution
+ scheduled (bool, optional): Represents if it is a scheduled execution
+ Defaults to False
+ execution_mode (Optional[WorkflowExecution.Mode]): WorkflowExecution Mode
+ Defaults to None
+ pipeline_id (Optional[str], optional): Id of pipeline. Defaults to None
+ include_metadata (bool): Whether to include metadata in the prompt output
Kwargs:
- log_events_id (str): Session ID of the user, helps establish
- WS connection for streaming logs to the FE
+ log_events_id (str): Session ID of the user,
+ helps establish WS connection for streaming logs to the FE
Returns:
- dict[str, list[Any]]: Returns a dict with result from
- workflow execution
+ dict[str, list[Any]]: Returns a dict with result from workflow execution
"""
task_id = current_task.request.id
tenant: Organization = (
@@ -481,6 +487,7 @@ def execute_bin(
workflow_execution=workflow_execution,
execution_mode=execution_mode,
hash_values_of_files=hash_values_of_files,
+ include_metadata=include_metadata,
).result
return result
@@ -490,6 +497,7 @@ def complete_execution(
execution_id: Optional[str] = None,
pipeline_id: Optional[str] = None,
hash_values_of_files: dict[str, str] = {},
+ include_metadata: bool = False,
) -> ExecutionResponse:
if pipeline_id:
logger.info(f"Executing pipeline: {pipeline_id}")
@@ -517,6 +525,7 @@ def complete_execution(
workflow=workflow,
workflow_execution=workflow_execution,
hash_values_of_files=hash_values_of_files,
+ include_metadata=include_metadata,
)
except WorkflowExecution.DoesNotExist:
return WorkflowHelper.create_and_make_execution_response(
@@ -544,6 +553,7 @@ def step_execution(
execution_action: str,
execution_id: Optional[str] = None,
hash_values_of_files: dict[str, str] = {},
+ include_metadata: bool = False,
) -> ExecutionResponse:
if execution_action is Workflow.ExecutionAction.START.value: # type: ignore
if execution_id is None:
@@ -557,6 +567,7 @@ def step_execution(
single_step=True,
workflow_execution=workflow_execution,
hash_values_of_files=hash_values_of_files,
+ include_metadata=include_metadata,
)
except WorkflowExecution.DoesNotExist:
return WorkflowHelper.create_and_make_execution_response(
diff --git a/frontend/package-lock.json b/frontend/package-lock.json
index aee9ec808..2c7eec1d6 100644
--- a/frontend/package-lock.json
+++ b/frontend/package-lock.json
@@ -25,10 +25,13 @@
"cronstrue": "^2.48.0",
"emoji-picker-react": "^4.8.0",
"emoji-regex": "^10.3.0",
+ "file-saver": "^2.0.5",
+ "framer-motion": "^11.2.10",
"handlebars": "^4.7.8",
"http-proxy-middleware": "^2.0.6",
"js-cookie": "^3.0.5",
"js-yaml": "^4.1.0",
+ "json-2-csv": "^5.5.4",
"markdown-to-jsx": "^7.2.1",
"moment": "^2.29.4",
"moment-timezone": "^0.5.45",
@@ -7519,6 +7522,14 @@
"resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz",
"integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA=="
},
+ "node_modules/deeks": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/deeks/-/deeks-3.1.0.tgz",
+ "integrity": "sha512-e7oWH1LzIdv/prMQ7pmlDlaVoL64glqzvNgkgQNgyec9ORPHrT2jaOqMtRyqJuwWjtfb6v+2rk9pmaHj+F137A==",
+ "engines": {
+ "node": ">= 16"
+ }
+ },
"node_modules/deep-equal": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.1.tgz",
@@ -7747,6 +7758,14 @@
"node": ">=6"
}
},
+ "node_modules/doc-path": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/doc-path/-/doc-path-4.1.1.tgz",
+ "integrity": "sha512-h1ErTglQAVv2gCnOpD3sFS6uolDbOKHDU1BZq+Kl3npPqroU3dYL42lUgMfd5UimlwtRgp7C9dLGwqQ5D2HYgQ==",
+ "engines": {
+ "node": ">=16"
+ }
+ },
"node_modules/doctrine": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
@@ -9044,6 +9063,11 @@
"webpack": "^4.0.0 || ^5.0.0"
}
},
+ "node_modules/file-saver": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.5.tgz",
+ "integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA=="
+ },
"node_modules/filelist": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz",
@@ -9383,6 +9407,30 @@
"url": "https://www.patreon.com/infusion"
}
},
+ "node_modules/framer-motion": {
+ "version": "11.2.10",
+ "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-11.2.10.tgz",
+ "integrity": "sha512-/gr3PLZUVFCc86a9MqCUboVrALscrdluzTb3yew+2/qKBU8CX6nzs918/SRBRCqaPbx0TZP10CB6yFgK2C5cYQ==",
+ "dependencies": {
+ "tslib": "^2.4.0"
+ },
+ "peerDependencies": {
+ "@emotion/is-prop-valid": "*",
+ "react": "^18.0.0",
+ "react-dom": "^18.0.0"
+ },
+ "peerDependenciesMeta": {
+ "@emotion/is-prop-valid": {
+ "optional": true
+ },
+ "react": {
+ "optional": true
+ },
+ "react-dom": {
+ "optional": true
+ }
+ }
+ },
"node_modules/fresh": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
@@ -12724,6 +12772,18 @@
"node": ">=4"
}
},
+ "node_modules/json-2-csv": {
+ "version": "5.5.4",
+ "resolved": "https://registry.npmjs.org/json-2-csv/-/json-2-csv-5.5.4.tgz",
+ "integrity": "sha512-gB24IF5SvZn7QhEh6kp9QwFhRnI3FVEEXAGyq0xtPxqOQ4odYU3PU9pFKRoR1SGABxunQlBP6VFv0c8EnLbsLQ==",
+ "dependencies": {
+ "deeks": "3.1.0",
+ "doc-path": "4.1.1"
+ },
+ "engines": {
+ "node": ">= 16"
+ }
+ },
"node_modules/json-parse-even-better-errors": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
@@ -25853,6 +25913,11 @@
"resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz",
"integrity": "sha512-Q6fKUPqnAHAyhiUgFU7BUzLiv0kd8saH9al7tnu5Q/okj6dnupxyTgFIBjVzJATdfIAm9NAsvXNzjaKa+bxVyA=="
},
+ "deeks": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/deeks/-/deeks-3.1.0.tgz",
+ "integrity": "sha512-e7oWH1LzIdv/prMQ7pmlDlaVoL64glqzvNgkgQNgyec9ORPHrT2jaOqMtRyqJuwWjtfb6v+2rk9pmaHj+F137A=="
+ },
"deep-equal": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.2.1.tgz",
@@ -26027,6 +26092,11 @@
"@leichtgewicht/ip-codec": "^2.0.1"
}
},
+ "doc-path": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/doc-path/-/doc-path-4.1.1.tgz",
+ "integrity": "sha512-h1ErTglQAVv2gCnOpD3sFS6uolDbOKHDU1BZq+Kl3npPqroU3dYL42lUgMfd5UimlwtRgp7C9dLGwqQ5D2HYgQ=="
+ },
"doctrine": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
@@ -26993,6 +27063,11 @@
"schema-utils": "^3.0.0"
}
},
+ "file-saver": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/file-saver/-/file-saver-2.0.5.tgz",
+ "integrity": "sha512-P9bmyZ3h/PRG+Nzga+rbdI4OEpNDzAVyy74uVO9ATgzLK6VtAsYybF/+TOCvrc0MO793d6+42lLyZTw7/ArVzA=="
+ },
"filelist": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz",
@@ -27235,6 +27310,14 @@
"resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-4.2.0.tgz",
"integrity": "sha512-MhLuK+2gUcnZe8ZHlaaINnQLl0xRIGRfcGk2yl8xoQAfHrSsL3rYu6FCmBdkdbhc9EPlwyGHewaRsvwRMJtAlA=="
},
+ "framer-motion": {
+ "version": "11.2.10",
+ "resolved": "https://registry.npmjs.org/framer-motion/-/framer-motion-11.2.10.tgz",
+ "integrity": "sha512-/gr3PLZUVFCc86a9MqCUboVrALscrdluzTb3yew+2/qKBU8CX6nzs918/SRBRCqaPbx0TZP10CB6yFgK2C5cYQ==",
+ "requires": {
+ "tslib": "^2.4.0"
+ }
+ },
"fresh": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
@@ -29761,6 +29844,15 @@
"resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz",
"integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA=="
},
+ "json-2-csv": {
+ "version": "5.5.4",
+ "resolved": "https://registry.npmjs.org/json-2-csv/-/json-2-csv-5.5.4.tgz",
+ "integrity": "sha512-gB24IF5SvZn7QhEh6kp9QwFhRnI3FVEEXAGyq0xtPxqOQ4odYU3PU9pFKRoR1SGABxunQlBP6VFv0c8EnLbsLQ==",
+ "requires": {
+ "deeks": "3.1.0",
+ "doc-path": "4.1.1"
+ }
+ },
"json-parse-even-better-errors": {
"version": "2.3.1",
"resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
diff --git a/frontend/package.json b/frontend/package.json
index a01f53738..9a9bd21ae 100644
--- a/frontend/package.json
+++ b/frontend/package.json
@@ -20,10 +20,13 @@
"cronstrue": "^2.48.0",
"emoji-picker-react": "^4.8.0",
"emoji-regex": "^10.3.0",
+ "file-saver": "^2.0.5",
+ "framer-motion": "^11.2.10",
"handlebars": "^4.7.8",
"http-proxy-middleware": "^2.0.6",
"js-cookie": "^3.0.5",
"js-yaml": "^4.1.0",
+ "json-2-csv": "^5.5.4",
"markdown-to-jsx": "^7.2.1",
"moment": "^2.29.4",
"moment-timezone": "^0.5.45",
diff --git a/frontend/public/icons/connector-icons/Redis.png b/frontend/public/icons/connector-icons/Redis.png
new file mode 100644
index 000000000..b8deff6cc
Binary files /dev/null and b/frontend/public/icons/connector-icons/Redis.png differ
diff --git a/frontend/src/components/agency/actions/Actions.jsx b/frontend/src/components/agency/actions/Actions.jsx
index fe82adaca..faa973bf0 100644
--- a/frontend/src/components/agency/actions/Actions.jsx
+++ b/frontend/src/components/agency/actions/Actions.jsx
@@ -38,7 +38,7 @@ function Actions({ statusBarMsg, initializeWfComp, stepLoader }) {
const [openAddApiModal, setOpenAddApiModal] = useState(false);
const [apiOpsPresent, setApiOpsPresent] = useState(false);
const [canAddTaskPipeline, setCanAddTaskPipeline] = useState(false);
- const [canAddETLPipeline, setCanAddETAPipeline] = useState(false);
+ const [canAddETLPipeline, setCanAddETLPipeline] = useState(false);
const [openAddTaskModal, setOpenAddTaskModal] = useState(false);
const [openAddETLModal, setOpenAddETLModal] = useState(false);
const [deploymentName, setDeploymentName] = useState("");
@@ -76,8 +76,9 @@ function Actions({ statusBarMsg, initializeWfComp, stepLoader }) {
);
// Enable Deploy as ETL Pipeline only when
// destination connection_type is DATABASE and Source & Destination are Configured
- setCanAddETAPipeline(
- destination?.connection_type === "DATABASE" &&
+ setCanAddETLPipeline(
+ (destination?.connection_type === "DATABASE" ||
+ destination.connection_type === "MANUALREVIEW") &&
source?.connector_instance &&
destination.connector_instance
);
@@ -302,6 +303,12 @@ function Actions({ statusBarMsg, initializeWfComp, stepLoader }) {
) {
return false;
}
+ if (
+ source?.connection_type === "FILESYSTEM" &&
+ destination?.connection_type === "MANUALREVIEW"
+ ) {
+ return false;
+ }
return !source?.connector_instance || !destination?.connector_instance;
};
diff --git a/frontend/src/components/agency/configure-connector-modal/ConfigureConnectorModal.jsx b/frontend/src/components/agency/configure-connector-modal/ConfigureConnectorModal.jsx
index 0f3977546..e781869e4 100644
--- a/frontend/src/components/agency/configure-connector-modal/ConfigureConnectorModal.jsx
+++ b/frontend/src/components/agency/configure-connector-modal/ConfigureConnectorModal.jsx
@@ -41,7 +41,8 @@ function ConfigureConnectorModal({
disabled:
!connectorId ||
connDetails?.connector_id !== selectedId ||
- connType === "DATABASE",
+ connType === "DATABASE" ||
+ connType === "MANUALREVIEW",
},
];
diff --git a/frontend/src/components/agency/configure-forms-layout/ConfigureFormsLayout.jsx b/frontend/src/components/agency/configure-forms-layout/ConfigureFormsLayout.jsx
index 74a9e916d..9f6a4ed9b 100644
--- a/frontend/src/components/agency/configure-forms-layout/ConfigureFormsLayout.jsx
+++ b/frontend/src/components/agency/configure-forms-layout/ConfigureFormsLayout.jsx
@@ -50,26 +50,28 @@ function ConfigureFormsLayout({
-
-
- {JSON.stringify(combinedOutput, null, 2)}
-
-
- )}
-
+
+ {JSON.stringify(combinedOutput, null, 2)}
+
+
+ )}
+ Profile not found
; + } + + return ( +No chunks founds
; + } + return ( + <> + {chunk?.map((line) => ( +