From 7c513ff86b2a7b28f06fafbdd1760b77d4003594 Mon Sep 17 00:00:00 2001 From: Chandrasekharan M <117059509+chandrasekharan-zipstack@users.noreply.github.com> Date: Wed, 17 Jul 2024 18:55:48 +0530 Subject: [PATCH 1/3] fix: Test connector error handling improvements (#496) * Connector test error handling improvements * Updated error messages to hint its from client --------- Co-authored-by: Rahul Johny <116638720+johnyrahul@users.noreply.github.com> --- .../connector_processor.py | 17 +- backend/connector_processor/exceptions.py | 4 +- backend/connector_processor/views.py | 4 +- backend/pdm.lock | 147 ++++++++---------- unstract/connectors/pyproject.toml | 2 +- .../connectors/databases/unstract_db.py | 2 +- .../azure_cloud_storage.py | 8 +- .../connectors/filesystems/box/box.py | 13 +- .../google_cloud_storage.py | 10 +- .../filesystems/google_drive/google_drive.py | 12 +- .../connectors/filesystems/http/http.py | 12 +- .../local_storage/local_storage.py | 14 +- .../filesystems/minio/exceptions.py | 26 ++++ .../connectors/filesystems/minio/minio.py | 13 +- .../filesystems/zs_dropbox/exceptions.py | 16 +- .../filesystems/zs_dropbox/zs_dropbox.py | 6 +- 16 files changed, 177 insertions(+), 129 deletions(-) create mode 100644 unstract/connectors/src/unstract/connectors/filesystems/minio/exceptions.py diff --git a/backend/connector_processor/connector_processor.py b/backend/connector_processor/connector_processor.py index dcababce3..b2346a9d4 100644 --- a/backend/connector_processor/connector_processor.py +++ b/backend/connector_processor/connector_processor.py @@ -12,8 +12,7 @@ InValidConnectorId, InValidConnectorMode, OAuthTimeOut, - TestConnectorException, - TestConnectorInputException, + TestConnectorInputError, ) from unstract.connectors.base import UnstractConnector @@ -100,15 +99,15 @@ def get_all_supported_connectors( return supported_connectors @staticmethod - def test_connectors(connector_id: str, cred_string: dict[str, Any]) -> bool: + def test_connectors(connector_id: str, credentials: dict[str, Any]) -> bool: logger.info(f"Testing connector: {connector_id}") connector: dict[str, Any] = fetch_connectors_by_key_value( ConnectorKeys.ID, connector_id )[0] if connector.get(ConnectorKeys.OAUTH): try: - oauth_key = cred_string.get(ConnectorAuthKey.OAUTH_KEY) - cred_string = ConnectorAuthHelper.get_oauth_creds_from_cache( + oauth_key = credentials.get(ConnectorAuthKey.OAUTH_KEY) + credentials = ConnectorAuthHelper.get_oauth_creds_from_cache( cache_key=oauth_key, delete_key=False ) except Exception as exc: @@ -120,17 +119,13 @@ def test_connectors(connector_id: str, cred_string: dict[str, Any]) -> bool: try: connector_impl = Connectorkit().get_connector_by_id( - connector_id, cred_string + connector_id, credentials ) test_result = connector_impl.test_credentials() logger.info(f"{connector_id} test result: {test_result}") return test_result except ConnectorError as e: - logger.error(f"Error while testing {connector_id}: {e}") - raise TestConnectorInputException(core_err=e) - except Exception as e: - logger.error(f"Error while testing {connector_id}: {e}") - raise TestConnectorException + raise TestConnectorInputError(core_err=e) def get_connector_data_with_key(connector_id: str, key_value: str) -> Any: """Generic Function to get connector data with provided key.""" diff --git a/backend/connector_processor/exceptions.py b/backend/connector_processor/exceptions.py index 1df8079d8..22a1e2966 100644 --- a/backend/connector_processor/exceptions.py +++ b/backend/connector_processor/exceptions.py @@ -31,7 +31,7 @@ class JSONParseException(APIException): class OAuthTimeOut(APIException): status_code = 408 - default_detail = "Timed Out. Please re authenticate." + default_detail = "Timed out. Please re-authenticate." class InternalServiceError(APIException): @@ -44,7 +44,7 @@ class TestConnectorException(APIException): default_detail = "Error while testing connector." -class TestConnectorInputException(UnstractBaseException): +class TestConnectorInputError(UnstractBaseException): def __init__(self, core_err: ConnectorError) -> None: super().__init__(detail=core_err.message, core_err=core_err) self.default_detail = core_err.message diff --git a/backend/connector_processor/views.py b/backend/connector_processor/views.py index 55367c40d..edca86ba1 100644 --- a/backend/connector_processor/views.py +++ b/backend/connector_processor/views.py @@ -67,10 +67,10 @@ def test(self, request: Request) -> Response: """Tests the connector against the credentials passed.""" serializer: TestConnectorSerializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) - connector_id = serializer.validated_data.get(ConnectorKeys.CONNECTOR_ID) + connector_id = serializer.validated_data.get(CIKey.CONNECTOR_ID) cred_string = serializer.validated_data.get(CIKey.CONNECTOR_METADATA) test_result = ConnectorProcessor.test_connectors( - connector_id=connector_id, cred_string=cred_string + connector_id=connector_id, credentials=cred_string ) return Response( {ConnectorKeys.IS_VALID: test_result}, diff --git a/backend/pdm.lock b/backend/pdm.lock index 230c6db84..4b896fa3e 100644 --- a/backend/pdm.lock +++ b/backend/pdm.lock @@ -542,13 +542,13 @@ files = [ [[package]] name = "cachetools" -version = "5.3.3" +version = "5.4.0" requires_python = ">=3.7" summary = "Extensible memoizing collections and decorators" groups = ["default", "dev"] files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, + {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, ] [[package]] @@ -1068,7 +1068,7 @@ files = [ [[package]] name = "dropboxdrivefs" -version = "1.3.1" +version = "1.4.1" requires_python = ">=3.5" summary = "Dropbox implementation for fsspec module" groups = ["default", "dev"] @@ -1078,7 +1078,7 @@ dependencies = [ "requests", ] files = [ - {file = "dropboxdrivefs-1.3.1.tar.gz", hash = "sha256:892ee9017c59648736d79c3989cadb9e129b469fcec0c68d12e42bd6826a962d"}, + {file = "dropboxdrivefs-1.4.1.tar.gz", hash = "sha256:6f3c6061d045813553ce91ed0e2b682f1d70bec74011943c92b3181faacefd34"}, ] [[package]] @@ -1098,14 +1098,14 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.2.1" +version = "1.2.2" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" groups = ["default", "dev", "test"] marker = "python_version < \"3.11\"" files = [ - {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, - {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [[package]] @@ -1299,7 +1299,7 @@ files = [ [[package]] name = "google-api-python-client" -version = "2.136.0" +version = "2.137.0" requires_python = ">=3.7" summary = "Google API Client Library for Python" groups = ["default", "dev"] @@ -1311,8 +1311,8 @@ dependencies = [ "uritemplate<5,>=3.0.1", ] files = [ - {file = "google-api-python-client-2.136.0.tar.gz", hash = "sha256:161c722c8864e7ed39393e2b7eea76ef4e1c933a6a59f9d7c70409b6635f225d"}, - {file = "google_api_python_client-2.136.0-py2.py3-none-any.whl", hash = "sha256:5a554c8b5edf0a609b905d89d7ced82e8f6ac31da1e4d8d5684ef63dbc0e49f5"}, + {file = "google_api_python_client-2.137.0-py2.py3-none-any.whl", hash = "sha256:a8b5c5724885e5be9f5368739aa0ccf416627da4ebd914b410a090c18f84d692"}, + {file = "google_api_python_client-2.137.0.tar.gz", hash = "sha256:e739cb74aac8258b1886cb853b0722d47c81fe07ad649d7f2206f06530513c04"}, ] [[package]] @@ -1364,7 +1364,7 @@ files = [ [[package]] name = "google-cloud-aiplatform" -version = "1.58.0" +version = "1.59.0" requires_python = ">=3.8" summary = "Vertex AI API client library" groups = ["default", "dev"] @@ -1376,14 +1376,14 @@ dependencies = [ "google-cloud-resource-manager<3.0.0dev,>=1.3.3", "google-cloud-storage<3.0.0dev,>=1.32.0", "packaging>=14.3", - "proto-plus<2.0.0dev,>=1.22.0", + "proto-plus<2.0.0dev,>=1.22.3", "protobuf!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev,>=3.19.5", "pydantic<3", "shapely<3.0.0dev", ] files = [ - {file = "google-cloud-aiplatform-1.58.0.tar.gz", hash = "sha256:7a05aceac4a6c7eaa26e684e9f202b829cc7e57f82bffe7281684275a553fcad"}, - {file = "google_cloud_aiplatform-1.58.0-py2.py3-none-any.whl", hash = "sha256:21f1320860f4916183ec939fdf2ff3fc1d7fdde97fe5795974257ab21f9458ec"}, + {file = "google-cloud-aiplatform-1.59.0.tar.gz", hash = "sha256:2bebb59c0ba3e3b4b568305418ca1b021977988adbee8691a5bed09b037e7e63"}, + {file = "google_cloud_aiplatform-1.59.0-py2.py3-none-any.whl", hash = "sha256:549e6eb1844b0f853043309138ebe2db00de4bbd8197b3bde26804ac163ef52a"}, ] [[package]] @@ -1862,7 +1862,7 @@ files = [ [[package]] name = "huggingface-hub" -version = "0.23.4" +version = "0.23.5" requires_python = ">=3.8.0" summary = "Client library to download and publish models, datasets and other repos on the huggingface.co hub" groups = ["default", "dev"] @@ -1876,8 +1876,8 @@ dependencies = [ "typing-extensions>=3.7.4.3", ] files = [ - {file = "huggingface_hub-0.23.4-py3-none-any.whl", hash = "sha256:3a0b957aa87150addf0cc7bd71b4d954b78e749850e1e7fb29ebbd2db64ca037"}, - {file = "huggingface_hub-0.23.4.tar.gz", hash = "sha256:35d99016433900e44ae7efe1c209164a5a81dbbcd53a52f99c281dcd7ce22431"}, + {file = "huggingface_hub-0.23.5-py3-none-any.whl", hash = "sha256:d7a7d337615e11a45cc14a0ce5a605db6b038dc24af42866f731684825226e90"}, + {file = "huggingface_hub-0.23.5.tar.gz", hash = "sha256:67a9caba79b71235be3752852ca27da86bd54311d2424ca8afdb8dda056edf98"}, ] [[package]] @@ -2017,21 +2017,6 @@ files = [ {file = "kombu-5.3.7.tar.gz", hash = "sha256:011c4cd9a355c14a1de8d35d257314a1d2456d52b7140388561acac3cf1a97bf"}, ] -[[package]] -name = "llama-cloud" -version = "0.0.6" -requires_python = "<4,>=3.8" -summary = "" -groups = ["default", "dev"] -dependencies = [ - "httpx>=0.20.0", - "pydantic>=1.10", -] -files = [ - {file = "llama_cloud-0.0.6-py3-none-any.whl", hash = "sha256:0f07c8a865be632b543dec2bcad350a68a61f13413a7421b4b03de32c36f0194"}, - {file = "llama_cloud-0.0.6.tar.gz", hash = "sha256:33b94cd119133dcb2899c9b69e8e1c36aec7bc7e80062c55c65f15618722e091"}, -] - [[package]] name = "llama-index" version = "0.10.38" @@ -2091,7 +2076,7 @@ files = [ [[package]] name = "llama-index-core" -version = "0.10.53.post1" +version = "0.10.55" requires_python = "<4.0,>=3.8.1" summary = "Interface between LLMs and your data" groups = ["default", "dev"] @@ -2104,7 +2089,6 @@ dependencies = [ "dirtyjson<2.0.0,>=1.0.8", "fsspec>=2023.5.0", "httpx", - "llama-cloud<0.0.7,>=0.0.6", "nest-asyncio<2.0.0,>=1.5.8", "networkx>=3.0", "nltk<4.0.0,>=3.8.1", @@ -2121,8 +2105,8 @@ dependencies = [ "wrapt", ] files = [ - {file = "llama_index_core-0.10.53.post1-py3-none-any.whl", hash = "sha256:565d0967dd8f05456c66f5aca6ee6ee3dbc5645b6a55c81957f776ff029d6a99"}, - {file = "llama_index_core-0.10.53.post1.tar.gz", hash = "sha256:6219a737b66c887b406814b0d9db6e24addd35f3136ffb6a879e54ac3f133406"}, + {file = "llama_index_core-0.10.55-py3-none-any.whl", hash = "sha256:e2f7dbc9c992d4487dabad6a7b0f40ed145cce0ab99e52cc78e9caf0cd4c1c08"}, + {file = "llama_index_core-0.10.55.tar.gz", hash = "sha256:b02d46595c17805221a8f404c04a97609d1ce22e5be24ad7b7c4ac30e5181561"}, ] [[package]] @@ -2413,7 +2397,7 @@ files = [ [[package]] name = "llama-index-readers-file" -version = "0.1.29" +version = "0.1.30" requires_python = "<4.0,>=3.8.1" summary = "llama-index readers file integration" groups = ["default", "dev"] @@ -2424,8 +2408,8 @@ dependencies = [ "striprtf<0.0.27,>=0.0.26", ] files = [ - {file = "llama_index_readers_file-0.1.29-py3-none-any.whl", hash = "sha256:b25f3dbf7bf3e0635290e499e808db5ba955eab67f205a3ff1cea6a4eb93556a"}, - {file = "llama_index_readers_file-0.1.29.tar.gz", hash = "sha256:f9f696e738383e7d14078e75958fba5a7030f7994a20586e3140e1ca41395a54"}, + {file = "llama_index_readers_file-0.1.30-py3-none-any.whl", hash = "sha256:d5f6cdd4685ee73103c68b9bc0dfb0d05439033133fc6bd45ef31ff41519e723"}, + {file = "llama_index_readers_file-0.1.30.tar.gz", hash = "sha256:32f40465f2a8a65fa5773e03c9f4dd55164be934ae67fad62113680436787d91"}, ] [[package]] @@ -3045,7 +3029,7 @@ files = [ [[package]] name = "portalocker" -version = "2.10.0" +version = "2.10.1" requires_python = ">=3.8" summary = "Wraps the portalocker recipe for easy usage" groups = ["default", "dev"] @@ -3053,8 +3037,8 @@ dependencies = [ "pywin32>=226; platform_system == \"Windows\"", ] files = [ - {file = "portalocker-2.10.0-py3-none-any.whl", hash = "sha256:48944147b2cd42520549bc1bb8fe44e220296e56f7c3d551bc6ecce69d9b0de1"}, - {file = "portalocker-2.10.0.tar.gz", hash = "sha256:49de8bc0a2f68ca98bf9e219c81a3e6b27097c7bf505a87c5a112ce1aaeb9b81"}, + {file = "portalocker-2.10.1-py3-none-any.whl", hash = "sha256:53a5984ebc86a025552264b459b46a2086e269b21823cb572f8f28ee759e45bf"}, + {file = "portalocker-2.10.1.tar.gz", hash = "sha256:ef1bf844e878ab08aee7e40184156e1151f228f103aa5c6bd0724cc330960f8f"}, ] [[package]] @@ -3457,7 +3441,7 @@ files = [ [[package]] name = "pypdf" -version = "4.2.0" +version = "4.3.0" requires_python = ">=3.6" summary = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" groups = ["default", "dev"] @@ -3465,8 +3449,8 @@ dependencies = [ "typing-extensions>=4.0; python_version < \"3.11\"", ] files = [ - {file = "pypdf-4.2.0-py3-none-any.whl", hash = "sha256:dc035581664e0ad717e3492acebc1a5fc23dba759e788e3d4a9fc9b1a32e72c1"}, - {file = "pypdf-4.2.0.tar.gz", hash = "sha256:fe63f3f7d1dcda1c9374421a94c1bba6c6f8c4a62173a59b64ffd52058f846b1"}, + {file = "pypdf-4.3.0-py3-none-any.whl", hash = "sha256:eeea4d019b57c099d02a0e1692eaaab23341ae3f255c1dafa3c8566b4636496d"}, + {file = "pypdf-4.3.0.tar.gz", hash = "sha256:0d7a4c67fd03782f5a09d3f48c11c7a31e0bb9af78861a25229bb49259ed0504"}, ] [[package]] @@ -4022,18 +4006,18 @@ files = [ [[package]] name = "setuptools" -version = "70.2.0" +version = "70.3.0" requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" groups = ["default", "dev"] files = [ - {file = "setuptools-70.2.0-py3-none-any.whl", hash = "sha256:b8b8060bb426838fbe942479c90296ce976249451118ef566a5a0b7d8b78fb05"}, - {file = "setuptools-70.2.0.tar.gz", hash = "sha256:bd63e505105011b25c3c11f753f7e3b8465ea739efddaccef8f0efac2137bac1"}, + {file = "setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc"}, + {file = "setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5"}, ] [[package]] name = "shapely" -version = "2.0.4" +version = "2.0.5" requires_python = ">=3.7" summary = "Manipulation and analysis of geometric objects" groups = ["default", "dev"] @@ -4041,28 +4025,25 @@ dependencies = [ "numpy<3,>=1.14", ] files = [ - {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:011b77153906030b795791f2fdfa2d68f1a8d7e40bce78b029782ade3afe4f2f"}, - {file = "shapely-2.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9831816a5d34d5170aa9ed32a64982c3d6f4332e7ecfe62dc97767e163cb0b17"}, - {file = "shapely-2.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5c4849916f71dc44e19ed370421518c0d86cf73b26e8656192fcfcda08218fbd"}, - {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:841f93a0e31e4c64d62ea570d81c35de0f6cea224568b2430d832967536308e6"}, - {file = "shapely-2.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b4431f522b277c79c34b65da128029a9955e4481462cbf7ebec23aab61fc58"}, - {file = "shapely-2.0.4-cp310-cp310-win32.whl", hash = "sha256:92a41d936f7d6743f343be265ace93b7c57f5b231e21b9605716f5a47c2879e7"}, - {file = "shapely-2.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:30982f79f21bb0ff7d7d4a4e531e3fcaa39b778584c2ce81a147f95be1cd58c9"}, - {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de0205cb21ad5ddaef607cda9a3191eadd1e7a62a756ea3a356369675230ac35"}, - {file = "shapely-2.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7d56ce3e2a6a556b59a288771cf9d091470116867e578bebced8bfc4147fbfd7"}, - {file = "shapely-2.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:58b0ecc505bbe49a99551eea3f2e8a9b3b24b3edd2a4de1ac0dc17bc75c9ec07"}, - {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:790a168a808bd00ee42786b8ba883307c0e3684ebb292e0e20009588c426da47"}, - {file = "shapely-2.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4310b5494271e18580d61022c0857eb85d30510d88606fa3b8314790df7f367d"}, - {file = "shapely-2.0.4-cp311-cp311-win32.whl", hash = "sha256:63f3a80daf4f867bd80f5c97fbe03314348ac1b3b70fb1c0ad255a69e3749879"}, - {file = "shapely-2.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:c52ed79f683f721b69a10fb9e3d940a468203f5054927215586c5d49a072de8d"}, - {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9103abd1678cb1b5f7e8e1af565a652e036844166c91ec031eeb25c5ca8af0"}, - {file = "shapely-2.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:263bcf0c24d7a57c80991e64ab57cba7a3906e31d2e21b455f493d4aab534aaa"}, - {file = "shapely-2.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ddf4a9bfaac643e62702ed662afc36f6abed2a88a21270e891038f9a19bc08fc"}, - {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:485246fcdb93336105c29a5cfbff8a226949db37b7473c89caa26c9bae52a242"}, - {file = "shapely-2.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8de4578e838a9409b5b134a18ee820730e507b2d21700c14b71a2b0757396acc"}, - {file = "shapely-2.0.4-cp39-cp39-win32.whl", hash = "sha256:9dab4c98acfb5fb85f5a20548b5c0abe9b163ad3525ee28822ffecb5c40e724c"}, - {file = "shapely-2.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:31c19a668b5a1eadab82ff070b5a260478ac6ddad3a5b62295095174a8d26398"}, - {file = "shapely-2.0.4.tar.gz", hash = "sha256:5dc736127fac70009b8d309a0eeb74f3e08979e530cf7017f2f507ef62e6cfb8"}, + {file = "shapely-2.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89d34787c44f77a7d37d55ae821f3a784fa33592b9d217a45053a93ade899375"}, + {file = "shapely-2.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:798090b426142df2c5258779c1d8d5734ec6942f778dab6c6c30cfe7f3bf64ff"}, + {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45211276900c4790d6bfc6105cbf1030742da67594ea4161a9ce6812a6721e68"}, + {file = "shapely-2.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e119444bc27ca33e786772b81760f2028d930ac55dafe9bc50ef538b794a8e1"}, + {file = "shapely-2.0.5-cp310-cp310-win32.whl", hash = "sha256:9a4492a2b2ccbeaebf181e7310d2dfff4fdd505aef59d6cb0f217607cb042fb3"}, + {file = "shapely-2.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:1e5cb5ee72f1bc7ace737c9ecd30dc174a5295fae412972d3879bac2e82c8fae"}, + {file = "shapely-2.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5bbfb048a74cf273db9091ff3155d373020852805a37dfc846ab71dde4be93ec"}, + {file = "shapely-2.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93be600cbe2fbaa86c8eb70656369f2f7104cd231f0d6585c7d0aa555d6878b8"}, + {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f8e71bb9a46814019f6644c4e2560a09d44b80100e46e371578f35eaaa9da1c"}, + {file = "shapely-2.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5251c28a29012e92de01d2e84f11637eb1d48184ee8f22e2df6c8c578d26760"}, + {file = "shapely-2.0.5-cp311-cp311-win32.whl", hash = "sha256:35110e80070d664781ec7955c7de557456b25727a0257b354830abb759bf8311"}, + {file = "shapely-2.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c6b78c0007a34ce7144f98b7418800e0a6a5d9a762f2244b00ea560525290c9"}, + {file = "shapely-2.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7545a39c55cad1562be302d74c74586f79e07b592df8ada56b79a209731c0219"}, + {file = "shapely-2.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4c83a36f12ec8dee2066946d98d4d841ab6512a6ed7eb742e026a64854019b5f"}, + {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89e640c2cd37378480caf2eeda9a51be64201f01f786d127e78eaeff091ec897"}, + {file = "shapely-2.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06efe39beafde3a18a21dde169d32f315c57da962826a6d7d22630025200c5e6"}, + {file = "shapely-2.0.5-cp39-cp39-win32.whl", hash = "sha256:8203a8b2d44dcb366becbc8c3d553670320e4acf0616c39e218c9561dd738d92"}, + {file = "shapely-2.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:7fed9dbfbcfec2682d9a047b9699db8dcc890dfca857ecba872c42185fc9e64e"}, + {file = "shapely-2.0.5.tar.gz", hash = "sha256:bff2366bc786bfa6cb353d6b47d0443c570c32776612e527ee47b6df63fcfe32"}, ] [[package]] @@ -4322,13 +4303,13 @@ files = [ [[package]] name = "sqlparse" -version = "0.5.0" +version = "0.5.1" requires_python = ">=3.8" summary = "A non-validating SQL parser." groups = ["default"] files = [ - {file = "sqlparse-0.5.0-py3-none-any.whl", hash = "sha256:c204494cd97479d0e39f28c93d46c0b2d5959c7b9ab904762ea6c7af211c8663"}, - {file = "sqlparse-0.5.0.tar.gz", hash = "sha256:714d0a4932c059d16189f58ef5411ec2287a4360f17cdd0edd2d09d4c5087c93"}, + {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, + {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, ] [[package]] @@ -4491,13 +4472,13 @@ files = [ [[package]] name = "tomlkit" -version = "0.12.5" -requires_python = ">=3.7" +version = "0.13.0" +requires_python = ">=3.8" summary = "Style preserving TOML library" groups = ["default", "dev"] files = [ - {file = "tomlkit-0.12.5-py3-none-any.whl", hash = "sha256:af914f5a9c59ed9d0762c7b64d3b5d5df007448eb9cd2edc8a46b1eafead172f"}, - {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, + {file = "tomlkit-0.13.0-py3-none-any.whl", hash = "sha256:7075d3042d03b80f603482d69bf0c8f345c2b30e41699fd8883227f89972b264"}, + {file = "tomlkit-0.13.0.tar.gz", hash = "sha256:08ad192699734149f5b97b45f1f18dad7eb1b6d16bc72ad0c2335772650d7b72"}, ] [[package]] @@ -4698,7 +4679,7 @@ dependencies = [ "PyMySQL==1.1.0", "adlfs==2023.8.0", "boxfs==0.2.1", - "dropboxdrivefs==1.3.1", + "dropboxdrivefs==1.4.1", "gcsfs==2023.6.0", "google-auth==2.20.0", "google-cloud-bigquery==3.11.4", @@ -4829,13 +4810,13 @@ files = [ [[package]] name = "validators" -version = "0.31.0" +version = "0.33.0" requires_python = ">=3.8" summary = "Python Data Validation for Humans™" groups = ["default", "dev"] files = [ - {file = "validators-0.31.0-py3-none-any.whl", hash = "sha256:e15a600d81555a4cd409b17bf55946c5edec7748e776afc85ed0a19bdee54e56"}, - {file = "validators-0.31.0.tar.gz", hash = "sha256:de7574fc56a231c788162f3e7da15bc2053c5ff9e0281d9ff1afb3a7b69498df"}, + {file = "validators-0.33.0-py3-none-any.whl", hash = "sha256:134b586a98894f8139865953899fc2daeb3d0c35569552c5518f089ae43ed075"}, + {file = "validators-0.33.0.tar.gz", hash = "sha256:535867e9617f0100e676a1257ba1e206b9bfd847ddc171e4d44811f07ff0bfbf"}, ] [[package]] diff --git a/unstract/connectors/pyproject.toml b/unstract/connectors/pyproject.toml index fc292d93d..f25839f3e 100644 --- a/unstract/connectors/pyproject.toml +++ b/unstract/connectors/pyproject.toml @@ -16,7 +16,7 @@ dependencies = [ "s3fs[boto3]==2023.6.0", # For Minio "PyDrive2[fsspec]==1.15.4", # For GDrive "oauth2client==4.1.3", # For GDrive - "dropboxdrivefs==1.3.1", # For Dropbox + "dropboxdrivefs==1.4.1", # For Dropbox "boxfs==0.2.1", # For Box "gcsfs==2023.6.0", # For GoogleCloudStorage "adlfs==2023.8.0", # For AzureCloudStorage diff --git a/unstract/connectors/src/unstract/connectors/databases/unstract_db.py b/unstract/connectors/src/unstract/connectors/databases/unstract_db.py index d426c26e6..7ec82abc2 100644 --- a/unstract/connectors/src/unstract/connectors/databases/unstract_db.py +++ b/unstract/connectors/src/unstract/connectors/databases/unstract_db.py @@ -68,7 +68,7 @@ def test_credentials(self) -> bool: try: self.get_engine() except Exception as e: - raise ConnectorError(str(e)) from e + raise ConnectorError(f"Error while connecting to DB: {str(e)}") from e return True def execute(self, query: str) -> Any: diff --git a/unstract/connectors/src/unstract/connectors/filesystems/azure_cloud_storage/azure_cloud_storage.py b/unstract/connectors/src/unstract/connectors/filesystems/azure_cloud_storage/azure_cloud_storage.py index d83426e2c..fe61fd6f2 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/azure_cloud_storage/azure_cloud_storage.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/azure_cloud_storage/azure_cloud_storage.py @@ -65,7 +65,11 @@ def get_fsspec_fs(self) -> AzureBlobFileSystem: def test_credentials(self) -> bool: """To test credentials for Azure Cloud Storage.""" try: - self.get_fsspec_fs().ls(f"{self.bucket}") + is_dir = bool(self.get_fsspec_fs().isdir(self.bucket)) + if not is_dir: + raise RuntimeError(f"'{self.bucket}' is not a valid bucket.") except Exception as e: - raise ConnectorError(str(e)) + raise ConnectorError( + f"Error from Azure Cloud Storage while testing connection: {str(e)}" + ) from e return True diff --git a/unstract/connectors/src/unstract/connectors/filesystems/box/box.py b/unstract/connectors/src/unstract/connectors/filesystems/box/box.py index e30cd264d..2989e7916 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/box/box.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/box/box.py @@ -24,7 +24,7 @@ def __init__(self, settings: dict[str, Any]): settings_dict = json.loads(settings["box_app_settings"]) if not isinstance(settings_dict, dict): raise ConnectorError( - "Box app settings is expected to be a valid JSON", + "Box app settings should be a valid JSON.", treat_as_user_message=True, ) except JSONDecodeError as e: @@ -112,8 +112,15 @@ def get_fsspec_fs(self) -> BoxFileSystem: def test_credentials(self) -> bool: """To test credentials for the Box connector.""" + is_dir = False try: - self.get_fsspec_fs().isdir("/") + is_dir = bool(self.get_fsspec_fs().isdir("/")) except Exception as e: - raise ConnectorError(str(e)) + raise ConnectorError( + f"Error from Box while testing connection: {str(e)}" + ) from e + if not is_dir: + raise ConnectorError( + "Unable to connect to Box, please check the connection settings." + ) return True diff --git a/unstract/connectors/src/unstract/connectors/filesystems/google_cloud_storage/google_cloud_storage.py b/unstract/connectors/src/unstract/connectors/filesystems/google_cloud_storage/google_cloud_storage.py index 915cb2b2c..b79bb8742 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/google_cloud_storage/google_cloud_storage.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/google_cloud_storage/google_cloud_storage.py @@ -64,7 +64,11 @@ def get_fsspec_fs(self) -> GCSFileSystem: def test_credentials(self) -> bool: """To test credentials for Google Cloud Storage.""" try: - is_dir = bool(self.get_fsspec_fs().isdir(f"{self.bucket}")) - return is_dir + is_dir = bool(self.get_fsspec_fs().isdir(self.bucket)) + if not is_dir: + raise RuntimeError(f"'{self.bucket}' is not a valid bucket.") except Exception as e: - raise ConnectorError(str(e)) + raise ConnectorError( + f"Error from Google Cloud Storage while testing connection: {str(e)}" + ) from e + return True diff --git a/unstract/connectors/src/unstract/connectors/filesystems/google_drive/google_drive.py b/unstract/connectors/src/unstract/connectors/filesystems/google_drive/google_drive.py index 7beafa412..01b574273 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/google_drive/google_drive.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/google_drive/google_drive.py @@ -91,10 +91,18 @@ def get_fsspec_fs(self) -> GDriveFileSystem: def test_credentials(self) -> bool: """To test credentials for Google Drive.""" + is_dir = False try: - self.get_fsspec_fs().isdir("root") + is_dir = bool(self.get_fsspec_fs().isdir("root")) except Exception as e: - raise ConnectorError(str(e)) + raise ConnectorError( + f"Error from Google Drive while testing connection: {str(e)}" + ) from e + if not is_dir: + raise ConnectorError( + "Unable to connect to Google Drive, " + "please check the connection settings." + ) return True @staticmethod diff --git a/unstract/connectors/src/unstract/connectors/filesystems/http/http.py b/unstract/connectors/src/unstract/connectors/filesystems/http/http.py index c12236e15..bf0a29dd6 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/http/http.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/http/http.py @@ -71,8 +71,16 @@ def get_fsspec_fs(self) -> HTTPFileSystem: def test_credentials(self) -> bool: """To test credentials for HTTP(S).""" + is_dir = False try: - self.get_fsspec_fs().isdir("/") + is_dir = bool(self.get_fsspec_fs().isdir("/")) except Exception as e: - raise ConnectorError(str(e)) + raise ConnectorError( + f"Error while connecting to HTTP server: {str(e)}" + ) from e + if not is_dir: + raise ConnectorError( + "Unable to connect to HTTP server, " + "please check the connection settings." + ) return True diff --git a/unstract/connectors/src/unstract/connectors/filesystems/local_storage/local_storage.py b/unstract/connectors/src/unstract/connectors/filesystems/local_storage/local_storage.py index ec179e990..783803fe3 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/local_storage/local_storage.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/local_storage/local_storage.py @@ -4,6 +4,7 @@ from fsspec.implementations.local import LocalFileSystem +from unstract.connectors.exceptions import ConnectorError from unstract.connectors.filesystems.unstract_file_system import UnstractFileSystem logger = logging.getLogger(__name__) @@ -60,9 +61,16 @@ def get_fsspec_fs(self) -> Any: def test_credentials(self, *args, **kwargs) -> bool: # type:ignore """To test credentials for LocalStorage.""" + is_dir = False try: - self.get_fsspec_fs().isdir("/") + is_dir = bool(self.get_fsspec_fs().isdir("/")) except Exception as e: - logger.error(f"Test creds failed: {e}") - return False + raise ConnectorError( + f"Error while connecting to local storage: {str(e)}" + ) from e + if not is_dir: + raise ConnectorError( + "Unable to connect to local storage, " + "please check the connection settings." + ) return True diff --git a/unstract/connectors/src/unstract/connectors/filesystems/minio/exceptions.py b/unstract/connectors/src/unstract/connectors/filesystems/minio/exceptions.py new file mode 100644 index 000000000..2b9b6ea0a --- /dev/null +++ b/unstract/connectors/src/unstract/connectors/filesystems/minio/exceptions.py @@ -0,0 +1,26 @@ +from unstract.connectors.exceptions import ConnectorError + +S3FS_EXC_TO_UNSTRACT_EXC = { + "The AWS Access Key Id you provided does not exist in our records": ( + "Invalid Key (Access Key ID) provided, please provide a valid one." + ), + "The request signature we calculated does not match the signature you provided": ( + "Invalid Secret (Secret Access Key) provided, please provide a valid one." + ), + "[Errno 22] S3 API Requests must be made to API port": ( # Minio only + "Request made to invalid port, please check the port of the endpoint URL." + ), +} + + +def handle_s3fs_exception(e: Exception) -> ConnectorError: + original_exc = str(e) + user_msg = "Error from S3 / MinIO while testing connection: " + exc_to_append = "" + for s3fs_exc, user_friendly_msg in S3FS_EXC_TO_UNSTRACT_EXC.items(): + if s3fs_exc in original_exc: + exc_to_append = user_friendly_msg + break + + user_msg += exc_to_append if exc_to_append else str(e) + return ConnectorError(message=user_msg) diff --git a/unstract/connectors/src/unstract/connectors/filesystems/minio/minio.py b/unstract/connectors/src/unstract/connectors/filesystems/minio/minio.py index 4bdf2c110..b676cda42 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/minio/minio.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/minio/minio.py @@ -4,9 +4,10 @@ from s3fs.core import S3FileSystem -from unstract.connectors.exceptions import ConnectorError from unstract.connectors.filesystems.unstract_file_system import UnstractFileSystem +from .exceptions import handle_s3fs_exception + logger = logging.getLogger(__name__) @@ -37,11 +38,11 @@ def get_id() -> str: @staticmethod def get_name() -> str: - return "MinioFS/S3" + return "S3/Minio" @staticmethod def get_description() -> str: - return "All MinioFS compatible, including AWS S3" + return "Connect to AWS S3 and other compatible storage such as Minio." @staticmethod def get_icon() -> str: @@ -76,7 +77,9 @@ def get_fsspec_fs(self) -> S3FileSystem: def test_credentials(self) -> bool: """To test credentials for Minio.""" try: - self.get_fsspec_fs().isdir(f"{self.bucket}") + is_dir = bool(self.get_fsspec_fs().isdir(self.bucket)) + if not is_dir: + raise RuntimeError(f"'{self.bucket}' is not a valid bucket.") except Exception as e: - raise ConnectorError(str(e)) + raise handle_s3fs_exception(e) from e return True diff --git a/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/exceptions.py b/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/exceptions.py index 1453a8f2c..bab073573 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/exceptions.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/exceptions.py @@ -7,16 +7,22 @@ def handle_dropbox_exception(e: DropboxException) -> ConnectorError: - user_msg = "" + user_msg = "Error from Dropbox while testing connection: " if isinstance(e, ExcAuthError): if isinstance(e.error, AuthError): if e.error.is_expired_access_token(): - user_msg = "Expired access token" + user_msg += ( + "Expired access token, please regenerate it " + "through the Dropbox console." + ) elif e.error.is_invalid_access_token(): - user_msg = "Invalid access token" + user_msg += ( + "Invalid access token, please enter a valid token " + "from the Dropbox console." + ) else: - user_msg = e.error._tag + user_msg += e.error._tag elif isinstance(e, ApiError): if e.user_message_text is not None: - user_msg = e.user_message_text + user_msg += e.user_message_text return ConnectorError(message=user_msg, treat_as_user_message=True) diff --git a/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/zs_dropbox.py b/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/zs_dropbox.py index 797198caa..e7d5a7d37 100644 --- a/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/zs_dropbox.py +++ b/unstract/connectors/src/unstract/connectors/filesystems/zs_dropbox/zs_dropbox.py @@ -69,11 +69,9 @@ def test_credentials(self) -> bool: # self.get_fsspec_fs().connect() self.get_fsspec_fs().ls("") except DropboxException as e: - logger.error(f"Test creds failed: {e}") - raise handle_dropbox_exception(e) + raise handle_dropbox_exception(e) from e except Exception as e: - logger.error(f"Test creds failed: {e}") - raise ConnectorError(str(e)) + raise ConnectorError(f"Error while connecting to Dropbox: {str(e)}") from e return True @staticmethod From f705ee6cefca4c1cadf42ec8d423e80fe82dcb1a Mon Sep 17 00:00:00 2001 From: Rahul Johny <116638720+johnyrahul@users.noreply.github.com> Date: Thu, 18 Jul 2024 14:03:58 +0530 Subject: [PATCH 2/3] Shared tool export fix (#491) --- .../prompt_studio_registry/prompt_studio_registry_helper.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py b/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py index 67f84b450..8d25f3f40 100644 --- a/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py +++ b/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py @@ -176,7 +176,6 @@ def update_or_create_psr_tool( obj, created = PromptStudioRegistry.objects.update_or_create( custom_tool=custom_tool, created_by=custom_tool.created_by, - modified_by=custom_tool.modified_by, defaults={ "name": custom_tool.tool_name, "tool_property": properties.to_dict(), @@ -190,7 +189,7 @@ def update_or_create_psr_tool( logger.info(f"PSR {obj.prompt_registry_id} was created") else: logger.info(f"PSR {obj.prompt_registry_id} was updated") - + obj.modified_by = custom_tool.modified_by obj.shared_to_org = shared_with_org if not shared_with_org: obj.shared_users.clear() From 9e27a4eeb654fc4b03891a7dacf72c3f0171affd Mon Sep 17 00:00:00 2001 From: Deepak K <89829542+Deepak-Kesavan@users.noreply.github.com> Date: Thu, 18 Jul 2024 14:17:53 +0530 Subject: [PATCH 3/3] [FIX] Made workflow to use latest tool version (#499) Made workflow to use latest tool version --- .../prompt_studio_registry/constants.py | 3 --- .../prompt_studio_registry_helper.py | 20 +++---------------- 2 files changed, 3 insertions(+), 20 deletions(-) diff --git a/backend/prompt_studio/prompt_studio_registry/constants.py b/backend/prompt_studio/prompt_studio_registry/constants.py index 6dbde1c09..7fea2c73c 100644 --- a/backend/prompt_studio/prompt_studio_registry/constants.py +++ b/backend/prompt_studio/prompt_studio_registry/constants.py @@ -89,9 +89,6 @@ class JsonSchemaKey: ENABLE_CHALLENGE = "enable_challenge" CHALLENGE_LLM = "challenge_llm" ENABLE_SINGLE_PASS_EXTRACTION = "enable_single_pass_extraction" - IMAGE_URL = "image_url" - IMAGE_NAME = "image_name" - IMAGE_TAG = "image_tag" SUMMARIZE_PROMPT = "summarize_prompt" SUMMARIZE_AS_SOURCE = "summarize_as_source" ENABLE_HIGHLIGHT = "enable_highlight" diff --git a/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py b/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py index 8d25f3f40..ef8ecd4aa 100644 --- a/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py +++ b/backend/prompt_studio/prompt_studio_registry/prompt_studio_registry_helper.py @@ -119,25 +119,14 @@ def get_tool_by_prompt_registry_id( f"ID {prompt_registry_id}: {e} " ) return None - # The below properties are introduced after 0.20.0 - # So defaulting to 0.20.0 if the properties are not found - image_url = prompt_registry_tool.tool_metadata.get( - JsonSchemaKey.IMAGE_URL, "docker:unstract/tool-structure:0.0.20" - ) - image_name = prompt_registry_tool.tool_metadata.get( - JsonSchemaKey.IMAGE_NAME, "unstract/tool-structure" - ) - image_tag = prompt_registry_tool.tool_metadata.get( - JsonSchemaKey.IMAGE_TAG, "0.0.20" - ) return Tool( tool_uid=prompt_registry_tool.prompt_registry_id, properties=Properties.from_dict(prompt_registry_tool.tool_property), spec=Spec.from_dict(prompt_registry_tool.tool_spec), icon=prompt_registry_tool.icon, - image_url=image_url, - image_name=image_name, - image_tag=image_tag, + image_url=settings.STRUCTURE_TOOL_IMAGE_URL, + image_name=settings.STRUCTURE_TOOL_IMAGE_NAME, + image_tag=settings.STRUCTURE_TOOL_IMAGE_TAG, ) @staticmethod @@ -241,9 +230,6 @@ def frame_export_json( export_metadata[JsonSchemaKey.DESCRIPTION] = tool.description export_metadata[JsonSchemaKey.AUTHOR] = tool.author export_metadata[JsonSchemaKey.TOOL_ID] = str(tool.tool_id) - export_metadata[JsonSchemaKey.IMAGE_URL] = settings.STRUCTURE_TOOL_IMAGE_URL - export_metadata[JsonSchemaKey.IMAGE_NAME] = settings.STRUCTURE_TOOL_IMAGE_NAME - export_metadata[JsonSchemaKey.IMAGE_TAG] = settings.STRUCTURE_TOOL_IMAGE_TAG default_llm_profile = ProfileManager.get_default_llm_profile(tool) challenge_llm_instance: Optional[AdapterInstance] = tool.challenge_llm