diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 52c68eb00..deb24de93 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -57,7 +57,7 @@ repos: unstract/flags/src/unstract/flags/evaluation_.*\.py| )$ - repo: https://github.com/pycqa/flake8 - rev: 7.0.0 + rev: 7.1.0 hooks: - id: flake8 args: [--max-line-length=88] @@ -96,7 +96,7 @@ repos: types: - python - repo: https://github.com/gitleaks/gitleaks - rev: v8.18.3 + rev: v8.18.4 hooks: - id: gitleaks # - repo: https://github.com/hadolint/hadolint diff --git a/backend/backend/urls.py b/backend/backend/urls.py index 725a8f7b6..9a4f70bc5 100644 --- a/backend/backend/urls.py +++ b/backend/backend/urls.py @@ -30,6 +30,7 @@ path("", include("tool_instance.urls")), path("", include("pipeline.urls")), path("", include("apps.urls")), + path("", include("feature_flag.urls")), path("workflow/", include("workflow_manager.urls")), path("platform/", include("platform_settings.urls")), path("api/", include("api.urls")), diff --git a/backend/feature_flag/urls.py b/backend/feature_flag/urls.py index c90426d1e..3952e27f9 100644 --- a/backend/feature_flag/urls.py +++ b/backend/feature_flag/urls.py @@ -3,9 +3,20 @@ This module defines the URL patterns for the feature_flags app. """ -import feature_flag.views as views from django.urls import path +from feature_flag.views import FeatureFlagViewSet +from rest_framework.urlpatterns import format_suffix_patterns -urlpatterns = [ - path("evaluate/", views.evaluate_feature_flag, name="evaluate_feature_flag"), -] +feature_flags_list = FeatureFlagViewSet.as_view( + { + "post": "evaluate", + "get": "list", + } +) + +urlpatterns = format_suffix_patterns( + [ + path("evaluate/", feature_flags_list, name="evaluate_feature_flag"), + path("flags/", feature_flags_list, name="list_feature_flags"), + ] +) diff --git a/backend/feature_flag/views.py b/backend/feature_flag/views.py index 6e155f5e3..ab318c112 100644 --- a/backend/feature_flag/views.py +++ b/backend/feature_flag/views.py @@ -6,52 +6,43 @@ import logging -from rest_framework import status -from rest_framework.decorators import api_view -from rest_framework.request import Request +from rest_framework import status, viewsets from rest_framework.response import Response - -from unstract.flags.client import EvaluationClient +from utils.request.feature_flag import check_feature_flag_status, list_all_flags logger = logging.getLogger(__name__) -@api_view(["POST"]) -def evaluate_feature_flag(request: Request) -> Response: - """Function to evaluate the feature flag. - - To-Do: Refactor to a class based view, use serializers (DRF). +class FeatureFlagViewSet(viewsets.ViewSet): + """A simple ViewSet for evaluating feature flag.""" - Args: - request: request object + def evaluate(self, request): + try: + flag_key = request.data.get("flag_key") - Returns: - evaluate response - """ - try: - namespace_key = request.data.get("namespace_key") - flag_key = request.data.get("flag_key") - entity_id = request.data.get("entity_id") - context = request.data.get("context") + if not flag_key: + return Response( + {"message": "Request parameters are missing."}, + status=status.HTTP_400_BAD_REQUEST, + ) - if not namespace_key or not flag_key or not entity_id: + flag_enabled = check_feature_flag_status(flag_key) + return Response({"flag_status": flag_enabled}, status=status.HTTP_200_OK) + except Exception as e: + logger.error("No response from server: %s", e) return Response( - {"message": "Request paramteres are missing."}, - status=status.HTTP_400_BAD_REQUEST, + {"message": "No response from server"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, ) - evaluation_client = EvaluationClient() - response = evaluation_client.boolean_evaluate_feature_flag( - namespace_key=namespace_key, - flag_key=flag_key, - entity_id=entity_id, - context=context, - ) - - return Response({"enabled": response}, status=status.HTTP_200_OK) - except Exception as e: - logger.error("No response from server: %s", e) - return Response( - {"message": "No response from server"}, - status=status.HTTP_500_INTERNAL_SERVER_ERROR, - ) + def list(self, request): + try: + namespace_key = request.query_params.get("namespace", "default") + feature_flags = list_all_flags(namespace_key) + return Response({"feature_flags": feature_flags}, status=status.HTTP_200_OK) + except Exception as e: + logger.error("No response from server: %s", e) + return Response( + {"message": "No response from server"}, + status=status.HTTP_500_INTERNAL_SERVER_ERROR, + ) diff --git a/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py b/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py index 00d8d4309..ae6dbfde6 100644 --- a/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py +++ b/backend/prompt_studio/prompt_studio_core/prompt_studio_helper.py @@ -302,7 +302,7 @@ def index_document( """ tool: CustomTool = CustomTool.objects.get(pk=tool_id) if is_summary: - profile_manager = ProfileManager.objects.get( + profile_manager: ProfileManager = ProfileManager.objects.get( prompt_studio_tool=tool, is_summarize_llm=True ) default_profile = profile_manager diff --git a/backend/prompt_studio/prompt_studio_core/urls.py b/backend/prompt_studio/prompt_studio_core/urls.py index 3a32f63c4..0abc1c436 100644 --- a/backend/prompt_studio/prompt_studio_core/urls.py +++ b/backend/prompt_studio/prompt_studio_core/urls.py @@ -1,4 +1,6 @@ +from django.db import transaction from django.urls import path +from django.utils.decorators import method_decorator from rest_framework.urlpatterns import format_suffix_patterns from .views import PromptStudioCoreView @@ -77,7 +79,9 @@ ), path( "prompt-studio/index-document/", - prompt_studio_prompt_index, + method_decorator(transaction.non_atomic_requests)( + prompt_studio_prompt_index + ), name="prompt-studio-prompt-index", ), path( diff --git a/backend/prompt_studio/prompt_studio_core/views.py b/backend/prompt_studio/prompt_studio_core/views.py index 235e266b4..9093efd42 100644 --- a/backend/prompt_studio/prompt_studio_core/views.py +++ b/backend/prompt_studio/prompt_studio_core/views.py @@ -189,7 +189,7 @@ def make_profile_default(self, request: HttpRequest, pk: Any = None) -> Response data={"default_profile": profile_manager.profile_id}, ) - @action(detail=True, methods=["get"]) + @action(detail=True, methods=["post"]) def index_document(self, request: HttpRequest, pk: Any = None) -> Response: """API Entry point method to index input file. diff --git a/backend/prompt_studio/prompt_studio_index_manager/prompt_studio_index_helper.py b/backend/prompt_studio/prompt_studio_index_manager/prompt_studio_index_helper.py index 69fd5ba5b..49cae9141 100644 --- a/backend/prompt_studio/prompt_studio_index_manager/prompt_studio_index_helper.py +++ b/backend/prompt_studio/prompt_studio_index_manager/prompt_studio_index_helper.py @@ -1,6 +1,7 @@ import json import logging +from django.db import transaction from prompt_studio.prompt_profile_manager.models import ProfileManager from prompt_studio.prompt_studio_core.exceptions import IndexingAPIError from prompt_studio.prompt_studio_document_manager.models import DocumentManager @@ -18,46 +19,48 @@ def handle_index_manager( profile_manager: ProfileManager, doc_id: str, ) -> IndexManager: - document: DocumentManager = DocumentManager.objects.get(pk=document_id) + try: - index_id = "raw_index_id" - if is_summary: - index_id = "summarize_index_id" + with transaction.atomic(): - args: dict[str, str] = dict() - args["document_manager"] = document - args["profile_manager"] = profile_manager + document: DocumentManager = DocumentManager.objects.get(pk=document_id) - try: - # Create or get the existing record for this document and - # profile combo - index_manager, success = IndexManager.objects.get_or_create(**args) - - if success: - logger.info( - f"Index manager doc_id: {doc_id} for " - f"profile {profile_manager.profile_id} created" - ) - else: - logger.info( - f"Index manager doc_id: {doc_id} for " - f"profile {profile_manager.profile_id} updated" - ) - - index_ids = index_manager.index_ids_history - index_ids_list = json.loads(index_ids) if index_ids else [] - if doc_id not in index_ids: - index_ids_list.append(doc_id) - - args[index_id] = doc_id - args["index_ids_history"] = json.dumps(index_ids_list) - - # Update the record with the index id - result: IndexManager = IndexManager.objects.filter( - index_manager_id=index_manager.index_manager_id - ).update(**args) + index_id = "raw_index_id" + if is_summary: + index_id = "summarize_index_id" + + args: dict[str, str] = dict() + args["document_manager"] = document + args["profile_manager"] = profile_manager + + # Create or get the existing record for this document and + # profile combo + index_manager, success = IndexManager.objects.get_or_create(**args) + if success: + logger.info( + f"Index manager doc_id: {doc_id} for " + f"profile {profile_manager.profile_id} created" + ) + else: + logger.info( + f"Index manager doc_id: {doc_id} for " + f"profile {profile_manager.profile_id} updated" + ) + + index_ids = index_manager.index_ids_history + index_ids_list = json.loads(index_ids) if index_ids else [] + if doc_id not in index_ids: + index_ids_list.append(doc_id) + + args[index_id] = doc_id + args["index_ids_history"] = json.dumps(index_ids_list) + + # Update the record with the index id + result: IndexManager = IndexManager.objects.filter( + index_manager_id=index_manager.index_manager_id + ).update(**args) + return result except Exception as e: + transaction.rollback() raise IndexingAPIError("Error updating indexing status") from e - - return result diff --git a/backend/sample.env b/backend/sample.env index c5901856b..7c33854e2 100644 --- a/backend/sample.env +++ b/backend/sample.env @@ -89,9 +89,9 @@ PROMPT_PORT=3003 PROMPT_STUDIO_FILE_PATH=/app/prompt-studio-data # Structure Tool -STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.25" +STRUCTURE_TOOL_IMAGE_URL="docker:unstract/tool-structure:0.0.26" STRUCTURE_TOOL_IMAGE_NAME="unstract/tool-structure" -STRUCTURE_TOOL_IMAGE_TAG="0.0.25" +STRUCTURE_TOOL_IMAGE_TAG="0.0.26" # Feature Flags EVALUATION_SERVER_IP=localhost diff --git a/backend/tenant_account/migrations/0001_initial.py b/backend/tenant_account/migrations/0001_initial.py index cad92899c..9cf3e1e69 100644 --- a/backend/tenant_account/migrations/0001_initial.py +++ b/backend/tenant_account/migrations/0001_initial.py @@ -14,8 +14,9 @@ class Migration(migrations.Migration): ] operations = [ + # Updated the name here as the 002, 0002 step is just name change migrations.CreateModel( - name="User", + name="OrganizationMember", fields=[ ( "user_ptr", @@ -28,6 +29,8 @@ class Migration(migrations.Migration): to=settings.AUTH_USER_MODEL, ), ), + # Added column which is used in 0002 here + ("role", models.CharField(default="admin")), ], options={ "verbose_name": "user", diff --git a/backend/tenant_account/migrations/0002_organizationmember_delete_user.py b/backend/tenant_account/migrations/0002_organizationmember_delete_user.py index 6a41c4aa8..68b4a63a2 100644 --- a/backend/tenant_account/migrations/0002_organizationmember_delete_user.py +++ b/backend/tenant_account/migrations/0002_organizationmember_delete_user.py @@ -1,9 +1,6 @@ # Generated by Django 4.2.1 on 2023-08-21 11:12 -import django.contrib.auth.models -import django.db.models.deletion -from django.conf import settings -from django.db import migrations, models +from django.db import migrations class Migration(migrations.Migration): @@ -13,33 +10,36 @@ class Migration(migrations.Migration): ] operations = [ - migrations.CreateModel( - name="OrganizationMember", - fields=[ - ( - "user_ptr", - models.OneToOneField( - auto_created=True, - on_delete=django.db.models.deletion.CASCADE, - parent_link=True, - primary_key=True, - serialize=False, - to=settings.AUTH_USER_MODEL, - ), - ), - ("role", models.CharField(default="admin")), - ], - options={ - "verbose_name": "user", - "verbose_name_plural": "users", - "abstract": False, - }, - bases=("account.user",), - managers=[ - ("objects", django.contrib.auth.models.UserManager()), - ], - ), - migrations.DeleteModel( - name="User", - ), + # # Commenting out here as this is taken care in 0001 + # migrations.CreateModel( + # name="OrganizationMember", + # fields=[ + # ( + # "user_ptr", + # models.OneToOneField( + # auto_created=True, + # on_delete=django.db.models.deletion.CASCADE, + # parent_link=True, + # primary_key=True, + # serialize=False, + # to=settings.AUTH_USER_MODEL, + # ), + # ), + # ("role", models.CharField(default="admin")), + # ], + # options={ + # "verbose_name": "user", + # "verbose_name_plural": "users", + # "abstract": False, + # }, + # bases=("account.user",), + # managers=[ + # ("objects", django.contrib.auth.models.UserManager()), + # ], + # ), + # # https://www.geeksforgeeks.org/what-is-access-exclusive-lock-mode-in-postgreysql/ + # # commenting drop table to ignore AccesExclusive Lock + # migrations.DeleteModel( + # name="User", + # ), ] diff --git a/backend/tool_instance/exceptions.py b/backend/tool_instance/exceptions.py index 75c6d652e..69c2c26a5 100644 --- a/backend/tool_instance/exceptions.py +++ b/backend/tool_instance/exceptions.py @@ -38,4 +38,9 @@ class ToolInstantiationError(ToolInstanceBaseException): class BadRequestException(ToolInstanceBaseException): status_code = 400 - default_detail = "Invalid input" + default_detail = "Invalid input." + + +class ToolSettingValidationError(APIException): + status_code = 400 + default_detail = "Error while validating tool's setting." diff --git a/backend/tool_instance/tool_instance_helper.py b/backend/tool_instance/tool_instance_helper.py index 85c729c79..16000f7c2 100644 --- a/backend/tool_instance/tool_instance_helper.py +++ b/backend/tool_instance/tool_instance_helper.py @@ -1,7 +1,6 @@ import logging import os import uuid -from json import JSONDecodeError from typing import Any, Optional from account.models import User @@ -10,10 +9,10 @@ from connector.connector_instance_helper import ConnectorInstanceHelper from django.core.exceptions import PermissionDenied from django.core.exceptions import ValidationError as DjangoValidationError -from jsonschema.exceptions import UnknownType from jsonschema.exceptions import ValidationError as JSONValidationError from prompt_studio.prompt_studio_registry.models import PromptStudioRegistry from tool_instance.constants import JsonSchemaKey +from tool_instance.exceptions import ToolSettingValidationError from tool_instance.models import ToolInstance from tool_instance.tool_processor import ToolProcessor from unstract.adapters.enums import AdapterTypes @@ -333,7 +332,7 @@ def reorder_tool_instances(instances_to_reorder: list[uuid.UUID]) -> None: @staticmethod def validate_tool_settings( user: User, tool_uid: str, tool_meta: dict[str, Any] - ) -> tuple[bool, str]: + ) -> bool: """Function to validate Tools settings.""" # check if exported tool is valid for the user who created workflow @@ -351,14 +350,22 @@ def validate_tool_settings( ) try: DefaultsGeneratingValidator(schema_json).validate(tool_meta) - return True, "" - except JSONDecodeError as e: - return False, str(e) except JSONValidationError as e: - logger.error(e) - return False, str(tool_name + ": " + e.schema["description"]) - except UnknownType as e: - return False, str(e) + logger.error(e, stack_info=True, exc_info=True) + err_msg = e.message + # TODO: Support other JSON validation errors or consider following + # https://github.com/networknt/json-schema-validator/blob/master/doc/cust-msg.md + if e.validator == "required": + for validator_val in e.validator_value: + required_prop = e.schema.get("properties").get(validator_val) + required_display_name = required_prop.get("title") + err_msg = err_msg.replace(validator_val, required_display_name) + else: + logger.warning(f"Unformatted exception sent to user: {err_msg}") + raise ToolSettingValidationError( + f"Error validating tool settings for '{tool_name}': {err_msg}" + ) + return True @staticmethod def validate_adapter_permissions( diff --git a/backend/utils/request/feature_flag.py b/backend/utils/request/feature_flag.py index a62d5a790..d258b377e 100644 --- a/backend/utils/request/feature_flag.py +++ b/backend/utils/request/feature_flag.py @@ -2,7 +2,8 @@ from typing import Optional -from unstract.flags.client import EvaluationClient +from unstract.flags.client.evaluation import EvaluationClient +from unstract.flags.client.flipt import FliptClient def check_feature_flag_status( @@ -38,3 +39,17 @@ def check_feature_flag_status( except Exception as e: print(f"Error: {str(e)}") return False + + +def list_all_flags( + namespace_key: str, +) -> dict: + try: + flipt_client = FliptClient() + response = flipt_client.list_feature_flags( + namespace_key=namespace_key, + ) + return response + except Exception as e: + print(f"Error: {str(e)}") + return {} diff --git a/backend/workflow_manager/endpoint/database_utils.py b/backend/workflow_manager/endpoint/database_utils.py index 28587a427..f79f87c6d 100644 --- a/backend/workflow_manager/endpoint/database_utils.py +++ b/backend/workflow_manager/endpoint/database_utils.py @@ -65,13 +65,6 @@ def get_sql_values_for_query( sql_values[column] = f"parse_json($${values[column]}$$)" else: sql_values[column] = f"{values[column]}" - elif cls_name == DBConnectionClass.BIGQUERY: - col = column.lower() - type_x = column_types[col] - if type_x in BigQuery.COLUMN_TYPES: - sql_values[column] = f"{type_x}({values[column]})" - else: - sql_values[column] = f"{values[column]}" else: # Default to Other SQL DBs # TODO: Handle numeric types with no quotes diff --git a/backend/workflow_manager/endpoint/tests/__init__.py b/backend/workflow_manager/endpoint/tests/__init__.py index fca2b2401..6ec742384 100644 --- a/backend/workflow_manager/endpoint/tests/__init__.py +++ b/backend/workflow_manager/endpoint/tests/__init__.py @@ -1,3 +1,3 @@ -from backend.celery import app as celery_app +from backend.celery_service import app as celery_app # type: ignore __all__ = ["celery_app"] diff --git a/backend/workflow_manager/workflow/exceptions.py b/backend/workflow_manager/workflow/exceptions.py index fb6d5a1d9..39087ab8e 100644 --- a/backend/workflow_manager/workflow/exceptions.py +++ b/backend/workflow_manager/workflow/exceptions.py @@ -54,8 +54,3 @@ class InternalException(APIException): class WorkflowExecutionNotExist(APIException): status_code = 404 default_detail = "Workflow execution does not exist" - - -class ToolValidationError(APIException): - status_code = 400 - default_detail = "Tool validation error" diff --git a/backend/workflow_manager/workflow/execution.py b/backend/workflow_manager/workflow/execution.py index 9160deccb..b249e53dc 100644 --- a/backend/workflow_manager/workflow/execution.py +++ b/backend/workflow_manager/workflow/execution.py @@ -198,38 +198,40 @@ def execute(self, single_step: bool = False) -> None: if single_step: execution_type = ExecutionType.STEP - if self.compilation_result["success"] is True: - if ( - self.execution_mode == WorkflowExecution.Mode.INSTANT - or self.execution_mode == WorkflowExecution.Mode.QUEUE - ): - start_time = time.time() - try: - self.execute_workflow(execution_type=execution_type) - end_time = time.time() - execution_time = end_time - start_time - except StopExecution as exception: - end_time = time.time() - execution_time = end_time - start_time - logger.info(f"Execution {self.execution_id} stopped") - raise exception - except Exception as exception: - end_time = time.time() - execution_time = end_time - start_time - message = str(exception)[:EXECUTION_ERROR_LENGTH] - logger.info( - f"Execution {self.execution_id} in {execution_time}s, " - f" Error {exception}" - ) - raise WorkflowExecutionError(message) from exception - else: - error_message = f"Unknown Execution Method {self.execution_mode}" - raise WorkflowExecutionError(error_message) + if self.compilation_result["success"] is False: + error_message = ( + f"Errors while compiling workflow " + f"{self.compilation_result['problems'][0]}" + ) + raise WorkflowExecutionError(error_message) - else: - error_message = f"Errors while compiling workflow {self.compilation_result['problems'][0]}" # noqa + if self.execution_mode not in ( + WorkflowExecution.Mode.INSTANT, + WorkflowExecution.Mode.QUEUE, + ): + error_message = f"Unknown Execution Method {self.execution_mode}" raise WorkflowExecutionError(error_message) + start_time = time.time() + try: + self.execute_workflow(execution_type=execution_type) + end_time = time.time() + execution_time = end_time - start_time + except StopExecution as exception: + end_time = time.time() + execution_time = end_time - start_time + logger.info(f"Execution {self.execution_id} stopped") + raise exception + except Exception as exception: + end_time = time.time() + execution_time = end_time - start_time + message = str(exception)[:EXECUTION_ERROR_LENGTH] + logger.info( + f"Execution {self.execution_id} in {execution_time}s, " + f" Error {exception}" + ) + raise WorkflowExecutionError(message) from exception + def publish_initial_workflow_logs(self, total_files: int) -> None: """Publishes the initial logs for the workflow. diff --git a/backend/workflow_manager/workflow/workflow_helper.py b/backend/workflow_manager/workflow/workflow_helper.py index 0849b07ef..8b94d8edd 100644 --- a/backend/workflow_manager/workflow/workflow_helper.py +++ b/backend/workflow_manager/workflow/workflow_helper.py @@ -37,7 +37,6 @@ from workflow_manager.workflow.exceptions import ( InvalidRequest, TaskDoesNotExistError, - ToolValidationError, WorkflowDoesNotExistError, WorkflowExecutionNotExist, ) @@ -217,13 +216,11 @@ def validate_tool_instances_meta( tool_instances: list[ToolInstance], ) -> None: for tool in tool_instances: - valid, message = ToolInstanceHelper.validate_tool_settings( + ToolInstanceHelper.validate_tool_settings( user=tool.workflow.created_by, tool_uid=tool.tool_id, tool_meta=tool.metadata, ) - if not valid: - raise ToolValidationError(message) @staticmethod def run_workflow( diff --git a/frontend/src/assets/index.js b/frontend/src/assets/index.js index eb10ae3fc..5115b0663 100644 --- a/frontend/src/assets/index.js +++ b/frontend/src/assets/index.js @@ -31,6 +31,7 @@ import { ReactComponent as OrgSelection } from "./org-selection.svg"; import { ReactComponent as RedGradCircle } from "./red-grad-circle.svg"; import { ReactComponent as YellowGradCircle } from "./yellow-grad-circle.svg"; import { ReactComponent as ExportToolIcon } from "./export-tool.svg"; +import { ReactComponent as PlaceholderImg } from "./placeholder.svg"; export { SunIcon, @@ -66,4 +67,5 @@ export { RedGradCircle, YellowGradCircle, ExportToolIcon, + PlaceholderImg, }; diff --git a/frontend/src/assets/placeholder.svg b/frontend/src/assets/placeholder.svg new file mode 100644 index 000000000..fc76cc7a7 --- /dev/null +++ b/frontend/src/assets/placeholder.svg @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/frontend/src/components/navigations/side-nav-bar/SideNavBar.jsx b/frontend/src/components/navigations/side-nav-bar/SideNavBar.jsx index 9c1add0ce..2e881a32b 100644 --- a/frontend/src/components/navigations/side-nav-bar/SideNavBar.jsx +++ b/frontend/src/components/navigations/side-nav-bar/SideNavBar.jsx @@ -7,7 +7,6 @@ const { Sider } = Layout; import Workflows from "../../../assets/Workflows.svg"; import apiDeploy from "../../../assets/api-deployments.svg"; -import appdev from "../../../assets/appdev.svg"; import CustomTools from "../../../assets/custom-tools-icon.svg"; import EmbeddingIcon from "../../../assets/embedding.svg"; import etl from "../../../assets/etl.svg"; @@ -18,10 +17,17 @@ import VectorDbIcon from "../../../assets/vector-db.svg"; import TextExtractorIcon from "../../../assets/text-extractor.svg"; import { useSessionStore } from "../../../store/session-store"; +let getMenuItem; +try { + getMenuItem = require("../../../plugins/app-deployments/app-deployment-components/helpers/getMenuItem"); +} catch (err) { + // Plugin unavailable. +} + const SideNavBar = ({ collapsed }) => { const navigate = useNavigate(); const { sessionDetails } = useSessionStore(); - const { orgName } = sessionDetails; + const { orgName, flags } = sessionDetails; const data = [ { @@ -36,15 +42,6 @@ const SideNavBar = ({ collapsed }) => { path: `/${orgName}/api`, active: window.location.pathname.startsWith(`/${orgName}/api`), }, - { - id: 1.2, - title: "App Deployments", - description: "Standalone unstructured data apps", - icon: BranchesOutlined, - image: appdev, - path: `/${orgName}/app`, - active: window.location.pathname.startsWith(`/${orgName}/app`), - }, { id: 1.3, title: "ETL Pipelines", @@ -147,6 +144,10 @@ const SideNavBar = ({ collapsed }) => { }, ]; + if (getMenuItem && flags.app_deployment) { + data[0].subMenu.splice(1, 0, getMenuItem.default(orgName)); + } + return ( { - const { Option } = Select; - const { TextArea } = Input; - - return ( - setOpen(false)} - onCancel={() => setOpen(false)} - okText="Save and Deploy" - okButtonProps={{ style: { background: "#092C4C" } }} - width={800} - closable={true} - maskClosable={false} - > -
-
- Project Name - -
- Workflow - -
- Frequency of runs -