From ef3719a07ad2cc0c40d08b124b0bf0bb26e29c76 Mon Sep 17 00:00:00 2001 From: Callum Wells <68609181+swells2020@users.noreply.github.com> Date: Thu, 24 Aug 2023 10:07:44 +0100 Subject: [PATCH 01/10] [RPD-299] Create CLI stack remove command (#204) * [RPD-239] Improve the way the UI handles latency when contacting Azure services (#198) * [RPD-248] Add a basic example demonstrating how to use the API (#199) * updates docs * removes scratch.py * fixes typos * updates for comments * adds cli command to cli module * fix circular import (#201) * bumping to version v0.2.9 for release * adds tests --------- Co-authored-by: KirsoppJ <40233184+KirsoppJ@users.noreply.github.com> Co-authored-by: Jonathan Carlton --- RELEASE_NOTES.md | 18 +++++++++ docs/getting-started.md | 40 +++++++++++++++++- pyproject.toml | 2 +- src/matcha_ml/VERSION | 2 +- src/matcha_ml/cli/_validation.py | 4 ++ src/matcha_ml/cli/cli.py | 29 +++++++++++++ src/matcha_ml/core/__init__.py | 2 + src/matcha_ml/core/core.py | 4 ++ src/matcha_ml/runners/base_runner.py | 14 ++++--- tests/test_cli/test_stack.py | 56 +++++++++++++++++++++++++- tests/test_runners/test_base_runner.py | 14 ------- 11 files changed, 161 insertions(+), 24 deletions(-) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 4e473491..d0b7e4fc 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,3 +1,21 @@ +# v0.2.9 + +This is a minor release to address a bug and improve documentation based on the changes introduced in v0.2.8. + +Date: 23rd August 2023 + +## Bug Fixes + +* Fixed a circular import bug ([#201](https://github.com/fuzzylabs/matcha/pull/201)) + +## Documentation + +* Adds API-based examples to the getting started guide ([#119](https://github.com/fuzzylabs/matcha/pull/199)) + +See all changes here: https://github.com/fuzzylabs/matcha/compare/v0.2.8...v0.2.9 + +--- + # Stacks 📚 LLMs are all the rage at the moment, with new and improved models being released almost daily. These models are quite large (as implied by the name) and cannot be hosted on standard personal computers, therefore we need to use cloud infrastructure to manage and deploy these models. However, standing up and managing these cloud resources isn't typically the forte of a lot of those interested in LLMs. diff --git a/docs/getting-started.md b/docs/getting-started.md index bb9f3909..fe5174af 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -2,6 +2,8 @@ In this guide, we'll walk you through how to provision your first machine learning infrastructure to Azure, and then use that infrastructure to train and deploy a model. The model we're using is a movie recommender, and we picked this because it's one that beginners can get up and running with quickly. +There are two ways to interact with Matcha; via the CLI tool, or through the API. Throughout this guide we'll demonstrate how to get started using either method. + There are five things we'll cover: * [Pre-requisites](#pre-requisites): everything you need to set up before starting. @@ -78,16 +80,27 @@ When you run this command, you'll be taken to the Azure login screen in a web br Next, let's provision: +CLI: ```bash matcha provision ``` -Initially, Matcha will ask you a few questions about how you'd like your infrastructure to be set up. Specifically, it will ask for a _name_ for your infrastructure, a _region_ to deploy it to. Once these details are provided, Matcha will proceed to initialize a remote state manager and ask for a password. After that, it will go ahead of provision infrastructure. +> Note: users have the choice of passing optional arguments representing the location, prefix, and password parameters by using '--location', '--prefix', or '--password'. For example; `--location uksouth --prefix test123 --password strong_password`. + +API: +```python +import matcha_ml.core as matcha + +matcha_state_object: MatchaState = matcha.provision(location="uksouth", prefix="test123", password="strong_password") +``` + +Initially, Matcha will ask you a few questions about how you'd like your infrastructure to be set up. Specifically, it will ask for a _location_ for your infrastructure, a _prefix_ to deploy it to. Once these details are provided, Matcha will proceed to initialize a remote state manager and ask for a password. After that, it will go ahead of provision infrastructure. > Note: provisioning can take up to 20 minutes. Once provisioning is completed, you can query Matcha, using the `get` command: +CLI: ```bash matcha get ``` @@ -152,6 +165,23 @@ Experiment tracker By default, Matcha will hide sensitive resource properties. If you need one of these properties, then you can add the `--show-sensitive` flag to your `get` command. +API: +```python +import matcha_ml.core as matcha + +matcha_state_object: MatchaState = matcha.get() +``` + +As with the CLI tool, users have the ability to 'get' specific resources by passing optional `resource_name` and `property_name` arguments to the get function, as demonstrated below: + +```python +import matcha_ml.core as matcha + +matcha_state_object: MatchaState = matcha.get(resource_name="experiment_tracker", property_name="flavor") +``` + +> Note: the `get()` method will return a `MatchaState` object which represents the provisioned state. The `MatchaState` object contains the `get_component()` method, which will return (where applicable) a `MatchaStateComponent` object representing the specified Matcha state component. In turn, each `MatchaStateComponent` object has a `find_property()` method that will allow the user to be able to access individual component properties. + # 🤝 Sharing resources You'll notice that a configuration file is create as part of the provisioning process - it's called `matcha.config.json`. This file stores the information necessary for Matcha to identify the resource group and storage container that holds the details of the provisioned resources. @@ -236,10 +266,18 @@ This will result in a score, which represents how strongly we recommend movie ID The final thing you'll want to do is decommission the infrastructure that Matcha has set up during this guide. Matcha includes a `destroy` command which will remove everything that has been provisioned, which avoids running up an Azure bill! +CLI: ```bash matcha destroy ``` +API: +```python +import matcha_ml.core as matcha + +matcha.destroy() +``` + > Note: that this command is irreversible will remove all the resources deployed by `matcha provision` including the resource group, so make sure you save any data you wish to keep before running this command. > > You may also notice that an additional resource has appeared in Azure called 'NetworkWatcherRG' (if it wasn't already there). This is a resource that is automatically provisioned by Azure in each region when there is in-coming traffic to a provisioned resource and isn't controlled by Matcha. More information can be found [here](https://learn.microsoft.com/en-us/azure/network-watcher/network-watcher-monitoring-overview) on how to manage or remove this resource. diff --git a/pyproject.toml b/pyproject.toml index b372dccf..a7b0118f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "matcha-ml" -version = "0.2.8" +version = "0.2.9" description = "Matcha: An open source tool for provisioning MLOps environments to the cloud." authors = ["FuzzyLabs "] license = "Apache-2.0" diff --git a/src/matcha_ml/VERSION b/src/matcha_ml/VERSION index a45be462..1866a362 100644 --- a/src/matcha_ml/VERSION +++ b/src/matcha_ml/VERSION @@ -1 +1 @@ -0.2.8 +0.2.9 diff --git a/src/matcha_ml/cli/_validation.py b/src/matcha_ml/cli/_validation.py index d9e4dadd..5afef1b7 100644 --- a/src/matcha_ml/cli/_validation.py +++ b/src/matcha_ml/cli/_validation.py @@ -4,6 +4,8 @@ from typer import BadParameter +from matcha_ml.cli.ui.print_messages import print_status +from matcha_ml.cli.ui.status_message_builders import build_status from matcha_ml.core._validation import is_valid_prefix, is_valid_region from matcha_ml.errors import MatchaInputError from matcha_ml.services import AzureClient @@ -66,6 +68,7 @@ def region_typer_callback(region: str) -> str: Returns: str: the region after checks are passed. """ + print_status(build_status("Validating region selection with Azure...")) if not region: return region @@ -90,6 +93,7 @@ def prefix_typer_callback(prefix: str) -> str: Returns: str: if valid, the prefix is returned. """ + print_status(build_status("Validating prefix selection with Azure...")) if not prefix: return prefix diff --git a/src/matcha_ml/cli/cli.py b/src/matcha_ml/cli/cli.py index 2bfaa837..8cc1ff2f 100644 --- a/src/matcha_ml/cli/cli.py +++ b/src/matcha_ml/cli/cli.py @@ -23,6 +23,7 @@ build_step_success_status, ) from matcha_ml.cli.ui.user_approval_functions import is_user_approved +from matcha_ml.core.core import stack_remove from matcha_ml.errors import MatchaError, MatchaInputError app = typer.Typer(no_args_is_help=True, pretty_exceptions_show_locals=False) @@ -266,5 +267,33 @@ def set(stack: str = typer.Argument("default")) -> None: raise typer.Exit() +@stack_app.command(help="Remove a module from the current Matcha stack.") +def remove(module: str = typer.Argument(None)) -> None: + """Remove a module from the current Matcha stack. + + Args: + module (str): the name of the module to be removed. + """ + if module: + try: + stack_remove(module) + print_status( + build_status( + f"Matcha '{module}' module has been removed from the current stack." + ) + ) + except MatchaInputError as e: + print_error(str(e)) + raise typer.Exit() + except MatchaError as e: + print_error(str(e)) + raise typer.Exit() + else: + print_error( + "No module specified. Please run `matcha stack remove` again and provide the name of the module you wish to remove." + ) + raise typer.Exit() + + if __name__ == "__main__": app() diff --git a/src/matcha_ml/core/__init__.py b/src/matcha_ml/core/__init__.py index 97bac49d..86077f68 100644 --- a/src/matcha_ml/core/__init__.py +++ b/src/matcha_ml/core/__init__.py @@ -7,6 +7,7 @@ provision, remove_state_lock, stack_set, + stack_remove, ) __all__ = [ @@ -17,4 +18,5 @@ "destroy", "provision", "stack_set", + "stack_remove", ] diff --git a/src/matcha_ml/core/core.py b/src/matcha_ml/core/core.py index 5e3ea522..acdaab77 100644 --- a/src/matcha_ml/core/core.py +++ b/src/matcha_ml/core/core.py @@ -366,3 +366,7 @@ def stack_set(stack_name: str) -> None: ) MatchaConfigService.update(stack) + +def stack_remove(module_name: str) -> str: + """A placeholder for the stack remove logic in core.""" + return module_name \ No newline at end of file diff --git a/src/matcha_ml/runners/base_runner.py b/src/matcha_ml/runners/base_runner.py index 5875c94b..97ff75f8 100644 --- a/src/matcha_ml/runners/base_runner.py +++ b/src/matcha_ml/runners/base_runner.py @@ -1,7 +1,8 @@ """Run terraform templates to provision and deprovision resources.""" import os +from abc import abstractmethod from multiprocessing.pool import ThreadPool -from typing import Optional, Tuple +from typing import Any, Optional import typer @@ -18,7 +19,6 @@ TerraformConfig, TerraformService, ) -from matcha_ml.state.matcha_state import MatchaStateService SPINNER = "dots" @@ -183,10 +183,12 @@ def _destroy_terraform(self, msg: str = "") -> None: if tf_result.return_code != 0: raise MatchaTerraformError(tf_error=tf_result.std_err) - def provision(self) -> MatchaStateService: + @abstractmethod + def provision(self) -> Any: """Provision resources required for the deployment.""" - raise NotImplementedError + pass - def deprovision(self) -> None: + @abstractmethod + def deprovision(self) -> Any: """Destroy the provisioned resources.""" - raise NotImplementedError + pass diff --git a/tests/test_cli/test_stack.py b/tests/test_cli/test_stack.py index 29f763cb..846c6c7f 100644 --- a/tests/test_cli/test_stack.py +++ b/tests/test_cli/test_stack.py @@ -1,6 +1,7 @@ """Test suit to test the stack command and all its subcommands.""" import os +from unittest.mock import MagicMock, patch from typer.testing import CliRunner @@ -8,7 +9,7 @@ from matcha_ml.config import MatchaConfig, MatchaConfigService from matcha_ml.state.remote_state_manager import RemoteStateManager -INTERNAL_FUNCTION_STUB = "matcha_ml.core" +INTERNAL_FUNCTION_STUB = "matcha_ml.core.core" def test_cli_stack_command_help_option(runner: CliRunner) -> None: @@ -158,3 +159,56 @@ def test_stack_set_file_modified( assert "stack" in new_config_dict assert new_config_dict["stack"]["name"] == "llm" assert config_dict.items() <= new_config_dict.items() + + +def test_cli_stack_set_remove_help_option(runner: CliRunner) -> None: + """Tests the --help option for the cli stack remove sub-command. + + Args: + runner (CliRunner): typer CLI runner. + """ + result = runner.invoke(app, ["stack", "remove", "--help"]) + + assert result.exit_code == 0 + + assert "Remove a module from the current Matcha stack." in result.stdout + + +def test_cli_stack_remove_command_without_args(runner: CliRunner) -> None: + """Tests the --help option for the cli stack remove sub-command. + + Args: + runner (CliRunner): typer CLI runner. + """ + result = runner.invoke(app, ["stack", "remove"]) + + assert result.exit_code == 0 + + assert ( + "No module specified. Please run `matcha stack remove` again and provide the name\nof the module you wish to remove.\n" + in result.stdout + ) + + +@patch(f"{INTERNAL_FUNCTION_STUB}.stack_remove") +def test_cli_stack_remove_command_with_args( + mocked_stack_remove: MagicMock, + matcha_testing_directory: str, + runner: CliRunner, +) -> None: + """Tests the cli stack set sub-command with args. + + Args: + mocked_stack_remove (MagicMock): a mocked stack_remove function. + matcha_testing_directory (str): a temporary working directory. + runner (CliRunner): typer CLI runner. + """ + os.chdir(matcha_testing_directory) + result = runner.invoke(app, ["stack", "remove", "experiment_tracker"]) + + assert result.exit_code == 0 + assert mocked_stack_remove.assert_called_once + assert ( + "Matcha 'experiment_tracker' module has been removed from the current stack." + in result.stdout + ) diff --git a/tests/test_runners/test_base_runner.py b/tests/test_runners/test_base_runner.py index 77d30656..5cc43986 100644 --- a/tests/test_runners/test_base_runner.py +++ b/tests/test_runners/test_base_runner.py @@ -168,17 +168,3 @@ def test_destroy_terraform(capsys: SysCapture): str(exc_info.value) == "Terraform failed because of the following error: 'Destroy failed'." ) - - -def test_provision(): - """Test provision function in BaseRunner class raises NotImplemented exception.""" - template_runner = BaseRunner() - with pytest.raises(NotImplementedError): - template_runner.provision() - - -def test_deprovision(): - """Test deprovision function in BaseRunner class raises NotImplemented exception.""" - template_runner = BaseRunner() - with pytest.raises(NotImplementedError): - template_runner.deprovision() From 040962389918967744b4d5d0d6a0d8770ee217cc Mon Sep 17 00:00:00 2001 From: Chris <32800386+Christopher-Norman@users.noreply.github.com> Date: Thu, 24 Aug 2023 10:20:04 +0100 Subject: [PATCH 02/10] [RPD-298] Create stack add command (#203) * Add CLI stack add command * Update cli command with implementation and test cli command --- src/matcha_ml/cli/cli.py | 30 ++++++++++++++++++- src/matcha_ml/core/__init__.py | 4 ++- src/matcha_ml/core/core.py | 16 +++++++++- tests/test_cli/test_stack.py | 55 ++++++++++++++++++++++++++++++++-- 4 files changed, 99 insertions(+), 6 deletions(-) diff --git a/src/matcha_ml/cli/cli.py b/src/matcha_ml/cli/cli.py index 8cc1ff2f..a668dcf7 100644 --- a/src/matcha_ml/cli/cli.py +++ b/src/matcha_ml/cli/cli.py @@ -23,7 +23,7 @@ build_step_success_status, ) from matcha_ml.cli.ui.user_approval_functions import is_user_approved -from matcha_ml.core.core import stack_remove +from matcha_ml.core.core import stack_add, stack_remove from matcha_ml.errors import MatchaError, MatchaInputError app = typer.Typer(no_args_is_help=True, pretty_exceptions_show_locals=False) @@ -267,6 +267,34 @@ def set(stack: str = typer.Argument("default")) -> None: raise typer.Exit() +@stack_app.command(help="Add a module to the stack.") +def add(module: str = typer.Argument(None)) -> None: + """Add a module to the stack for Matcha to provision. + + Args: + module (str): the name of the module to add (e.g. 'seldon'). + """ + if module: + try: + stack_add(module) + print_status( + build_status( + f"Matcha '{module}' module has been added to the current stack." + ) + ) + except MatchaInputError as e: + print_error(str(e)) + raise typer.Exit() + except MatchaError as e: + print_error(str(e)) + raise typer.Exit() + else: + print_error( + "No module specified. Please run `matcha stack add` again and provide the name of the module you wish to add." + ) + raise typer.Exit() + + @stack_app.command(help="Remove a module from the current Matcha stack.") def remove(module: str = typer.Argument(None)) -> None: """Remove a module from the current Matcha stack. diff --git a/src/matcha_ml/core/__init__.py b/src/matcha_ml/core/__init__.py index 86077f68..9b1572be 100644 --- a/src/matcha_ml/core/__init__.py +++ b/src/matcha_ml/core/__init__.py @@ -6,8 +6,9 @@ get, provision, remove_state_lock, - stack_set, + stack_add, stack_remove, + stack_set, ) __all__ = [ @@ -18,5 +19,6 @@ "destroy", "provision", "stack_set", + "stack_add", "stack_remove", ] diff --git a/src/matcha_ml/core/core.py b/src/matcha_ml/core/core.py index acdaab77..24673d5c 100644 --- a/src/matcha_ml/core/core.py +++ b/src/matcha_ml/core/core.py @@ -367,6 +367,20 @@ def stack_set(stack_name: str) -> None: MatchaConfigService.update(stack) + +def stack_add(module: str) -> None: + """A function for adding a module by name to the stack. + + Args: + module (str): The name of the module to add. + + Raises: + MatchaInputError: if the stack_name is not a valid stack type + MatchaError: if there are already resources provisioned. + """ + ... + + def stack_remove(module_name: str) -> str: """A placeholder for the stack remove logic in core.""" - return module_name \ No newline at end of file + return module_name diff --git a/tests/test_cli/test_stack.py b/tests/test_cli/test_stack.py index 846c6c7f..d9f5d499 100644 --- a/tests/test_cli/test_stack.py +++ b/tests/test_cli/test_stack.py @@ -161,6 +161,19 @@ def test_stack_set_file_modified( assert config_dict.items() <= new_config_dict.items() +def test_cli_stack_set_add_help_option(runner: CliRunner) -> None: + """Tests the --help option for the cli stack add sub-command. + + Args: + runner (CliRunner): typer CLI runner. + """ + result = runner.invoke(app, ["stack", "add", "--help"]) + + assert result.exit_code == 0 + + assert "Add a module to the stack." in result.stdout + + def test_cli_stack_set_remove_help_option(runner: CliRunner) -> None: """Tests the --help option for the cli stack remove sub-command. @@ -174,14 +187,28 @@ def test_cli_stack_set_remove_help_option(runner: CliRunner) -> None: assert "Remove a module from the current Matcha stack." in result.stdout +def test_cli_stack_add_command_without_args(runner: CliRunner) -> None: + """Tests the cli stack add sub-command without passing an argument. + + Args: + runner (CliRunner): typer CLI runner. + """ + result = runner.invoke(app, ["stack", "add"]) + + assert result.exit_code == 0 + + assert ( + "No module specified. Please run `matcha stack add` again and" in result.stdout + ) + + def test_cli_stack_remove_command_without_args(runner: CliRunner) -> None: - """Tests the --help option for the cli stack remove sub-command. + """Tests the cli stack remove sub-command without passing an argument. Args: runner (CliRunner): typer CLI runner. """ result = runner.invoke(app, ["stack", "remove"]) - assert result.exit_code == 0 assert ( @@ -190,13 +217,35 @@ def test_cli_stack_remove_command_without_args(runner: CliRunner) -> None: ) +def test_cli_stack_add_command_with_args( + matcha_testing_directory: str, + runner: CliRunner, +) -> None: + """Tests the cli stack add sub-command with args. + + Args: + matcha_testing_directory (str): a temporary working directory. + runner (CliRunner): typer CLI runner. + """ + os.chdir(matcha_testing_directory) + with patch(f"{INTERNAL_FUNCTION_STUB}.stack_add") as mocked_stack_add: + result = runner.invoke(app, ["stack", "add", "experiment_tracker"]) + + assert result.exit_code == 0 + assert mocked_stack_add.assert_called_once + assert ( + "Matcha 'experiment_tracker' module has been added to the current stack." + in result.stdout + ) + + @patch(f"{INTERNAL_FUNCTION_STUB}.stack_remove") def test_cli_stack_remove_command_with_args( mocked_stack_remove: MagicMock, matcha_testing_directory: str, runner: CliRunner, ) -> None: - """Tests the cli stack set sub-command with args. + """Tests the cli stack remove sub-command with args. Args: mocked_stack_remove (MagicMock): a mocked stack_remove function. From 6722ba6dcdd5780077e7bb471c4039b0f92a1c5f Mon Sep 17 00:00:00 2001 From: Chris <32800386+Christopher-Norman@users.noreply.github.com> Date: Thu, 24 Aug 2023 11:44:08 +0100 Subject: [PATCH 03/10] [RPD-308] Add stack module validation #206 --- src/matcha_ml/core/_validation.py | 44 +++++++++++++++++++++++++ tests/test_core/test_core_validation.py | 16 +++++++++ 2 files changed, 60 insertions(+) diff --git a/src/matcha_ml/core/_validation.py b/src/matcha_ml/core/_validation.py index 1e0984e1..721e646c 100644 --- a/src/matcha_ml/core/_validation.py +++ b/src/matcha_ml/core/_validation.py @@ -1,5 +1,7 @@ """"Validation for core commands.""" +from enum import Enum, EnumMeta + from matcha_ml.errors import MatchaInputError from matcha_ml.services import AzureClient @@ -8,6 +10,36 @@ MAXIMUM_RESOURCE_NAME_LEN = 24 +class StackModuleMeta(EnumMeta): + """Metaclass for the StackModule Enum.""" + + def __contains__(self, item: str) -> bool: # type: ignore + """Method for checking if an item is a member of the enum. + + Args: + item (str): the quantity to check for in the Enum. + + Returns: + True if item is a member of the Enum, False otherwise. + """ + try: + self(item) + except ValueError: + return False + else: + return True + + +class StackModule(Enum, metaclass=StackModuleMeta): + """Enum defining valid matcha stack modules.""" + + ZENML = "zenml" + COMMON = "common" + DVC = "dvc" + MLFLOW = "mlflow" + SELDON = "seldon" + + def _is_alphanumeric(prefix: str) -> bool: """Check whether the prefix is an alphanumeric string. @@ -100,3 +132,15 @@ def is_valid_region(region: str) -> bool: """ azure_client = AzureClient() return bool(azure_client.is_valid_region(region)) + + +def stack_module_is_valid(module: str) -> bool: + """Checks whether a module name is valid. + + Args: + module (str): The name of the stack module. + + Returns: + bool: True, if the module exists in the StackModule enum, otherwise, False. + """ + return module in StackModule diff --git a/tests/test_core/test_core_validation.py b/tests/test_core/test_core_validation.py index ffb98029..dd27c03c 100644 --- a/tests/test_core/test_core_validation.py +++ b/tests/test_core/test_core_validation.py @@ -9,6 +9,7 @@ _is_alphanumeric, _is_not_digits, is_valid_prefix, + stack_module_is_valid, ) from matcha_ml.errors import MatchaInputError @@ -96,3 +97,18 @@ def test_is_valid_prefix_invalid( is_valid_prefix(prefix) assert str(err.value) == error_msg + + +def test_stack_module_is_valid_with_valid_module(): + """Test stack module validation returns True when the module is valid.""" + assert stack_module_is_valid("zenml") + + +def test_stack_module_is_valid_with_valid_module_with_upper_case(): + """Test stack module validation returns False when the module is fully upper case and not valid.""" + assert not stack_module_is_valid("ZENML") + + +def test_stack_module_is_valid_with_invalid_module(): + """Test stack module validation returns False when the module does not exist.""" + assert not stack_module_is_valid("invalidmodule") From b9c1bd15319036813c673178dfb967164a08d47d Mon Sep 17 00:00:00 2001 From: Chris <32800386+Christopher-Norman@users.noreply.github.com> Date: Fri, 25 Aug 2023 13:47:36 +0100 Subject: [PATCH 04/10] [RPD-310] Update MatchaConfig and MatchaConfigComponent to include adding and removal of properties without overwriting (#207) * Add initial add/remove commands * Test remove property for MatchaConfigComponent * Fix docstring * Tests for MatchaConfigService * Update docstring with raises --- src/matcha_ml/config/matcha_config.py | 60 +++++++++++++++-- tests/test_config/test_matcha_config.py | 85 +++++++++++++++++++++++++ 2 files changed, 140 insertions(+), 5 deletions(-) diff --git a/src/matcha_ml/config/matcha_config.py b/src/matcha_ml/config/matcha_config.py index e2340b18..1f6a7858 100644 --- a/src/matcha_ml/config/matcha_config.py +++ b/src/matcha_ml/config/matcha_config.py @@ -45,13 +45,18 @@ def find_property( None, ) - # if property is None: - # raise MatchaError( - # f"The property with the name '{property_name}' could not be found." - # ) - return property + def remove_property(self, property_name: str) -> None: + """Removes a property by name if it exists in the component. + + Args: + property_name (str): the name of the property to remove. + """ + self.properties = [ + item for item in self.properties if item.name != property_name + ] + @dataclass class MatchaConfig: @@ -207,6 +212,51 @@ def update( MatchaConfigService.write_matcha_config(config) + @staticmethod + def add_property( + component_name: str, component_property: MatchaConfigComponentProperty + ) -> None: + """Method to add a MatchaConfigComponentProperty to a Component, if it does not exist, this will create the component too. + + Args: + component_name (str): Name of the component. + component_property (MatchaConfigComponentProperty): Property to add to the component. + """ + if MatchaConfigService.config_file_exists(): + config = MatchaConfigService.read_matcha_config() + component = config.find_component(component_name) + if component is not None: + component.properties.append(component_property) + MatchaConfigService.update(component) + return + + MatchaConfigService.update( + MatchaConfigComponent(component_name, [component_property]) + ) + + @staticmethod + def remove_property(component_name: str, property_name: str) -> None: + """Method to remove a MatchaConfigComponentProperty to a Component. + + Args: + component_name (str): Name of the component. + property_name (str): Name of the property within the component. + + Raises: + MatchaError: raises a MatchaError if the local config file does not exist. + MatchaError: raises a MatchaError if the specified component does not exist. + """ + if MatchaConfigService.config_file_exists(): + config = MatchaConfigService.read_matcha_config() + component = config.find_component(component_name) + if component is not None: + component.remove_property(property_name) + MatchaConfigService.update(component) + else: + raise MatchaError(f"Error - The {component_name} does not exist.") + else: + raise MatchaError("Error - The Matcha config file does not exist.") + @staticmethod def delete_matcha_config() -> None: """A function for deleting the local Matcha config file. diff --git a/tests/test_config/test_matcha_config.py b/tests/test_config/test_matcha_config.py index 8e3ba2d2..d27a762c 100644 --- a/tests/test_config/test_matcha_config.py +++ b/tests/test_config/test_matcha_config.py @@ -260,3 +260,88 @@ def test_matcha_config_service_update( assert config_dict.items() <= updated_config_dict.items() assert updated_config_dict["test"]["name"] == "passed" assert updated_config_dict["test2"]["name"] == "passed_again" + + +def test_remove_property_expected( + mocked_matcha_config_component: MatchaConfigComponent, +): + """Test that a component is removed if it exists. + + Args: + mocked_matcha_config_component (MatchaConfigComponent): a mocked MatchaConfigComponent instance. + """ + mocked_matcha_config_component.remove_property(property_name="account_name") + mocked_matcha_config_component.remove_property(property_name="resource_group_name") + mocked_matcha_config_component.remove_property( + property_name="not_an_existing_property_name" + ) + + assert mocked_matcha_config_component == MatchaConfigComponent( + name="remote_state_bucket", + properties=[ + MatchaConfigComponentProperty(name="container_name", value="test-container") + ], + ) + + +def test_add_property_expected( + matcha_testing_directory, mocked_matcha_config_json_object +): + """Test adding a property. + + Args: + matcha_testing_directory (str): A temporary working directory. + mocked_matcha_config_json_object (dict): A dictionary representation of a matcha config json file. + """ + os.chdir(matcha_testing_directory) + config = MatchaConfig.from_dict(mocked_matcha_config_json_object) + config_dict = config.to_dict() + + MatchaConfigService.write_matcha_config(config) + + component_property = MatchaConfigComponentProperty(name="name", value="passed") + second_component_property = MatchaConfigComponentProperty( + name="location", value="ukwest" + ) + + MatchaConfigService.add_property( + component_name="test", component_property=component_property + ) + MatchaConfigService.add_property( + component_name="test", component_property=second_component_property + ) + + updated_config = MatchaConfigService.read_matcha_config() + updated_config_dict = updated_config.to_dict() + + assert len(updated_config_dict) - 1 == len(config_dict) + assert config_dict.items() <= updated_config_dict.items() + assert updated_config_dict["test"]["name"] == "passed" + assert updated_config_dict["test"]["location"] == "ukwest" + + +def test_remove_property_config_service_expected( + matcha_testing_directory, mocked_matcha_config_json_object +): + """Test removing a property from the ConfigService level. + + Args: + matcha_testing_directory (str): A temporary working directory. + mocked_matcha_config_json_object (dict): A dictionary representation of a matcha config json file. + """ + os.chdir(matcha_testing_directory) + config = MatchaConfig.from_dict(mocked_matcha_config_json_object) + + MatchaConfigService.write_matcha_config(config) + + config_dict = MatchaConfigService.read_matcha_config().to_dict() + assert config_dict["remote_state_bucket"]["account_name"] == "test-account" + + MatchaConfigService.remove_property( + component_name="remote_state_bucket", property_name="account_name" + ) + + updated_config = MatchaConfigService.read_matcha_config() + updated_config_dict = updated_config.to_dict() + + assert updated_config_dict.get("remote_state_bucket").get("account_name") is None From f6518a79005e60f460ea0ebea164878a56085bde Mon Sep 17 00:00:00 2001 From: Chris <32800386+Christopher-Norman@users.noreply.github.com> Date: Tue, 29 Aug 2023 13:01:01 +0100 Subject: [PATCH 05/10] [RPD-304] Migrate existing Terraform code into a modular format (#197) * Migrate Terraform files to modules * Fix Seldon folder and add storage module to mlflow * Delete outputs.tf in Chroma module * Delete outputs.tf in Chroma/Chroma used for test * Remove ZenML ingress comments and move zenml namespace * Add variable VM size, remove testing default variables * Simplify precommit regex * Add vm size variable to aks module --- .pre-commit-config.yaml | 2 +- .../chroma/chroma/chroma_helm/Chart.yaml | 4 + .../chroma_helm/templates/deployment.yaml | 29 ++ .../chroma/chroma_helm/templates/pvc.yaml | 12 + .../chroma/chroma_helm/templates/service.yaml | 18 + .../chroma/chroma/chroma_helm/values.yaml | 18 + .../modules/chroma/chroma/main.tf | 8 + .../infrastructure/modules/chroma/main.tf | 5 + .../modules/common/aks/README.md | 41 +++ .../infrastructure/modules/common/aks/main.tf | 19 + .../modules/common/aks/output.tf | 46 +++ .../modules/common/aks/variables.tf | 19 + .../common/azure_container_registry/README.md | 36 ++ .../common/azure_container_registry/main.tf | 13 + .../common/azure_container_registry/output.tf | 10 + .../azure_container_registry/variables.tf | 19 + .../modules/common/configure_kubectl.tf | 12 + .../infrastructure/modules/common/helm.tf | 10 + .../modules/common/kubernetes.tf | 30 ++ .../infrastructure/modules/common/main.tf | 31 ++ .../infrastructure/modules/common/outputs.tf | 29 ++ .../modules/common/providers.tf | 46 +++ .../modules/common/resource_group/README.md | 32 ++ .../modules/common/resource_group/main.tf | 3 + .../modules/common/resource_group/output.tf | 4 + .../common/resource_group/variables.tf | 4 + .../modules/common/variables.tf | 15 + .../data_version_control_storage/README.md | 44 +++ .../dvc/data_version_control_storage/main.tf | 22 ++ .../data_version_control_storage/output.tf | 15 + .../data_version_control_storage/providers.tf | 8 + .../data_version_control_storage/variables.tf | 14 + .../infrastructure/modules/dvc/main.tf | 7 + .../infrastructure/modules/dvc/outputs.tf | 15 + .../infrastructure/modules/mlflow/main.tf | 18 + .../modules/mlflow/mlflow_module/getURI.tf | 8 + .../modules/mlflow/mlflow_module/main.tf | 44 +++ .../modules/mlflow/mlflow_module/outputs.tf | 4 + .../modules/mlflow/mlflow_module/providers.tf | 12 + .../modules/mlflow/mlflow_module/variables.tf | 29 ++ .../infrastructure/modules/mlflow/outputs.tf | 10 + .../modules/mlflow/storage/README.md | 44 +++ .../modules/mlflow/storage/main.tf | 22 ++ .../modules/mlflow/storage/output.tf | 50 +++ .../modules/mlflow/storage/providers.tf | 8 + .../modules/mlflow/storage/variables.tf | 14 + .../infrastructure/modules/seldon/main.tf | 11 + .../infrastructure/modules/seldon/outputs.tf | 9 + .../modules/seldon/seldon/README.md | 49 +++ .../modules/seldon/seldon/istio.tf | 65 ++++ .../modules/seldon/seldon/main.tf | 47 +++ .../modules/seldon/seldon/outputs.tf | 16 + .../modules/seldon/seldon/permissions.tf | 39 +++ .../modules/seldon/seldon/providers.tf | 12 + .../modules/seldon/seldon/variables.tf | 15 + .../modules/seldon/variables.tf | 13 + .../infrastructure/modules/zenml/main.tf | 25 ++ .../infrastructure/modules/zenml/outputs.tf | 26 ++ .../infrastructure/modules/zenml/variables.tf | 17 + .../modules/zenml/zen_server/README.md | 69 ++++ .../modules/zenml/zen_server/getURL.tf | 10 + .../modules/zenml/zen_server/main.tf | 66 ++++ .../modules/zenml/zen_server/outputs.tf | 15 + .../modules/zenml/zen_server/providers.tf | 16 + .../modules/zenml/zen_server/sql.tf | 57 +++ .../modules/zenml/zen_server/variables.tf | 167 +++++++++ .../zenml/zen_server/zenml_helm/Chart.yaml | 13 + .../zen_server/zenml_helm/templates/NOTES.txt | 36 ++ .../zenml_helm/templates/_helpers.tpl | 70 ++++ .../zenml_helm/templates/cert-secret.yaml | 45 +++ .../zen_server/zenml_helm/templates/hpa.yaml | 28 ++ .../templates/server-deployment.yaml | 242 +++++++++++++ .../zenml_helm/templates/server-ingress.yaml | 59 ++++ .../zenml_helm/templates/server-secret.yaml | 70 ++++ .../zenml_helm/templates/server-service.yaml | 15 + .../zenml_helm/templates/serviceaccount.yaml | 12 + .../templates/tests/test-connection.yaml | 15 + .../zenml/zen_server/zenml_helm/values.yaml | 326 ++++++++++++++++++ .../modules/zenml/zenml_namespace.tf | 5 + .../modules/zenml/zenml_storage/README.md | 45 +++ .../modules/zenml/zenml_storage/main.tf | 31 ++ .../modules/zenml/zenml_storage/output.tf | 50 +++ .../modules/zenml/zenml_storage/variables.tf | 19 + 83 files changed, 2727 insertions(+), 1 deletion(-) create mode 100644 src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/Chart.yaml create mode 100644 src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/deployment.yaml create mode 100644 src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/pvc.yaml create mode 100644 src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/service.yaml create mode 100644 src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/values.yaml create mode 100644 src/matcha_ml/infrastructure/modules/chroma/chroma/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/chroma/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/aks/README.md create mode 100644 src/matcha_ml/infrastructure/modules/common/aks/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/aks/output.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/aks/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/azure_container_registry/README.md create mode 100644 src/matcha_ml/infrastructure/modules/common/azure_container_registry/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/azure_container_registry/output.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/azure_container_registry/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/configure_kubectl.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/helm.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/kubernetes.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/providers.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/resource_group/README.md create mode 100644 src/matcha_ml/infrastructure/modules/common/resource_group/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/resource_group/output.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/resource_group/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/common/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/README.md create mode 100644 src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/output.tf create mode 100644 src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/providers.tf create mode 100644 src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/dvc/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/dvc/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/getURI.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/providers.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/storage/README.md create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/storage/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/storage/output.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/storage/providers.tf create mode 100644 src/matcha_ml/infrastructure/modules/mlflow/storage/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/seldon/README.md create mode 100644 src/matcha_ml/infrastructure/modules/seldon/seldon/istio.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/seldon/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/seldon/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/seldon/permissions.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/seldon/providers.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/seldon/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/seldon/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/README.md create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/getURL.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/outputs.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/providers.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/sql.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/variables.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/Chart.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/NOTES.txt create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/_helpers.tpl create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/cert-secret.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/hpa.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-deployment.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-ingress.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-secret.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-service.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/serviceaccount.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/tests/test-connection.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/values.yaml create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zenml_namespace.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zenml_storage/README.md create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zenml_storage/main.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zenml_storage/output.tf create mode 100644 src/matcha_ml/infrastructure/modules/zenml/zenml_storage/variables.tf diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fdd404d8..710b34f6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ repos: - id: check-toml - id: check-yaml args: ["--unsafe"] # only check syntax for yaml files - exclude: ^src/matcha_ml/infrastructure/llm/zen_server + exclude: ^(src/matcha_ml/infrastructure/) - id: check-json - id: mixed-line-ending files: "\\.(py|txt|yaml|json|md|toml|lock|cfg|html|sh|js|yml)$" diff --git a/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/Chart.yaml b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/Chart.yaml new file mode 100644 index 00000000..3932fbba --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/Chart.yaml @@ -0,0 +1,4 @@ +apiVersion: v2 +name: chroma +description: Chroma Server Helm Chart +version: 0.1.0 diff --git a/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/deployment.yaml b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/deployment.yaml new file mode 100644 index 00000000..36e3ab40 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/deployment.yaml @@ -0,0 +1,29 @@ +# templates/deployment.yaml +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ .Release.Name }}-server +spec: + selector: + matchLabels: + app: {{ .Release.Name }}-server + template: + metadata: + labels: + app: {{ .Release.Name }}-server + spec: + containers: + - name: chroma-server + image: {{ .Values.image.repository }}:{{ .Values.image.tag }} + ports: + - containerPort: 8000 + resources: +{{ toYaml .Values.resources | indent 12 }} + volumeMounts: + - mountPath: /index_data + name: {{ .Release.Name }}-server-index + restartPolicy: Always + volumes: + - name: chroma-server-index + persistentVolumeClaim: + claimName: {{ .Release.Name }}-server-index diff --git a/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/pvc.yaml b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/pvc.yaml new file mode 100644 index 00000000..56d3b0fe --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/pvc.yaml @@ -0,0 +1,12 @@ +# templates/pvc.yaml +apiVersion: v1 +kind: PersistentVolumeClaim +metadata: + name: {{ .Release.Name }}-server-index +spec: + accessModes: + - ReadWriteOnce + storageClassName: {{ .Values.pvc.storageClassName }} + resources: + requests: + storage: {{ .Values.pvc.requestsStorage }} diff --git a/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/service.yaml b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/service.yaml new file mode 100644 index 00000000..9ce103d6 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/templates/service.yaml @@ -0,0 +1,18 @@ +# templates/service.yaml +apiVersion: v1 +kind: Service +metadata: + name: {{ .Release.Name }}-service +spec: + selector: + app: {{ .Release.Name }}-server + ports: + - name: "8123" + port: 8123 + targetPort: 8123 + - name: "9000" + port: 9000 + targetPort: 9000 + - name: "8000" + port: 8000 + targetPort: 8000 diff --git a/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/values.yaml b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/values.yaml new file mode 100644 index 00000000..c38d484a --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/chroma/chroma/chroma_helm/values.yaml @@ -0,0 +1,18 @@ +# values.yaml +image: + repository: ghcr.io/chroma-core/chroma + tag: 0.4.3 + +resources: + requests: + memory: "256Mi" + cpu: "256m" + limits: + memory: "2Gi" + cpu: "2" + +pvc: + accessModes: + - ReadWriteOnce + storageClassName: default + requestsStorage: "100Mi" diff --git a/src/matcha_ml/infrastructure/modules/chroma/chroma/main.tf b/src/matcha_ml/infrastructure/modules/chroma/chroma/main.tf new file mode 100644 index 00000000..055f02f5 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/chroma/chroma/main.tf @@ -0,0 +1,8 @@ + +resource "helm_release" "chroma" { + name = "chroma" + chart = "${path.module}/chroma_helm" + namespace = "default" + + values = [file("${path.module}/chroma_helm/values.yaml")] +} diff --git a/src/matcha_ml/infrastructure/modules/chroma/main.tf b/src/matcha_ml/infrastructure/modules/chroma/main.tf new file mode 100644 index 00000000..1883e941 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/chroma/main.tf @@ -0,0 +1,5 @@ +module "chroma" { + source = "./chroma" + + depends_on = [null_resource.configure_local_kubectl] +} diff --git a/src/matcha_ml/infrastructure/modules/common/aks/README.md b/src/matcha_ml/infrastructure/modules/common/aks/README.md new file mode 100644 index 00000000..8b7b77bb --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/aks/README.md @@ -0,0 +1,41 @@ +## Requirements + +No requirements. + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [azurerm_kubernetes_cluster.main](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/kubernetes_cluster) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [location](#input\_location) | The Azure region where the Kubernetes cluster will be created | `string` | n/a | yes | +| [prefix](#input\_prefix) | Prefix to be used for all resources in this module | `string` | n/a | yes | +| [resource\_group\_name](#input\_resource\_group\_name) | The name of the resource group to create the Kubernetes cluster in | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [aks\_cluster\_id](#output\_aks\_cluster\_id) | ID of the created Kubernetes cluster | +| [aks\_cluster\_name](#output\_aks\_cluster\_name) | Name of the created Kubernetes cluster | +| [aks\_object\_id](#output\_aks\_object\_id) | Object ID for the Kubernetes cluster | +| [aks\_principal\_id](#output\_aks\_principal\_id) | Principal ID for the Kubernetes cluster | +| [client\_certificate](#output\_client\_certificate) | Client certificate for accessing the Kubernetes cluster | +| [client\_key](#output\_client\_key) | Client key for accessing the Kubernetes cluster | +| [cluster\_ca\_certificate](#output\_cluster\_ca\_certificate) | Cluster CA certificate for the Kubernetes cluster | +| [host](#output\_host) | Host address for the Kubernetes cluster | +| [kube\_config](#output\_kube\_config) | Raw Kubernetes configuration for the created cluster | diff --git a/src/matcha_ml/infrastructure/modules/common/aks/main.tf b/src/matcha_ml/infrastructure/modules/common/aks/main.tf new file mode 100644 index 00000000..55bb2cbb --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/aks/main.tf @@ -0,0 +1,19 @@ +resource "azurerm_kubernetes_cluster" "main" { + name = "${var.prefix}-k8s" + location = var.location + resource_group_name = var.resource_group_name + dns_prefix = "${var.prefix}-k8s" + + default_node_pool { + name = "default" + vm_size = var.vm_size + + enable_auto_scaling = true + max_count = 3 + min_count = 1 + } + + identity { + type = "SystemAssigned" + } +} diff --git a/src/matcha_ml/infrastructure/modules/common/aks/output.tf b/src/matcha_ml/infrastructure/modules/common/aks/output.tf new file mode 100644 index 00000000..de131e60 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/aks/output.tf @@ -0,0 +1,46 @@ +output "kube_config" { + description = "Raw Kubernetes configuration for the created cluster" + value = azurerm_kubernetes_cluster.main.kube_config_raw + sensitive = true +} + +output "client_key" { + description = "Client key for accessing the Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.kube_config.0.client_key +} + +output "client_certificate" { + description = "Client certificate for accessing the Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.kube_config.0.client_certificate + sensitive = true +} + +output "cluster_ca_certificate" { + description = "Cluster CA certificate for the Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.kube_config.0.cluster_ca_certificate +} + +output "host" { + description = "Host address for the Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.kube_config.0.host +} + +output "aks_cluster_id" { + description = "ID of the created Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.id +} + +output "aks_cluster_name" { + description = "Name of the created Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.name +} + +output "aks_principal_id" { + description = "Principal ID for the Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.identity[0].principal_id +} + +output "aks_object_id" { + description = "Object ID for the Kubernetes cluster" + value = azurerm_kubernetes_cluster.main.kubelet_identity[0].object_id +} diff --git a/src/matcha_ml/infrastructure/modules/common/aks/variables.tf b/src/matcha_ml/infrastructure/modules/common/aks/variables.tf new file mode 100644 index 00000000..409203cf --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/aks/variables.tf @@ -0,0 +1,19 @@ +variable "prefix" { + description = "Prefix to be used for all resources in this module" + type = string +} + +variable "location" { + description = "The Azure region where the Kubernetes cluster will be created" + type = string +} + +variable "resource_group_name" { + description = "The name of the resource group to create the Kubernetes cluster in" + type = string +} + +variable "vm_size" { + description = "The Azure VM size to use." + type = string +} diff --git a/src/matcha_ml/infrastructure/modules/common/azure_container_registry/README.md b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/README.md new file mode 100644 index 00000000..70af6ba7 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/README.md @@ -0,0 +1,36 @@ +## Requirements + +No requirements. + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [azurerm_container_registry.main](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/container_registry) | resource | +| [azurerm_role_assignment.aks_acr_access](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/role_assignment) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [aks\_object\_id](#input\_aks\_object\_id) | Object id for aks cluster | `string` | n/a | yes | +| [location](#input\_location) | The Azure region in which this resources should be created. | `string` | n/a | yes | +| [prefix](#input\_prefix) | A prefix used for all resources | `string` | n/a | yes | +| [resource\_group\_name](#input\_resource\_group\_name) | The resource group name which is used to create the resource group | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [container\_registry\_name](#output\_container\_registry\_name) | The name of the container registry | +| [container\_registry\_url](#output\_container\_registry\_url) | The URL used to log into the container registry | diff --git a/src/matcha_ml/infrastructure/modules/common/azure_container_registry/main.tf b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/main.tf new file mode 100644 index 00000000..9c7ec142 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/main.tf @@ -0,0 +1,13 @@ +resource "azurerm_container_registry" "main" { + name = "cr${var.prefix}" + resource_group_name = var.resource_group_name + location = var.location + sku = "Standard" +} + +resource "azurerm_role_assignment" "aks_acr_access" { + scope = azurerm_container_registry.main.id + role_definition_name = "AcrPull" + principal_id = var.aks_object_id + skip_service_principal_aad_check = true +} diff --git a/src/matcha_ml/infrastructure/modules/common/azure_container_registry/output.tf b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/output.tf new file mode 100644 index 00000000..a2069133 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/output.tf @@ -0,0 +1,10 @@ +# output for container registry +output "container_registry_url" { + description = "The URL used to log into the container registry" + value = azurerm_container_registry.main.login_server +} + +output "container_registry_name" { + description = "The name of the container registry" + value = azurerm_container_registry.main.name +} diff --git a/src/matcha_ml/infrastructure/modules/common/azure_container_registry/variables.tf b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/variables.tf new file mode 100644 index 00000000..513a9768 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/azure_container_registry/variables.tf @@ -0,0 +1,19 @@ +variable "prefix" { + description = "A prefix used for all resources" + type = string +} + +variable "resource_group_name" { + description = "The resource group name which is used to create the resource group" + type = string +} + +variable "location" { + description = "The Azure region in which this resources should be created." + type = string +} + +variable "aks_object_id" { + description = "Object id for aks cluster" + type = string +} diff --git a/src/matcha_ml/infrastructure/modules/common/configure_kubectl.tf b/src/matcha_ml/infrastructure/modules/common/configure_kubectl.tf new file mode 100644 index 00000000..6e69e9de --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/configure_kubectl.tf @@ -0,0 +1,12 @@ +# Derived from ZenML's stack recipes; source: https://github.com/zenml-io/mlops-stacks/blob/8eb06596bf836d3a3dd2634fbc7f2b5687421811/aws-minimal/configure_kubectl.tf + +# set up local kubectl client to access the newly created cluster +resource "null_resource" "configure_local_kubectl" { + provisioner "local-exec" { + command = "az aks get-credentials --resource-group ${module.resource_group.name} --name ${module.aks.aks_cluster_name} --context ${local.kubectl_context} --overwrite-existing" + } +} + +locals { + kubectl_context = "terraform-${module.aks.aks_cluster_name}-${replace(substr(timestamp(), 0, 16), ":", "_")}" +} diff --git a/src/matcha_ml/infrastructure/modules/common/helm.tf b/src/matcha_ml/infrastructure/modules/common/helm.tf new file mode 100644 index 00000000..dac70cd9 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/helm.tf @@ -0,0 +1,10 @@ +provider "helm" { + kubernetes { + host = module.aks.host + + client_certificate = base64decode(module.aks.client_certificate) + client_key = base64decode(module.aks.client_key) + cluster_ca_certificate = base64decode(module.aks.cluster_ca_certificate) + config_path = local.kubectl_config_path + } +} diff --git a/src/matcha_ml/infrastructure/modules/common/kubernetes.tf b/src/matcha_ml/infrastructure/modules/common/kubernetes.tf new file mode 100644 index 00000000..304dcc5e --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/kubernetes.tf @@ -0,0 +1,30 @@ +# Derived from ZenML's stack recipes; source: https://github.com/zenml-io/mlops-stacks/blob/8eb06596bf836d3a3dd2634fbc7f2b5687421811/aws-minimal/kubernetes.tf + +# check if the host OS is Linux or Windows +data "external" "os" { + working_dir = path.module + program = ["printf", "{\"os\": \"Linux\"}"] +} + +locals { + os = data.external.os.result.os + kubectl_config_path = local.os == "Windows" ? "%USERPROFILE%\\.kube\\config" : "~/.kube/config" +} + +# a default (non-aliased) provider configuration for "kubernetes" +provider "kubernetes" { + host = module.aks.host + + client_certificate = base64decode(module.aks.client_certificate) + client_key = base64decode(module.aks.client_key) + cluster_ca_certificate = base64decode(module.aks.cluster_ca_certificate) + config_path = local.kubectl_config_path +} + +provider "kubectl" { + host = module.aks.host + + client_certificate = base64decode(module.aks.client_certificate) + client_key = base64decode(module.aks.client_key) + cluster_ca_certificate = base64decode(module.aks.cluster_ca_certificate) +} diff --git a/src/matcha_ml/infrastructure/modules/common/main.tf b/src/matcha_ml/infrastructure/modules/common/main.tf new file mode 100644 index 00000000..9107bc70 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/main.tf @@ -0,0 +1,31 @@ +provider "azurerm" { + features { + resource_group { + prevent_deletion_if_contains_resources = false + } + } +} + +module "resource_group" { + source = "./resource_group" + + prefix = var.prefix +} + +module "aks" { + source = "./aks" + + prefix = var.prefix + location = var.location + resource_group_name = module.resource_group.name + vm_size = var.vm_size +} + +module "acr" { + source = "./azure_container_registry" + + prefix = var.prefix + resource_group_name = module.resource_group.name + location = var.location + aks_object_id = module.aks.aks_object_id +} diff --git a/src/matcha_ml/infrastructure/modules/common/outputs.tf b/src/matcha_ml/infrastructure/modules/common/outputs.tf new file mode 100644 index 00000000..9e4c560a --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/outputs.tf @@ -0,0 +1,29 @@ +output "orchestrator_aks_k8s_context" { + description = "The name of the Kubernetes context used for deployment" + value = local.kubectl_context +} + +output "container_registry_azure_registry_url" { + description = "The URL for the Azure Container Registry" + value = module.acr.container_registry_url +} + +output "container_registry_azure_registry_name" { + description = "The name of the Azure Container Registry" + value = module.acr.container_registry_name +} + +output "cloud_azure_resource_group_name" { + description = "Name of the Azure resource group" + value = module.resource_group.name +} + +output "cloud_azure_prefix"{ + description = "The Azure resource group name prefix" + value = var.prefix +} + +output "cloud_azure_location"{ + description = "The Azure location in which the resources are provisioned" + value = var.location +} diff --git a/src/matcha_ml/infrastructure/modules/common/providers.tf b/src/matcha_ml/infrastructure/modules/common/providers.tf new file mode 100644 index 00000000..b3876b23 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/providers.tf @@ -0,0 +1,46 @@ +# defining the providers for the all module +terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = ">=3.16.0" + } + + random = { + source = "hashicorp/random" + version = "3.1.0" + } + + helm = { + source = "hashicorp/helm" + version = "~> 2.0.1" + } + + local = { + source = "hashicorp/local" + version = "2.1.0" + } + + null = { + source = "hashicorp/null" + version = "3.2.1" + } + + kubernetes = { + source = "hashicorp/kubernetes" + version = "~> 2.11.0" + } + + kubectl = { + source = "gavinbunney/kubectl" + version = "1.14.0" + } + + htpasswd = { + source = "loafoe/htpasswd" + version = "1.0.4" + } + } + + required_version = ">= 0.14.8" +} diff --git a/src/matcha_ml/infrastructure/modules/common/resource_group/README.md b/src/matcha_ml/infrastructure/modules/common/resource_group/README.md new file mode 100644 index 00000000..72624d2e --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/resource_group/README.md @@ -0,0 +1,32 @@ +## Requirements + +No requirements. + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [azurerm_resource_group.main](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/resource_group) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [location](#input\_location) | The Azure region in which resource group should be provisioned | `string` | n/a | yes | +| [prefix](#input\_prefix) | A prefix used for all resources | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [name](#output\_name) | Name of the resource group | diff --git a/src/matcha_ml/infrastructure/modules/common/resource_group/main.tf b/src/matcha_ml/infrastructure/modules/common/resource_group/main.tf new file mode 100644 index 00000000..6afb0a9e --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/resource_group/main.tf @@ -0,0 +1,3 @@ +data "azurerm_resource_group" "main" { + name = "${var.prefix}-resources" +} diff --git a/src/matcha_ml/infrastructure/modules/common/resource_group/output.tf b/src/matcha_ml/infrastructure/modules/common/resource_group/output.tf new file mode 100644 index 00000000..55f05726 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/resource_group/output.tf @@ -0,0 +1,4 @@ +output "name" { + description = "Name of the resource group" + value = data.azurerm_resource_group.main.name +} diff --git a/src/matcha_ml/infrastructure/modules/common/resource_group/variables.tf b/src/matcha_ml/infrastructure/modules/common/resource_group/variables.tf new file mode 100644 index 00000000..0325a60e --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/resource_group/variables.tf @@ -0,0 +1,4 @@ +variable "prefix" { + description = "A prefix used for all resources" + type = string +} diff --git a/src/matcha_ml/infrastructure/modules/common/variables.tf b/src/matcha_ml/infrastructure/modules/common/variables.tf new file mode 100644 index 00000000..bd9a1bf4 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/common/variables.tf @@ -0,0 +1,15 @@ +variable "prefix" { + description = "A prefix used for all resources" + type = string +} + +variable "location" { + description = "The Azure Region in which all resources should be provisioned" + type = string +} + +variable "vm_size" { + description = "The Azure VM size to use." + type = string + default = "Standard_DS3_v2" +} diff --git a/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/README.md b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/README.md new file mode 100644 index 00000000..cae1bfe3 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/README.md @@ -0,0 +1,44 @@ +## Requirements + +| Name | Version | +|------|---------| +| [azurerm](#requirement\_azurerm) | 3.48.0 | + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | 3.48.0 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [azurerm_storage_account.storageaccount](https://registry.terraform.io/providers/hashicorp/azurerm/3.48.0/docs/resources/storage_account) | resource | +| [azurerm_storage_container.storagecontainer](https://registry.terraform.io/providers/hashicorp/azurerm/3.48.0/docs/resources/storage_container) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [location](#input\_location) | The Azure Region in which this resources should be created. | `string` | n/a | yes | +| [prefix](#input\_prefix) | The prefix which should be used for naming storage account ({prefix}storageacc) and container ({prefix}storagecontainer) | `string` | n/a | yes | +| [resource\_group\_name](#input\_resource\_group\_name) | The resource group name which is used to create the resource group | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [blobstorage\_container\_path](#output\_blobstorage\_container\_path) | The Azure Blob Storage Container path for storing your artifacts | +| [primary\_access\_key](#output\_primary\_access\_key) | Azure Storage Account - Primary access key | +| [primary\_blob\_connection\_string](#output\_primary\_blob\_connection\_string) | Azure Storage Account - Primary Blob service connection string | +| [primary\_connection\_string](#output\_primary\_connection\_string) | Azure Storage Account - Primary connection string | +| [secondary\_access\_key](#output\_secondary\_access\_key) | Azure Storage Account - Secondary access key | +| [secondary\_blob\_connection\_string](#output\_secondary\_blob\_connection\_string) | Azure Storage Account - Secondary Blob service connection string | +| [secondary\_connection\_string](#output\_secondary\_connection\_string) | Azure Storage Account - Secondary connection string | +| [storage\_account\_name](#output\_storage\_account\_name) | The name of the Azure Storage Account. | +| [storage\_container\_name](#output\_storage\_container\_name) | The name of the Azure Storage Container. | diff --git a/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/main.tf b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/main.tf new file mode 100644 index 00000000..72b26b18 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/main.tf @@ -0,0 +1,22 @@ +# Reference: https://github.com/hashicorp/terraform-provider-azurerm/tree/main/examples/storage/storage-container + +# create a storage account +resource "azurerm_storage_account" "storageaccount" { + name = "${var.prefix}dvcacc" + resource_group_name = var.resource_group_name + location = var.location + + account_tier = "Standard" + account_kind = "StorageV2" + account_replication_type = "LRS" + enable_https_traffic_only = true + access_tier = "Hot" + allow_nested_items_to_be_public = true +} + +# create a storage container inside created storage account +resource "azurerm_storage_container" "storagecontainer" { + name = "${var.prefix}dvcstore" + storage_account_name = azurerm_storage_account.storageaccount.name + container_access_type = "container" +} diff --git a/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/output.tf b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/output.tf new file mode 100644 index 00000000..45a5f5ce --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/output.tf @@ -0,0 +1,15 @@ +output "storage_container_name" { + description = "The name of the Azure Storage Container." + value = azurerm_storage_container.storagecontainer.name +} + +output "storage_account_name" { + description = "The name of the Azure Storage Account." + value = azurerm_storage_account.storageaccount.name +} + +output "primary_connection_string" { + description = "Azure Storage Account - Primary connection string" + value = azurerm_storage_account.storageaccount.primary_connection_string + sensitive = true +} diff --git a/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/providers.tf b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/providers.tf new file mode 100644 index 00000000..e2d7507d --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/providers.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "3.48.0" + } + } +} diff --git a/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/variables.tf b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/variables.tf new file mode 100644 index 00000000..8a3fab49 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/dvc/data_version_control_storage/variables.tf @@ -0,0 +1,14 @@ +variable "resource_group_name" { + description = "The resource group name which is used to create the resource group" + type = string +} + +variable "prefix" { + description = "The prefix which should be used for naming storage account ({prefix}dvcacc) and container ({prefix}dvcstore)" + type = string +} + +variable "location" { + description = "The Azure Region in which this resources should be created." + type = string +} diff --git a/src/matcha_ml/infrastructure/modules/dvc/main.tf b/src/matcha_ml/infrastructure/modules/dvc/main.tf new file mode 100644 index 00000000..fc8db048 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/dvc/main.tf @@ -0,0 +1,7 @@ +module "data_version_control_storage" { + source = "./data_version_control_storage" + + resource_group_name = module.resource_group.name + prefix = var.prefix + location = var.location +} diff --git a/src/matcha_ml/infrastructure/modules/dvc/outputs.tf b/src/matcha_ml/infrastructure/modules/dvc/outputs.tf new file mode 100644 index 00000000..65a99440 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/dvc/outputs.tf @@ -0,0 +1,15 @@ +output "data_version_control_primary_connection_string"{ + description = "The primary connection string for the ZenML Azure Storage Account" + value = module.data_version_control_storage.primary_connection_string + sensitive = true +} + +output "data_version_control_storage_container_name"{ + description = "The name of the container used for data version control" + value = module.data_version_control_storage.storage_container_name +} + +output "data_version_control_storage_account_name"{ + description = "The name of the storage account for data version control" + value = module.data_version_control_storage.storage_account_name +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/main.tf b/src/matcha_ml/infrastructure/modules/mlflow/main.tf new file mode 100644 index 00000000..356b47ba --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/main.tf @@ -0,0 +1,18 @@ +module "mlflow" { + source = "./mlflow_module" + + depends_on = [null_resource.configure_local_kubectl] + + # storage variables + storage_account_name = module.storage.storage_account_name + storage_container_name = module.storage.storage_container_name + artifact_azure_access_key = module.storage.primary_access_key +} + +module "storage" { + source = "./storage" + + resource_group_name = module.resource_group.name + prefix = var.prefix + location = var.location +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/getURI.tf b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/getURI.tf new file mode 100644 index 00000000..e4a6890c --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/getURI.tf @@ -0,0 +1,8 @@ +# Derived from ZenML's stack recipes; source: https://github.com/zenml-io/mlops-stacks/blob/8eb06596bf836d3a3dd2634fbc7f2b5687421811/azure-minimal/get_URIs.tf + +# get URI for MLflow tracking server +data "kubernetes_service" "mlflow_tracking" { + metadata { + name = helm_release.mlflow_tracking.name + } +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/main.tf b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/main.tf new file mode 100644 index 00000000..eb97dfaa --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/main.tf @@ -0,0 +1,44 @@ +# create the mlflow tracking server deployment using mlflow helm charts +# Reference: https://github.com/community-charts/helm-charts/blob/main/charts/mlflow/values.yaml +resource "helm_release" "mlflow_tracking" { + + name = "mlflow-tracking" + repository = "https://community-charts.github.io/helm-charts" + chart = "mlflow" + + # Change type from "ClusterIP" to "LoadBalancer" + set { + name = "service.type" + value = "LoadBalancer" + } + # set proxied access to artifact storage + set { + name = "artifactRoot.proxiedArtifactStorage" + value = var.artifact_proxied_access + type = "auto" + } + + # Derived from ZenML's stack recipes; source: https://github.com/zenml-io/mlops-stacks/blob/8eb06596bf836d3a3dd2634fbc7f2b5687421811/aws-minimal/mlflow-module/mlflow.tf#L39 + # set values for Azure Blob Storage + set { + name = "artifactRoot.azureBlob.enabled" + value = var.artifact_azure + type = "auto" + } + set { + name = "artifactRoot.azureBlob.storageAccount" + value = var.storage_account_name + type = "string" + } + set { + name = "artifactRoot.azureBlob.container" + value = var.storage_container_name + type = "string" + } + set_sensitive { + name = "artifactRoot.azureBlob.accessKey" + value = var.artifact_azure_access_key + type = "string" + } + +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/outputs.tf b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/outputs.tf new file mode 100644 index 00000000..d73628a7 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/outputs.tf @@ -0,0 +1,4 @@ +output "mlflow_tracking_url" { + description = "The tracking URL for MLFlow dashboard" + value = "http://${data.kubernetes_service.mlflow_tracking.status.0.load_balancer.0.ingress.0.ip}:${data.kubernetes_service.mlflow_tracking.spec.0.port.0.port}" +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/providers.tf b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/providers.tf new file mode 100644 index 00000000..f46344cb --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/providers.tf @@ -0,0 +1,12 @@ +# Derived from ZenML's stack recipes; source: https://github.com/zenml-io/mlops-stacks/blob/8eb06596bf836d3a3dd2634fbc7f2b5687421811/aws-minimal/mlflow-module/providers.tf + +# defining the providers required by the mlflow module +terraform { + required_providers { + htpasswd = { + source = "loafoe/htpasswd" + version = "1.0.4" + } + } + required_version = ">= 0.14.8" +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/variables.tf b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/variables.tf new file mode 100644 index 00000000..058a5057 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/mlflow_module/variables.tf @@ -0,0 +1,29 @@ +# artifact storage variables +variable "artifact_proxied_access" { + description = "Boolean to indicate if we are using proxied artifact storage" + type = bool + default = false +} + +variable "storage_account_name" { + description = "Name of Azure Storage Container already created inside Azure Blob Storage" + type = string +} + +variable "storage_container_name" { + description = "Name of container to create inside Azure Storage Account to store artifacts" + type = string +} + +variable "artifact_azure" { + description = "Boolean to indicate if we are using Azure Blob Storage as storage for MLFlow" + type = bool + default = true +} + + +variable "artifact_azure_access_key" { + description = "Access Key for Azure Blob Storage" + type = string + default = "" +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/outputs.tf b/src/matcha_ml/infrastructure/modules/mlflow/outputs.tf new file mode 100644 index 00000000..aa75c9d3 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/outputs.tf @@ -0,0 +1,10 @@ +output "experiment_tracker_mlflow_tracking_url" { + description = "The URL for the MLflow tracking server" + value = module.mlflow.mlflow_tracking_url +} + +output "experiment_tracker_mlflow_azure_connection_string" { + description = "The Azure connection string for the MLflow artifact storage" + value = module.storage.primary_connection_string + sensitive = true +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/storage/README.md b/src/matcha_ml/infrastructure/modules/mlflow/storage/README.md new file mode 100644 index 00000000..cae1bfe3 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/storage/README.md @@ -0,0 +1,44 @@ +## Requirements + +| Name | Version | +|------|---------| +| [azurerm](#requirement\_azurerm) | 3.48.0 | + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | 3.48.0 | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [azurerm_storage_account.storageaccount](https://registry.terraform.io/providers/hashicorp/azurerm/3.48.0/docs/resources/storage_account) | resource | +| [azurerm_storage_container.storagecontainer](https://registry.terraform.io/providers/hashicorp/azurerm/3.48.0/docs/resources/storage_container) | resource | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [location](#input\_location) | The Azure Region in which this resources should be created. | `string` | n/a | yes | +| [prefix](#input\_prefix) | The prefix which should be used for naming storage account ({prefix}storageacc) and container ({prefix}storagecontainer) | `string` | n/a | yes | +| [resource\_group\_name](#input\_resource\_group\_name) | The resource group name which is used to create the resource group | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [blobstorage\_container\_path](#output\_blobstorage\_container\_path) | The Azure Blob Storage Container path for storing your artifacts | +| [primary\_access\_key](#output\_primary\_access\_key) | Azure Storage Account - Primary access key | +| [primary\_blob\_connection\_string](#output\_primary\_blob\_connection\_string) | Azure Storage Account - Primary Blob service connection string | +| [primary\_connection\_string](#output\_primary\_connection\_string) | Azure Storage Account - Primary connection string | +| [secondary\_access\_key](#output\_secondary\_access\_key) | Azure Storage Account - Secondary access key | +| [secondary\_blob\_connection\_string](#output\_secondary\_blob\_connection\_string) | Azure Storage Account - Secondary Blob service connection string | +| [secondary\_connection\_string](#output\_secondary\_connection\_string) | Azure Storage Account - Secondary connection string | +| [storage\_account\_name](#output\_storage\_account\_name) | The name of the Azure Storage Account. | +| [storage\_container\_name](#output\_storage\_container\_name) | The name of the Azure Storage Container. | diff --git a/src/matcha_ml/infrastructure/modules/mlflow/storage/main.tf b/src/matcha_ml/infrastructure/modules/mlflow/storage/main.tf new file mode 100644 index 00000000..b448a625 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/storage/main.tf @@ -0,0 +1,22 @@ +# Reference: https://github.com/hashicorp/terraform-provider-azurerm/tree/main/examples/storage/storage-container + +# create a storage account +resource "azurerm_storage_account" "storageaccount" { + name = "st${var.prefix}acc" + resource_group_name = var.resource_group_name + location = var.location + + account_tier = "Standard" + account_kind = "StorageV2" + account_replication_type = "LRS" + enable_https_traffic_only = true + access_tier = "Hot" + allow_nested_items_to_be_public = true +} + +# create a storage container inside created storage account +resource "azurerm_storage_container" "storagecontainer" { + name = "${var.prefix}store" + storage_account_name = azurerm_storage_account.storageaccount.name + container_access_type = "container" +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/storage/output.tf b/src/matcha_ml/infrastructure/modules/mlflow/storage/output.tf new file mode 100644 index 00000000..ef9a9916 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/storage/output.tf @@ -0,0 +1,50 @@ +output "storage_container_name" { + description = "The name of the Azure Storage Container." + value = azurerm_storage_container.storagecontainer.name +} + +output "blobstorage_container_path" { + description = "The Azure Blob Storage Container path for storing your artifacts" + value = "az://${azurerm_storage_container.storagecontainer.name}" +} + +output "storage_account_name" { + description = "The name of the Azure Storage Account." + value = azurerm_storage_account.storageaccount.name +} + +output "primary_access_key" { + description = "Azure Storage Account - Primary access key" + value = azurerm_storage_account.storageaccount.primary_access_key + sensitive = true +} + +output "secondary_access_key" { + description = "Azure Storage Account - Secondary access key" + value = azurerm_storage_account.storageaccount.secondary_access_key + sensitive = true +} + +output "primary_connection_string" { + description = "Azure Storage Account - Primary connection string" + value = azurerm_storage_account.storageaccount.primary_connection_string + sensitive = true +} + +output "secondary_connection_string" { + description = "Azure Storage Account - Secondary connection string" + value = azurerm_storage_account.storageaccount.secondary_connection_string + sensitive = true +} + +output "primary_blob_connection_string" { + description = "Azure Storage Account - Primary Blob service connection string" + value = azurerm_storage_account.storageaccount.primary_blob_connection_string + sensitive = true +} + +output "secondary_blob_connection_string" { + description = "Azure Storage Account - Secondary Blob service connection string" + value = azurerm_storage_account.storageaccount.secondary_blob_connection_string + sensitive = true +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/storage/providers.tf b/src/matcha_ml/infrastructure/modules/mlflow/storage/providers.tf new file mode 100644 index 00000000..e2d7507d --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/storage/providers.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + azurerm = { + source = "hashicorp/azurerm" + version = "3.48.0" + } + } +} diff --git a/src/matcha_ml/infrastructure/modules/mlflow/storage/variables.tf b/src/matcha_ml/infrastructure/modules/mlflow/storage/variables.tf new file mode 100644 index 00000000..23388ac3 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/mlflow/storage/variables.tf @@ -0,0 +1,14 @@ +variable "resource_group_name" { + description = "The resource group name which is used to create the resource group" + type = string +} + +variable "prefix" { + description = "The prefix which should be used for naming storage account ({prefix}storageacc) and container ({prefix}storagecontainer)" + type = string +} + +variable "location" { + description = "The Azure Region in which this resources should be created." + type = string +} diff --git a/src/matcha_ml/infrastructure/modules/seldon/main.tf b/src/matcha_ml/infrastructure/modules/seldon/main.tf new file mode 100644 index 00000000..b5c22a5f --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/seldon/main.tf @@ -0,0 +1,11 @@ + +module "seldon" { + source = "./seldon" + + depends_on = [null_resource.configure_local_kubectl] + + # details about the seldon deployment + seldon_name = var.seldon_name + seldon_namespace = var.seldon_namespace + +} diff --git a/src/matcha_ml/infrastructure/modules/seldon/outputs.tf b/src/matcha_ml/infrastructure/modules/seldon/outputs.tf new file mode 100644 index 00000000..4f6fc8af --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/seldon/outputs.tf @@ -0,0 +1,9 @@ +output "model_deployer_seldon_workloads_namespace" { + description = "The Kubernetes namespace for Seldon workloads" + value = module.seldon.workloads_namespace +} + +output "model_deployer_seldon_base_url" { + description = "The base URL for the Seldon API server" + value = module.seldon.base_url +} diff --git a/src/matcha_ml/infrastructure/modules/seldon/seldon/README.md b/src/matcha_ml/infrastructure/modules/seldon/seldon/README.md new file mode 100644 index 00000000..9ceb938e --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/seldon/seldon/README.md @@ -0,0 +1,49 @@ +## Requirements + +| Name | Version | +|------|---------| +| [terraform](#requirement\_terraform) | >= 0.14.8 | +| [kubectl](#requirement\_kubectl) | 1.14.0 | + +## Providers + +| Name | Version | +|------|---------| +| [helm](#provider\_helm) | n/a | +| [kubectl](#provider\_kubectl) | 1.14.0 | +| [kubernetes](#provider\_kubernetes) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [helm_release.istio_base](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | resource | +| [helm_release.istio_ingress](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | resource | +| [helm_release.istiod](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | resource | +| [helm_release.seldon](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | resource | +| [kubectl_manifest.gateway](https://registry.terraform.io/providers/gavinbunney/kubectl/1.14.0/docs/resources/manifest) | resource | +| [kubernetes_cluster_role_binding_v1.seldon_machinelearning-permission_binding](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/cluster_role_binding_v1) | resource | +| [kubernetes_cluster_role_v1.seldon-machinelearning_permission](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/cluster_role_v1) | resource | +| [kubernetes_namespace.istio_ns](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | resource | +| [kubernetes_namespace.seldon_ns](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | resource | +| [kubernetes_namespace.seldon_workloads](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | resource | +| [kubernetes_service.seldon_ingress](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/data-sources/service) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [seldon\_name](#input\_seldon\_name) | Seldon Helm deployment name | `string` | n/a | yes | +| [seldon\_namespace](#input\_seldon\_namespace) | Seldon system namespace | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [base\_url](#output\_base\_url) | The base URL of the Seldon deployment | +| [ingress\_gateway\_spec](#output\_ingress\_gateway\_spec) | The YAML specification for the Istio ingress gateway | +| [workloads\_namespace](#output\_workloads\_namespace) | The namespace for Seldon workloads | diff --git a/src/matcha_ml/infrastructure/modules/seldon/seldon/istio.tf b/src/matcha_ml/infrastructure/modules/seldon/seldon/istio.tf new file mode 100644 index 00000000..7c4d9a9a --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/seldon/seldon/istio.tf @@ -0,0 +1,65 @@ +# Derived from ZenML's stack recipes; source: https://github.com/zenml-io/mlops-stacks/blob/8eb06596bf836d3a3dd2634fbc7f2b5687421811/aws-minimal/seldon/istio.tf + +# create a namespace for istio resources +resource "kubernetes_namespace" "istio_ns" { + metadata { + name = "istio-system" + labels = { + istio-injection = "enabled" + } + } +} + +# istio-base creates the istio definitions that will be used going forward +resource "helm_release" "istio_base" { + name = "istio-base-seldon" + repository = "https://istio-release.storage.googleapis.com/charts" + chart = "base" + + # adding a dependency on the istio-namespace + namespace = kubernetes_namespace.istio_ns.metadata[0].name +} + +# the istio daemon +resource "helm_release" "istiod" { + name = "istiod-seldon" + repository = helm_release.istio_base.repository # dependency on istio-base + chart = "istiod" + + namespace = kubernetes_namespace.istio_ns.metadata[0].name +} + +# the istio ingress gateway +# cannot use kubernetes_manifest resource since it practically +# doesn't support CRDs. Going with kubectl instead. +resource "kubectl_manifest" "gateway" { + yaml_body = < [terraform](#requirement\_terraform) | >= 0.14.8 | +| [htpasswd](#requirement\_htpasswd) | 1.0.4 | +| [kubectl](#requirement\_kubectl) | 1.14.0 | + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | n/a | +| [helm](#provider\_helm) | n/a | +| [kubernetes](#provider\_kubernetes) | n/a | +| [random](#provider\_random) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [azurerm_mysql_flexible_database.db](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/mysql_flexible_database) | resource | +| [azurerm_mysql_flexible_server.mysql](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/mysql_flexible_server) | resource | +| [azurerm_mysql_flexible_server_configuration.require_ssl](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/mysql_flexible_server_configuration) | resource | +| [azurerm_mysql_flexible_server_firewall_rule.allow_IPs](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/mysql_flexible_server_firewall_rule) | resource | +| [helm_release.zen_server](https://registry.terraform.io/providers/hashicorp/helm/latest/docs/resources/release) | resource | +| [kubernetes_namespace.zen_server](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/resources/namespace) | resource | +| [random_password.mysql_password](https://registry.terraform.io/providers/hashicorp/random/latest/docs/resources/password) | resource | +| [kubernetes_service.zen_server](https://registry.terraform.io/providers/hashicorp/kubernetes/latest/docs/data-sources/service) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [analytics\_opt\_in](#input\_analytics\_opt\_in) | The flag to enable/disable analytics | `bool` | `false` | no | +| [database\_password](#input\_database\_password) | The password for the CloudSQL store | `string` | `""` | no | +| [database\_ssl\_ca](#input\_database\_ssl\_ca) | The server ca for the Flexible MySQL instance | `string` | `""` | no | +| [database\_ssl\_cert](#input\_database\_ssl\_cert) | The client cert for the Flexible MySQL instance | `string` | `""` | no | +| [database\_ssl\_key](#input\_database\_ssl\_key) | The client key for the Flexible MySQL instance | `string` | `""` | no | +| [database\_ssl\_verify\_server\_cert](#input\_database\_ssl\_verify\_server\_cert) | Should SSL be verified? | `bool` | `false` | no | +| [database\_url](#input\_database\_url) | The URL for the Flexible MySQL instance | `string` | `""` | no | +| [database\_username](#input\_database\_username) | The username for the CloudSQL store | `string` | `"user"` | no | +| [db\_disk\_size](#input\_db\_disk\_size) | The allocated storage in gigabytes | `number` | `20` | no | +| [db\_instance\_name](#input\_db\_instance\_name) | The name for the Flexible MySQL store | `string` | `"zenmlserver"` | no | +| [db\_name](#input\_db\_name) | The name for the database | `string` | `"zendb"` | no | +| [db\_sku\_name](#input\_db\_sku\_name) | The sku\_name for the database resource | `string` | `"B_Standard_B1s"` | no | +| [db\_version](#input\_db\_version) | The version of MySQL to use | `string` | `"5.7"` | no | +| [deploy\_db](#input\_deploy\_db) | Should a Flexible MySQL instance be created? | `bool` | `true` | no | +| [kubectl\_config\_path](#input\_kubectl\_config\_path) | The path to the kube config | `string` | `""` | no | +| [location](#input\_location) | The location for your Azure resources | `string` | n/a | yes | +| [namespace](#input\_namespace) | The namespace to install the ZenML server Helm chart in | `string` | `"terraform-server"` | no | +| [password](#input\_password) | Password for the default ZenML server account | `string` | n/a | yes | +| [prefix](#input\_prefix) | A prefix used for all resources | `string` | n/a | yes | +| [resource\_group\_name](#input\_resource\_group\_name) | The resource group in Azure that you want to deploy ZenML to | `string` | n/a | yes | +| [username](#input\_username) | Username for the default ZenML server account | `string` | `"default"` | no | +| [zenmlserver\_image\_repo](#input\_zenmlserver\_image\_repo) | The repository to use for the zenmlserver docker image. | `string` | `"zenmldocker/zenml-server"` | no | +| [zenmlserver\_image\_tag](#input\_zenmlserver\_image\_tag) | The tag to use for the zenmlserver docker image. | `string` | `"latest"` | no | + +## Outputs + +| Name | Description | +|------|-------------| +| [zenserver\_password](#output\_zenserver\_password) | The password used to access the ZenML server | +| [zenserver\_url](#output\_zenserver\_url) | The URL for the ZenML server | +| [zenserver\_username](#output\_zenserver\_username) | The username used to access the ZenML server | diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/getURL.tf b/src/matcha_ml/infrastructure/modules/zenml/zen_server/getURL.tf new file mode 100644 index 00000000..a45aff5b --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/getURL.tf @@ -0,0 +1,10 @@ +data "kubernetes_service" "zen_server" { + metadata { + name = "${helm_release.zen_server.name}-zenml" + namespace = helm_release.zen_server.namespace + } + + depends_on = [ + helm_release.zen_server + ] +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/main.tf b/src/matcha_ml/infrastructure/modules/zenml/zen_server/main.tf new file mode 100644 index 00000000..31696669 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/main.tf @@ -0,0 +1,66 @@ +# Derived from ZenML's stack recipes; source: https://github.com/zenml-io/mlops-stacks/blob/8eb06596bf836d3a3dd2634fbc7f2b5687421811/modules/zenml-module/zen_server.tf + +# create the ZenServer deployment +resource "kubernetes_namespace" "zen_server" { + metadata { + name = "${var.prefix}-${var.namespace}" + } +} + +resource "helm_release" "zen_server" { + + name = "${var.prefix}-zenserver" + chart = "${path.module}/zenml_helm" + namespace = kubernetes_namespace.zen_server.metadata[0].name + + set { + name = "zenml.image.repository" + value = var.zenmlserver_image_repo + } + + set { + name = "zenml.defaultUsername" + value = var.username + } + set { + name = "zenml.defaultPassword" + value = var.password + } + set { + name = "zenml.deploymentType" + value = "azure" + } + set { + name = "zenml.analyticsOptIn" + value = var.analytics_opt_in + } + + # set parameters for the mysql database + set { + name = "zenml.database.url" + value = var.deploy_db ? "mysql://${var.database_username}:${azurerm_mysql_flexible_server.mysql[0].administrator_password}@${azurerm_mysql_flexible_server.mysql[0].name}.mysql.database.azure.com:3306/${var.db_name}" : var.database_url + } + set { + name = "zenml.database.sslCa" + value = var.deploy_db ? "" : var.database_ssl_ca + } + set { + name = "zenml.database.sslCert" + value = var.deploy_db ? "" : var.database_ssl_cert + } + set { + name = "zenml.database.sslKey" + value = var.deploy_db ? "" : var.database_ssl_key + } + set { + name = "zenml.database.sslVerifyServerCert" + value = var.deploy_db ? false : var.database_ssl_verify_server_cert + } + set { + name = "zenml.image.tag" + value = var.zenmlserver_version + } + depends_on = [ + resource.kubernetes_namespace.zen_server + ] +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/outputs.tf b/src/matcha_ml/infrastructure/modules/zenml/zen_server/outputs.tf new file mode 100644 index 00000000..9e8f482d --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/outputs.tf @@ -0,0 +1,15 @@ +output "zenserver_url" { + description = "The URL for the ZenML server" + value = "http://${data.kubernetes_service.zen_server.status.0.load_balancer.0.ingress.0.ip}" +} + +output "zenserver_username" { + description = "The username used to access the ZenML server" + value = var.username +} + +output "zenserver_password" { + description = "The password used to access the ZenML server" + value = var.password + sensitive = true +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/providers.tf b/src/matcha_ml/infrastructure/modules/zenml/zen_server/providers.tf new file mode 100644 index 00000000..a418423b --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/providers.tf @@ -0,0 +1,16 @@ +# defining the providers for the zenserver module +terraform { + required_providers { + kubectl = { + source = "gavinbunney/kubectl" + version = "1.14.0" + } + + htpasswd = { + source = "loafoe/htpasswd" + version = "1.0.4" + } + } + + required_version = ">= 0.14.8" +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/sql.tf b/src/matcha_ml/infrastructure/modules/zenml/zen_server/sql.tf new file mode 100644 index 00000000..4e87117e --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/sql.tf @@ -0,0 +1,57 @@ +resource "azurerm_mysql_flexible_server" "mysql" { + count = var.deploy_db ? 1 : 0 + name = "${var.prefix}${var.db_instance_name}" + resource_group_name = var.resource_group_name + location = var.location + administrator_login = var.database_username + administrator_password = var.database_password == "" ? random_password.mysql_password.result : var.database_password + version = var.db_version + storage { + size_gb = var.db_disk_size + } + sku_name = var.db_sku_name +} + +resource "azurerm_mysql_flexible_database" "db" { + count = var.deploy_db ? 1 : 0 + name = "${var.prefix}${var.db_name}" + resource_group_name = var.resource_group_name + server_name = azurerm_mysql_flexible_server.mysql[0].name + charset = "utf8" + collation = "utf8_unicode_ci" +} + +resource "azurerm_mysql_flexible_server_firewall_rule" "allow_IPs" { + count = var.deploy_db ? 1 : 0 + name = "all_traffic" + resource_group_name = var.resource_group_name + server_name = azurerm_mysql_flexible_server.mysql[0].name + start_ip_address = "0.0.0.0" + end_ip_address = "255.255.255.255" +} + +resource "azurerm_mysql_flexible_server_configuration" "require_ssl" { + count = var.deploy_db ? 1 : 0 + name = "require_secure_transport" + resource_group_name = var.resource_group_name + server_name = azurerm_mysql_flexible_server.mysql[0].name + value = "OFF" +} + +resource "random_password" "mysql_password" { + length = 12 + special = false + min_lower = 1 + min_numeric = 1 + min_upper = 1 +} + +# # download SSL certificate +# resource "null_resource" "download-SSL-certificate" { +# count = var.deploy_db ? 1 : 0 + +# provisioner "local-exec" { +# command = "wget https://dl.cacerts.digicert.com/DigiCertGlobalRootCA.crt.pem" +# } + +# } diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/variables.tf b/src/matcha_ml/infrastructure/modules/zenml/zen_server/variables.tf new file mode 100644 index 00000000..ae4a8d66 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/variables.tf @@ -0,0 +1,167 @@ +variable "prefix" { + description = "A prefix used for all resources" + type = string +} + +variable "resource_group_name" { + description = "The resource group in Azure that you want to deploy ZenML to" + type = string +} + +variable "location" { + description = "The location for your Azure resources" + type = string +} + +# ZenServer credentials +variable "username" { + description = "Username for the default ZenML server account" + default = "default" + type = string +} + +variable "password" { + description = "Password for the default ZenML server account" + type = string +} + +variable "namespace" { + description = "The namespace to install the ZenML server Helm chart in" + default = "terraform-server" + type = string +} + +variable "kubectl_config_path" { + description = "The path to the kube config" + default = "" + type = string +} + +variable "analytics_opt_in" { + description = "The flag to enable/disable analytics" + default = false + type = bool +} + +# If you want a new Flexible Server, choose a name and a password. If you already +# have an instance, provide the name and the password here too. +variable "database_username" { + description = "The username for the CloudSQL store" + default = "user" + type = string +} +variable "database_password" { + description = "The password for the CloudSQL store" + default = "" + type = string +} + +# if you enable the deploy_db option, this will +# create a new Flexible MySQL instance and then use it for this +# ZenServer. If disabled, you have to supply connection details +# in the section below. +variable "deploy_db" { + description = "Should a Flexible MySQL instance be created?" + default = true + type = bool +} +variable "db_instance_name" { + description = "The name for the Flexible MySQL store" + default = "zenmlserver" + type = string +} +variable "db_name" { + description = "The name for the database" + default = "zendb" + type = string +} +variable "db_version" { + description = "The version of MySQL to use" + default = "5.7" +} +variable "db_sku_name" { + description = "The sku_name for the database resource" + default = "B_Standard_B1s" + type = string +} +variable "db_disk_size" { + description = "The allocated storage in gigabytes" + default = 20 + type = number +} + +# If you haven't enabled the deploy_db option, provide +# the following value in addition to setting the username and +# password in the values.tfvars.json file. +variable "database_url" { + description = "The URL for the Flexible MySQL instance" + default = "" + type = string +} +variable "database_ssl_ca" { + description = "The server ca for the Flexible MySQL instance" + default = "" + type = string +} +variable "database_ssl_cert" { + description = "The client cert for the Flexible MySQL instance" + default = "" + type = string +} +variable "database_ssl_key" { + description = "The client key for the Flexible MySQL instance" + default = "" + type = string +} +variable "database_ssl_verify_server_cert" { + description = "Should SSL be verified?" + default = false + type = bool +} + +# # Ingress variables +# variable "ingress_path" { +# description = "The path on the Ingress URL to expose ZenML at" +# default = "zenml" +# type = string +# } + +# # set to true if you don't already have an nginx ingress +# # controller in your cluster +# variable "create_ingress_controller" { +# description = "set to true if you want to create an ingress controller in your cluster" +# default = true +# type = bool +# } + +# # if you already have an ingress controller, supply it's URL +# variable "ingress_controller_hostname" { +# description = "The hostname for the ingress controller on your cluster" +# default = "" +# type = string +# } +# variable "ingress_tls" { +# description = "Whether to enable tls on the ingress or not" +# default = true +# type = bool +# } +# variable "ingress_tls_generate_certs" { +# description = "Whether to enable tls certificates or not" +# default = true +# type = bool +# } +# variable "ingress_tls_secret_name" { +# description = "Name for the Kubernetes secret that stores certificates" +# default = "zenml-tls-certs" +# type = string +# } + +variable "zenmlserver_image_repo" { + description = "The repository to use for the zenmlserver docker image." + default = "zenmldocker/zenml-server" + type = string +} +variable "zenmlserver_version" { + description = "The tag to use for the zenmlserver docker image." + type = string +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/Chart.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/Chart.yaml new file mode 100644 index 00000000..51fbb0fe --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/Chart.yaml @@ -0,0 +1,13 @@ +apiVersion: v2 +name: zenml +version: "1.0" +description: Open source MLOps framework for portable production ready ML pipelines +keywords: +- mlops +- zenml +- server +home: https://zenml.io +sources: +- https://github.com/zenml-io/zenml +icon: https://raw.githubusercontent.com/zenml-io/zenml/main/docs/book/.gitbook/assets/zenml_logo.png +appVersion: "0.42.1" diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/NOTES.txt b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/NOTES.txt new file mode 100644 index 00000000..a4de5753 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/NOTES.txt @@ -0,0 +1,36 @@ +{{- if .Values.zenml.ingress.enabled }} +{{- if .Values.zenml.ingress.host }} +You can access the ZenML server at: + + http{{ if $.Values.zenml.ingress.tls.enabled }}s{{ end }}://{{ .Values.zenml.ingress.host }}{{ .Values.zenml.ingress.path }} + +with the following credentials: + + username: {{ .Values.zenml.defaultUsername }} + password: {{ .Values.zenml.defaultPassword }} + +{{- else }} + + +{{- end }} +{{- else }} + +You can get the ZenML server URL by running these commands: + +{{- if contains "NodePort" .Values.zenml.service.type }} + export NODE_PORT=$(kubectl get --namespace {{ .Release.Namespace }} -o jsonpath="{.spec.ports[0].nodePort}" services {{ include "zenml.fullname" . }}) + export NODE_IP=$(kubectl get nodes --namespace {{ .Release.Namespace }} -o jsonpath="{.items[0].status.addresses[0].address}") + echo http://$NODE_IP:$NODE_PORT +{{- else if contains "LoadBalancer" .Values.zenml.service.type }} + NOTE: It may take a few minutes for the LoadBalancer IP to be available. + You can watch the status of by running 'kubectl get --namespace {{ .Release.Namespace }} svc -w {{ include "zenml.fullname" . }}' + export SERVICE_IP=$(kubectl get svc --namespace {{ .Release.Namespace }} {{ include "zenml.fullname" . }} --template "{{"{{ range (index .status.loadBalancer.ingress 0) }}{{.}}{{ end }}"}}") + echo http://$SERVICE_IP:{{ .Values.zenml.service.port }} +{{- else if contains "ClusterIP" .Values.zenml.service.type }} + export POD_NAME=$(kubectl get pods --namespace {{ .Release.Namespace }} -l "app.kubernetes.io/name={{ include "zenml.name" . }},app.kubernetes.io/instance={{ .Release.Name }}" -o jsonpath="{.items[0].metadata.name}") + export CONTAINER_PORT=$(kubectl get pod --namespace {{ .Release.Namespace }} $POD_NAME -o jsonpath="{.spec.containers[0].ports[0].containerPort}") + echo "Visit http://127.0.0.1:8080 to use your application" + kubectl --namespace {{ .Release.Namespace }} port-forward $POD_NAME 8080:$CONTAINER_PORT +{{- end }} + +{{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/_helpers.tpl b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/_helpers.tpl new file mode 100644 index 00000000..6732baae --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/_helpers.tpl @@ -0,0 +1,70 @@ +{{/* +Expand the name of the chart. +*/}} +{{- define "zenml.name" -}} +{{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Create a default fully qualified app name. +We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec). +If release name contains chart name it will be used as a full name. +*/}} +{{- define "zenml.fullname" -}} +{{- if .Values.fullnameOverride }} +{{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- $name := default .Chart.Name .Values.nameOverride }} +{{- if contains $name .Release.Name }} +{{- .Release.Name | trunc 63 | trimSuffix "-" }} +{{- else }} +{{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }} +{{- end }} +{{- end }} +{{- end }} + +{{/* +Create chart name and version as used by the chart label. +*/}} +{{- define "zenml.chart" -}} +{{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }} +{{- end }} + +{{/* +Common labels +*/}} +{{- define "zenml.labels" -}} +helm.sh/chart: {{ include "zenml.chart" . }} +{{ include "zenml.selectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} +{{- define "zenml.metadataLabels" -}} +helm.sh/chart: {{ include "zenml.chart" . }} +{{ include "zenml.metadataSelectorLabels" . }} +{{- if .Chart.AppVersion }} +app.kubernetes.io/version: {{ .Chart.AppVersion | quote }} +{{- end }} +app.kubernetes.io/managed-by: {{ .Release.Service }} +{{- end }} + +{{/* +Selector labels +*/}} +{{- define "zenml.selectorLabels" -}} +app.kubernetes.io/name: {{ include "zenml.name" . }} +app.kubernetes.io/instance: {{ .Release.Name }} +{{- end }} + +{{/* +Create the name of the service account to use +*/}} +{{- define "zenml.serviceAccountName" -}} +{{- if .Values.serviceAccount.create }} +{{- default (include "zenml.fullname" .) .Values.serviceAccount.name }} +{{- else }} +{{- default "default" .Values.serviceAccount.name }} +{{- end }} +{{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/cert-secret.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/cert-secret.yaml new file mode 100644 index 00000000..c10854db --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/cert-secret.yaml @@ -0,0 +1,45 @@ +{{- if and .Values.zenml.ingress.enabled .Values.zenml.ingress.tls.enabled .Values.zenml.ingress.tls.generateCerts -}} + +{{- $certSubjectName := .Values.zenml.ingress.host -}} + +{{- $prevServerSecret := (lookup "v1" "Secret" .Release.Namespace .Values.zenml.ingress.tls.secretName) -}} +{{- if or .Release.IsInstall (not $prevServerSecret) }} +{{- $_ := set . "regenerateCerts" true -}} +{{- else if eq (index $prevServerSecret.metadata.annotations "zenml.certs/subject-name") $certSubjectName }} +{{- $_ := set . "regenerateCerts" false -}} +{{- else }} +{{- $_ := set . "regenerateCerts" true -}} +{{- end }} + +{{- if .regenerateCerts }} + +{{- $caCert := genCA "zenml-ca" 365 -}} +{{- $serverCert := genSignedCert $certSubjectName nil (list $certSubjectName) 365 $caCert -}} + +{{- $_ := set . "caCert" $caCert.Cert -}} +{{- $_ := set . "serverCert" $serverCert.Cert -}} +{{- $_ := set . "serverKey" $serverCert.Key -}} + +{{- else }} + +{{- $_ := set . "caCert" (index $prevServerSecret.data "ca.crt" | b64dec) -}} +{{- $_ := set . "serverCert" (index $prevServerSecret.data "tls.crt" | b64dec) -}} +{{- $_ := set . "serverKey" (index $prevServerSecret.data "tls.key" | b64dec) -}} + +{{- end }} + +apiVersion: v1 +kind: Secret +metadata: + name: {{ .Values.zenml.ingress.tls.secretName }} + labels: + {{- include "zenml.labels" . | nindent 4 }} + annotations: + zenml.certs/subject-name: {{ $certSubjectName}} +type: kubernetes.io/tls +data: + tls.crt: {{ .serverCert | b64enc | quote }} + tls.key: {{ .serverKey | b64enc | quote }} + ca.crt: {{ .caCert | b64enc | quote }} + +{{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/hpa.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/hpa.yaml new file mode 100644 index 00000000..da913c40 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/hpa.yaml @@ -0,0 +1,28 @@ +{{- if .Values.autoscaling.enabled }} +apiVersion: autoscaling/v2beta1 +kind: HorizontalPodAutoscaler +metadata: + name: {{ include "zenml.fullname" . }} + labels: + {{- include "zenml.labels" . | nindent 4 }} +spec: + scaleTargetRef: + apiVersion: apps/v1 + kind: Deployment + name: {{ include "zenml.fullname" . }} + minReplicas: {{ .Values.autoscaling.minReplicas }} + maxReplicas: {{ .Values.autoscaling.maxReplicas }} + metrics: + {{- if .Values.autoscaling.targetCPUUtilizationPercentage }} + - type: Resource + resource: + name: cpu + targetAverageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }} + {{- end }} + {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }} + - type: Resource + resource: + name: memory + targetAverageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }} + {{- end }} +{{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-deployment.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-deployment.yaml new file mode 100644 index 00000000..6e28098f --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-deployment.yaml @@ -0,0 +1,242 @@ +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "zenml.fullname" . }} + labels: + {{- include "zenml.labels" . | nindent 4 }} +spec: + {{- if .Values.zenml.database.url }} + {{- if not .Values.autoscaling.enabled }} + replicas: {{ .Values.zenml.replicaCount }} + {{- end }} + {{- else }} + replicas: 1 + {{- end }} + selector: + matchLabels: + {{- include "zenml.selectorLabels" . | nindent 6 }} + template: + metadata: + annotations: + checksum/secret: {{ include (print $.Template.BasePath "/server-secret.yaml") . | sha256sum }} + {{- with .Values.podAnnotations }} + {{- toYaml . | nindent 8 }} + {{- end }} + labels: + {{- include "zenml.selectorLabels" . | nindent 8 }} + spec: + {{- with .Values.imagePullSecrets }} + imagePullSecrets: + {{- toYaml . | nindent 8 }} + {{- end }} + serviceAccountName: {{ include "zenml.serviceAccountName" . }} + securityContext: + {{- toYaml .Values.podSecurityContext | nindent 8 }} + {{- if and (eq .Values.zenml.secretsStore.type "gcp") .Values.zenml.secretsStore.gcp.google_application_credentials }} + volumes: + - name: gcp-credentials + secret: + secretName: {{ include "zenml.fullname" . }} + items: + - key: GOOGLE_APPLICATION_CREDENTIALS_FILE + path: credentials.json + {{- end }} + {{- if .Values.zenml.database.url }} + initContainers: + - name: {{ .Chart.Name }}-db-init + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.zenml.image.repository }}:{{ .Values.zenml.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.zenml.image.pullPolicy }} + args: ["status"] + command: ['zenml'] + env: + {{- if .Values.zenml.debug }} + - name: ZENML_LOGGING_VERBOSITY + value: "DEBUG" + {{- end }} + - name: ZENML_ANALYTICS_OPT_IN + value: "False" + - name: ZENML_DEFAULT_PROJECT_NAME + value: {{ .Values.zenml.defaultProject | quote }} + - name: ZENML_DEFAULT_USER_NAME + value: {{ .Values.zenml.defaultUsername | quote }} + {{- if .Values.zenml.database.url }} + - name: ZENML_STORE_TYPE + value: sql + - name: ZENML_STORE_SSL_VERIFY_SERVER_CERT + value: {{ .Values.zenml.database.sslVerifyServerCert | default "false" | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.enabled }} + - name: ZENML_SECRETS_STORE_TYPE + value: {{ .Values.zenml.secretsStore.type | quote }} + {{- if eq .Values.zenml.secretsStore.type "aws" }} + - name: ZENML_SECRETS_STORE_REGION_NAME + value: {{ .Values.zenml.secretsStore.aws.region_name | quote }} + - name: ZENML_SECRETS_STORE_SECRET_LIST_REFRESH_TIMEOUT + value: {{ .Values.zenml.secretsStore.aws.secret_list_refresh_timeout | quote }} + {{- else if eq .Values.zenml.secretsStore.type "gcp" }} + - name: ZENML_SECRETS_STORE_PROJECT_ID + value: {{ .Values.zenml.secretsStore.gcp.project_id | quote }} + {{- if .Values.zenml.secretsStore.gcp.google_application_credentials }} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: /gcp-credentials/credentials.json + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "azure" }} + - name: ZENML_SECRETS_STORE_KEY_VAULT_NAME + value: {{ .Values.zenml.secretsStore.azure.key_vault_name | quote }} + {{- else if eq .Values.zenml.secretsStore.type "hashicorp" }} + - name: ZENML_SECRETS_STORE_VAULT_ADDR + value: {{ .Values.zenml.secretsStore.hashicorp.vault_addr | quote }} + {{- if .Values.zenml.secretsStore.hashicorp.vault_namespace }} + - name: ZENML_SECRETS_STORE_VAULT_NAMESPACE + value: {{ .Values.zenml.secretsStore.hashicorp.vault_namespace | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.hashicorp.max_versions }} + - name: ZENML_SECRETS_STORE_MAX_VERSIONS + value: {{ .Values.zenml.secretsStore.hashicorp.max_versions | quote }} + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "custom" }} + - name: ZENML_SECRETS_STORE_CLASS_PATH + value: {{ .Values.zenml.secretsStore.custom.class_path | quote }} + {{- end }} + {{- else }} + - name: ZENML_SECRETS_STORE_TYPE + value: none + {{- end }} + {{- if .Values.zenml.environment }} + {{- range $key, $value := .Values.zenml.environment }} + - name: {{ $key }} + value: {{ $value | quote }} + {{- end }} + {{- end }} + envFrom: + - secretRef: + name: {{ include "zenml.fullname" . }} + {{- if and (eq .Values.zenml.secretsStore.type "gcp") .Values.zenml.secretsStore.gcp.google_application_credentials }} + volumeMounts: + - name: gcp-credentials + mountPath: /gcp-credentials + readOnly: true + {{- end }} + {{- end }} + containers: + - name: {{ .Chart.Name }} + securityContext: + {{- toYaml .Values.securityContext | nindent 12 }} + image: "{{ .Values.zenml.image.repository }}:{{ .Values.zenml.image.tag | default .Chart.AppVersion }}" + imagePullPolicy: {{ .Values.zenml.image.pullPolicy }} + env: + {{- if .Values.zenml.debug }} + - name: ZENML_LOGGING_VERBOSITY + value: "DEBUG" + {{- end }} + {{- if .Values.zenml.analyticsOptIn }} + - name: ZENML_ANALYTICS_OPT_IN + value: "True" + {{- else if not .Values.zenml.analyticsOptIn }} + - name: ZENML_ANALYTICS_OPT_IN + value: "False" + {{- end }} + - name: ZENML_AUTH_TYPE + value: {{ .Values.zenml.authType | quote }} + {{- if .Values.zenml.rootUrlPath }} + - name: ZENML_SERVER_ROOT_URL_PATH + value: {{ .Values.zenml.rootUrlPath | quote }} + {{- end }} + - name: ZENML_DEFAULT_PROJECT_NAME + value: {{ .Values.zenml.defaultProject | quote }} + - name: ZENML_DEFAULT_USER_NAME + value: {{ .Values.zenml.defaultUsername | quote }} + {{- if .Values.zenml.enableImplicitAuthMethods }} + - name: ZENML_ENABLE_IMPLICIT_AUTH_METHODS + value: "True" + {{- end }} + {{- if .Values.zenml.database.url }} + - name: ZENML_STORE_TYPE + value: sql + - name: DISABLE_DATABASE_MIGRATION + value: "True" + - name: ZENML_STORE_SSL_VERIFY_SERVER_CERT + value: {{ .Values.zenml.database.sslVerifyServerCert | default "false" | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.enabled }} + - name: ZENML_SECRETS_STORE_TYPE + value: {{ .Values.zenml.secretsStore.type | quote }} + {{- if eq .Values.zenml.secretsStore.type "aws" }} + - name: ZENML_SECRETS_STORE_REGION_NAME + value: {{ .Values.zenml.secretsStore.aws.region_name | quote }} + - name: ZENML_SECRETS_STORE_SECRET_LIST_REFRESH_TIMEOUT + value: {{ .Values.zenml.secretsStore.aws.secret_list_refresh_timeout | quote }} + {{- else if eq .Values.zenml.secretsStore.type "gcp" }} + - name: ZENML_SECRETS_STORE_PROJECT_ID + value: {{ .Values.zenml.secretsStore.gcp.project_id | quote }} + {{- if .Values.zenml.secretsStore.gcp.google_application_credentials }} + - name: GOOGLE_APPLICATION_CREDENTIALS + value: /gcp-credentials/credentials.json + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "azure" }} + - name: ZENML_SECRETS_STORE_KEY_VAULT_NAME + value: {{ .Values.zenml.secretsStore.azure.key_vault_name | quote }} + {{- else if eq .Values.zenml.secretsStore.type "hashicorp" }} + - name: ZENML_SECRETS_STORE_VAULT_ADDR + value: {{ .Values.zenml.secretsStore.hashicorp.vault_addr | quote }} + {{- if .Values.zenml.secretsStore.hashicorp.vault_namespace }} + - name: ZENML_SECRETS_STORE_VAULT_NAMESPACE + value: {{ .Values.zenml.secretsStore.hashicorp.vault_namespace | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.hashicorp.max_versions }} + - name: ZENML_SECRETS_STORE_MAX_VERSIONS + value: {{ .Values.zenml.secretsStore.hashicorp.max_versions | quote }} + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "custom" }} + - name: ZENML_SECRETS_STORE_CLASS_PATH + value: {{ .Values.zenml.secretsStore.custom.class_path | quote }} + {{- end }} + {{- else }} + - name: ZENML_SECRETS_STORE_TYPE + value: none + {{- end }} + - name: ZENML_SERVER_DEPLOYMENT_TYPE + value: {{ .Values.zenml.deploymentType | default "kubernetes" }} + {{- if .Values.zenml.environment }} + {{- range $key, $value := .Values.zenml.environment }} + - name: {{ $key }} + value: {{ $value | quote }} + {{- end }} + {{- end }} + envFrom: + - secretRef: + name: {{ include "zenml.fullname" . }} + {{- if and (eq .Values.zenml.secretsStore.type "gcp") .Values.zenml.secretsStore.gcp.google_application_credentials }} + volumeMounts: + - name: gcp-credentials + mountPath: /gcp-credentials + readOnly: true + {{- end }} + ports: + - name: http + containerPort: 8080 + protocol: TCP + livenessProbe: + httpGet: + path: /health + port: http + readinessProbe: + httpGet: + path: /health + port: http + resources: + {{- toYaml .Values.resources | nindent 12 }} + {{- with .Values.nodeSelector }} + nodeSelector: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.affinity }} + affinity: + {{- toYaml . | nindent 8 }} + {{- end }} + {{- with .Values.tolerations }} + tolerations: + {{- toYaml . | nindent 8 }} + {{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-ingress.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-ingress.yaml new file mode 100644 index 00000000..d2e0423e --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-ingress.yaml @@ -0,0 +1,59 @@ +{{- if .Values.zenml.ingress.enabled -}} +{{- $fullName := include "zenml.fullname" . -}} +{{- $svcPort := .Values.zenml.service.port -}} +{{- if and .Values.zenml.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }} + {{- if not (hasKey .Values.zenml.ingress.annotations "kubernetes.io/ingress.class") }} + {{- $_ := set .Values.zenml.ingress.annotations "kubernetes.io/ingress.class" .Values.zenml.ingress.className}} + {{- end }} +{{- end }} +{{- if and $.Values.zenml.ingress.tls.enabled (eq .Values.zenml.ingress.className "nginx") }} + {{- $_ := set .Values.zenml.ingress.annotations "nginx.ingress.kubernetes.io/ssl-redirect" "true"}} +{{- end }} + +{{- if semverCompare ">=1.19-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1 +{{- else if semverCompare ">=1.14-0" .Capabilities.KubeVersion.GitVersion -}} +apiVersion: networking.k8s.io/v1beta1 +{{- else -}} +apiVersion: extensions/v1beta1 +{{- end }} +kind: Ingress +metadata: + name: {{ $fullName }} + labels: + {{- include "zenml.labels" . | nindent 4 }} + {{- with .Values.zenml.ingress.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +spec: + {{- if and .Values.zenml.ingress.className (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion) }} + ingressClassName: {{ .Values.zenml.ingress.className }} + {{- end }} + {{- if .Values.zenml.ingress.tls.enabled }} + tls: + - hosts: + - {{ .Values.zenml.ingress.host | quote }} + secretName: {{ .Values.zenml.ingress.tls.secretName }} + {{- end }} + rules: + - http: + paths: + - path: {{ .Values.zenml.ingress.path }} + {{- if semverCompare ">=1.18-0" $.Capabilities.KubeVersion.GitVersion }} + pathType: Prefix + {{- end }} + backend: + {{- if semverCompare ">=1.19-0" $.Capabilities.KubeVersion.GitVersion }} + service: + name: {{ $fullName }} + port: + number: {{ $svcPort }} + {{- else }} + serviceName: {{ $fullName }} + servicePort: {{ $svcPort }} + {{- end }} + {{- if .Values.zenml.ingress.host }} + host: {{ .Values.zenml.ingress.host | quote }} + {{- end }} +{{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-secret.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-secret.yaml new file mode 100644 index 00000000..45f8fd11 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-secret.yaml @@ -0,0 +1,70 @@ +apiVersion: v1 +kind: Secret +type: Opaque +metadata: + name: {{ include "zenml.fullname" . }} + labels: + {{- include "zenml.labels" . | nindent 4 }} +data: + ZENML_DEFAULT_USER_PASSWORD: {{ .Values.zenml.defaultPassword | b64enc | quote }} + {{- if .Values.zenml.jwtSecretKey }} + ZENML_JWT_SECRET_KEY: {{ .Values.zenml.jwtSecretKey | b64enc | quote }} + {{- else if .Release.IsInstall }} + ZENML_JWT_SECRET_KEY: {{ randAlphaNum 32 | b64enc | quote }} + {{- else }} + ZENML_JWT_SECRET_KEY: {{ (lookup "v1" "Secret" .Release.Namespace (include "zenml.fullname" .)).data.ZENML_JWT_SECRET_KEY | default (randAlphaNum 32 | b64enc | quote) }} + {{- end }} + {{- if .Values.zenml.database.url }} + ZENML_STORE_URL: {{ .Values.zenml.database.url | b64enc | quote }} + {{- if .Values.zenml.database.sslCa }} + ZENML_STORE_SSL_CA: {{ .Files.Get .Values.zenml.database.sslCa | b64enc }} + {{- end }} + {{- if .Values.zenml.database.sslCert }} + ZENML_STORE_SSL_CERT: {{ .Files.Get .Values.zenml.database.sslCert | b64enc }} + {{- end }} + {{- if .Values.zenml.database.sslKey }} + ZENML_STORE_SSL_KEY: {{ .Files.Get .Values.zenml.database.sslKey | b64enc }} + {{- end }} + {{- end }} + {{- if .Values.zenml.secretsStore.enabled }} + {{- if eq .Values.zenml.secretsStore.type "sql" }} + {{- if .Values.zenml.secretsStore.sql.encryptionKey }} + ZENML_SECRETS_STORE_ENCRYPTION_KEY: {{ .Values.zenml.secretsStore.sql.encryptionKey | b64enc | quote }} + {{- else if .Values.zenml.secretsStore.encryptionKey }} + ZENML_SECRETS_STORE_ENCRYPTION_KEY: {{ .Values.zenml.secretsStore.encryptionKey | b64enc | quote }} + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "aws" }} + {{- if .Values.zenml.secretsStore.aws.aws_access_key_id }} + ZENML_SECRETS_STORE_AWS_ACCESS_KEY_ID: {{ .Values.zenml.secretsStore.aws.aws_access_key_id | b64enc | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.aws.aws_secret_access_key }} + ZENML_SECRETS_STORE_AWS_SECRET_ACCESS_KEY: {{ .Values.zenml.secretsStore.aws.aws_secret_access_key | b64enc | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.aws.aws_session_token }} + ZENML_SECRETS_STORE_AWS_SESSION_TOKEN: {{ .Values.zenml.secretsStore.aws.aws_session_token | b64enc | quote }} + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "azure" }} + {{- if .Values.zenml.secretsStore.azure.azure_client_id }} + ZENML_SECRETS_STORE_AZURE_CLIENT_ID: {{ .Values.zenml.secretsStore.azure.azure_client_id | b64enc | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.azure.azure_client_secret }} + ZENML_SECRETS_STORE_AZURE_CLIENT_SECRET: {{ .Values.zenml.secretsStore.azure.azure_client_secret | b64enc | quote }} + {{- end }} + {{- if .Values.zenml.secretsStore.azure.azure_tenant_id }} + ZENML_SECRETS_STORE_AZURE_TENANT_ID: {{ .Values.zenml.secretsStore.azure.azure_tenant_id | b64enc | quote }} + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "gcp" }} + {{- if .Values.zenml.secretsStore.gcp.google_application_credentials }} + GOOGLE_APPLICATION_CREDENTIALS_FILE: {{ .Files.Get .Values.zenml.secretsStore.gcp.google_application_credentials | b64enc | quote }} + {{- end }} + {{- else if eq .Values.zenml.secretsStore.type "hashicorp" }} + {{- if .Values.zenml.secretsStore.hashicorp.vault_token }} + ZENML_SECRETS_STORE_VAULT_TOKEN: {{ .Values.zenml.secretsStore.hashicorp.vault_token | b64enc | quote }} + {{- end }} + {{- end }} + {{- end }} + {{- if .Values.zenml.environment }} + {{- range $key, $value := .Values.zenml.environment }} + {{ $key }}: {{ $value | b64enc | quote }} + {{- end }} + {{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-service.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-service.yaml new file mode 100644 index 00000000..74d00f3a --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/server-service.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Service +metadata: + name: {{ include "zenml.fullname" . }} + labels: + {{- include "zenml.labels" . | nindent 4 }} +spec: + type: {{ .Values.zenml.service.type }} + ports: + - port: {{ .Values.zenml.service.port }} + targetPort: 8080 + protocol: TCP + name: http + selector: + {{- include "zenml.selectorLabels" . | nindent 4 }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/serviceaccount.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/serviceaccount.yaml new file mode 100644 index 00000000..79eacbc8 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/serviceaccount.yaml @@ -0,0 +1,12 @@ +{{- if .Values.serviceAccount.create -}} +apiVersion: v1 +kind: ServiceAccount +metadata: + name: {{ include "zenml.serviceAccountName" . }} + labels: + {{- include "zenml.labels" . | nindent 4 }} + {{- with .Values.serviceAccount.annotations }} + annotations: + {{- toYaml . | nindent 4 }} + {{- end }} +{{- end }} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/tests/test-connection.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/tests/test-connection.yaml new file mode 100644 index 00000000..1ff5a91f --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/templates/tests/test-connection.yaml @@ -0,0 +1,15 @@ +apiVersion: v1 +kind: Pod +metadata: + name: "{{ include "zenml.fullname" . }}-test-connection" + labels: + {{- include "zenml.labels" . | nindent 4 }} + annotations: + "helm.sh/hook": test +spec: + containers: + - name: wget + image: busybox + command: ['wget'] + args: ['{{ include "zenml.fullname" . }}:{{ .Values.zenml.service.port }}'] + restartPolicy: Never diff --git a/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/values.yaml b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/values.yaml new file mode 100644 index 00000000..e03a5cd7 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zen_server/zenml_helm/values.yaml @@ -0,0 +1,326 @@ +# Default values for zenml. + +imagePullSecrets: [] +nameOverride: "" +fullnameOverride: "" + + +# ZenML server related options. +zenml: + + replicaCount: 1 + + image: + repository: zenmldocker/zenml-server + pullPolicy: Always + # Overrides the image tag whose default is the chart appVersion. + tag: + + debug: true + + # Flag to enable/disable the tracking process of the analytics + analyticsOptIn: true + + # ZenML server deployment type. This field is used for telemetry purposes. + # Example values are "local", "kubernetes", "aws", "gcp", "azure". + deploymentType: + + # The ZenML authentication scheme. Use one of: + # + # NO_AUTH - No authentication + # HTTP_BASIC - HTTP Basic authentication + # OAUTH2_PASSWORD_BEARER - OAuth2 password bearer with JWT tokens + authType: OAUTH2_PASSWORD_BEARER + + # The secret key used to sign JWT tokens. Only relevant if the + # OAUTH2_PASSWORD_BEARER authentication scheme is used. This should be set to + # a random string with a recommended length of at least 32 characters, e.g.: + # + # ```python + # from secrets import token_hex + # token_hex(32) + # ``` + # + # or: + # + # ```shell + # openssl rand -hex 32 + # ``` + # + # If not explicitly set, a random key will be generated when the helm + # chart is installed and reused for all subsequent upgrades. + jwtSecretKey: + + # The root URL path to use when behind a proxy. This is useful when the + # `rewrite-target` annotation is used in the ingress controller, e.g.: + # + # ```yaml + # rootUrlPath: /zenml + # + # ingress: + # enabled: true + # className: "nginx" + # annotations: + # nginx.ingress.kubernetes.io/rewrite-target: /$1 + # host: + # path: /zenml/?(.*) + # ``` + rootUrlPath: + + defaultProject: default + defaultUsername: default + # Use your own password here + defaultPassword: zenml + + # Implicit authentication methods featured by service connectors that support + # them are disabled by default, for security reasons. This is because they + # allow users to authenticate to the cloud provider where ZenML is running + # without having to provide any credentials. + enableImplicitAuthMethods: false + + # MySQL database configuration. If not set, a local sqlite database will be + # used, which will not be persisted across pod restarts. + # NOTE: the certificate files need to be copied in the helm chart folder and + # the paths configured here need to be relative to the root of the helm chart. + database: {} + # url: "mysql://admin:password@zenml-mysql:3306/database" + # sslCa: /path/to/ca.pem + # sslCert: /path/to/client-cert.pem + # sslKey: /path/to/client-key.pem + # sslVerifyServerCert: True + + + # Secrets store settings. This is used to store centralized secrets. + secretsStore: + + # Set to false to disable the secrets store. + enabled: true + + # The type of secrets store to use. Use one of: + # + # sql - Use the same SQL database as the ZenML server + # aws - Use the AWS Secrets Manager as a secrets store + # gcp - Use the GCP Secrets Manager as a secrets store + # azure - Use the Azure Key Vault as a secrets store + # hashicorp - Use the HashiCorp Vault as a secrets store + # custom - Use a custom secrets store implementation (needs a custom + # ZenML server image with the custom secrets store implementation + # installed) + # + # Depending on the type, additional configuration options may be required + # under the respective sections. + # + type: sql + + # SQL secrets store configuration. Only relevant if the `sql` secrets store + # type is configured. + sql: + + # The secret key used to encrypt secrets in the SQL database. Only relevant + # if the SQL secrets store type is used. This should be set to a random + # string with a recommended length of at least 32 characters, e.g.: + # + # ```python + # from secrets import token_hex + # token_hex(32) + # ``` + # + # or: + # + # ```shell + # openssl rand -hex 32 + # ``` + # + # If not set, database secret encryption will be disabled. + # + # IMPORTANT: If you configure encryption for your SQL database secrets + # store, you should keep this value somewhere safe and secure, as it will be + # required to decrypt the secrets in the database. If you lose the + # encryption key, you will not be able to decrypt the secrets in the + # database and will have to reset them. You should not change this value + # after you have already configured encryption for your SQL database + # secrets store. + encryptionKey: + + # AWS secrets store configuration. Only relevant if the `aws` secrets store + # type is configured. + aws: + + # The AWS region to use. This must be set to the region where the AWS + # Secrets Manager service that you want to use is located. + region_name: us-east-1 + + # The AWS credentials to use to authenticate with the AWS Secrets + # Manager instance. You can omit these if you are running the ZenML server + # in an AWS EKS cluster that has an IAM role attached to it that has + # permissions to access the AWS Secrets Manager instance. + # NOTE: setting this is the same as setting the AWS_ACCESS_KEY_ID, + # AWS_SECRET_ACCESS_KEY, and AWS_SESSION_TOKEN environment variables + # in the zenml.secretEnvironment variable. + aws_access_key_id: + aws_secret_access_key: + aws_session_token: + + # The AWS Secrets Manager has a known issue where it does not immediately + # reflect new and updated secrets in the `list_secrets` results. To work + # around this issue, you can set this value to a non-zero value to + # get the ZenML server to wait after creating or updating an AWS secret + # until the changes are reflected in the secrets returned by + # `list_secrets` or the number of seconds specified by this value has + # elapsed. Should not be set to a high value as it may cause thread + # starvation in the ZenML server on high load. + secret_list_refresh_timeout: 0 + + + # GCP secrets store configuration. Only relevant if the `gcp` secrets store + # type is configured. + gcp: + + # The GCP project ID to use. This must be set to the project ID where the + # GCP Secrets Manager service that you want to use is located. + project_id: my-gcp-project + + # Path to the GCP credentials file to use to authenticate with the GCP Secrets + # Manager instance. You can omit this if you are running the ZenML server + # in a GCP GKE cluster that uses workload identity to authenticate with + # GCP services without the need for credentials. + # NOTE: the credentials file needs to be copied in the helm chart folder + # and the path configured here needs to be relative to the root of the + # helm chart. + google_application_credentials: + + # AWS Key Vault secrets store configuration. Only relevant if the `azure` + # secrets store type is configured. + azure: + + # The name of the Azure Key Vault. This must be set to point to the Azure + # Key Vault instance that you want to use. + key_vault_name: + + # The Azure application service principal credentials to use to + # authenticate with the Azure Key Vault API. You can omit these if you are + # running the ZenML server hosted in Azure and are using a managed + # identity to access the Azure Key Vault service. + # NOTE: setting this is the same as setting the AZURE_CLIENT_ID, + # AZURE_CLIENT_SECRET, and AZURE_TENANT_ID environment variables + # in the zenml.secretEnvironment variable. + azure_client_id: + azure_client_secret: + azure_tenant_id: + + # HashiCorp Vault secrets store configuration. Only relevant if the `hashicorp` + # secrets store type is configured + hashicorp: + + # The url of the HashiCorp Vault server + vault_addr: https://vault.example.com + # The token used to authenticate with the Vault server + vault_token: + # The Vault Enterprise namespace. Not required for Vault OSS. + vault_namespace: + # The maximum number of secret versions to keep. If not set, the default + # value of 1 will be used (only the latest version will be kept). + max_versions: + + # Custom secrets store configuration. Only relevant if the `custom` secrets + # store type is configured. + custom: + + # The class path of the custom secrets store implementation. This should + # point to a full Python class that extends the + # `zenml.zen_stores.secrets_stores.base_secrets_store.BaseSecretsStore` + # base class. The class should be importable from the container image + # that you are using for the ZenML server. + # + # Any additional configuration options for the custom secrets store + # implementation should be passed through the `environment` and the + # `secretEnvironment` variables and using the `ZENML_SECRETS_STORE_` + # environment variable naming convention. For example, if the custom + # secrets store implementation expects an `aws_access_key_id` option, you + # should set the `ZENML_SECRETS_STORE_AWS_ACCESS_KEY_ID` environment + # variable in the `zenml.secretEnvironment` variable. + class_path: my.custom.secrets.store.MyCustomSecretsStore + + # Extra environment variables to set in the ZenML server container. + environment: {} + + # Extra environment variables to set in the ZenML server container that + # should be kept secret. These will be set as Kubernetes secrets and + # mounted as environment variables in the ZenML server container. + secretEnvironment: {} + + service: + type: LoadBalancer # changed from ClusterIP + port: 80 + + ingress: + enabled: false # changed from true + className: "nginx" + annotations: + nginx.ingress.kubernetes.io/ssl-redirect: "true" + # nginx.ingress.kubernetes.io/rewrite-target: /$1 + # kubernetes.io/ingress.class: nginx + # kubernetes.io/tls-acme: "true" + # cert-manager.io/cluster-issuer: "letsencrypt" + + # hint: you can use a service like nip.io to get a wildcard DNS for your + # ingress IP address. For example, if your ingress IP is 192.168.0.1, you + # can use a host name like zenml.192.168.0.1.nip.io. This allows you to + # reuse the same ingress for multiple deployments and/or services. + host: + path: / + tls: + enabled: false + # NOTE: if enabled, this will generate self-signed certificates during + # installation. This also requires that the ingress host be set to the + # domain name or IP address used to access the ZenML server from outside + # the cluster. + generateCerts: false + secretName: zenml-tls-certs + + +serviceAccount: + # Specifies whether a service account should be created + create: true + # Annotations to add to the service account + annotations: {} + # The name of the service account to use. + # If not set and create is true, a name is generated using the fullname template + name: "zenml" + +podAnnotations: {} + +podSecurityContext: {} + # fsGroup: 2000 + +securityContext: + runAsNonRoot: true + runAsUser: 1000 + # capabilities: + # drop: + # - ALL + +resources: {} + # We usually recommend not to specify default resources and to leave this as a conscious + # choice for the user. This also increases chances charts run on environments with little + # resources, such as Minikube. If you do want to specify resources, uncomment the following + # lines, adjust them as necessary, and remove the curly braces after 'resources:'. + # limits: + # cpu: 100m + # memory: 128Mi + # requests: + # cpu: 100m + # memory: 128Mi + +autoscaling: + enabled: false + minReplicas: 1 + maxReplicas: 100 + targetCPUUtilizationPercentage: 80 + # targetMemoryUtilizationPercentage: 80 + +nodeSelector: {} + +tolerations: [] + +affinity: {} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zenml_namespace.tf b/src/matcha_ml/infrastructure/modules/zenml/zenml_namespace.tf new file mode 100644 index 00000000..c0705a8a --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zenml_namespace.tf @@ -0,0 +1,5 @@ +resource "kubernetes_namespace" "k8s_ns" { + metadata { + name = "zenml" + } +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/README.md b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/README.md new file mode 100644 index 00000000..3787ccea --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/README.md @@ -0,0 +1,45 @@ +## Requirements + +No requirements. + +## Providers + +| Name | Version | +|------|---------| +| [azurerm](#provider\_azurerm) | n/a | + +## Modules + +No modules. + +## Resources + +| Name | Type | +|------|------| +| [azurerm_role_assignment.zenmlstorage](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/role_assignment) | resource | +| [azurerm_storage_account.zenmlaccount](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/storage_account) | resource | +| [azurerm_storage_container.zenmlstoragecontainer](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/storage_container) | resource | +| [azurerm_storage_account.zenmlaccount](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/data-sources/storage_account) | data source | + +## Inputs + +| Name | Description | Type | Default | Required | +|------|-------------|------|---------|:--------:| +| [aks\_principal\_id](#input\_aks\_principal\_id) | Principal id for aks cluster | `string` | n/a | yes | +| [location](#input\_location) | The Azure Region in which this resources should be created. | `string` | n/a | yes | +| [prefix](#input\_prefix) | A prefix used for all resources | `string` | n/a | yes | +| [resource\_group\_name](#input\_resource\_group\_name) | The resource group name which is used to create the resource group | `string` | n/a | yes | + +## Outputs + +| Name | Description | +|------|-------------| +| [zenml\_blobstorage\_container\_path](#output\_zenml\_blobstorage\_container\_path) | The Azure Blob Storage Container path for storing zenml artifacts | +| [zenml\_primary\_access\_key](#output\_zenml\_primary\_access\_key) | ZenML Azure Storage Account - Primary access key | +| [zenml\_primary\_blob\_connection\_string](#output\_zenml\_primary\_blob\_connection\_string) | ZenML Azure Storage Account - Primary Blob service connection string | +| [zenml\_primary\_connection\_string](#output\_zenml\_primary\_connection\_string) | ZenML Azure Storage Account - Primary connection string | +| [zenml\_secondary\_access\_key](#output\_zenml\_secondary\_access\_key) | ZenML Azure Storage Account - Secondary access key | +| [zenml\_secondary\_blob\_connection\_string](#output\_zenml\_secondary\_blob\_connection\_string) | ZenML Azure Storage Account - Secondary Blob service connection string | +| [zenml\_secondary\_connection\_string](#output\_zenml\_secondary\_connection\_string) | ZenML Azure Storage Account - Secondary connection string | +| [zenml\_storage\_account\_name](#output\_zenml\_storage\_account\_name) | The name of the Azure Storage Account used to store ZenML artifacts. | +| [zenml\_storage\_container\_name](#output\_zenml\_storage\_container\_name) | The name of the Azure Storage container used to store ZenML artifacts. | diff --git a/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/main.tf b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/main.tf new file mode 100644 index 00000000..8a37cb74 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/main.tf @@ -0,0 +1,31 @@ +# Reference: https://github.com/hashicorp/terraform-provider-azurerm/tree/main/examples/storage/storage-container + +# create a storage account +resource "azurerm_storage_account" "zenmlaccount" { + name = "${var.prefix}zenmlacc" + resource_group_name = var.resource_group_name + location = var.location + + account_tier = "Standard" + account_kind = "StorageV2" + account_replication_type = "LRS" +} + +# create a storage container inside created storage account +resource "azurerm_storage_container" "zenmlstoragecontainer" { + name = "${var.prefix}artifactstore" + storage_account_name = azurerm_storage_account.zenmlaccount.name + container_access_type = "private" +} + + +data "azurerm_storage_account" "zenmlaccount" { + name = azurerm_storage_account.zenmlaccount.name + resource_group_name = var.resource_group_name +} + +resource "azurerm_role_assignment" "zenmlstorage" { + scope = azurerm_storage_account.zenmlaccount.id + role_definition_name = "Contributor" + principal_id = var.aks_principal_id +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/output.tf b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/output.tf new file mode 100644 index 00000000..84226fbf --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/output.tf @@ -0,0 +1,50 @@ +output "zenml_storage_container_name" { + description = "The name of the Azure Storage container used to store ZenML artifacts." + value = azurerm_storage_container.zenmlstoragecontainer.name +} + +output "zenml_blobstorage_container_path" { + description = "The Azure Blob Storage Container path for storing zenml artifacts" + value = "az://${azurerm_storage_container.zenmlstoragecontainer.name}" +} + +output "zenml_storage_account_name" { + description = "The name of the Azure Storage Account used to store ZenML artifacts." + value = azurerm_storage_account.zenmlaccount.name +} + +output "zenml_primary_access_key" { + description = "ZenML Azure Storage Account - Primary access key" + value = azurerm_storage_account.zenmlaccount.primary_access_key + sensitive = true +} + +output "zenml_secondary_access_key" { + description = "ZenML Azure Storage Account - Secondary access key" + value = azurerm_storage_account.zenmlaccount.secondary_access_key + sensitive = true +} + +output "zenml_primary_connection_string" { + description = "ZenML Azure Storage Account - Primary connection string" + value = azurerm_storage_account.zenmlaccount.primary_connection_string + sensitive = true +} + +output "zenml_secondary_connection_string" { + description = "ZenML Azure Storage Account - Secondary connection string" + value = azurerm_storage_account.zenmlaccount.secondary_connection_string + sensitive = true +} + +output "zenml_primary_blob_connection_string" { + description = "ZenML Azure Storage Account - Primary Blob service connection string" + value = azurerm_storage_account.zenmlaccount.primary_blob_connection_string + sensitive = true +} + +output "zenml_secondary_blob_connection_string" { + description = "ZenML Azure Storage Account - Secondary Blob service connection string" + value = azurerm_storage_account.zenmlaccount.secondary_blob_connection_string + sensitive = true +} diff --git a/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/variables.tf b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/variables.tf new file mode 100644 index 00000000..c41ea739 --- /dev/null +++ b/src/matcha_ml/infrastructure/modules/zenml/zenml_storage/variables.tf @@ -0,0 +1,19 @@ +variable "prefix" { + description = "A prefix used for all resources" + type = string +} + +variable "resource_group_name" { + description = "The resource group name which is used to create the resource group" + type = string +} + +variable "location" { + description = "The Azure Region in which this resources should be created." + type = string +} + +variable "aks_principal_id" { + description = "Principal id for aks cluster" + type = string +} From b940f47818f9790903ebe0576e40760190f131b9 Mon Sep 17 00:00:00 2001 From: Chris <32800386+Christopher-Norman@users.noreply.github.com> Date: Tue, 29 Aug 2023 14:32:15 +0100 Subject: [PATCH 06/10] [RPD-306] Add core stack add command (#208) * Add initial add function * Remove changes to stack remove command * Fix tests * Add core add method test * Add edge case tests and update stack name to custom on matcha stack add command * Use Typer for error handling and add test for existing state * Update to newer version of typer --- poetry.lock | 787 +++++++++++++++--------- pyproject.toml | 4 +- src/matcha_ml/cli/cli.py | 38 +- src/matcha_ml/config/matcha_config.py | 1 + src/matcha_ml/constants.py | 16 + src/matcha_ml/core/_validation.py | 45 -- src/matcha_ml/core/core.py | 32 +- tests/test_cli/test_stack.py | 13 +- tests/test_core/test_core.py | 117 +++- tests/test_core/test_core_validation.py | 16 - 10 files changed, 704 insertions(+), 365 deletions(-) diff --git a/poetry.lock b/poetry.lock index 10e4cfc6..3f55b940 100644 --- a/poetry.lock +++ b/poetry.lock @@ -13,32 +13,32 @@ files = [ [[package]] name = "azure-core" -version = "1.28.0" +version = "1.29.3" description = "Microsoft Azure Core Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-core-1.28.0.zip", hash = "sha256:e9eefc66fc1fde56dab6f04d4e5d12c60754d5a9fa49bdcfd8534fc96ed936bd"}, - {file = "azure_core-1.28.0-py3-none-any.whl", hash = "sha256:dec36dfc8eb0b052a853f30c07437effec2f9e3e1fc8f703d9bdaa5cfc0043d9"}, + {file = "azure-core-1.29.3.tar.gz", hash = "sha256:c92700af982e71c8c73de9f4c20da8b3f03ce2c22d13066e4d416b4629c87903"}, + {file = "azure_core-1.29.3-py3-none-any.whl", hash = "sha256:f8b2910f92b66293d93bd00564924ad20ad48f4a1e150577cf18d1e7d4f9263c"}, ] [package.dependencies] requests = ">=2.18.4" six = ">=1.11.0" -typing-extensions = ">=4.3.0" +typing-extensions = ">=4.6.0" [package.extras] aio = ["aiohttp (>=3.0)"] [[package]] name = "azure-identity" -version = "1.13.0" +version = "1.14.0" description = "Microsoft Azure Identity Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-identity-1.13.0.zip", hash = "sha256:c931c27301ffa86b07b4dcf574e29da73e3deba9ab5d1fe4f445bb6a3117e260"}, - {file = "azure_identity-1.13.0-py3-none-any.whl", hash = "sha256:bd700cebb80cd9862098587c29d8677e819beca33c62568ced6d5a8e5e332b82"}, + {file = "azure-identity-1.14.0.zip", hash = "sha256:72441799f8c5c89bfe21026965e266672a7c5d050c2c65119ef899dd5362e2b1"}, + {file = "azure_identity-1.14.0-py3-none-any.whl", hash = "sha256:edabf0e010eb85760e1dd19424d5e8f97ba2c9caff73a16e7b30ccbdbcce369b"}, ] [package.dependencies] @@ -46,7 +46,6 @@ azure-core = ">=1.11.0,<2.0.0" cryptography = ">=2.5" msal = ">=1.20.0,<2.0.0" msal-extensions = ">=0.3.0,<2.0.0" -six = ">=1.12.0" [[package]] name = "azure-mgmt-authorization" @@ -112,19 +111,19 @@ isodate = ">=0.6.1,<1.0.0" [[package]] name = "azure-mgmt-storage" -version = "21.0.0" +version = "21.1.0" description = "Microsoft Azure Storage Management Client Library for Python" optional = false python-versions = ">=3.7" files = [ - {file = "azure-mgmt-storage-21.0.0.zip", hash = "sha256:6eb13eeecf89195b2b5f47be0679e3f27888efd7bd2132eec7ebcbce75cb1377"}, - {file = "azure_mgmt_storage-21.0.0-py3-none-any.whl", hash = "sha256:89d644c6192118b0b097deaa9c4925832d8f7ea4693d38d5fce3f0125b43a1c5"}, + {file = "azure-mgmt-storage-21.1.0.tar.gz", hash = "sha256:d6d3c0e917c988bc9ed0472477d3ef3f90886009eb1d97a711944f8375630162"}, + {file = "azure_mgmt_storage-21.1.0-py3-none-any.whl", hash = "sha256:593f2544fc4f05750c4fe7ca4d83c32ea1e9d266e57899bbf79ce5940124e8cc"}, ] [package.dependencies] azure-common = ">=1.1,<2.0" azure-mgmt-core = ">=1.3.2,<2.0.0" -msrest = ">=0.7.1" +isodate = ">=0.6.1,<1.0.0" [[package]] name = "azure-mgmt-subscription" @@ -162,6 +161,20 @@ typing-extensions = ">=4.3.0" [package.extras] aio = ["azure-core[aio] (>=1.28.0,<2.0.0)"] +[[package]] +name = "babel" +version = "2.12.1" +description = "Internationalization utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "Babel-2.12.1-py3-none-any.whl", hash = "sha256:b4246fb7677d3b98f501a39d43396d3cafdc8eadb045f4a31be01863f655c610"}, + {file = "Babel-2.12.1.tar.gz", hash = "sha256:cc2d99999cd01d44420ae725a21c9e3711b3aadc7976d6147f622d8581963455"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + [[package]] name = "backoff" version = "2.2.1" @@ -173,6 +186,24 @@ files = [ {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] +[[package]] +name = "beautifulsoup4" +version = "4.12.2" +description = "Screen-scraping library" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, + {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + [[package]] name = "certifi" version = "2023.7.22" @@ -262,13 +293,13 @@ pycparser = "*" [[package]] name = "cfgv" -version = "3.3.1" +version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.8" files = [ - {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, - {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] [[package]] @@ -357,13 +388,13 @@ files = [ [[package]] name = "click" -version = "8.1.6" +version = "8.1.7" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" files = [ - {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, - {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [package.dependencies] @@ -380,87 +411,65 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] -[[package]] -name = "commonmark" -version = "0.9.1" -description = "Python parser for the CommonMark Markdown spec" -optional = false -python-versions = "*" -files = [ - {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, - {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, -] - -[package.extras] -test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] - [[package]] name = "coverage" -version = "7.2.7" +version = "7.3.0" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, - {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, - {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, - {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, - {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, - {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, - {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, - {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, - {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, - {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, - {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, - {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, - {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, - {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, - {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, - {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, - {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, - {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, - {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, - {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, - {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, - {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, - {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, - {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, - {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, - {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, - {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, - {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, - {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, - {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, - {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, - {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, - {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:db76a1bcb51f02b2007adacbed4c88b6dee75342c37b05d1822815eed19edee5"}, + {file = "coverage-7.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c02cfa6c36144ab334d556989406837336c1d05215a9bdf44c0bc1d1ac1cb637"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:477c9430ad5d1b80b07f3c12f7120eef40bfbf849e9e7859e53b9c93b922d2af"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce2ee86ca75f9f96072295c5ebb4ef2a43cecf2870b0ca5e7a1cbdd929cf67e1"}, + {file = "coverage-7.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68d8a0426b49c053013e631c0cdc09b952d857efa8f68121746b339912d27a12"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b3eb0c93e2ea6445b2173da48cb548364f8f65bf68f3d090404080d338e3a689"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:90b6e2f0f66750c5a1178ffa9370dec6c508a8ca5265c42fbad3ccac210a7977"}, + {file = "coverage-7.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96d7d761aea65b291a98c84e1250cd57b5b51726821a6f2f8df65db89363be51"}, + {file = "coverage-7.3.0-cp310-cp310-win32.whl", hash = "sha256:63c5b8ecbc3b3d5eb3a9d873dec60afc0cd5ff9d9f1c75981d8c31cfe4df8527"}, + {file = "coverage-7.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:97c44f4ee13bce914272589b6b41165bbb650e48fdb7bd5493a38bde8de730a1"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74c160285f2dfe0acf0f72d425f3e970b21b6de04157fc65adc9fd07ee44177f"}, + {file = "coverage-7.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b543302a3707245d454fc49b8ecd2c2d5982b50eb63f3535244fd79a4be0c99d"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad0f87826c4ebd3ef484502e79b39614e9c03a5d1510cfb623f4a4a051edc6fd"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13c6cbbd5f31211d8fdb477f0f7b03438591bdd077054076eec362cf2207b4a7"}, + {file = "coverage-7.3.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fac440c43e9b479d1241fe9d768645e7ccec3fb65dc3a5f6e90675e75c3f3e3a"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3c9834d5e3df9d2aba0275c9f67989c590e05732439b3318fa37a725dff51e74"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4c8e31cf29b60859876474034a83f59a14381af50cbe8a9dbaadbf70adc4b214"}, + {file = "coverage-7.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7a9baf8e230f9621f8e1d00c580394a0aa328fdac0df2b3f8384387c44083c0f"}, + {file = "coverage-7.3.0-cp311-cp311-win32.whl", hash = "sha256:ccc51713b5581e12f93ccb9c5e39e8b5d4b16776d584c0f5e9e4e63381356482"}, + {file = "coverage-7.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:887665f00ea4e488501ba755a0e3c2cfd6278e846ada3185f42d391ef95e7e70"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d000a739f9feed900381605a12a61f7aaced6beae832719ae0d15058a1e81c1b"}, + {file = "coverage-7.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:59777652e245bb1e300e620ce2bef0d341945842e4eb888c23a7f1d9e143c446"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9737bc49a9255d78da085fa04f628a310c2332b187cd49b958b0e494c125071"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5247bab12f84a1d608213b96b8af0cbb30d090d705b6663ad794c2f2a5e5b9fe"}, + {file = "coverage-7.3.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2ac9a1de294773b9fa77447ab7e529cf4fe3910f6a0832816e5f3d538cfea9a"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:85b7335c22455ec12444cec0d600533a238d6439d8d709d545158c1208483873"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:36ce5d43a072a036f287029a55b5c6a0e9bd73db58961a273b6dc11a2c6eb9c2"}, + {file = "coverage-7.3.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:211a4576e984f96d9fce61766ffaed0115d5dab1419e4f63d6992b480c2bd60b"}, + {file = "coverage-7.3.0-cp312-cp312-win32.whl", hash = "sha256:56afbf41fa4a7b27f6635bc4289050ac3ab7951b8a821bca46f5b024500e6321"}, + {file = "coverage-7.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:7f297e0c1ae55300ff688568b04ff26b01c13dfbf4c9d2b7d0cb688ac60df479"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac0dec90e7de0087d3d95fa0533e1d2d722dcc008bc7b60e1143402a04c117c1"}, + {file = "coverage-7.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:438856d3f8f1e27f8e79b5410ae56650732a0dcfa94e756df88c7e2d24851fcd"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1084393c6bda8875c05e04fce5cfe1301a425f758eb012f010eab586f1f3905e"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49ab200acf891e3dde19e5aa4b0f35d12d8b4bd805dc0be8792270c71bd56c54"}, + {file = "coverage-7.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67e6bbe756ed458646e1ef2b0778591ed4d1fcd4b146fc3ba2feb1a7afd4254"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f39c49faf5344af36042b293ce05c0d9004270d811c7080610b3e713251c9b0"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7df91fb24c2edaabec4e0eee512ff3bc6ec20eb8dccac2e77001c1fe516c0c84"}, + {file = "coverage-7.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:34f9f0763d5fa3035a315b69b428fe9c34d4fc2f615262d6be3d3bf3882fb985"}, + {file = "coverage-7.3.0-cp38-cp38-win32.whl", hash = "sha256:bac329371d4c0d456e8d5f38a9b0816b446581b5f278474e416ea0c68c47dcd9"}, + {file = "coverage-7.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b859128a093f135b556b4765658d5d2e758e1fae3e7cc2f8c10f26fe7005e543"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed8d310afe013db1eedd37176d0839dc66c96bcfcce8f6607a73ffea2d6ba"}, + {file = "coverage-7.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61260ec93f99f2c2d93d264b564ba912bec502f679793c56f678ba5251f0393"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97af9554a799bd7c58c0179cc8dbf14aa7ab50e1fd5fa73f90b9b7215874ba28"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3558e5b574d62f9c46b76120a5c7c16c4612dc2644c3d48a9f4064a705eaee95"}, + {file = "coverage-7.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37d5576d35fcb765fca05654f66aa71e2808d4237d026e64ac8b397ffa66a56a"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:07ea61bcb179f8f05ffd804d2732b09d23a1238642bf7e51dad62082b5019b34"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:80501d1b2270d7e8daf1b64b895745c3e234289e00d5f0e30923e706f110334e"}, + {file = "coverage-7.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4eddd3153d02204f22aef0825409091a91bf2a20bce06fe0f638f5c19a85de54"}, + {file = "coverage-7.3.0-cp39-cp39-win32.whl", hash = "sha256:2d22172f938455c156e9af2612650f26cceea47dc86ca048fa4e0b2d21646ad3"}, + {file = "coverage-7.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:60f64e2007c9144375dd0f480a54d6070f00bb1a28f65c408370544091c9bc9e"}, + {file = "coverage-7.3.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:5492a6ce3bdb15c6ad66cb68a0244854d9917478877a25671d70378bdc8562d0"}, + {file = "coverage-7.3.0.tar.gz", hash = "sha256:49dbb19cdcafc130f597d9e04a29d0a032ceedf729e41b181f51cd170e6ee865"}, ] [package.dependencies] @@ -514,6 +523,17 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "cssselect" +version = "1.2.0" +description = "cssselect parses CSS3 Selectors and translates them to XPath 1.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cssselect-1.2.0-py2.py3-none-any.whl", hash = "sha256:da1885f0c10b60c03ed5eccbb6b68d6eff248d91976fcde348f395d54c9fd35e"}, + {file = "cssselect-1.2.0.tar.gz", hash = "sha256:666b19839cfaddb9ce9d36bfe4c969132c647b92fc9088c4e23f786b30f1b3dc"}, +] + [[package]] name = "dataclasses-json" version = "0.5.9" @@ -546,13 +566,13 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.1.2" +version = "1.1.3" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, - {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, ] [package.extras] @@ -560,18 +580,21 @@ test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.12.2" +version = "3.12.3" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, + {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.11\""} + [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] [[package]] name = "ghp-import" @@ -592,13 +615,13 @@ dev = ["flake8", "markdown", "twine", "wheel"] [[package]] name = "griffe" -version = "0.32.3" +version = "0.35.2" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-0.32.3-py3-none-any.whl", hash = "sha256:d9471934225818bf8f309822f70451cc6abb4b24e59e0bb27402a45f9412510f"}, - {file = "griffe-0.32.3.tar.gz", hash = "sha256:14983896ad581f59d5ad7b6c9261ff12bdaa905acccc1129341d13e545da8521"}, + {file = "griffe-0.35.2-py3-none-any.whl", hash = "sha256:9650d6d0369c22f29f2c1bec9548ddc7f448f8ca38698a5799f92f736824e749"}, + {file = "griffe-0.35.2.tar.gz", hash = "sha256:84ecfe3df17454993b8dd485201566609ac6706a2eb22e3f402da2a39f9f6b5f"}, ] [package.dependencies] @@ -606,13 +629,13 @@ colorama = ">=0.4" [[package]] name = "identify" -version = "2.5.26" +version = "2.5.27" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.26-py2.py3-none-any.whl", hash = "sha256:c22a8ead0d4ca11f1edd6c9418c3220669b3b7533ada0a0ffa6cc0ef85cf9b54"}, - {file = "identify-2.5.26.tar.gz", hash = "sha256:7243800bce2f58404ed41b7c002e53d4d22bcf3ae1b7900c2d7aefd95394bf7f"}, + {file = "identify-2.5.27-py2.py3-none-any.whl", hash = "sha256:fdb527b2dfe24602809b2201e033c2a113d7bdf716db3ca8e3243f735dcecaba"}, + {file = "identify-2.5.27.tar.gz", hash = "sha256:287b75b04a0e22d727bc9a41f0d4f3c1bcada97490fa6eabb5b28f0e9097e733"}, ] [package.extras] @@ -690,6 +713,113 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "lxml" +version = "4.9.3" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" +files = [ + {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, + {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, + {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, + {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, + {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1247694b26342a7bf47c02e513d32225ededd18045264d40758abeb3c838a51f"}, + {file = "lxml-4.9.3-cp310-cp310-win32.whl", hash = "sha256:cdb650fc86227eba20de1a29d4b2c1bfe139dc75a0669270033cb2ea3d391b85"}, + {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, + {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, + {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9e28c51fa0ce5674be9f560c6761c1b441631901993f76700b1b30ca6c8378d6"}, + {file = "lxml-4.9.3-cp311-cp311-win32.whl", hash = "sha256:0bfd0767c5c1de2551a120673b72e5d4b628737cb05414f03c3277bf9bed3305"}, + {file = "lxml-4.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:25f32acefac14ef7bd53e4218fe93b804ef6f6b92ffdb4322bb6d49d94cad2bc"}, + {file = "lxml-4.9.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d3ff32724f98fbbbfa9f49d82852b159e9784d6094983d9a8b7f2ddaebb063d4"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:48d6ed886b343d11493129e019da91d4039826794a3e3027321c56d9e71505be"}, + {file = "lxml-4.9.3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9a92d3faef50658dd2c5470af249985782bf754c4e18e15afb67d3ab06233f13"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b4e4bc18382088514ebde9328da057775055940a1f2e18f6ad2d78aa0f3ec5b9"}, + {file = "lxml-4.9.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fc9b106a1bf918db68619fdcd6d5ad4f972fdd19c01d19bdb6bf63f3589a9ec5"}, + {file = "lxml-4.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d37017287a7adb6ab77e1c5bee9bcf9660f90ff445042b790402a654d2ad81d8"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:56dc1f1ebccc656d1b3ed288f11e27172a01503fc016bcabdcbc0978b19352b7"}, + {file = "lxml-4.9.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:578695735c5a3f51569810dfebd05dd6f888147a34f0f98d4bb27e92b76e05c2"}, + {file = "lxml-4.9.3-cp35-cp35m-win32.whl", hash = "sha256:704f61ba8c1283c71b16135caf697557f5ecf3e74d9e453233e4771d68a1f42d"}, + {file = "lxml-4.9.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c41bfca0bd3532d53d16fd34d20806d5c2b1ace22a2f2e4c0008570bf2c58833"}, + {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:0c0850c8b02c298d3c7006b23e98249515ac57430e16a166873fc47a5d549287"}, + {file = "lxml-4.9.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aca086dc5f9ef98c512bac8efea4483eb84abbf926eaeedf7b91479feb092458"}, + {file = "lxml-4.9.3-cp36-cp36m-win32.whl", hash = "sha256:50baa9c1c47efcaef189f31e3d00d697c6d4afda5c3cde0302d063492ff9b477"}, + {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:081d32421db5df44c41b7f08a334a090a545c54ba977e47fd7cc2deece78809a"}, + {file = "lxml-4.9.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:23eed6d7b1a3336ad92d8e39d4bfe09073c31bfe502f20ca5116b2a334f8ec02"}, + {file = "lxml-4.9.3-cp37-cp37m-win32.whl", hash = "sha256:1509dd12b773c02acd154582088820893109f6ca27ef7291b003d0e81666109f"}, + {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3e9bdd30efde2b9ccfa9cb5768ba04fe71b018a25ea093379c857c9dad262c40"}, + {file = "lxml-4.9.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fcdd00edfd0a3001e0181eab3e63bd5c74ad3e67152c84f93f13769a40e073a7"}, + {file = "lxml-4.9.3-cp38-cp38-win32.whl", hash = "sha256:57aba1bbdf450b726d58b2aea5fe47c7875f5afb2c4a23784ed78f19a0462574"}, + {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, + {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:b6420a005548ad52154c8ceab4a1290ff78d757f9e5cbc68f8c77089acd3c432"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bb3bb49c7a6ad9d981d734ef7c7193bc349ac338776a0360cc671eaee89bcf69"}, + {file = "lxml-4.9.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d27be7405547d1f958b60837dc4c1007da90b8b23f54ba1f8b728c78fdb19d50"}, + {file = "lxml-4.9.3-cp39-cp39-win32.whl", hash = "sha256:8df133a2ea5e74eef5e8fc6f19b9e085f758768a16e9877a60aec455ed2609b2"}, + {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, + {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, + {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.35)"] + [[package]] name = "markdown" version = "3.4.4" @@ -708,6 +838,46 @@ importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.0)", "mkdocs-nature (>=0.4)"] testing = ["coverage", "pyyaml"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + +[[package]] +name = "markdown2" +version = "2.4.10" +description = "A fast and complete Python implementation of Markdown" +optional = false +python-versions = ">=3.5, <4" +files = [ + {file = "markdown2-2.4.10-py2.py3-none-any.whl", hash = "sha256:e6105800483783831f5dc54f827aa5b44eb137ecef5a70293d8ecfbb4109ecc6"}, + {file = "markdown2-2.4.10.tar.gz", hash = "sha256:cdba126d90dc3aef6f4070ac342f974d63f415678959329cc7909f96cc235d72"}, +] + +[package.extras] +all = ["pygments (>=2.7.3)", "wavedrom"] +code-syntax-highlighting = ["pygments (>=2.7.3)"] +wavedrom = ["wavedrom"] + [[package]] name = "markupsafe" version = "2.1.3" @@ -801,6 +971,17 @@ files = [ [package.dependencies] marshmallow = ">=2.0.0" +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mergedeep" version = "1.3.4" @@ -871,23 +1052,27 @@ files = [ [[package]] name = "mkdocs-material" -version = "9.1.21" +version = "9.2.5" description = "Documentation that simply works" optional = false python-versions = ">=3.7" files = [ - {file = "mkdocs_material-9.1.21-py3-none-any.whl", hash = "sha256:58bb2f11ef240632e176d6f0f7d1cff06be1d11c696a5a1b553b808b4280ed47"}, - {file = "mkdocs_material-9.1.21.tar.gz", hash = "sha256:71940cdfca84ab296b6362889c25395b1621273fb16c93deda257adb7ff44ec8"}, + {file = "mkdocs_material-9.2.5-py3-none-any.whl", hash = "sha256:315a59725f0565bccfec7f9d1313beae7658bf874a176264b98f804a0cbc1298"}, + {file = "mkdocs_material-9.2.5.tar.gz", hash = "sha256:02b4d1f662bc022e9497411e679323c30185e031a08a7004c763aa8d47ae9a29"}, ] [package.dependencies] +babel = ">=2.10.3" colorama = ">=0.4" jinja2 = ">=3.0" +lxml = ">=4.6" markdown = ">=3.2" -mkdocs = ">=1.5.0" +mkdocs = ">=1.5.2" mkdocs-material-extensions = ">=1.1" +paginate = ">=0.5.6" pygments = ">=2.14" pymdown-extensions = ">=9.9.1" +readtime = ">=2.0" regex = ">=2022.4.24" requests = ">=2.26" @@ -1014,37 +1199,38 @@ async = ["aiodns", "aiohttp (>=3.0)"] [[package]] name = "mypy" -version = "1.4.1" +version = "1.5.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, + {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, + {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, + {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, + {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, + {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, + {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, + {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, + {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, + {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, + {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, + {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, + {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, + {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, + {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, + {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, + {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, + {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, + {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, + {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, + {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, + {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, + {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, ] [package.dependencies] @@ -1055,7 +1241,6 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] reports = ["lxml"] [[package]] @@ -1110,6 +1295,16 @@ files = [ {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] +[[package]] +name = "paginate" +version = "0.5.6" +description = "Divides large result sets into pages for easier browsing" +optional = false +python-versions = "*" +files = [ + {file = "paginate-0.5.6.tar.gz", hash = "sha256:5e6007b6a9398177a7e1648d04fdd9f8c9766a1a945bceac82f1929e8c78af2d"}, +] + [[package]] name = "pathspec" version = "0.11.2" @@ -1138,13 +1333,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -1201,13 +1396,13 @@ files = [ [[package]] name = "pygments" -version = "2.15.1" +version = "2.16.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.7" files = [ - {file = "Pygments-2.15.1-py3-none-any.whl", hash = "sha256:db2db3deb4b4179f399a09054b023b6a586b76499d36965813c71aa8ed7b5fd1"}, - {file = "Pygments-2.15.1.tar.gz", hash = "sha256:8ace4d3c1dd481894b2005f560ead0f9f19ee64fe983366be1a21e171d12775c"}, + {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, + {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, ] [package.extras] @@ -1248,6 +1443,24 @@ files = [ markdown = ">=3.2" pyyaml = "*" +[[package]] +name = "pyquery" +version = "2.0.0" +description = "A jquery-like library for python" +optional = false +python-versions = "*" +files = [ + {file = "pyquery-2.0.0-py3-none-any.whl", hash = "sha256:8dfc9b4b7c5f877d619bbae74b1898d5743f6ca248cfd5d72b504dd614da312f"}, + {file = "pyquery-2.0.0.tar.gz", hash = "sha256:963e8d4e90262ff6d8dec072ea97285dc374a2f69cad7776f4082abcf6a1d8ae"}, +] + +[package.dependencies] +cssselect = ">=1.2.0" +lxml = ">=2.1" + +[package.extras] +test = ["pytest", "pytest-cov", "requests", "webob", "webtest"] + [[package]] name = "pytest" version = "7.4.0" @@ -1312,6 +1525,17 @@ files = [ {file = "python-terraform-0.10.1.tar.gz", hash = "sha256:0f4d1648c4fec61d5aa53142558ba3761c8f9ee7acce57505a1245d4dd85f5cf"}, ] +[[package]] +name = "pytz" +version = "2023.3" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, +] + [[package]] name = "pywin32" version = "306" @@ -1398,101 +1622,116 @@ files = [ [package.dependencies] pyyaml = "*" +[[package]] +name = "readtime" +version = "3.0.0" +description = "Calculates the time some text takes the average human to read, based on Medium's read time forumula" +optional = false +python-versions = "*" +files = [ + {file = "readtime-3.0.0.tar.gz", hash = "sha256:76c5a0d773ad49858c53b42ba3a942f62fbe20cc8c6f07875797ac7dc30963a9"}, +] + +[package.dependencies] +beautifulsoup4 = ">=4.0.1" +markdown2 = ">=2.4.3" +pyquery = ">=1.2" + [[package]] name = "regex" -version = "2023.6.3" +version = "2023.8.8" description = "Alternative regular expression module, to replace re." optional = false python-versions = ">=3.6" files = [ - {file = "regex-2023.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:824bf3ac11001849aec3fa1d69abcb67aac3e150a933963fb12bda5151fe1bfd"}, - {file = "regex-2023.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05ed27acdf4465c95826962528f9e8d41dbf9b1aa8531a387dee6ed215a3e9ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b49c764f88a79160fa64f9a7b425620e87c9f46095ef9c9920542ab2495c8bc"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8e3f1316c2293e5469f8f09dc2d76efb6c3982d3da91ba95061a7e69489a14ef"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:43e1dd9d12df9004246bacb79a0e5886b3b6071b32e41f83b0acbf293f820ee8"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4959e8bcbfda5146477d21c3a8ad81b185cd252f3d0d6e4724a5ef11c012fb06"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af4dd387354dc83a3bff67127a124c21116feb0d2ef536805c454721c5d7993d"}, - {file = "regex-2023.6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2239d95d8e243658b8dbb36b12bd10c33ad6e6933a54d36ff053713f129aa536"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:890e5a11c97cf0d0c550eb661b937a1e45431ffa79803b942a057c4fb12a2da2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a8105e9af3b029f243ab11ad47c19b566482c150c754e4c717900a798806b222"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:25be746a8ec7bc7b082783216de8e9473803706723b3f6bef34b3d0ed03d57e2"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:3676f1dd082be28b1266c93f618ee07741b704ab7b68501a173ce7d8d0d0ca18"}, - {file = "regex-2023.6.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:10cb847aeb1728412c666ab2e2000ba6f174f25b2bdc7292e7dd71b16db07568"}, - {file = "regex-2023.6.3-cp310-cp310-win32.whl", hash = "sha256:dbbbfce33cd98f97f6bffb17801b0576e653f4fdb1d399b2ea89638bc8d08ae1"}, - {file = "regex-2023.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:c5f8037000eb21e4823aa485149f2299eb589f8d1fe4b448036d230c3f4e68e0"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c123f662be8ec5ab4ea72ea300359023a5d1df095b7ead76fedcd8babbedf969"}, - {file = "regex-2023.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9edcbad1f8a407e450fbac88d89e04e0b99a08473f666a3f3de0fd292badb6aa"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcba6dae7de533c876255317c11f3abe4907ba7d9aa15d13e3d9710d4315ec0e"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29cdd471ebf9e0f2fb3cac165efedc3c58db841d83a518b082077e612d3ee5df"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12b74fbbf6cbbf9dbce20eb9b5879469e97aeeaa874145517563cca4029db65c"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c29ca1bd61b16b67be247be87390ef1d1ef702800f91fbd1991f5c4421ebae8"}, - {file = "regex-2023.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77f09bc4b55d4bf7cc5eba785d87001d6757b7c9eec237fe2af57aba1a071d9"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ea353ecb6ab5f7e7d2f4372b1e779796ebd7b37352d290096978fea83c4dba0c"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:10590510780b7541969287512d1b43f19f965c2ece6c9b1c00fc367b29d8dce7"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2fbd6236aae3b7f9d514312cdb58e6494ee1c76a9948adde6eba33eb1c4264f"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:6b2675068c8b56f6bfd5a2bda55b8accbb96c02fd563704732fd1c95e2083461"}, - {file = "regex-2023.6.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:74419d2b50ecb98360cfaa2974da8689cb3b45b9deff0dcf489c0d333bcc1477"}, - {file = "regex-2023.6.3-cp311-cp311-win32.whl", hash = "sha256:fb5ec16523dc573a4b277663a2b5a364e2099902d3944c9419a40ebd56a118f9"}, - {file = "regex-2023.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:09e4a1a6acc39294a36b7338819b10baceb227f7f7dbbea0506d419b5a1dd8af"}, - {file = "regex-2023.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0654bca0cdf28a5956c83839162692725159f4cda8d63e0911a2c0dc76166525"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:463b6a3ceb5ca952e66550a4532cef94c9a0c80dc156c4cc343041951aec1697"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87b2a5bb5e78ee0ad1de71c664d6eb536dc3947a46a69182a90f4410f5e3f7dd"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6343c6928282c1f6a9db41f5fd551662310e8774c0e5ebccb767002fcf663ca9"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6192d5af2ccd2a38877bfef086d35e6659566a335b1492786ff254c168b1693"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74390d18c75054947e4194019077e243c06fbb62e541d8817a0fa822ea310c14"}, - {file = "regex-2023.6.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:742e19a90d9bb2f4a6cf2862b8b06dea5e09b96c9f2df1779e53432d7275331f"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8abbc5d54ea0ee80e37fef009e3cec5dafd722ed3c829126253d3e22f3846f1e"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c2b867c17a7a7ae44c43ebbeb1b5ff406b3e8d5b3e14662683e5e66e6cc868d3"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d831c2f8ff278179705ca59f7e8524069c1a989e716a1874d6d1aab6119d91d1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ee2d1a9a253b1729bb2de27d41f696ae893507c7db224436abe83ee25356f5c1"}, - {file = "regex-2023.6.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:61474f0b41fe1a80e8dfa70f70ea1e047387b7cd01c85ec88fa44f5d7561d787"}, - {file = "regex-2023.6.3-cp36-cp36m-win32.whl", hash = "sha256:0b71e63226e393b534105fcbdd8740410dc6b0854c2bfa39bbda6b0d40e59a54"}, - {file = "regex-2023.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bbb02fd4462f37060122e5acacec78e49c0fbb303c30dd49c7f493cf21fc5b27"}, - {file = "regex-2023.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b862c2b9d5ae38a68b92e215b93f98d4c5e9454fa36aae4450f61dd33ff48487"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:976d7a304b59ede34ca2921305b57356694f9e6879db323fd90a80f865d355a3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:83320a09188e0e6c39088355d423aa9d056ad57a0b6c6381b300ec1a04ec3d16"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9427a399501818a7564f8c90eced1e9e20709ece36be701f394ada99890ea4b3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178bbc1b2ec40eaca599d13c092079bf529679bf0371c602edaa555e10b41c3"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:837328d14cde912af625d5f303ec29f7e28cdab588674897baafaf505341f2fc"}, - {file = "regex-2023.6.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d44dc13229905ae96dd2ae2dd7cebf824ee92bc52e8cf03dcead37d926da019"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d54af539295392611e7efbe94e827311eb8b29668e2b3f4cadcfe6f46df9c777"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7117d10690c38a622e54c432dfbbd3cbd92f09401d622902c32f6d377e2300ee"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bb60b503ec8a6e4e3e03a681072fa3a5adcbfa5479fa2d898ae2b4a8e24c4591"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:65ba8603753cec91c71de423a943ba506363b0e5c3fdb913ef8f9caa14b2c7e0"}, - {file = "regex-2023.6.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:271f0bdba3c70b58e6f500b205d10a36fb4b58bd06ac61381b68de66442efddb"}, - {file = "regex-2023.6.3-cp37-cp37m-win32.whl", hash = "sha256:9beb322958aaca059f34975b0df135181f2e5d7a13b84d3e0e45434749cb20f7"}, - {file = "regex-2023.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fea75c3710d4f31389eed3c02f62d0b66a9da282521075061ce875eb5300cf23"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f56fcb7ff7bf7404becdfc60b1e81a6d0561807051fd2f1860b0d0348156a07"}, - {file = "regex-2023.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d2da3abc88711bce7557412310dfa50327d5769a31d1c894b58eb256459dc289"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a99b50300df5add73d307cf66abea093304a07eb017bce94f01e795090dea87c"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5708089ed5b40a7b2dc561e0c8baa9535b77771b64a8330b684823cfd5116036"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:687ea9d78a4b1cf82f8479cab23678aff723108df3edeac098e5b2498879f4a7"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d3850beab9f527f06ccc94b446c864059c57651b3f911fddb8d9d3ec1d1b25d"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8915cc96abeb8983cea1df3c939e3c6e1ac778340c17732eb63bb96247b91d2"}, - {file = "regex-2023.6.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:841d6e0e5663d4c7b4c8099c9997be748677d46cbf43f9f471150e560791f7ff"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9edce5281f965cf135e19840f4d93d55b3835122aa76ccacfd389e880ba4cf82"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b956231ebdc45f5b7a2e1f90f66a12be9610ce775fe1b1d50414aac1e9206c06"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:36efeba71c6539d23c4643be88295ce8c82c88bbd7c65e8a24081d2ca123da3f"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:cf67ca618b4fd34aee78740bea954d7c69fdda419eb208c2c0c7060bb822d747"}, - {file = "regex-2023.6.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b4598b1897837067a57b08147a68ac026c1e73b31ef6e36deeeb1fa60b2933c9"}, - {file = "regex-2023.6.3-cp38-cp38-win32.whl", hash = "sha256:f415f802fbcafed5dcc694c13b1292f07fe0befdb94aa8a52905bd115ff41e88"}, - {file = "regex-2023.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:d4f03bb71d482f979bda92e1427f3ec9b220e62a7dd337af0aa6b47bf4498f72"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ccf91346b7bd20c790310c4147eee6ed495a54ddb6737162a36ce9dbef3e4751"}, - {file = "regex-2023.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b28f5024a3a041009eb4c333863d7894d191215b39576535c6734cd88b0fcb68"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0bb18053dfcfed432cc3ac632b5e5e5c5b7e55fb3f8090e867bfd9b054dbcbf"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5bfb3004f2144a084a16ce19ca56b8ac46e6fd0651f54269fc9e230edb5e4a"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c6b48d0fa50d8f4df3daf451be7f9689c2bde1a52b1225c5926e3f54b6a9ed1"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:051da80e6eeb6e239e394ae60704d2b566aa6a7aed6f2890a7967307267a5dc6"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4c3b7fa4cdaa69268748665a1a6ff70c014d39bb69c50fda64b396c9116cf77"}, - {file = "regex-2023.6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:457b6cce21bee41ac292d6753d5e94dcbc5c9e3e3a834da285b0bde7aa4a11e9"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aad51907d74fc183033ad796dd4c2e080d1adcc4fd3c0fd4fd499f30c03011cd"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0385e73da22363778ef2324950e08b689abdf0b108a7d8decb403ad7f5191938"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:c6a57b742133830eec44d9b2290daf5cbe0a2f1d6acee1b3c7b1c7b2f3606df7"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3e5219bf9e75993d73ab3d25985c857c77e614525fac9ae02b1bebd92f7cecac"}, - {file = "regex-2023.6.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e5087a3c59eef624a4591ef9eaa6e9a8d8a94c779dade95d27c0bc24650261cd"}, - {file = "regex-2023.6.3-cp39-cp39-win32.whl", hash = "sha256:20326216cc2afe69b6e98528160b225d72f85ab080cbdf0b11528cbbaba2248f"}, - {file = "regex-2023.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:bdff5eab10e59cf26bc479f565e25ed71a7d041d1ded04ccf9aee1d9f208487a"}, - {file = "regex-2023.6.3.tar.gz", hash = "sha256:72d1a25bf36d2050ceb35b517afe13864865268dfb45910e2e17a84be6cbfeb0"}, + {file = "regex-2023.8.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88900f521c645f784260a8d346e12a1590f79e96403971241e64c3a265c8ecdb"}, + {file = "regex-2023.8.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3611576aff55918af2697410ff0293d6071b7e00f4b09e005d614686ac4cd57c"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8a0ccc8f2698f120e9e5742f4b38dc944c38744d4bdfc427616f3a163dd9de5"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c662a4cbdd6280ee56f841f14620787215a171c4e2d1744c9528bed8f5816c96"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf0633e4a1b667bfe0bb10b5e53fe0d5f34a6243ea2530eb342491f1adf4f739"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551ad543fa19e94943c5b2cebc54c73353ffff08228ee5f3376bd27b3d5b9800"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54de2619f5ea58474f2ac211ceea6b615af2d7e4306220d4f3fe690c91988a61"}, + {file = "regex-2023.8.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ec4b3f0aebbbe2fc0134ee30a791af522a92ad9f164858805a77442d7d18570"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ae646c35cb9f820491760ac62c25b6d6b496757fda2d51be429e0e7b67ae0ab"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca339088839582d01654e6f83a637a4b8194d0960477b9769d2ff2cfa0fa36d2"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d9b6627408021452dcd0d2cdf8da0534e19d93d070bfa8b6b4176f99711e7f90"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:bd3366aceedf274f765a3a4bc95d6cd97b130d1dda524d8f25225d14123c01db"}, + {file = "regex-2023.8.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7aed90a72fc3654fba9bc4b7f851571dcc368120432ad68b226bd593f3f6c0b7"}, + {file = "regex-2023.8.8-cp310-cp310-win32.whl", hash = "sha256:80b80b889cb767cc47f31d2b2f3dec2db8126fbcd0cff31b3925b4dc6609dcdb"}, + {file = "regex-2023.8.8-cp310-cp310-win_amd64.whl", hash = "sha256:b82edc98d107cbc7357da7a5a695901b47d6eb0420e587256ba3ad24b80b7d0b"}, + {file = "regex-2023.8.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1e7d84d64c84ad97bf06f3c8cb5e48941f135ace28f450d86af6b6512f1c9a71"}, + {file = "regex-2023.8.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ce0f9fbe7d295f9922c0424a3637b88c6c472b75eafeaff6f910494a1fa719ef"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06c57e14ac723b04458df5956cfb7e2d9caa6e9d353c0b4c7d5d54fcb1325c46"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e7a9aaa5a1267125eef22cef3b63484c3241aaec6f48949b366d26c7250e0357"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b7408511fca48a82a119d78a77c2f5eb1b22fe88b0d2450ed0756d194fe7a9a"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14dc6f2d88192a67d708341f3085df6a4f5a0c7b03dec08d763ca2cd86e9f559"}, + {file = "regex-2023.8.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48c640b99213643d141550326f34f0502fedb1798adb3c9eb79650b1ecb2f177"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0085da0f6c6393428bf0d9c08d8b1874d805bb55e17cb1dfa5ddb7cfb11140bf"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:964b16dcc10c79a4a2be9f1273fcc2684a9eedb3906439720598029a797b46e6"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7ce606c14bb195b0e5108544b540e2c5faed6843367e4ab3deb5c6aa5e681208"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:40f029d73b10fac448c73d6eb33d57b34607f40116e9f6e9f0d32e9229b147d7"}, + {file = "regex-2023.8.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3b8e6ea6be6d64104d8e9afc34c151926f8182f84e7ac290a93925c0db004bfd"}, + {file = "regex-2023.8.8-cp311-cp311-win32.whl", hash = "sha256:942f8b1f3b223638b02df7df79140646c03938d488fbfb771824f3d05fc083a8"}, + {file = "regex-2023.8.8-cp311-cp311-win_amd64.whl", hash = "sha256:51d8ea2a3a1a8fe4f67de21b8b93757005213e8ac3917567872f2865185fa7fb"}, + {file = "regex-2023.8.8-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e951d1a8e9963ea51efd7f150450803e3b95db5939f994ad3d5edac2b6f6e2b4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:704f63b774218207b8ccc6c47fcef5340741e5d839d11d606f70af93ee78e4d4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22283c769a7b01c8ac355d5be0715bf6929b6267619505e289f792b01304d898"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91129ff1bb0619bc1f4ad19485718cc623a2dc433dff95baadbf89405c7f6b57"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de35342190deb7b866ad6ba5cbcccb2d22c0487ee0cbb251efef0843d705f0d4"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b993b6f524d1e274a5062488a43e3f9f8764ee9745ccd8e8193df743dbe5ee61"}, + {file = "regex-2023.8.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3026cbcf11d79095a32d9a13bbc572a458727bd5b1ca332df4a79faecd45281c"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:293352710172239bf579c90a9864d0df57340b6fd21272345222fb6371bf82b3"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d909b5a3fff619dc7e48b6b1bedc2f30ec43033ba7af32f936c10839e81b9217"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3d370ff652323c5307d9c8e4c62efd1956fb08051b0e9210212bc51168b4ff56"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b076da1ed19dc37788f6a934c60adf97bd02c7eea461b73730513921a85d4235"}, + {file = "regex-2023.8.8-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:e9941a4ada58f6218694f382e43fdd256e97615db9da135e77359da257a7168b"}, + {file = "regex-2023.8.8-cp36-cp36m-win32.whl", hash = "sha256:a8c65c17aed7e15a0c824cdc63a6b104dfc530f6fa8cb6ac51c437af52b481c7"}, + {file = "regex-2023.8.8-cp36-cp36m-win_amd64.whl", hash = "sha256:aadf28046e77a72f30dcc1ab185639e8de7f4104b8cb5c6dfa5d8ed860e57236"}, + {file = "regex-2023.8.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:423adfa872b4908843ac3e7a30f957f5d5282944b81ca0a3b8a7ccbbfaa06103"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ae594c66f4a7e1ea67232a0846649a7c94c188d6c071ac0210c3e86a5f92109"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e51c80c168074faa793685656c38eb7a06cbad7774c8cbc3ea05552d615393d8"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09b7f4c66aa9d1522b06e31a54f15581c37286237208df1345108fcf4e050c18"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e73e5243af12d9cd6a9d6a45a43570dbe2e5b1cdfc862f5ae2b031e44dd95a8"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:941460db8fe3bd613db52f05259c9336f5a47ccae7d7def44cc277184030a116"}, + {file = "regex-2023.8.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f0ccf3e01afeb412a1a9993049cb160d0352dba635bbca7762b2dc722aa5742a"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2e9216e0d2cdce7dbc9be48cb3eacb962740a09b011a116fd7af8c832ab116ca"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:5cd9cd7170459b9223c5e592ac036e0704bee765706445c353d96f2890e816c8"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4873ef92e03a4309b3ccd8281454801b291b689f6ad45ef8c3658b6fa761d7ac"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:239c3c2a339d3b3ddd51c2daef10874410917cd2b998f043c13e2084cb191684"}, + {file = "regex-2023.8.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:1005c60ed7037be0d9dea1f9c53cc42f836188227366370867222bda4c3c6bd7"}, + {file = "regex-2023.8.8-cp37-cp37m-win32.whl", hash = "sha256:e6bd1e9b95bc5614a7a9c9c44fde9539cba1c823b43a9f7bc11266446dd568e3"}, + {file = "regex-2023.8.8-cp37-cp37m-win_amd64.whl", hash = "sha256:9a96edd79661e93327cfeac4edec72a4046e14550a1d22aa0dd2e3ca52aec921"}, + {file = "regex-2023.8.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2181c20ef18747d5f4a7ea513e09ea03bdd50884a11ce46066bb90fe4213675"}, + {file = "regex-2023.8.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a2ad5add903eb7cdde2b7c64aaca405f3957ab34f16594d2b78d53b8b1a6a7d6"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9233ac249b354c54146e392e8a451e465dd2d967fc773690811d3a8c240ac601"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:920974009fb37b20d32afcdf0227a2e707eb83fe418713f7a8b7de038b870d0b"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd2b6c5dfe0929b6c23dde9624483380b170b6e34ed79054ad131b20203a1a63"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96979d753b1dc3b2169003e1854dc67bfc86edf93c01e84757927f810b8c3c93"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ae54a338191e1356253e7883d9d19f8679b6143703086245fb14d1f20196be9"}, + {file = "regex-2023.8.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2162ae2eb8b079622176a81b65d486ba50b888271302190870b8cc488587d280"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c884d1a59e69e03b93cf0dfee8794c63d7de0ee8f7ffb76e5f75be8131b6400a"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf9273e96f3ee2ac89ffcb17627a78f78e7516b08f94dc435844ae72576a276e"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:83215147121e15d5f3a45d99abeed9cf1fe16869d5c233b08c56cdf75f43a504"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3f7454aa427b8ab9101f3787eb178057c5250478e39b99540cfc2b889c7d0586"}, + {file = "regex-2023.8.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0640913d2c1044d97e30d7c41728195fc37e54d190c5385eacb52115127b882"}, + {file = "regex-2023.8.8-cp38-cp38-win32.whl", hash = "sha256:0c59122ceccb905a941fb23b087b8eafc5290bf983ebcb14d2301febcbe199c7"}, + {file = "regex-2023.8.8-cp38-cp38-win_amd64.whl", hash = "sha256:c12f6f67495ea05c3d542d119d270007090bad5b843f642d418eb601ec0fa7be"}, + {file = "regex-2023.8.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:82cd0a69cd28f6cc3789cc6adeb1027f79526b1ab50b1f6062bbc3a0ccb2dbc3"}, + {file = "regex-2023.8.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bb34d1605f96a245fc39790a117ac1bac8de84ab7691637b26ab2c5efb8f228c"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:987b9ac04d0b38ef4f89fbc035e84a7efad9cdd5f1e29024f9289182c8d99e09"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dd6082f4e2aec9b6a0927202c85bc1b09dcab113f97265127c1dc20e2e32495"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7eb95fe8222932c10d4436e7a6f7c99991e3fdd9f36c949eff16a69246dee2dc"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7098c524ba9f20717a56a8d551d2ed491ea89cbf37e540759ed3b776a4f8d6eb"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b694430b3f00eb02c594ff5a16db30e054c1b9589a043fe9174584c6efa8033"}, + {file = "regex-2023.8.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b2aeab3895d778155054abea5238d0eb9a72e9242bd4b43f42fd911ef9a13470"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:988631b9d78b546e284478c2ec15c8a85960e262e247b35ca5eaf7ee22f6050a"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:67ecd894e56a0c6108ec5ab1d8fa8418ec0cff45844a855966b875d1039a2e34"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:14898830f0a0eb67cae2bbbc787c1a7d6e34ecc06fbd39d3af5fe29a4468e2c9"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:f2200e00b62568cfd920127782c61bc1c546062a879cdc741cfcc6976668dfcf"}, + {file = "regex-2023.8.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9691a549c19c22d26a4f3b948071e93517bdf86e41b81d8c6ac8a964bb71e5a6"}, + {file = "regex-2023.8.8-cp39-cp39-win32.whl", hash = "sha256:6ab2ed84bf0137927846b37e882745a827458689eb969028af8032b1b3dac78e"}, + {file = "regex-2023.8.8-cp39-cp39-win_amd64.whl", hash = "sha256:5543c055d8ec7801901e1193a51570643d6a6ab8751b1f7dd9af71af467538bb"}, + {file = "regex-2023.8.8.tar.gz", hash = "sha256:fcbdc5f2b0f1cd0f6a56cdb46fe41d2cce1e644e3b68832f3eeebc5fb0f7712e"}, ] [[package]] @@ -1536,22 +1775,22 @@ rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rich" -version = "12.6.0" +version = "13.5.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.6.3,<4.0.0" +python-versions = ">=3.7.0" files = [ - {file = "rich-12.6.0-py3-none-any.whl", hash = "sha256:a4eb26484f2c82589bd9a17c73d32a010b1e29d89f1604cd9bf3a2097b81bb5e"}, - {file = "rich-12.6.0.tar.gz", hash = "sha256:ba3a3775974105c221d31141f2c116f4fd65c5ceb0698657a11e9f295ec93fd0"}, + {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, + {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, ] [package.dependencies] -commonmark = ">=0.9.0,<0.10.0" -pygments = ">=2.6.0,<3.0.0" +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} [package.extras] -jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] +jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "segment-analytics-python" @@ -1575,31 +1814,20 @@ test = ["flake8 (==3.7.9)", "mock (==2.0.0)", "pylint (==2.8.0)"] [[package]] name = "setuptools" -version = "68.0.0" +version = "68.1.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, - {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, + {file = "setuptools-68.1.2-py3-none-any.whl", hash = "sha256:3d8083eed2d13afc9426f227b24fd1659489ec107c0e86cec2ffdde5c92e790b"}, + {file = "setuptools-68.1.2.tar.gz", hash = "sha256:3d4dfa6d95f1b101d695a6160a7626e15583af71a5f52176efa5d39a054d475d"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5,<=7.1.2)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] -[[package]] -name = "shellingham" -version = "1.5.0.post1" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -files = [ - {file = "shellingham-1.5.0.post1-py2.py3-none-any.whl", hash = "sha256:368bf8c00754fd4f55afb7bbb86e272df77e4dc76ac29dbcbb81a59e9fc15744"}, - {file = "shellingham-1.5.0.post1.tar.gz", hash = "sha256:823bc5fb5c34d60f285b624e7264f4dda254bc803a3774a147bf99c0e3004a28"}, -] - [[package]] name = "six" version = "1.16.0" @@ -1611,6 +1839,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "soupsieve" +version = "2.4.1" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, + {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -1624,26 +1863,24 @@ files = [ [[package]] name = "typer" -version = "0.7.0" +version = "0.9.0" description = "Typer, build great CLIs. Easy to code. Based on Python type hints." optional = false python-versions = ">=3.6" files = [ - {file = "typer-0.7.0-py3-none-any.whl", hash = "sha256:b5e704f4e48ec263de1c0b3a2387cd405a13767d2f907f44c1a08cbad96f606d"}, - {file = "typer-0.7.0.tar.gz", hash = "sha256:ff797846578a9f2a201b53442aedeb543319466870fbe1c701eab66dd7681165"}, + {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, + {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, ] [package.dependencies] click = ">=7.1.1,<9.0.0" -colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"all\""} -rich = {version = ">=10.11.0,<13.0.0", optional = true, markers = "extra == \"all\""} -shellingham = {version = ">=1.3.0,<2.0.0", optional = true, markers = "extra == \"all\""} +typing-extensions = ">=3.7.4.3" [package.extras] -all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] -test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<13.0.0)", "shellingham (>=1.3.0,<2.0.0)"] +test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] [[package]] name = "types-pyyaml" @@ -1711,13 +1948,13 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.24.2" +version = "20.24.3" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.24.2-py3-none-any.whl", hash = "sha256:43a3052be36080548bdee0b42919c88072037d50d56c28bd3f853cbe92b953ff"}, - {file = "virtualenv-20.24.2.tar.gz", hash = "sha256:fd8a78f46f6b99a67b7ec5cf73f92357891a7b3a40fd97637c27f854aae3b9e0"}, + {file = "virtualenv-20.24.3-py3-none-any.whl", hash = "sha256:95a6e9398b4967fbcb5fef2acec5efaf9aa4972049d9ae41f95e0972a683fd02"}, + {file = "virtualenv-20.24.3.tar.gz", hash = "sha256:e5c3b4ce817b0b328af041506a2a299418c98747c4b1e68cb7527e74ced23efc"}, ] [package.dependencies] @@ -1786,4 +2023,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "c50632b24854733473ee1cebce99b747a46c0b26aed706589c2226ee1c5077ad" +content-hash = "341c748ac176d0e7828d7674e5dbefb5c1e98eda5bf02b252e8d1a64e29730a7" diff --git a/pyproject.toml b/pyproject.toml index a7b0118f..a8f496b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ matcha = "matcha_ml.cli.cli:app" [tool.poetry.dependencies] python = "^3.8" -typer = {extras = ["all"], version = "^0.7.0"} +typer = "0.9.0" python-terraform = "^0.10.1" azure-identity = "^1.12.0" azure-mgmt-resource = "^23.0.0" @@ -59,6 +59,8 @@ azure-storage-blob = "^12.16.0" urllib3 = "1.26.6" types-urllib3 = "^1.26.25.13" requests = "^2.31.0" +typing-extensions = "^4.7.1" +rich = "^13.5.2" [tool.poetry.group.dev.dependencies] diff --git a/src/matcha_ml/cli/cli.py b/src/matcha_ml/cli/cli.py index a668dcf7..f1cfd5be 100644 --- a/src/matcha_ml/cli/cli.py +++ b/src/matcha_ml/cli/cli.py @@ -2,6 +2,7 @@ from typing import Optional, Tuple import typer +from typing_extensions import Annotated from matcha_ml import __version__, core from matcha_ml.cli._validation import ( @@ -268,30 +269,31 @@ def set(stack: str = typer.Argument("default")) -> None: @stack_app.command(help="Add a module to the stack.") -def add(module: str = typer.Argument(None)) -> None: +def add( + module: Annotated[str, typer.Argument(help="The module name.")], + flavor: Annotated[str, typer.Argument(help="the flavor of the module.")], +) -> None: """Add a module to the stack for Matcha to provision. Args: - module (str): the name of the module to add (e.g. 'seldon'). + module (str): the name of the module to add (e.g. 'orchestrator', 'experiment_tracker'). + flavor (str): the sub-type of the module (e.g. 'mlflow' for the module experiment_tracker) + + Example usage: + matcha stack add experiment_tracker mlflow """ - if module: - try: - stack_add(module) - print_status( - build_status( - f"Matcha '{module}' module has been added to the current stack." - ) + try: + stack_add(module, flavor) + print_status( + build_status( + f"Matcha '{module}' module of flavor '{flavor}' has been added to the current stack." ) - except MatchaInputError as e: - print_error(str(e)) - raise typer.Exit() - except MatchaError as e: - print_error(str(e)) - raise typer.Exit() - else: - print_error( - "No module specified. Please run `matcha stack add` again and provide the name of the module you wish to add." ) + except MatchaInputError as e: + print_error(str(e)) + raise typer.Exit() + except MatchaError as e: + print_error(str(e)) raise typer.Exit() diff --git a/src/matcha_ml/config/matcha_config.py b/src/matcha_ml/config/matcha_config.py index 1f6a7858..d4aef102 100644 --- a/src/matcha_ml/config/matcha_config.py +++ b/src/matcha_ml/config/matcha_config.py @@ -242,6 +242,7 @@ def remove_property(component_name: str, property_name: str) -> None: component_name (str): Name of the component. property_name (str): Name of the property within the component. + Raises: MatchaError: raises a MatchaError if the local config file does not exist. MatchaError: raises a MatchaError if the specified component does not exist. diff --git a/src/matcha_ml/constants.py b/src/matcha_ml/constants.py index 5e16250a..d0cf14f4 100644 --- a/src/matcha_ml/constants.py +++ b/src/matcha_ml/constants.py @@ -1,5 +1,21 @@ """General constants file.""" import os +from matcha_ml.config.matcha_config import MatchaConfigComponentProperty + LOCK_FILE_NAME = "matcha.lock" MATCHA_STATE_PATH = os.path.join(".matcha", "infrastructure", "matcha.state") + +STACK_MODULES = { + "orchestrator": {"zenml": MatchaConfigComponentProperty("orchestrator", "zenml")}, + "experiment_tracker": { + "mlflow": MatchaConfigComponentProperty("experiment_tracker", "mlflow") + }, + "data_version_control": { + "dvc": MatchaConfigComponentProperty("data_version_control", "dvc") + }, + "vector_database": { + "chroma": MatchaConfigComponentProperty("vector_database", "chroma") + }, + "deployer": {"seldon": MatchaConfigComponentProperty("deployer", "seldon")}, +} diff --git a/src/matcha_ml/core/_validation.py b/src/matcha_ml/core/_validation.py index 721e646c..2750fd93 100644 --- a/src/matcha_ml/core/_validation.py +++ b/src/matcha_ml/core/_validation.py @@ -1,7 +1,4 @@ """"Validation for core commands.""" - -from enum import Enum, EnumMeta - from matcha_ml.errors import MatchaInputError from matcha_ml.services import AzureClient @@ -10,36 +7,6 @@ MAXIMUM_RESOURCE_NAME_LEN = 24 -class StackModuleMeta(EnumMeta): - """Metaclass for the StackModule Enum.""" - - def __contains__(self, item: str) -> bool: # type: ignore - """Method for checking if an item is a member of the enum. - - Args: - item (str): the quantity to check for in the Enum. - - Returns: - True if item is a member of the Enum, False otherwise. - """ - try: - self(item) - except ValueError: - return False - else: - return True - - -class StackModule(Enum, metaclass=StackModuleMeta): - """Enum defining valid matcha stack modules.""" - - ZENML = "zenml" - COMMON = "common" - DVC = "dvc" - MLFLOW = "mlflow" - SELDON = "seldon" - - def _is_alphanumeric(prefix: str) -> bool: """Check whether the prefix is an alphanumeric string. @@ -132,15 +99,3 @@ def is_valid_region(region: str) -> bool: """ azure_client = AzureClient() return bool(azure_client.is_valid_region(region)) - - -def stack_module_is_valid(module: str) -> bool: - """Checks whether a module name is valid. - - Args: - module (str): The name of the stack module. - - Returns: - bool: True, if the module exists in the StackModule enum, otherwise, False. - """ - return module in StackModule diff --git a/src/matcha_ml/core/core.py b/src/matcha_ml/core/core.py index 24673d5c..0f92e967 100644 --- a/src/matcha_ml/core/core.py +++ b/src/matcha_ml/core/core.py @@ -18,6 +18,7 @@ MatchaConfigComponentProperty, MatchaConfigService, ) +from matcha_ml.constants import STACK_MODULES from matcha_ml.core._validation import is_valid_prefix, is_valid_region from matcha_ml.errors import MatchaError, MatchaInputError from matcha_ml.runners import AzureRunner @@ -368,17 +369,42 @@ def stack_set(stack_name: str) -> None: MatchaConfigService.update(stack) -def stack_add(module: str) -> None: +def stack_add(module_type: str, module_flavor: str) -> None: """A function for adding a module by name to the stack. Args: - module (str): The name of the module to add. + module_type (str): The type of the module to add e.g. 'experiment_tracker'. + module_flavor (str): The flavor of module to add e.g. 'mlflow'. Raises: MatchaInputError: if the stack_name is not a valid stack type MatchaError: if there are already resources provisioned. """ - ... + module_type = module_type.lower() + module_flavor = module_flavor.lower() + + if RemoteStateManager().is_state_provisioned(): + raise MatchaError( + "The remote resources are already provisioned. Changing the stack now will not " + "change the remote state." + ) + + if STACK_MODULES.get(module_type) is None: + raise MatchaInputError(f"The module type '{module_type}' does not exist.") + + module_properties = STACK_MODULES.get(module_type, {}).get(module_flavor) + + if module_properties is None: + raise MatchaInputError( + f"The module type '{module_type}' does not have a flavor '{module_flavor}'." + ) + + MatchaConfigService.add_property("stack", module_properties) + + # Update stack name to custom + MatchaConfigService.add_property( + "stack", MatchaConfigComponentProperty("name", "custom") + ) def stack_remove(module_name: str) -> str: diff --git a/tests/test_cli/test_stack.py b/tests/test_cli/test_stack.py index d9f5d499..baa95144 100644 --- a/tests/test_cli/test_stack.py +++ b/tests/test_cli/test_stack.py @@ -187,6 +187,9 @@ def test_cli_stack_set_remove_help_option(runner: CliRunner) -> None: assert "Remove a module from the current Matcha stack." in result.stdout +TWO_EXIT_CODE = 2 + + def test_cli_stack_add_command_without_args(runner: CliRunner) -> None: """Tests the cli stack add sub-command without passing an argument. @@ -195,11 +198,9 @@ def test_cli_stack_add_command_without_args(runner: CliRunner) -> None: """ result = runner.invoke(app, ["stack", "add"]) - assert result.exit_code == 0 + assert result.exit_code == TWO_EXIT_CODE - assert ( - "No module specified. Please run `matcha stack add` again and" in result.stdout - ) + assert "Missing argument 'MODULE'." in result.stdout def test_cli_stack_remove_command_without_args(runner: CliRunner) -> None: @@ -229,12 +230,12 @@ def test_cli_stack_add_command_with_args( """ os.chdir(matcha_testing_directory) with patch(f"{INTERNAL_FUNCTION_STUB}.stack_add") as mocked_stack_add: - result = runner.invoke(app, ["stack", "add", "experiment_tracker"]) + result = runner.invoke(app, ["stack", "add", "experiment_tracker", "mlflow"]) assert result.exit_code == 0 assert mocked_stack_add.assert_called_once assert ( - "Matcha 'experiment_tracker' module has been added to the current stack." + "Matcha 'experiment_tracker' module of flavor 'mlflow' has been added to the \ncurrent stack.\n" in result.stdout ) diff --git a/tests/test_core/test_core.py b/tests/test_core/test_core.py index 3d1fe53b..3382a71d 100644 --- a/tests/test_core/test_core.py +++ b/tests/test_core/test_core.py @@ -11,8 +11,10 @@ import yaml from matcha_ml.cli.cli import app +from matcha_ml.config.matcha_config import MatchaConfigComponentProperty from matcha_ml.core import get, remove_state_lock -from matcha_ml.errors import MatchaInputError +from matcha_ml.core.core import stack_add +from matcha_ml.errors import MatchaError, MatchaInputError from matcha_ml.services.global_parameters_service import GlobalParameters from matcha_ml.state.matcha_state import ( MatchaState, @@ -356,3 +358,116 @@ def test_get_downloads_matcha_state_directory(mock_state_file, state_file_as_obj assert os.path.exists(state_file_location) assert state_file_as_object == get_result + + +def test_stack_add_expected(matcha_testing_directory: str): + """Tests that the core stack_add function works as expected for a given input. + + Args: + matcha_testing_directory (str): Mock directory for testing. + """ + os.chdir(matcha_testing_directory) + + with mock.patch( + "matcha_ml.core.core.RemoteStateManager" + ) as provisioned_state, mock.patch( + "matcha_ml.core.core.MatchaConfigService.add_property" + ) as add_property: + mock_remote_state_manager = MagicMock() + provisioned_state.return_value = mock_remote_state_manager + mock_remote_state_manager.is_state_provisioned.return_value = False + add_property.return_value = None + + stack_add(module_type="experiment_tracker", module_flavor="mlflow") + + add_property.assert_has_calls( + [ + mock.call( + "stack", + MatchaConfigComponentProperty( + name="experiment_tracker", value="mlflow" + ), + ), + mock.call( + "stack", MatchaConfigComponentProperty(name="name", value="custom") + ), + ] + ) + + +def test_stack_add_invalid_flavor(matcha_testing_directory: str): + """Tests that the core stack_add function raises an exception when a flavor is invalid. + + Args: + matcha_testing_directory (str): Mock directory for testing. + """ + os.chdir(matcha_testing_directory) + + with mock.patch( + "matcha_ml.core.core.RemoteStateManager" + ) as provisioned_state, mock.patch( + "matcha_ml.core.core.MatchaConfigService.add_property" + ) as add_property: + mock_remote_state_manager = MagicMock() + provisioned_state.return_value = mock_remote_state_manager + mock_remote_state_manager.is_state_provisioned.return_value = False + add_property.return_value = None + + with pytest.raises(MatchaInputError) as e: + stack_add(module_type="experiment_tracker", module_flavor="flavor") + + assert ( + "The module type 'experiment_tracker' does not have a flavor 'flavor'." + in str(e) + ) + + +def test_stack_add_invalid_module_type(matcha_testing_directory: str): + """Tests that the core stack_add function raises an exception when a module type is invalid. + + Args: + matcha_testing_directory (str): Mock directory for testing. + """ + os.chdir(matcha_testing_directory) + + with mock.patch( + "matcha_ml.core.core.RemoteStateManager" + ) as provisioned_state, mock.patch( + "matcha_ml.core.core.MatchaConfigService.add_property" + ) as add_property: + mock_remote_state_manager = MagicMock() + provisioned_state.return_value = mock_remote_state_manager + mock_remote_state_manager.is_state_provisioned.return_value = False + add_property.return_value = None + + with pytest.raises(MatchaInputError) as e: + stack_add(module_type="invalid_module", module_flavor="flavor") + + assert "The module type 'invalid_module' does not exist." in str(e) + + +def test_stack_add_with_existing_deployment(matcha_testing_directory: str): + """Tests that the core stack_add function raises an exception when a deployment already exists. + + Args: + matcha_testing_directory (str): Mock directory for testing. + """ + os.chdir(matcha_testing_directory) + + with mock.patch( + "matcha_ml.core.core.RemoteStateManager" + ) as provisioned_state, mock.patch( + "matcha_ml.core.core.MatchaConfigService.add_property" + ) as add_property: + mock_remote_state_manager = MagicMock() + provisioned_state.return_value = mock_remote_state_manager + mock_remote_state_manager.is_state_provisioned.return_value = True + add_property.return_value = None + + with pytest.raises(MatchaError) as e: + stack_add(module_type="invalid_module", module_flavor="flavor") + + assert ( + "The remote resources are already provisioned. Changing the stack now will not change the remote state." + in str(e) + ) diff --git a/tests/test_core/test_core_validation.py b/tests/test_core/test_core_validation.py index dd27c03c..ffb98029 100644 --- a/tests/test_core/test_core_validation.py +++ b/tests/test_core/test_core_validation.py @@ -9,7 +9,6 @@ _is_alphanumeric, _is_not_digits, is_valid_prefix, - stack_module_is_valid, ) from matcha_ml.errors import MatchaInputError @@ -97,18 +96,3 @@ def test_is_valid_prefix_invalid( is_valid_prefix(prefix) assert str(err.value) == error_msg - - -def test_stack_module_is_valid_with_valid_module(): - """Test stack module validation returns True when the module is valid.""" - assert stack_module_is_valid("zenml") - - -def test_stack_module_is_valid_with_valid_module_with_upper_case(): - """Test stack module validation returns False when the module is fully upper case and not valid.""" - assert not stack_module_is_valid("ZENML") - - -def test_stack_module_is_valid_with_invalid_module(): - """Test stack module validation returns False when the module does not exist.""" - assert not stack_module_is_valid("invalidmodule") From 676d42f7976e0d8763aabb0490911ca9856195ca Mon Sep 17 00:00:00 2001 From: Callum Wells <68609181+swells2020@users.noreply.github.com> Date: Thu, 31 Aug 2023 11:28:03 +0100 Subject: [PATCH 07/10] [RPD-307] add core stack remove command (#211) * adds stack remove command * merges changes and finishes tests * updates logic and tests * fixes CI tests. * fixes CI * fixes CI * fixes CI * fixes CI * fixes CI --- src/matcha_ml/cli/cli.py | 32 +++---- src/matcha_ml/config/matcha_config.py | 2 +- src/matcha_ml/core/core.py | 43 ++++++++- tests/conftest.py | 42 ++++++++- tests/test_cli/test_stack.py | 20 ++-- tests/test_config/test_matcha_config.py | 21 +++-- tests/test_core/test_core.py | 117 +++++++++++++++++++++++- tests/test_core/test_stack_set.py | 3 +- 8 files changed, 231 insertions(+), 49 deletions(-) diff --git a/src/matcha_ml/cli/cli.py b/src/matcha_ml/cli/cli.py index f1cfd5be..edab87d3 100644 --- a/src/matcha_ml/cli/cli.py +++ b/src/matcha_ml/cli/cli.py @@ -271,7 +271,7 @@ def set(stack: str = typer.Argument("default")) -> None: @stack_app.command(help="Add a module to the stack.") def add( module: Annotated[str, typer.Argument(help="The module name.")], - flavor: Annotated[str, typer.Argument(help="the flavor of the module.")], + flavor: Annotated[str, typer.Argument(help="The flavor of the module.")], ) -> None: """Add a module to the stack for Matcha to provision. @@ -298,30 +298,26 @@ def add( @stack_app.command(help="Remove a module from the current Matcha stack.") -def remove(module: str = typer.Argument(None)) -> None: +def remove( + module: Annotated[str, typer.Argument(help="The module name.")], +) -> None: """Remove a module from the current Matcha stack. Args: module (str): the name of the module to be removed. """ - if module: - try: - stack_remove(module) - print_status( - build_status( - f"Matcha '{module}' module has been removed from the current stack." - ) + try: + stack_remove(module) + print_status( + build_status( + f"Matcha '{module}' module has been removed from the current stack." ) - except MatchaInputError as e: - print_error(str(e)) - raise typer.Exit() - except MatchaError as e: - print_error(str(e)) - raise typer.Exit() - else: - print_error( - "No module specified. Please run `matcha stack remove` again and provide the name of the module you wish to remove." ) + except MatchaInputError as e: + print_error(str(e)) + raise typer.Exit() + except MatchaError as e: + print_error(str(e)) raise typer.Exit() diff --git a/src/matcha_ml/config/matcha_config.py b/src/matcha_ml/config/matcha_config.py index d4aef102..f20a8a59 100644 --- a/src/matcha_ml/config/matcha_config.py +++ b/src/matcha_ml/config/matcha_config.py @@ -236,7 +236,7 @@ def add_property( @staticmethod def remove_property(component_name: str, property_name: str) -> None: - """Method to remove a MatchaConfigComponentProperty to a Component. + """Method to remove a MatchaConfigComponentProperty from a Component. Args: component_name (str): Name of the component. diff --git a/src/matcha_ml/core/core.py b/src/matcha_ml/core/core.py index 0f92e967..692fe593 100644 --- a/src/matcha_ml/core/core.py +++ b/src/matcha_ml/core/core.py @@ -19,7 +19,10 @@ MatchaConfigService, ) from matcha_ml.constants import STACK_MODULES -from matcha_ml.core._validation import is_valid_prefix, is_valid_region +from matcha_ml.core._validation import ( + is_valid_prefix, + is_valid_region, +) from matcha_ml.errors import MatchaError, MatchaInputError from matcha_ml.runners import AzureRunner from matcha_ml.services.analytics_service import AnalyticsEvent, track @@ -407,6 +410,38 @@ def stack_add(module_type: str, module_flavor: str) -> None: ) -def stack_remove(module_name: str) -> str: - """A placeholder for the stack remove logic in core.""" - return module_name +def stack_remove(module_type: str) -> None: + """A function for removing a module by name in the stack. + + Args: + module_type (str): The name of the module to remove. + + Raises: + MatchaError: if there are already resources provisioned. + MatchaInputError: if the module_type is not a valid module within the current stack. + """ + if RemoteStateManager().is_state_provisioned(): + raise MatchaError( + "The remote resources are already provisioned. Changing the stack now will not " + "change the remote state." + ) + + module_type = module_type.lower() + + matcha_config = MatchaConfigService.read_matcha_config() + matcha_stack_component = matcha_config.find_component("stack") + + if matcha_stack_component: + if matcha_stack_component.find_property(module_type): + MatchaConfigService.remove_property("stack", module_type) + MatchaConfigService.add_property( + "stack", MatchaConfigComponentProperty("name", "custom") + ) + else: + raise MatchaInputError( + f"Module '{module_type}' does not exist in the current stack." + ) + else: + raise MatchaError( + "No Matcha 'stack' component found in the local 'matcha.config.json' file. Please run 'matcha stack set' or matcha stack add'." + ) diff --git a/tests/conftest.py b/tests/conftest.py index 6e9340fa..75bca109 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -257,11 +257,15 @@ def mocked_matcha_config_json_object() -> Dict[str, Dict[str, str]]: matcha_config (Dict[str, Dict[str, str]]): a dictionary representation of the matcha.config.json file """ matcha_config = { + "stack": { + "name": "default", + "experiment_tracker": "mlflow", + }, "remote_state_bucket": { "account_name": "test-account", "container_name": "test-container", "resource_group_name": "test-rg", - } + }, } return matcha_config @@ -278,7 +282,9 @@ def mocked_matcha_config_component_property() -> MatchaConfigComponentProperty: @pytest.fixture -def mocked_matcha_config_component(mocked_matcha_config_json_object): +def mocked_matcha_config_remote_state_bucket_component( + mocked_matcha_config_json_object, +): """A fixture returning a mocked MatchaConfigComponentProperty instance. Args: @@ -295,16 +301,42 @@ def mocked_matcha_config_component(mocked_matcha_config_json_object): @pytest.fixture -def mocked_matcha_config(mocked_matcha_config_component): +def mocked_matcha_config_stack_component(mocked_matcha_config_json_object): + """A fixture returning a mocked MatchaConfigComponentProperty instance. + + Args: + mocked_matcha_config_json_object (Dict[str, Dict[str, str]]): a dictionary representation of the matcha.config.json file + + Returns: + (MatchaConfigComponentProperty): a mocked MatchaConfigComponentProperty instance. + """ + properties = [] + for key, value in mocked_matcha_config_json_object["stack"].items(): + properties.append(MatchaConfigComponentProperty(name=key, value=value)) + + return MatchaConfigComponent(name="stack", properties=properties) + + +@pytest.fixture +def mocked_matcha_config( + mocked_matcha_config_remote_state_bucket_component, + mocked_matcha_config_stack_component, +): """A fixture returning a mocked MatchaConfig instance. Args: - mocked_matcha_config_component (MatchaConfigComponent): a mocked MatchaConfigComponent instance. + mocked_matcha_config_remote_state_bucket_component (MatchaConfigComponent): a mocked MatchaConfigComponent instance representing the remote storage bucket component. + mocked_matcha_config_stack_component (MatchaConfigComponent): a mocked MatchaConfigComponent instance representing the stack component. Returns: (MatchaConfig): a mocked MatchaConfig instance. """ - return MatchaConfig([mocked_matcha_config_component]) + return MatchaConfig( + [ + mocked_matcha_config_stack_component, + mocked_matcha_config_remote_state_bucket_component, + ] + ) @pytest.fixture diff --git a/tests/test_cli/test_stack.py b/tests/test_cli/test_stack.py index baa95144..4a1ae998 100644 --- a/tests/test_cli/test_stack.py +++ b/tests/test_cli/test_stack.py @@ -9,7 +9,8 @@ from matcha_ml.config import MatchaConfig, MatchaConfigService from matcha_ml.state.remote_state_manager import RemoteStateManager -INTERNAL_FUNCTION_STUB = "matcha_ml.core.core" +INTERNAL_FUNCTION_STUB = "matcha_ml.cli.cli" +TWO_EXIT_CODE = 2 def test_cli_stack_command_help_option(runner: CliRunner) -> None: @@ -155,10 +156,13 @@ def test_stack_set_file_modified( new_config_dict = new_config.to_dict() - assert len(new_config_dict) == len(config_dict) + 1 + assert len(new_config_dict) == len(config_dict) assert "stack" in new_config_dict assert new_config_dict["stack"]["name"] == "llm" - assert config_dict.items() <= new_config_dict.items() + assert ( + config_dict["remote_state_bucket"].items() + == new_config_dict["remote_state_bucket"].items() + ) def test_cli_stack_set_add_help_option(runner: CliRunner) -> None: @@ -187,9 +191,6 @@ def test_cli_stack_set_remove_help_option(runner: CliRunner) -> None: assert "Remove a module from the current Matcha stack." in result.stdout -TWO_EXIT_CODE = 2 - - def test_cli_stack_add_command_without_args(runner: CliRunner) -> None: """Tests the cli stack add sub-command without passing an argument. @@ -210,12 +211,9 @@ def test_cli_stack_remove_command_without_args(runner: CliRunner) -> None: runner (CliRunner): typer CLI runner. """ result = runner.invoke(app, ["stack", "remove"]) - assert result.exit_code == 0 + assert result.exit_code == TWO_EXIT_CODE - assert ( - "No module specified. Please run `matcha stack remove` again and provide the name\nof the module you wish to remove.\n" - in result.stdout - ) + assert "Missing argument 'MODULE'." in result.stdout def test_cli_stack_add_command_with_args( diff --git a/tests/test_config/test_matcha_config.py b/tests/test_config/test_matcha_config.py index d27a762c..d17a1fab 100644 --- a/tests/test_config/test_matcha_config.py +++ b/tests/test_config/test_matcha_config.py @@ -43,6 +43,11 @@ def test_matcha_config_from_dict( mocked_matcha_config_json_object (Dict[str, Dict[str, str]]): a dictionary representation of the matcha.config.json file mocked_matcha_config (MatchaConfig): a mocked MatchaConfig instance. """ + print( + mocked_matcha_config, + mocked_matcha_config.from_dict(mocked_matcha_config_json_object), + sep="\n", + ) assert ( mocked_matcha_config.from_dict(mocked_matcha_config_json_object) == mocked_matcha_config @@ -263,20 +268,24 @@ def test_matcha_config_service_update( def test_remove_property_expected( - mocked_matcha_config_component: MatchaConfigComponent, + mocked_matcha_config_remote_state_bucket_component: MatchaConfigComponent, ): """Test that a component is removed if it exists. Args: - mocked_matcha_config_component (MatchaConfigComponent): a mocked MatchaConfigComponent instance. + mocked_matcha_config_remote_state_bucket_component (MatchaConfigComponent): a mocked MatchaConfigComponent instance. """ - mocked_matcha_config_component.remove_property(property_name="account_name") - mocked_matcha_config_component.remove_property(property_name="resource_group_name") - mocked_matcha_config_component.remove_property( + mocked_matcha_config_remote_state_bucket_component.remove_property( + property_name="account_name" + ) + mocked_matcha_config_remote_state_bucket_component.remove_property( + property_name="resource_group_name" + ) + mocked_matcha_config_remote_state_bucket_component.remove_property( property_name="not_an_existing_property_name" ) - assert mocked_matcha_config_component == MatchaConfigComponent( + assert mocked_matcha_config_remote_state_bucket_component == MatchaConfigComponent( name="remote_state_bucket", properties=[ MatchaConfigComponentProperty(name="container_name", value="test-container") diff --git a/tests/test_core/test_core.py b/tests/test_core/test_core.py index 3382a71d..24586050 100644 --- a/tests/test_core/test_core.py +++ b/tests/test_core/test_core.py @@ -11,9 +11,13 @@ import yaml from matcha_ml.cli.cli import app -from matcha_ml.config.matcha_config import MatchaConfigComponentProperty +from matcha_ml.config.matcha_config import ( + MatchaConfig, + MatchaConfigComponentProperty, + MatchaConfigService, +) from matcha_ml.core import get, remove_state_lock -from matcha_ml.core.core import stack_add +from matcha_ml.core.core import stack_add, stack_remove from matcha_ml.errors import MatchaError, MatchaInputError from matcha_ml.services.global_parameters_service import GlobalParameters from matcha_ml.state.matcha_state import ( @@ -471,3 +475,112 @@ def test_stack_add_with_existing_deployment(matcha_testing_directory: str): "The remote resources are already provisioned. Changing the stack now will not change the remote state." in str(e) ) + + +def test_stack_remove_with_state_provisioned(matcha_testing_directory): + """Tests that the core stack_remove function raises an exception when a deployment already exists. + + Args: + matcha_testing_directory (str): Mock directory for testing. + """ + os.chdir(matcha_testing_directory) + + with mock.patch("matcha_ml.core.core.RemoteStateManager") as provisioned_state: + mock_remote_state_manager = MagicMock() + mock_remote_state_manager.is_state_provisioned.return_value = True + provisioned_state.return_value = mock_remote_state_manager + + with pytest.raises(MatchaError) as e: + stack_remove(module_type="test_module") + + assert ( + "The remote resources are already provisioned. Changing the stack now will not change the remote state." + in str(e) + ) + + +def test_stack_remove_with_module_present( + matcha_testing_directory: str, mocked_matcha_config: MatchaConfig +): + """Tests that the core stack_remove function removes a module when then module exists. + + Args: + matcha_testing_directory (str): Mock directory for testing. + mocked_matcha_config (MatchaConfig): A mocked MatchaConfig object for testing. + """ + os.chdir(matcha_testing_directory) + MatchaConfigService.write_matcha_config(mocked_matcha_config) + + with mock.patch("matcha_ml.core.core.RemoteStateManager") as provisioned_state: + mock_remote_state_manager = MagicMock() + mock_remote_state_manager.is_state_provisioned.return_value = False + provisioned_state.return_value = mock_remote_state_manager + stack_remove(module_type="experiment_tracker") + + new_matcha_config = MatchaConfigService.read_matcha_config() + + new_matcha_config.to_dict() + mocked_matcha_config.to_dict() + + assert mocked_matcha_config.find_component( + "remote_state_bucket" + ) == new_matcha_config.find_component("remote_state_bucket") + assert ( + new_matcha_config.find_component("stack").find_property("name").value + == "custom" + ) + assert not new_matcha_config.find_component("stack").find_property( + "experiment_tracker" + ) + + +def test_stack_remove_with_no_module( + matcha_testing_directory: str, mocked_matcha_config: MatchaConfig +): + """Tests that the core stack_remove function raises an exception when a module does not exist. + + Args: + matcha_testing_directory (str): Mock directory for testing. + mocked_matcha_config (MatchaConfig): A mocked MatchaConfig object for testing. + """ + os.chdir(matcha_testing_directory) + MatchaConfigService.write_matcha_config(mocked_matcha_config) + + with mock.patch( + "matcha_ml.core.core.RemoteStateManager" + ) as provisioned_state, pytest.raises(MatchaInputError) as e: + mock_remote_state_manager = MagicMock() + mock_remote_state_manager.is_state_provisioned.return_value = False + provisioned_state.return_value = mock_remote_state_manager + stack_remove(module_type="test_module") + + assert "Module 'test_module' does not exist in the current stack." in str(e) + + +def test_stack_remove_with_no_stack( + matcha_testing_directory: str, + mocked_matcha_config_remote_state_bucket_component: MatchaConfig, +): + """Tests that the core stack_remove function raises an exception when a stack component does not exist. + + Args: + matcha_testing_directory (str): Mock directory for testing. + mocked_matcha_config_remote_state_bucket_component (MatchaConfigComponent): A mocked MatchaConfigComponent object for testing. + """ + os.chdir(matcha_testing_directory) + MatchaConfigService.write_matcha_config( + MatchaConfig([mocked_matcha_config_remote_state_bucket_component]) + ) + + with mock.patch( + "matcha_ml.core.core.RemoteStateManager" + ) as provisioned_state, pytest.raises(MatchaError) as e: + mock_remote_state_manager = MagicMock() + mock_remote_state_manager.is_state_provisioned.return_value = False + provisioned_state.return_value = mock_remote_state_manager + stack_remove(module_type="test_module") + + assert ( + "No Matcha 'stack' component found in the local 'matcha.config.json' file. Please run 'matcha stack set' or matcha stack add'." + in str(e) + ) diff --git a/tests/test_core/test_stack_set.py b/tests/test_core/test_stack_set.py index 8a38df08..f1ac88c6 100644 --- a/tests/test_core/test_stack_set.py +++ b/tests/test_core/test_stack_set.py @@ -69,10 +69,9 @@ def test_stack_set_existing_file( new_config = MatchaConfigService.read_matcha_config() new_config_dict = new_config.to_dict() - assert len(new_config_dict) == len(config_dict) + 1 + assert len(new_config_dict) == len(config_dict) assert "stack" in new_config_dict assert new_config_dict["stack"]["name"] == "llm" - assert config_dict.items() <= new_config_dict.items() def test_stack_set_resources_already_provisioned(): From a235e128f830dc0edc7c392fab6577dc0a497bcb Mon Sep 17 00:00:00 2001 From: Chris <32800386+Christopher-Norman@users.noreply.github.com> Date: Thu, 31 Aug 2023 11:42:40 +0100 Subject: [PATCH 08/10] [RPD-312] Update base_template/azure_template files to handle new modular Terraform files (#209) * Initial azure template updates * Add tests for new functions * Use print_error instead of print * Fix tests --- src/matcha_ml/core/core.py | 2 +- src/matcha_ml/templates/azure_template.py | 150 +++++++++++++- tests/test_templates/test_azure_template.py | 207 ++++++++++++++++++++ 3 files changed, 355 insertions(+), 4 deletions(-) diff --git a/src/matcha_ml/core/core.py b/src/matcha_ml/core/core.py index 692fe593..40c12718 100644 --- a/src/matcha_ml/core/core.py +++ b/src/matcha_ml/core/core.py @@ -314,7 +314,7 @@ def provision( os.path.dirname(__file__), os.pardir, "infrastructure", - stack_name, + "modules", ) azure_template = AzureTemplate( diff --git a/src/matcha_ml/templates/azure_template.py b/src/matcha_ml/templates/azure_template.py index f8ff3510..4a170b7a 100644 --- a/src/matcha_ml/templates/azure_template.py +++ b/src/matcha_ml/templates/azure_template.py @@ -1,6 +1,18 @@ """Build a template for provisioning resources on Azure using terraform files.""" +import json +import os +import shutil +from shutil import rmtree from typing import List, Optional +from matcha_ml.cli.ui.print_messages import print_error, print_status +from matcha_ml.cli.ui.status_message_builders import ( + build_status, + build_step_success_status, + build_substep_success_status, +) +from matcha_ml.config.matcha_config import MatchaConfigService +from matcha_ml.errors import MatchaPermissionError from matcha_ml.state import MatchaState, MatchaStateService from matcha_ml.templates.base_template import BaseTemplate, TemplateVariables @@ -39,6 +51,64 @@ def __init__(self, submodule_names: List[str]) -> None: """ super().__init__(submodule_names) + @staticmethod + def empty_directory_except_files(directory: str, except_files: List[str]) -> None: + """Empties a directory of all files and folders except for a list of file names. + + Args: + directory (str): Directory name to clean. + except_files (List[str]): List of file names to be excluded from removal. + """ + try: + for item in os.listdir(directory): + item_path = os.path.join(directory, item) + if os.path.isfile(item_path) and item not in except_files: + os.remove(item_path) + elif os.path.isdir(item_path) and item not in except_files: + shutil.rmtree(item_path) + except (OSError, FileNotFoundError) as e: + print_error(f"Error while emptying directory '{directory}': {e}") + + @staticmethod + def concatenate_files(source_file: str, target_file: str) -> None: + """Takes the contents of one file and concatenates it to a target file. + + Args: + source_file (str): File to copy contents from. + target_file (str): Destination to add source file contents to. + """ + try: + with open(source_file) as source, open(target_file, "a") as target: + target.write(source.read()) + except (OSError, FileNotFoundError) as e: + print_error(f"Error while concatenating files: {e}") + + @staticmethod + def recursively_copy_files(source_dir: str, target_dir: str) -> None: + """Copy all files within a source location to a target location. + + Args: + source_dir (str): The source directory containing files to copy. + target_dir (str): The target directory to copy files to. + """ + try: + if not os.path.exists(target_dir): + os.makedirs(target_dir) + + for item in os.listdir(source_dir): + source_item = os.path.join(source_dir, item) + target_item = os.path.join(target_dir, item) + + if os.path.isdir(source_item): + shutil.copytree(source_item, target_item) + elif os.path.exists(target_item): + # Concatenate the source file content to the target file + AzureTemplate.concatenate_files(source_item, target_item) + else: + shutil.copy2(source_item, target_item) + except (OSError, FileNotFoundError) as e: + print_error(f"Error while copying files: {e}") + def build_template( self, config: TemplateVariables, @@ -46,15 +116,89 @@ def build_template( destination: str, verbose: Optional[bool] = False, ) -> None: - """Builds a template using the provided configuration and copies it to the destination. + """Build and copy the template to the project directory. Args: config (TemplateVariables): variables to apply to the template. template_src (str): path of the template to use. destination (str): destination path to write template to. - verbose (Optional[bool]): additional output is shown when True. Defaults to False. + verbose (bool, optional): additional output is shown when True. Defaults to False. + + Raises: + MatchaPermissionError: when there are no write permissions on the configuration destination """ - super().build_template(config, template_src, destination, verbose) + try: + print_status(build_status("\nBuilding configuration template...")) + + # Override configuration if it already exists + if os.path.exists(destination): + rmtree(destination) + + os.makedirs(destination, exist_ok=True) + + if verbose: + print_status( + build_substep_success_status( + f"Ensure template destination directory: {destination}" + ) + ) + + stack = {"common"} + stack_component = MatchaConfigService.read_matcha_config().find_component( + "stack" + ) + if stack_component is not None: + for item in stack_component.properties: + if item.name != "name": + stack.add(item.value) + + self.empty_directory_except_files( + destination, [".terraform", ".terraform.lock.hcl", "terraform.tfstate"] + ) + for module in stack: + source_directory = os.path.join(template_src, f"{module}") + self.recursively_copy_files(source_directory, destination) + + if verbose: + print_status( + build_substep_success_status( + f"{module} module configuration was copied" + ) + ) + + if verbose: + print_status( + build_substep_success_status("Configurations were copied.") + ) + + configuration_destination = os.path.join( + destination, "terraform.tfvars.json" + ) + with open(configuration_destination, "w") as f: + json.dump(vars(config), f) + + if verbose: + print_status( + build_substep_success_status("Template variables were added.") + ) + + except PermissionError: + raise MatchaPermissionError( + f"Error - You do not have permission to write the configuration. Check if you have write permissions for '{destination}'." + ) + + if verbose: + print_status( + build_substep_success_status("Template configuration has finished!") + ) + + print_status( + build_step_success_status( + f"The configuration template was written to {destination}" + ) + ) + + print() # Add matcha.state file one directory above the template config_dict = vars(config) diff --git a/tests/test_templates/test_azure_template.py b/tests/test_templates/test_azure_template.py index 416c2f51..94e85e59 100644 --- a/tests/test_templates/test_azure_template.py +++ b/tests/test_templates/test_azure_template.py @@ -1,4 +1,8 @@ """Test suite to test the azure template.""" +import os +import shutil +import tempfile + import pytest from matcha_ml.templates import AzureTemplate @@ -12,3 +16,206 @@ def azure_template() -> AzureTemplate: AzureTemplate: the Azure template. """ return AzureTemplate() + + +@pytest.fixture +def temp_directory() -> str: + """Temporary testing directory. + + Returns: + str: Location of the directory. + + Yields: + Iterator[str]: Location of the directory. + """ + temp_dir = tempfile.mkdtemp() + yield temp_dir + shutil.rmtree(temp_dir) + + +def test_empty_directory_except_files(temp_directory: str) -> None: + """Test the 'empty_directory_except_files' function. + + Args: + temp_directory (str): Temporary directory path for testing. + """ + # Create some files and directories in the temporary directory + with open(os.path.join(temp_directory, "file1.txt"), "w") as f: + f.write("Content of file1") + with open(os.path.join(temp_directory, "file2.txt"), "w") as f: + f.write("Content of file2") + + # Call the function to empty the directory except for file2.txt + AzureTemplate.empty_directory_except_files( + temp_directory, except_files=["file2.txt"] + ) + + assert not os.path.exists(os.path.join(temp_directory, "file1.txt")) + assert os.path.exists(os.path.join(temp_directory, "file2.txt")) + + +def test_concatenate_files(temp_directory: str) -> None: + """Test the 'concatenate_files' function. + + Args: + temp_directory (str): Temporary directory path for testing. + """ + source_file = os.path.join(temp_directory, "source.txt") + target_file = os.path.join(temp_directory, "target.txt") + + # Create source and target files with some content + with open(source_file, "w") as source: + source.write("Source file content") + with open(target_file, "w") as target: + target.write("Target file content") + + # Call the function to concatenate source_file to target_file + AzureTemplate.concatenate_files(source_file, target_file) + + # Check if the contents of target_file have been updated + with open(target_file) as target: + assert target.read() == "Target file contentSource file content" + + +def test_recursively_copy_files(temp_directory: str) -> None: + """Test the 'recursively_copy_files' function. + + Args: + temp_directory (str): Temporary directory path for testing. + """ + source_dir = os.path.join(temp_directory, "source") + target_dir = os.path.join(temp_directory, "target") + + # Create some files and directories in the source directory + os.mkdir(source_dir) + os.mkdir(os.path.join(source_dir, "subfolder")) + with open(os.path.join(source_dir, "file1.txt"), "w") as f: + f.write("Content of file1") + with open(os.path.join(source_dir, "subfolder", "file2.txt"), "w") as f: + f.write("Content of file2") + + # Call the function to copy files from source_dir to target_dir + AzureTemplate.recursively_copy_files(source_dir, target_dir) + + # Check if the files and directories have been copied correctly + assert os.path.exists(os.path.join(target_dir, "file1.txt")) + assert os.path.exists(os.path.join(target_dir, "subfolder", "file2.txt")) + + # Check if the content of 'file1.txt' and 'file2.txt' has been copied correctly + with open(os.path.join(target_dir, "file1.txt")) as f: + assert f.read() == "Content of file1" + with open(os.path.join(target_dir, "subfolder", "file2.txt")) as f: + assert f.read() == "Content of file2" + + +def test_empty_directory_except_files_error_handling( + temp_directory: str, capsys +) -> None: + """Test error handling in AzureTemplate.empty_directory_except_files function. + + Args: + temp_directory (str): The path to a temporary directory. + capsys: Pytest fixture for capturing stdout and stderr. + """ + # Run the function that raises an exception + AzureTemplate.empty_directory_except_files("nonexistent_directory", []) + + # Capture the output (stdout and stderr) + captured = capsys.readouterr() + + # Check if the error message is present in the captured output + assert "Error while emptying directory" in captured.err + + +def test_concatenate_files_error_handling(temp_directory: str, capsys) -> None: + """Test error handling in AzureTemplate.concatenate_files function. + + Args: + temp_directory (str): The path to a temporary directory. + capsys: Pytest fixture for capturing stdout and stderr. + """ + os.path.join(temp_directory, "source.txt") + target_file = os.path.join(temp_directory, "target.txt") + + # Run the function that raises an exception + AzureTemplate.concatenate_files("nonexistent_file.txt", target_file) + + # Capture the output (stdout and stderr) + captured = capsys.readouterr() + + # Check if the error message is present in the captured output + assert "Error while concatenating files:" in captured.err + + +def test_recursively_copy_files_error_handling_directory_not_exist( + temp_directory: str, capsys +) -> None: + """Test error handling in AzureTemplate.recursively_copy_files function when source directory does not exist. + + Args: + temp_directory (str): The path to a temporary directory. + capsys: Pytest fixture for capturing stdout and stderr. + """ + # Run the function that raises an exception + AzureTemplate.recursively_copy_files("nonexistent_source", "target") + + # Capture the output (stdout and stderr) + captured = capsys.readouterr() + + # Check if the error message is present in the captured output + assert ( + "Error while copying files: [Errno 2] No such file or directory:" + in captured.err + ) + + +def test_recursively_copy_files_error_handling_target_dir_exists( + temp_directory: str, capsys +) -> None: + """Test error handling in AzureTemplate.recursively_copy_files function when the target directory cannot be created. + + Args: + temp_directory (str): The path to a temporary directory. + capsys: Pytest fixture for capturing stdout and stderr. + """ + # Create a file with the same name as the target directory + target_dir_as_file = os.path.join(temp_directory, "target") + with open(target_dir_as_file, "w") as file: + file.write("content") + + # Run the function that raises an exception + AzureTemplate.recursively_copy_files(temp_directory, target_dir_as_file) + + # Capture the output (stdout and stderr) + captured = capsys.readouterr() + + # Check if the error message is present in the captured output + assert "Error while copying files:" in captured.err + + +def test_recursively_copy_files_error_handling_permission_error( + temp_directory: str, capsys +) -> None: + """Test error handling in AzureTemplate.recursively_copy_files function when a source file cannot be copied due to permission error. + + Args: + temp_directory (str): The path to a temporary directory. + capsys: Pytest fixture for capturing stdout and stderr. + """ + source_file = os.path.join(temp_directory, "source.txt") + with open(source_file, "w") as file: + file.write("content") + + # Create a read-only target directory + target_dir = os.path.join(temp_directory, "target") + os.makedirs(target_dir, exist_ok=True) + os.chmod(target_dir, 0o444) # Make it read-only + + # Run the function that raises an exception + AzureTemplate.recursively_copy_files(temp_directory, target_dir) + + # Capture the output (stdout and stderr) + captured = capsys.readouterr() + + # Check if the error message is present in the captured output + assert "Permission denied" in captured.err From ccf2e799691766091fa4c28c8cd0873ca1253492 Mon Sep 17 00:00:00 2001 From: Jonathan Carlton Date: Thu, 31 Aug 2023 13:14:53 +0100 Subject: [PATCH 09/10] [RPD-309] Update stack set to add individual modules to the matcha.config.json file (#210) --- src/matcha_ml/constants.py | 9 +++ src/matcha_ml/core/core.py | 44 +++++++++---- src/matcha_ml/templates/azure_template.py | 4 +- tests/test_cli/test_provision.py | 4 +- tests/test_cli/test_stack.py | 12 +++- tests/test_core/test_core_provision.py | 12 +++- tests/test_core/test_stack_set.py | 80 +++++++++++++++++++++-- 7 files changed, 139 insertions(+), 26 deletions(-) diff --git a/src/matcha_ml/constants.py b/src/matcha_ml/constants.py index d0cf14f4..599aead4 100644 --- a/src/matcha_ml/constants.py +++ b/src/matcha_ml/constants.py @@ -19,3 +19,12 @@ }, "deployer": {"seldon": MatchaConfigComponentProperty("deployer", "seldon")}, } + +DEFAULT_STACK = [ + MatchaConfigComponentProperty("orchestrator", "zenml"), + MatchaConfigComponentProperty("experiment_tracker", "mlflow"), + MatchaConfigComponentProperty("data_version_control", "dvc"), + MatchaConfigComponentProperty("deployer", "seldon"), +] + +LLM_STACK = DEFAULT_STACK + [MatchaConfigComponentProperty("vector_database", "chroma")] diff --git a/src/matcha_ml/core/core.py b/src/matcha_ml/core/core.py index 40c12718..ed94f84e 100644 --- a/src/matcha_ml/core/core.py +++ b/src/matcha_ml/core/core.py @@ -18,18 +18,20 @@ MatchaConfigComponentProperty, MatchaConfigService, ) -from matcha_ml.constants import STACK_MODULES -from matcha_ml.core._validation import ( - is_valid_prefix, - is_valid_region, -) + +from matcha_ml.constants import DEFAULT_STACK, LLM_STACK, STACK_MODULES +from matcha_ml.core._validation import is_valid_prefix, is_valid_region from matcha_ml.errors import MatchaError, MatchaInputError from matcha_ml.runners import AzureRunner from matcha_ml.services.analytics_service import AnalyticsEvent, track from matcha_ml.services.global_parameters_service import GlobalParameters from matcha_ml.state import MatchaStateService, RemoteStateManager from matcha_ml.state.matcha_state import MatchaState -from matcha_ml.templates.azure_template import DEFAULT_STACK, LLM_STACK, AzureTemplate +from matcha_ml.templates.azure_template import ( + DEFAULT_STACK_TF, + LLM_STACK_TF, + AzureTemplate, +) class StackTypeMeta( @@ -318,7 +320,7 @@ def provision( ) azure_template = AzureTemplate( - LLM_STACK if stack_name == StackType.LLM.value else DEFAULT_STACK + LLM_STACK_TF if stack_name == StackType.LLM.value else DEFAULT_STACK_TF ) zenml_version = infer_zenml_version() @@ -353,6 +355,27 @@ def stack_set(stack_name: str) -> None: MatchaInputError: if the stack_name is not a valid stack type MatchaError: if there are already resources provisioned. """ + + def _create_stack_component(stack_type: StackType) -> MatchaConfigComponent: + """Create the set of configuration component for the stack. + + Args: + stack_type (StackType): the type of stack to create. + + Returns: + MatchaConfigComponent: the stack component. + """ + stack = MatchaConfigComponent( + name="stack", + properties=[ + MatchaConfigComponentProperty(name="name", value=stack_type.value) + ], + ) + + stack.properties += LLM_STACK if stack_type == StackType.LLM else DEFAULT_STACK + + return stack + if RemoteStateManager().is_state_provisioned(): raise MatchaError( "The remote resources are already provisioned. Changing the stack now will not " @@ -362,12 +385,7 @@ def stack_set(stack_name: str) -> None: if stack_name.lower() not in StackType: raise MatchaInputError(f"{stack_name} is not a valid stack type.") - stack_enum = StackType(stack_name.lower()) - - stack = MatchaConfigComponent( - name="stack", - properties=[MatchaConfigComponentProperty(name="name", value=stack_enum.value)], - ) + stack = _create_stack_component(stack_type=StackType(stack_name.lower())) MatchaConfigService.update(stack) diff --git a/src/matcha_ml/templates/azure_template.py b/src/matcha_ml/templates/azure_template.py index 4a170b7a..e5a40908 100644 --- a/src/matcha_ml/templates/azure_template.py +++ b/src/matcha_ml/templates/azure_template.py @@ -16,7 +16,7 @@ from matcha_ml.state import MatchaState, MatchaStateService from matcha_ml.templates.base_template import BaseTemplate, TemplateVariables -DEFAULT_STACK = [ +DEFAULT_STACK_TF = [ "aks", "resource_group", "mlflow_module", @@ -29,7 +29,7 @@ "zen_server/zenml_helm/templates", "data_version_control_storage", ] -LLM_STACK = DEFAULT_STACK + [ +LLM_STACK_TF = DEFAULT_STACK_TF + [ "chroma", "chroma/chroma_helm", "chroma/chroma_helm/templates", diff --git a/tests/test_cli/test_provision.py b/tests/test_cli/test_provision.py index de912e78..060f3013 100644 --- a/tests/test_cli/test_provision.py +++ b/tests/test_cli/test_provision.py @@ -9,7 +9,7 @@ from typer.testing import CliRunner from matcha_ml.cli.cli import app -from matcha_ml.templates.azure_template import DEFAULT_STACK +from matcha_ml.templates.azure_template import DEFAULT_STACK_TF BASE_DIR = os.path.dirname(os.path.abspath(__file__)) TEMPLATE_DIR = os.path.join( @@ -66,7 +66,7 @@ def assert_infrastructure( module_file_path = os.path.join(destination_path, module_file_name) assert os.path.exists(module_file_path) - for module_name in DEFAULT_STACK: + for module_name in DEFAULT_STACK_TF: for module_file_name in glob.glob( os.path.join(TEMPLATE_DIR, module_name, "*.tf") ): diff --git a/tests/test_cli/test_stack.py b/tests/test_cli/test_stack.py index 4a1ae998..2f5f8192 100644 --- a/tests/test_cli/test_stack.py +++ b/tests/test_cli/test_stack.py @@ -126,7 +126,17 @@ def test_stack_set_file_created( assert result.exit_code == 0 config = MatchaConfigService.read_matcha_config() - assert config.to_dict() == {"stack": {"name": "llm"}} + expected_stack = { + "stack": { + "name": "llm", + "orchestrator": "zenml", + "experiment_tracker": "mlflow", + "data_version_control": "dvc", + "deployer": "seldon", + "vector_database": "chroma", + } + } + assert config.to_dict() == expected_stack def test_stack_set_file_modified( diff --git a/tests/test_core/test_core_provision.py b/tests/test_core/test_core_provision.py index c55a82bf..9a934122 100644 --- a/tests/test_core/test_core_provision.py +++ b/tests/test_core/test_core_provision.py @@ -23,7 +23,7 @@ from matcha_ml.state.matcha_state import ( MatchaState, ) -from matcha_ml.templates.azure_template import DEFAULT_STACK +from matcha_ml.templates.azure_template import DEFAULT_STACK_TF BASE_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -154,7 +154,7 @@ def assert_infrastructure( module_file_path = os.path.join(destination_path, module_file_name) assert os.path.exists(module_file_path) - for module_name in DEFAULT_STACK: + for module_name in DEFAULT_STACK_TF: for module_file_name in glob.glob( os.path.join(TEMPLATE_DIR, module_name, "*.tf") ): @@ -329,7 +329,13 @@ def test_stale_remote_state_file_is_removed(matcha_testing_directory: str): "container_name": "test-container", "resource_group_name": "test-rg", }, - "stack": {"name": "default"}, + "stack": { + "name": "default", + "orchestrator": "zenml", + "experiment_tracker": "mlflow", + "data_version_control": "dvc", + "deployer": "seldon", + }, } with mock.patch( diff --git a/tests/test_core/test_stack_set.py b/tests/test_core/test_stack_set.py index f1ac88c6..3a416af4 100644 --- a/tests/test_core/test_stack_set.py +++ b/tests/test_core/test_stack_set.py @@ -4,33 +4,103 @@ import pytest -from matcha_ml.config import MatchaConfig, MatchaConfigService +from matcha_ml.config import ( + MatchaConfig, + MatchaConfigComponent, + MatchaConfigComponentProperty, + MatchaConfigService, +) +from matcha_ml.constants import DEFAULT_STACK, LLM_STACK from matcha_ml.core import stack_set from matcha_ml.errors import MatchaError, MatchaInputError +@pytest.fixture +def expected_matcha_config_llm_stack() -> MatchaConfig: + """A mocked version of the MatchaConfig for the LLM stack. + + Returns: + MatchaConfig: the mocked llm stack config. + """ + return MatchaConfig( + components=[ + MatchaConfigComponent( + name="stack", + properties=[MatchaConfigComponentProperty(name="name", value="llm")] + + LLM_STACK, + ) + ] + ) + + +@pytest.fixture +def expected_matcha_config_default_stack() -> MatchaConfig: + """A mocked version of the MatchaConfig for the default stack. + + Returns: + MatchaConfig: the mocked default stack config. + """ + return MatchaConfig( + components=[ + MatchaConfigComponent( + name="stack", + properties=[MatchaConfigComponentProperty(name="name", value="default")] + + DEFAULT_STACK, + ) + ] + ) + + def test_stack_set_valid_no_existing_file( - matcha_testing_directory, mocked_remote_state_manager_is_state_provisioned_false + matcha_testing_directory, + mocked_remote_state_manager_is_state_provisioned_false, + expected_matcha_config_llm_stack, ): """Test that stack_set creates a config file if one doesn't exist and that it can be read properly. Args: matcha_testing_directory (str): temporary working directory mocked_remote_state_manager_is_state_provisioned_false (RemoteStateManager): A mocked remote state manager + expected_matcha_config_llm_stack (MatchaConfig): the expected configuration if the LLM stack is used. + """ + os.chdir(matcha_testing_directory) + + stack_set(stack_name="llm") + + config = MatchaConfigService.read_matcha_config() + assert config == expected_matcha_config_llm_stack + + +def test_change_stack_expected( + matcha_testing_directory, + mocked_remote_state_manager_is_state_provisioned_false, + expected_matcha_config_llm_stack, + expected_matcha_config_default_stack, +): + """Test that when a stack is changed that the components of that stack change as expected. + + Args: + matcha_testing_directory (str): a temporary working directory. + mocked_remote_state_manager_is_state_provisioned_false (RemoteStateManager): a mocked remote state manager. + expected_matcha_config_llm_stack (MatchaConfig): the expected configuration for the LLM stack. + expected_matcha_config_default_stack (MatchaConfig): the expected configuration for the default stack. """ + # create the stack in the testing directory and assert that it's what we expect os.chdir(matcha_testing_directory) stack_set(stack_name="llm") config = MatchaConfigService.read_matcha_config() - assert config.to_dict() == {"stack": {"name": "llm"}} + assert config == expected_matcha_config_llm_stack + # TODO Having to delete the file is a bit clunky and could be improved. MatchaConfigService.delete_matcha_config() stack_set(stack_name="default") - config = MatchaConfigService.read_matcha_config() - assert config.to_dict() == {"stack": {"name": "default"}} + default_config = MatchaConfigService.read_matcha_config() + assert default_config == expected_matcha_config_default_stack + assert default_config != config def test_stack_set_invalid( From 8aa4dd35240a80ecd4705939f8f1b1be102118d9 Mon Sep 17 00:00:00 2001 From: Jonathan Carlton Date: Fri, 1 Sep 2023 15:31:45 +0100 Subject: [PATCH 10/10] [RPD-315] Update provisioning/destroy message for modular stack (#213) * RPD-315 updated resource msg + made changes to get_stack() * RPD-315 removed old resource message constant variable * RPD-315 fixing 3.8 typing * RPD-315 fixing typing in the tests - missed on prev commit * RPD-315 fixing missed typing * RPD-315 updated tests based on failing CI --- src/matcha_ml/cli/cli.py | 13 ++- src/matcha_ml/cli/constants.py | 29 ++++-- .../cli/ui/status_message_builders.py | 34 ++++++- src/matcha_ml/config/matcha_config.py | 14 +-- src/matcha_ml/core/core.py | 5 +- tests/test_cli/test_stack.py | 2 +- .../test_status_message_builders.py | 91 +++++++++++++++++++ tests/test_templates/test_azure_template.py | 5 +- 8 files changed, 163 insertions(+), 30 deletions(-) diff --git a/src/matcha_ml/cli/cli.py b/src/matcha_ml/cli/cli.py index edab87d3..4488eeca 100644 --- a/src/matcha_ml/cli/cli.py +++ b/src/matcha_ml/cli/cli.py @@ -9,7 +9,6 @@ prefix_typer_callback, region_typer_callback, ) -from matcha_ml.cli.constants import RESOURCE_MSG, STATE_RESOURCE_MSG from matcha_ml.cli.ui.print_messages import ( print_error, print_resource_output, @@ -20,10 +19,12 @@ hide_sensitive_in_output, ) from matcha_ml.cli.ui.status_message_builders import ( + build_resources_msg_content, build_status, build_step_success_status, ) from matcha_ml.cli.ui.user_approval_functions import is_user_approved +from matcha_ml.config import MatchaConfigService from matcha_ml.core.core import stack_add, stack_remove from matcha_ml.errors import MatchaError, MatchaInputError @@ -112,7 +113,10 @@ def provision( Exit: Exit if resources are already provisioned. """ location, prefix, password = fill_provision_variables(location, prefix, password) - if is_user_approved(verb="provision", resources=RESOURCE_MSG): + + resource_msg = build_resources_msg_content(stack=MatchaConfigService.get_stack()) + + if is_user_approved(verb="provision", resources=resource_msg): try: _ = core.provision(location, prefix, password, verbose) except MatchaError as e: @@ -179,7 +183,10 @@ def destroy() -> None: Raises: Exit: Exit if core.destroy throws a MatchaError. """ - if is_user_approved(verb="destroy", resources=RESOURCE_MSG + STATE_RESOURCE_MSG): + resource_msg = build_resources_msg_content( + stack=MatchaConfigService.get_stack(), destroy=True + ) + if is_user_approved(verb="destroy", resources=resource_msg): try: core.destroy() print_status(build_step_success_status("Destroying resources is complete!")) diff --git a/src/matcha_ml/cli/constants.py b/src/matcha_ml/cli/constants.py index 800452c5..7e135919 100644 --- a/src/matcha_ml/cli/constants.py +++ b/src/matcha_ml/cli/constants.py @@ -1,20 +1,31 @@ """Constants for use within the Matcha CLI.""" -RESOURCE_MSG = [ +RESOURCE_MSG_CORE = [ ("Azure Kubernetes Service (AKS)", "A kubernetes cluster"), ( - "Two Storage Containers", - "A storage container for experiment tracking artifacts and a second for model training artifacts", + "Azure Container Registry", + "A container registry for storing docker images", ), - ( +] + +RESOURCE_MSG_MODULES = { + "deployer": ( "Seldon Core", "A framework for model deployment on top of a kubernetes cluster", ), - ( - "Azure Container Registry", - "A container registry for storing docker images", + "orchestrator": ( + "ZenServer", + "A zenml server required for remote orchestration and a storage container", ), - ("ZenServer", "A zenml server required for remote orchestration"), -] + "data_version_control": ( + "Data Version Control", + "A storage container to hold data versions", + ), + "experiment_tracker": ( + "MLflow", + "An experiment tracker backed by a storage container", + ), + "vector_database": ("Chroma", "A vector database"), +} STATE_RESOURCE_MSG = [ ("Azure Resource Group", "The resource group containing the provisioned resources"), diff --git a/src/matcha_ml/cli/ui/status_message_builders.py b/src/matcha_ml/cli/ui/status_message_builders.py index ec64362e..ad376008 100644 --- a/src/matcha_ml/cli/ui/status_message_builders.py +++ b/src/matcha_ml/cli/ui/status_message_builders.py @@ -5,12 +5,44 @@ from rich.console import Console -from matcha_ml.cli.constants import INFRA_FACTS +from matcha_ml.cli.constants import ( + INFRA_FACTS, + RESOURCE_MSG_CORE, + RESOURCE_MSG_MODULES, + STATE_RESOURCE_MSG, +) from matcha_ml.cli.ui.spinner import Spinner +from matcha_ml.config.matcha_config import MatchaConfigComponent +from matcha_ml.constants import DEFAULT_STACK err_console = Console(stderr=True) +def build_resources_msg_content( + stack: Optional[MatchaConfigComponent] = None, destroy: Optional[bool] = False +) -> List[Tuple[str, str]]: + """Build the resource message that is outputted to the user at provision and destroy. + + Args: + stack (Optional[MatchaConfigComponent]): the stack to build the resource message from. Defaults to None. + destroy (Optional[bool]): the message is different when destroying, set this flag when destroying. Defaults to False. + + Returns: + List[Tuple[str, str]]: the resource message. + """ + stack_properties = DEFAULT_STACK if stack is None else stack.properties + + modules = [ + RESOURCE_MSG_MODULES[stack_property.name] + for stack_property in stack_properties + if stack_property.name != "name" + ] + + message = RESOURCE_MSG_CORE + modules + + return message + STATE_RESOURCE_MSG if destroy else message + + def build_resource_confirmation( header: str, resources: List[Tuple[str, str]], footer: Optional[str] = None ) -> str: diff --git a/src/matcha_ml/config/matcha_config.py b/src/matcha_ml/config/matcha_config.py index f20a8a59..a8a7aed0 100644 --- a/src/matcha_ml/config/matcha_config.py +++ b/src/matcha_ml/config/matcha_config.py @@ -126,26 +126,18 @@ class MatchaConfigService: """A service for handling the Matcha config file.""" @staticmethod - def get_stack() -> Optional[MatchaConfigComponentProperty]: + def get_stack() -> Optional[MatchaConfigComponent]: """Gets the current stack name from the Matcha Config if it exists. Returns: - Optional[MatchaConfigComponentProperty]: The name of the current stack being used as a config component object. + Optional[MatchaConfigComponent]: The stack config component. """ try: stack = MatchaConfigService.read_matcha_config().find_component("stack") except MatchaError: stack = None - if stack is None: - return None - - name = stack.find_property("name") - - if name is None: - return None - - return name + return None if stack is None else stack @staticmethod def write_matcha_config(matcha_config: MatchaConfig) -> None: diff --git a/src/matcha_ml/core/core.py b/src/matcha_ml/core/core.py index ed94f84e..b04e3c16 100644 --- a/src/matcha_ml/core/core.py +++ b/src/matcha_ml/core/core.py @@ -18,7 +18,6 @@ MatchaConfigComponentProperty, MatchaConfigService, ) - from matcha_ml.constants import DEFAULT_STACK, LLM_STACK, STACK_MODULES from matcha_ml.core._validation import is_valid_prefix, is_valid_region from matcha_ml.errors import MatchaError, MatchaInputError @@ -310,7 +309,9 @@ def provision( stack = MatchaConfigService.get_stack() if stack is not None: - stack_name = stack.value + stack_property = stack.find_property("name") + if stack_property is not None: + stack_name = stack_property.value template = os.path.join( os.path.dirname(__file__), diff --git a/tests/test_cli/test_stack.py b/tests/test_cli/test_stack.py index 2f5f8192..bb778c06 100644 --- a/tests/test_cli/test_stack.py +++ b/tests/test_cli/test_stack.py @@ -243,7 +243,7 @@ def test_cli_stack_add_command_with_args( assert result.exit_code == 0 assert mocked_stack_add.assert_called_once assert ( - "Matcha 'experiment_tracker' module of flavor 'mlflow' has been added to the \ncurrent stack.\n" + "Matcha 'experiment_tracker' module of flavor 'mlflow' has been added" in result.stdout ) diff --git a/tests/test_cli/test_ui_primitives/test_status_message_builders.py b/tests/test_cli/test_ui_primitives/test_status_message_builders.py index 74d8e954..124436f5 100644 --- a/tests/test_cli/test_ui_primitives/test_status_message_builders.py +++ b/tests/test_cli/test_ui_primitives/test_status_message_builders.py @@ -3,12 +3,39 @@ import pytest +from matcha_ml.cli.constants import ( + RESOURCE_MSG_CORE, + RESOURCE_MSG_MODULES, + STATE_RESOURCE_MSG, +) from matcha_ml.cli.ui.status_message_builders import ( build_resource_confirmation, + build_resources_msg_content, build_status, build_step_success_status, build_substep_success_status, ) +from matcha_ml.config.matcha_config import MatchaConfigComponent +from matcha_ml.constants import DEFAULT_STACK + + +@pytest.fixture +def matcha_stack_component_names( + mocked_matcha_config_stack_component: MatchaConfigComponent, +) -> List[str]: + """A fixture to get the names of the modules in the stack. + + Args: + mocked_matcha_config_stack_component (MatchaConfigComponent): the default stack as a component. + + Returns: + List[str]: the names of the modules as a list. + """ + return [ + prop.name + for prop in mocked_matcha_config_stack_component.properties + if prop.name != "name" + ] @pytest.mark.parametrize( @@ -76,3 +103,67 @@ def test_build_substep_success_status(): """Test build substep success status formats status message correctly.""" expected = "[green]Step finished![/green]" assert build_substep_success_status("Step finished!") == expected + + +def test_build_resource_msg_content_expected( + mocked_matcha_config_stack_component: MatchaConfigComponent, + matcha_stack_component_names: List[str], +): + """Test that the resource message has the content that we would expect for a default stack. + + Args: + mocked_matcha_config_stack_component (MatchaConfigComponent): the default stack as a component. + matcha_stack_component_names (List[str]): the names of the modules in the default stack. + """ + stack_modules = [ + RESOURCE_MSG_MODULES[name] for name in matcha_stack_component_names + ] + expected_result = RESOURCE_MSG_CORE + stack_modules + + resource_msg = build_resources_msg_content( + stack=mocked_matcha_config_stack_component, destroy=False + ) + + assert resource_msg == expected_result + + +def test_build_resource_msg_content_expected_destroy( + mocked_matcha_config_stack_component: MatchaConfigComponent, + matcha_stack_component_names: List[str], +): + """Test that the resource message has the content that we would expect when destroying the default stack. + + Args: + mocked_matcha_config_stack_component (MatchaConfigComponent): the default stack as a component. + matcha_stack_component_names (List[str]): the names of the modules in the default stack. + """ + stack_modules = [ + RESOURCE_MSG_MODULES[name] for name in matcha_stack_component_names + ] + expected_result = RESOURCE_MSG_CORE + stack_modules + STATE_RESOURCE_MSG + + resource_msg = build_resources_msg_content( + stack=mocked_matcha_config_stack_component, destroy=True + ) + + assert resource_msg == expected_result + + +def test_build_resource_msg_content_no_stack(): + """Test that the resource message is accurate when no stack is specified.""" + stack_modules = [RESOURCE_MSG_MODULES[prop.name] for prop in DEFAULT_STACK] + expected_result = RESOURCE_MSG_CORE + stack_modules + + resource_msg = build_resources_msg_content() + + assert resource_msg == expected_result + + +def test_build_resource_msg_content_no_stack_destroy(): + """Test that the resource messages is accurate when no stack is specific and we're destroying.""" + stack_modules = [RESOURCE_MSG_MODULES[prop.name] for prop in DEFAULT_STACK] + expected_result = RESOURCE_MSG_CORE + stack_modules + STATE_RESOURCE_MSG + + resource_msg = build_resources_msg_content(destroy=True) + + assert resource_msg == expected_result diff --git a/tests/test_templates/test_azure_template.py b/tests/test_templates/test_azure_template.py index 94e85e59..06b5ef45 100644 --- a/tests/test_templates/test_azure_template.py +++ b/tests/test_templates/test_azure_template.py @@ -163,9 +163,8 @@ def test_recursively_copy_files_error_handling_directory_not_exist( captured = capsys.readouterr() # Check if the error message is present in the captured output - assert ( - "Error while copying files: [Errno 2] No such file or directory:" - in captured.err + assert ("Error while copying files" in captured.err) and ( + "No such file or directory" in captured.err )